sqlsmith-1.2.1/ 0000755 0001750 0001750 00000000000 13272664313 010367 5 0000000 0000000 sqlsmith-1.2.1/configure.ac 0000644 0001750 0001750 00000001540 13272663225 012576 0000000 0000000 AC_INIT(SQLsmith, 1.2.1, seltenreich@gmx.de, sqlsmith, https://github.com/anse1/sqlsmith/)
AC_LANG(C++)
AM_INIT_AUTOMAKE(-Wall -Werror foreign)
AC_PROG_CXX
AX_CXX_COMPILE_STDCXX_11(noext,mandatory)
AX_LIB_POSTGRESQL()
PKG_CHECK_MODULES(LIBPQXX, libpqxx >= 4.0)
PKG_CHECK_MODULES(MONETDB_MAPI, monetdb-mapi >= 11.23.0,
[AC_DEFINE([HAVE_MONETDB], [1], [define if the MonetDB client library is available])],
[]
)
AM_CONDITIONAL([DUT_MONETDB], [test x$pkg_failed = xno])
AX_BOOST_BASE()
AX_BOOST_REGEX
AC_SUBST(LIBPQXX_CFLAGS)
AC_SUBST(LIBPQXX_LIBS)
AC_SUBST(CONFIG_GIT_REVISION,
[m4_esyscmd_s([git describe --dirty --tags --always])])
AC_CHECK_LIB(sqlite3, sqlite3_open_v2)
AM_CONDITIONAL([DUT_SQLITE], [test x$ac_cv_lib_sqlite3_sqlite3_open_v2 = xyes])
AC_SUBST(REVISION)
AC_CONFIG_HEADERS(config.h)
AC_CONFIG_FILES([
Makefile
Doxyfile
])
AC_OUTPUT
sqlsmith-1.2.1/dump.xsl 0000644 0001750 0001750 00000004117 12736225121 012001 0000000 0000000
digraph ast {}label=,fillcolor=firebrick,style=filledfillcolor=red,style=filledfillcolor=salmon,style=filledfillcolor=wheat,style=filledfillcolor=springgreen,style=filledfillcolor=gainsboro,style=filledfillcolor=white,style=filled,[];
-> ;
sqlsmith-1.2.1/gitrev.h 0000644 0001750 0001750 00000000030 13272664313 011751 0000000 0000000 #define GITREV "v1.2.1"
sqlsmith-1.2.1/log.cc 0000644 0001750 0001750 00000010512 13272636632 011401 0000000 0000000 #include "config.h"
#include
#include
#include
#ifndef HAVE_BOOST_REGEX
#include
#else
#include
using boost::regex;
using boost::smatch;
using boost::regex_match;
#endif
#include
extern "C" {
#include
}
#include "log.hh"
#include "schema.hh"
#include "gitrev.h"
#include "impedance.hh"
#include "random.hh"
using namespace std;
using namespace pqxx;
struct stats_visitor : prod_visitor {
int nodes = 0;
int maxlevel = 0;
long retries = 0;
map production_stats;
virtual void visit(struct prod *p) {
nodes++;
if (p->level > maxlevel)
maxlevel = p->level;
production_stats[typeid(*p).name()]++;
retries += p->retries;
}
void report() {
cerr << "production statistics" << endl;
vector > report;
for (auto p : production_stats)
report.push_back(p);
stable_sort(report.begin(), report.end(),
[](const pair &a,
const pair &b)
{ return a.second > b.second; });
for (auto p : report) {
cerr << p.second << "\t" << p.first << endl;
}
}
};
void stats_collecting_logger::generated(prod &query)
{
queries++;
stats_visitor v;
query.accept(&v);
sum_nodes += v.nodes;
sum_height += v.maxlevel;
sum_retries += v.retries;
}
void cerr_logger::report()
{
cerr << endl << "queries: " << queries << endl;
// << " (" << 1000.0*query_count/gen_time.count() << " gen/s, "
// << 1000.0*query_count/query_time.count() << " exec/s)" << endl;
cerr << "AST stats (avg): height = " << sum_height/queries
<< " nodes = " << sum_nodes/queries << endl;
vector > report;
for (auto e : errors) {
report.push_back(e);
}
stable_sort(report.begin(), report.end(),
[](const pair &a,
const pair &b)
{ return a.second > b.second; });
long err_count = 0;
for (auto e : report) {
err_count += e.second;
cerr << e.second << "\t" << e.first.substr(0,80) << endl;
}
cerr << "error rate: " << (float)err_count/(queries) << endl;
impedance::report();
}
void cerr_logger::generated(prod &p)
{
stats_collecting_logger::generated(p);
if ((10*columns-1) == queries%(10*columns))
report();
}
void cerr_logger::executed(prod &query)
{
(void)query;
if (columns-1 == (queries%columns)) {
cerr << endl;
}
cerr << ".";
}
void cerr_logger::error(prod &query, const dut::failure &e)
{
(void)query;
istringstream err(e.what());
string line;
if (columns-1 == (queries%columns)) {
cerr << endl;
}
getline(err, line);
errors[line]++;
if (dynamic_cast(&e))
cerr << "t";
else if (dynamic_cast(&e))
cerr << "S";
else if (dynamic_cast(&e))
cerr << "C";
else
cerr << "e";
}
pqxx_logger::pqxx_logger(std::string target, std::string conninfo, struct schema &s)
{
c = make_shared(conninfo);
work w(*c);
w.exec("set application_name to '" PACKAGE "::log';");
c->prepare("instance",
"insert into instance (rev, target, hostname, version, seed) "
"values ($1, $2, $3, $4, $5) returning id");
char hostname[1024];
gethostname(hostname, sizeof(hostname));
ostringstream seed;
seed << smith::rng;
result r = w.prepared("instance")(GITREV)(target)(hostname)(s.version)(seed.str()).exec();
id = r[0][0].as(id);
c->prepare("error",
"insert into error (id, msg, query, sqlstate) "
"values (" + to_string(id) + ", $1, $2, $3)");
w.exec("insert into stat (id) values (" + to_string(id) + ")");
c->prepare("stat",
"update stat set generated=$1, level=$2, nodes=$3, updated=now() "
", retries = $4, impedance = $5 "
"where id = " + to_string(id));
w.commit();
}
void pqxx_logger::error(prod &query, const dut::failure &e)
{
work w(*c);
ostringstream s;
s << query;
w.prepared("error")(e.what())(s.str())(e.sqlstate).exec();
w.commit();
}
void pqxx_logger::generated(prod &query)
{
stats_collecting_logger::generated(query);
if (999 == (queries%1000)) {
work w(*c);
ostringstream s;
impedance::report(s);
w.prepared("stat")(queries)(sum_height/queries)(sum_nodes/queries)(sum_retries/queries)(s.str()).exec();
w.commit();
}
}
sqlsmith-1.2.1/README.org 0000644 0001750 0001750 00000017035 13272636744 011772 0000000 0000000 [[logo.png]]
* SQLsmith
: "I love the smell of coredumps in the morning"
** Description
SQLsmith is a random SQL query generator. Its paragon is [[https://embed.cs.utah.edu/csmith/][Csmith]],
which proved valuable for quality assurance in C compilers.
It currently supports generating queries for PostgreSQL, SQLite 3 and
MonetDB. To add support for another RDBMS, you need to implement two
classes providing schema information about and connectivity to the
device under test.
Besides developers of the RDBMS products, users developing extensions
might also be interested in exposing their code to SQLsmith's random
workload.
Since 2015, it found 118 bugs in alphas, betas and releases in the
aforementioned products, including security vulnerabilities in
released versions. Addtional bugs were squashed in extensions and
libraries such as orafce and glibc.
https://github.com/anse1/sqlsmith/wiki#score-list
** Dependencies
- C++11
- libpqxx
optional:
- boost::regex in case your std::regex is broken
- SQLite3
- monetdb_mapi
** Building on Debian Jessie
: apt-get install build-essential autoconf autoconf-archive libpqxx-dev libboost-regex-dev libsqlite3-dev
: cd sqlsmith
: autoreconf -i # Avoid when building from a release tarball
: ./configure
: make
** Building on OSX
In order to build on Mac OSX, assuming you use Homebrew, run the following
: brew install libpqxx automake libtool autoconf autoconf-archive pkg-config
: cd sqlsmith
: autoreconf -i # Avoid when building from a release tarball
: ./configure
: make
** Usage
SQLsmith connects to the target database to retrieve the schema for
query generation and to send the generated queries to. Currently, all
generated statements are rolled back. Beware that SQLsmith does call
functions that could possibly have side-effects
(e.g. pg_terminate_backend). Use a suitably *underprivileged user*
for its connection to avoid this.
Example invocations:
: # testing Postgres
: sqlsmith --verbose --target="host=/tmp port=65432 dbname=regression"
: # testing SQLite
: sqlsmith --verbose --sqlite="file:$HOME/.mozilla/firefox/places.sqlite?mode=ro"
: # testing MonetDB
: sqlsmith --verbose --monetdb="mapi:monetdb://localhost:50000/smith"
The following options are currently supported:
| =--target=connstr= | target postgres database (default: libpq defaults) |
| =--sqlite=URI= | target SQLite3 database |
| =--monetdb=URI= | target MonetDB database |
| =--log-to=connstr= | postgres db for logging errors into (default: don't log) |
| =--verbose= | emit progress output |
| =--version= | show version information |
| =--seed=int= | seed RNG with specified integer instead of PID |
| =--dry-run= | print queries instead of executing them |
| =--max-queries=long= | terminate after generating this many queries |
| =--exclude-catalog= | don't generate queries using catalog relations |
| =--dump-all-queries= | dump queries as they are generated |
| =--dump-all-graphs= | dump generated ASTs for debugging |
| =--rng-state=string= | deserialize dumped rng state |
Sample output:
=--verbose= makes sqlsmith emit some progress indication to stderr. A
symbol is output for each query sent to the server. Currently the
following ones are generated:
| symbol | meaning | details |
|--------+-------------------+-----------------------------------------------|
| . | ok | Query generated and executed with ok sqlstate |
| S | syntax error | These are bugs in sqlsmith - please report |
| t | timeout | SQLsmith sets a statement timeout of 1s |
| C | broken connection | These happen when a query crashes the server |
| e | other error | |
When you test against a RDBMS that doesn't support some of SQLsmith's
grammar, there will be a burst of syntax errors on startup. These
should disappear after some time as SQLsmith blacklists productions
that consistently lead to errors.
=--verbose= will also periodically emit error reports. In the
following example, these are mostly caused by the primitive type
system.
: queries: 39000 (202.399 gen/s, 298.942 exec/s)
: AST stats (avg): height = 5.599 nodes = 37.8489
: 82 ERROR: invalid regular expression: quantifier operand invalid
: 70 ERROR: canceling statement due to statement timeout
: 44 ERROR: operator does not exist: point = point
: 27 ERROR: operator does not exist: xml = xml
: 22 ERROR: cannot compare arrays of different element types
: 11 ERROR: could not determine which collation to use for string comparison
: 5 ERROR: invalid regular expression: nfa has too many states
: 4 ERROR: cache lookup failed for index 2619
: 4 ERROR: invalid regular expression: brackets [] not balanced
: 3 ERROR: operator does not exist: polygon = polygon
: 2 ERROR: invalid regular expression: parentheses () not balanced
: 1 ERROR: invalid regular expression: invalid character range
: error rate: 0.00705128
The only one that looks interesting here is the cache lookup one.
Taking a closer look at it reveals that it happens when you query a
certain catalog view like this:
: self=# select indexdef from pg_catalog.pg_indexes where indexdef is not NULL;
: FEHLER: cache lookup failed for index 2619
This is because the planner then puts =pg_get_indexdef(oid)= in a
context where it sees non-index-oids, which causes it to croak:
: QUERY PLAN
: ------------------------------------------------------------------------------------
: Hash Join (cost=17.60..30.65 rows=9 width=4)
: Hash Cond: (i.oid = x.indexrelid)
: -> Seq Scan on pg_class i (cost=0.00..12.52 rows=114 width=8)
: Filter: ((pg_get_indexdef(oid) IS NOT NULL) AND (relkind = 'i'::"char"))
: -> Hash (cost=17.31..17.31 rows=23 width=4)
: -> Hash Join (cost=12.52..17.31 rows=23 width=4)
: Hash Cond: (x.indrelid = c.oid)
: -> Seq Scan on pg_index x (cost=0.00..4.13 rows=113 width=8)
: -> Hash (cost=11.76..11.76 rows=61 width=8)
: -> Seq Scan on pg_class c (cost=0.00..11.76 rows=61 width=8)
: Filter: (relkind = ANY ('{r,m}'::"char"[]))
Now this is more of a curiosity than a bug, but still illustrating how
debugging with the help of SQLsmith might look like.
** Large-scale testing
=--log-to= allows logging of hundreds of sqlsmith instances into a
central PostgreSQL database. [[./log.sql]] contains the schema sqlsmith
expects and some additional views to generate reports on the logged
contents.
It also contains a trigger to filter boring/known errors based on the
contents of the tables known and known_re. I periodically COPY my
filter tables for testing PostgreSQL into the files [[./known_re.txt]] and
[[./known.txt]] to serve as a starting point.
** Resources
- [[https://korte.credativ.com/~ase/sqlsmith-talk.pdf][Slides from PGConf.EU 2016]]
** License
SQLsmith is available under GPLv3. Use it at your own risk. It may
*damage your database* (one of the purposes of this tool /is/ to try
and break things). See the file [[COPYING]] for details.
** Authors
Andreas Seltenreich
Bo Tang
Sjoerd Mullender
[[ast.png]]
sqlsmith-1.2.1/depcomp 0000755 0001750 0001750 00000056016 13146021403 011657 0000000 0000000 #! /bin/sh
# depcomp - compile a program generating dependencies as side-effects
scriptversion=2013-05-30.07; # UTC
# Copyright (C) 1999-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Originally written by Alexandre Oliva .
case $1 in
'')
echo "$0: No command. Try '$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: depcomp [--help] [--version] PROGRAM [ARGS]
Run PROGRAMS ARGS to compile a file, generating dependencies
as side-effects.
Environment variables:
depmode Dependency tracking mode.
source Source file read by 'PROGRAMS ARGS'.
object Object file output by 'PROGRAMS ARGS'.
DEPDIR directory where to store dependencies.
depfile Dependency file to output.
tmpdepfile Temporary file to use when outputting dependencies.
libtool Whether libtool is used (yes/no).
Report bugs to .
EOF
exit $?
;;
-v | --v*)
echo "depcomp $scriptversion"
exit $?
;;
esac
# Get the directory component of the given path, and save it in the
# global variables '$dir'. Note that this directory component will
# be either empty or ending with a '/' character. This is deliberate.
set_dir_from ()
{
case $1 in
*/*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
*) dir=;;
esac
}
# Get the suffix-stripped basename of the given path, and save it the
# global variable '$base'.
set_base_from ()
{
base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
}
# If no dependency file was actually created by the compiler invocation,
# we still have to create a dummy depfile, to avoid errors with the
# Makefile "include basename.Plo" scheme.
make_dummy_depfile ()
{
echo "#dummy" > "$depfile"
}
# Factor out some common post-processing of the generated depfile.
# Requires the auxiliary global variable '$tmpdepfile' to be set.
aix_post_process_depfile ()
{
# If the compiler actually managed to produce a dependency file,
# post-process it.
if test -f "$tmpdepfile"; then
# Each line is of the form 'foo.o: dependency.h'.
# Do two passes, one to just change these to
# $object: dependency.h
# and one to simply output
# dependency.h:
# which is needed to avoid the deleted-header problem.
{ sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
} > "$depfile"
rm -f "$tmpdepfile"
else
make_dummy_depfile
fi
}
# A tabulation character.
tab=' '
# A newline character.
nl='
'
# Character ranges might be problematic outside the C locale.
# These definitions help.
upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
lower=abcdefghijklmnopqrstuvwxyz
digits=0123456789
alpha=${upper}${lower}
if test -z "$depmode" || test -z "$source" || test -z "$object"; then
echo "depcomp: Variables source, object and depmode must be set" 1>&2
exit 1
fi
# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
depfile=${depfile-`echo "$object" |
sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
rm -f "$tmpdepfile"
# Avoid interferences from the environment.
gccflag= dashmflag=
# Some modes work just like other modes, but use different flags. We
# parameterize here, but still list the modes in the big case below,
# to make depend.m4 easier to write. Note that we *cannot* use a case
# here, because this file can only contain one case statement.
if test "$depmode" = hp; then
# HP compiler uses -M and no extra arg.
gccflag=-M
depmode=gcc
fi
if test "$depmode" = dashXmstdout; then
# This is just like dashmstdout with a different argument.
dashmflag=-xM
depmode=dashmstdout
fi
cygpath_u="cygpath -u -f -"
if test "$depmode" = msvcmsys; then
# This is just like msvisualcpp but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvisualcpp
fi
if test "$depmode" = msvc7msys; then
# This is just like msvc7 but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvc7
fi
if test "$depmode" = xlc; then
# IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
gccflag=-qmakedep=gcc,-MF
depmode=gcc
fi
case "$depmode" in
gcc3)
## gcc 3 implements dependency tracking that does exactly what
## we want. Yay! Note: for some reason libtool 1.4 doesn't like
## it if -MD -MP comes after the -MF stuff. Hmm.
## Unfortunately, FreeBSD c89 acceptance of flags depends upon
## the command line argument order; so add the flags where they
## appear in depend2.am. Note that the slowdown incurred here
## affects only configure: in makefiles, %FASTDEP% shortcuts this.
for arg
do
case $arg in
-c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
*) set fnord "$@" "$arg" ;;
esac
shift # fnord
shift # $arg
done
"$@"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
mv "$tmpdepfile" "$depfile"
;;
gcc)
## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
## (see the conditional assignment to $gccflag above).
## There are various ways to get dependency output from gcc. Here's
## why we pick this rather obscure method:
## - Don't want to use -MD because we'd like the dependencies to end
## up in a subdir. Having to rename by hand is ugly.
## (We might end up doing this anyway to support other compilers.)
## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
## -MM, not -M (despite what the docs say). Also, it might not be
## supported by the other compilers which use the 'gcc' depmode.
## - Using -M directly means running the compiler twice (even worse
## than renaming).
if test -z "$gccflag"; then
gccflag=-MD,
fi
"$@" -Wp,"$gccflag$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The second -e expression handles DOS-style file names with drive
# letters.
sed -e 's/^[^:]*: / /' \
-e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
## This next piece of magic avoids the "deleted header file" problem.
## The problem is that when a header file which appears in a .P file
## is deleted, the dependency causes make to die (because there is
## typically no way to rebuild the header). We avoid this by adding
## dummy dependencies for each header file. Too bad gcc doesn't do
## this for us directly.
## Some versions of gcc put a space before the ':'. On the theory
## that the space means something, we add a space to the output as
## well. hp depmode also adds that space, but also prefixes the VPATH
## to the object. Take care to not repeat it in the output.
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
sgi)
if test "$libtool" = yes; then
"$@" "-Wp,-MDupdate,$tmpdepfile"
else
"$@" -MDupdate "$tmpdepfile"
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
echo "$object : \\" > "$depfile"
# Clip off the initial element (the dependent). Don't try to be
# clever and replace this with sed code, as IRIX sed won't handle
# lines with more than a fixed number of characters (4096 in
# IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
# the IRIX cc adds comments like '#:fec' to the end of the
# dependency line.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
| tr "$nl" ' ' >> "$depfile"
echo >> "$depfile"
# The second pass generates a dummy entry for each header file.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
>> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile"
;;
xlc)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
aix)
# The C for AIX Compiler uses -M and outputs the dependencies
# in a .u file. In older versions, this file always lives in the
# current directory. Also, the AIX compiler puts '$object:' at the
# start of each line; $object doesn't have directory information.
# Version 6 uses the directory in both cases.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.u
tmpdepfile2=$base.u
tmpdepfile3=$dir.libs/$base.u
"$@" -Wc,-M
else
tmpdepfile1=$dir$base.u
tmpdepfile2=$dir$base.u
tmpdepfile3=$dir$base.u
"$@" -M
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
aix_post_process_depfile
;;
tcc)
# tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
# FIXME: That version still under development at the moment of writing.
# Make that this statement remains true also for stable, released
# versions.
# It will wrap lines (doesn't matter whether long or short) with a
# trailing '\', as in:
#
# foo.o : \
# foo.c \
# foo.h \
#
# It will put a trailing '\' even on the last line, and will use leading
# spaces rather than leading tabs (at least since its commit 0394caf7
# "Emit spaces for -MD").
"$@" -MD -MF "$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
# We have to change lines of the first kind to '$object: \'.
sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
# And for each line of the second kind, we have to emit a 'dep.h:'
# dummy dependency, to avoid the deleted-header problem.
sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
rm -f "$tmpdepfile"
;;
## The order of this option in the case statement is important, since the
## shell code in configure will try each of these formats in the order
## listed in this file. A plain '-MD' option would be understood by many
## compilers, so we must ensure this comes after the gcc and icc options.
pgcc)
# Portland's C compiler understands '-MD'.
# Will always output deps to 'file.d' where file is the root name of the
# source file under compilation, even if file resides in a subdirectory.
# The object file name does not affect the name of the '.d' file.
# pgcc 10.2 will output
# foo.o: sub/foo.c sub/foo.h
# and will wrap long lines using '\' :
# foo.o: sub/foo.c ... \
# sub/foo.h ... \
# ...
set_dir_from "$object"
# Use the source, not the object, to determine the base name, since
# that's sadly what pgcc will do too.
set_base_from "$source"
tmpdepfile=$base.d
# For projects that build the same source file twice into different object
# files, the pgcc approach of using the *source* file root name can cause
# problems in parallel builds. Use a locking strategy to avoid stomping on
# the same $tmpdepfile.
lockdir=$base.d-lock
trap "
echo '$0: caught signal, cleaning up...' >&2
rmdir '$lockdir'
exit 1
" 1 2 13 15
numtries=100
i=$numtries
while test $i -gt 0; do
# mkdir is a portable test-and-set.
if mkdir "$lockdir" 2>/dev/null; then
# This process acquired the lock.
"$@" -MD
stat=$?
# Release the lock.
rmdir "$lockdir"
break
else
# If the lock is being held by a different process, wait
# until the winning process is done or we timeout.
while test -d "$lockdir" && test $i -gt 0; do
sleep 1
i=`expr $i - 1`
done
fi
i=`expr $i - 1`
done
trap - 1 2 13 15
if test $i -le 0; then
echo "$0: failed to acquire lock after $numtries attempts" >&2
echo "$0: check lockdir '$lockdir'" >&2
exit 1
fi
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each line is of the form `foo.o: dependent.h',
# or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
# Do two passes, one to just change these to
# `$object: dependent.h' and one to simply `dependent.h:'.
sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp2)
# The "hp" stanza above does not work with aCC (C++) and HP's ia64
# compilers, which have integrated preprocessors. The correct option
# to use with these is +Maked; it writes dependencies to a file named
# 'foo.d', which lands next to the object file, wherever that
# happens to be.
# Much of this is similar to the tru64 case; see comments there.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir.libs/$base.d
"$@" -Wc,+Maked
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
"$@" +Maked
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
# Add 'dependent.h:' lines.
sed -ne '2,${
s/^ *//
s/ \\*$//
s/$/:/
p
}' "$tmpdepfile" >> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile" "$tmpdepfile2"
;;
tru64)
# The Tru64 compiler uses -MD to generate dependencies as a side
# effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
# At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
# dependencies in 'foo.d' instead, so we check for that too.
# Subdirectories are respected.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
# Libtool generates 2 separate objects for the 2 libraries. These
# two compilations output dependencies in $dir.libs/$base.o.d and
# in $dir$base.o.d. We have to check for both files, because
# one of the two compilations can be disabled. We should prefer
# $dir$base.o.d over $dir.libs/$base.o.d because the latter is
# automatically cleaned when .libs/ is deleted, while ignoring
# the former would cause a distcleancheck panic.
tmpdepfile1=$dir$base.o.d # libtool 1.5
tmpdepfile2=$dir.libs/$base.o.d # Likewise.
tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504
"$@" -Wc,-MD
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
tmpdepfile3=$dir$base.d
"$@" -MD
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
# Same post-processing that is required for AIX mode.
aix_post_process_depfile
;;
msvc7)
if test "$libtool" = yes; then
showIncludes=-Wc,-showIncludes
else
showIncludes=-showIncludes
fi
"$@" $showIncludes > "$tmpdepfile"
stat=$?
grep -v '^Note: including file: ' "$tmpdepfile"
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The first sed program below extracts the file names and escapes
# backslashes for cygpath. The second sed program outputs the file
# name when reading, but also accumulates all include files in the
# hold buffer in order to output them again at the end. This only
# works with sed implementations that can handle large buffers.
sed < "$tmpdepfile" -n '
/^Note: including file: *\(.*\)/ {
s//\1/
s/\\/\\\\/g
p
}' | $cygpath_u | sort -u | sed -n '
s/ /\\ /g
s/\(.*\)/'"$tab"'\1 \\/p
s/.\(.*\) \\/\1:/
H
$ {
s/.*/'"$tab"'/
G
p
}' >> "$depfile"
echo >> "$depfile" # make sure the fragment doesn't end with a backslash
rm -f "$tmpdepfile"
;;
msvc7msys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
#nosideeffect)
# This comment above is used by automake to tell side-effect
# dependency tracking mechanisms from slower ones.
dashmstdout)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout, regardless of -o.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
test -z "$dashmflag" && dashmflag=-M
# Require at least two characters before searching for ':'
# in the target name. This is to cope with DOS-style filenames:
# a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
"$@" $dashmflag |
sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
rm -f "$depfile"
cat < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this sed invocation
# correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
dashXmstdout)
# This case only exists to satisfy depend.m4. It is never actually
# run, as this mode is specially recognized in the preamble.
exit 1
;;
makedepend)
"$@" || exit $?
# Remove any Libtool call
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# X makedepend
shift
cleared=no eat=no
for arg
do
case $cleared in
no)
set ""; shift
cleared=yes ;;
esac
if test $eat = yes; then
eat=no
continue
fi
case "$arg" in
-D*|-I*)
set fnord "$@" "$arg"; shift ;;
# Strip any option that makedepend may not understand. Remove
# the object too, otherwise makedepend will parse it as a source file.
-arch)
eat=yes ;;
-*|$object)
;;
*)
set fnord "$@" "$arg"; shift ;;
esac
done
obj_suffix=`echo "$object" | sed 's/^.*\././'`
touch "$tmpdepfile"
${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
rm -f "$depfile"
# makedepend may prepend the VPATH from the source file name to the object.
# No need to regex-escape $object, excess matching of '.' is harmless.
sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process the last invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed '1,2d' "$tmpdepfile" \
| tr ' ' "$nl" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile" "$tmpdepfile".bak
;;
cpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
"$@" -E \
| sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
-e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
| sed '$ s: \\$::' > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
cat < "$tmpdepfile" >> "$depfile"
sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvisualcpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
IFS=" "
for arg
do
case "$arg" in
-o)
shift
;;
$object)
shift
;;
"-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
set fnord "$@"
shift
shift
;;
*)
set fnord "$@" "$arg"
shift
shift
;;
esac
done
"$@" -E 2>/dev/null |
sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
echo "$tab" >> "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvcmsys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
none)
exec "$@"
;;
*)
echo "Unknown depmode $depmode" 1>&2
exit 1
;;
esac
exit 0
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:
sqlsmith-1.2.1/configure 0000755 0001750 0001750 00000637650 13272664311 012235 0000000 0000000 #! /bin/sh
# Guess values for system-dependent variables and create Makefiles.
# Generated by GNU Autoconf 2.69 for SQLsmith 1.2.1.
#
# Report bugs to .
#
#
# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc.
#
#
# This configure script is free software; the Free Software Foundation
# gives unlimited permission to copy, distribute and modify it.
## -------------------- ##
## M4sh Initialization. ##
## -------------------- ##
# Be more Bourne compatible
DUALCASE=1; export DUALCASE # for MKS sh
if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
else
case `(set -o) 2>/dev/null` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
esac
fi
as_nl='
'
export as_nl
# Printing a long string crashes Solaris 7 /usr/bin/printf.
as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
# Prefer a ksh shell builtin over an external printf program on Solaris,
# but without wasting forks for bash or zsh.
if test -z "$BASH_VERSION$ZSH_VERSION" \
&& (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='print -r --'
as_echo_n='print -rn --'
elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='printf %s\n'
as_echo_n='printf %s'
else
if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
as_echo_n='/usr/ucb/echo -n'
else
as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
as_echo_n_body='eval
arg=$1;
case $arg in #(
*"$as_nl"*)
expr "X$arg" : "X\\(.*\\)$as_nl";
arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
esac;
expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
'
export as_echo_n_body
as_echo_n='sh -c $as_echo_n_body as_echo'
fi
export as_echo_body
as_echo='sh -c $as_echo_body as_echo'
fi
# The user is always right.
if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
PATH_SEPARATOR=';'
}
fi
# IFS
# We need space, tab and new line, in precisely that order. Quoting is
# there to prevent editors from complaining about space-tab.
# (If _AS_PATH_WALK were called with IFS unset, it would disable word
# splitting by setting IFS to empty value.)
IFS=" "" $as_nl"
# Find who we are. Look in the path if we contain no directory separator.
as_myself=
case $0 in #((
*[\\/]* ) as_myself=$0 ;;
*) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
done
IFS=$as_save_IFS
;;
esac
# We did not find ourselves, most probably we were run as `sh COMMAND'
# in which case we are not to be found in the path.
if test "x$as_myself" = x; then
as_myself=$0
fi
if test ! -f "$as_myself"; then
$as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
exit 1
fi
# Unset variables that we do not need and which cause bugs (e.g. in
# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
# suppresses any "Segmentation fault" message there. '((' could
# trigger a bug in pdksh 5.2.14.
for as_var in BASH_ENV ENV MAIL MAILPATH
do eval test x\${$as_var+set} = xset \
&& ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
done
PS1='$ '
PS2='> '
PS4='+ '
# NLS nuisances.
LC_ALL=C
export LC_ALL
LANGUAGE=C
export LANGUAGE
# CDPATH.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# Use a proper internal environment variable to ensure we don't fall
# into an infinite loop, continuously re-executing ourselves.
if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then
_as_can_reexec=no; export _as_can_reexec;
# We cannot yet assume a decent shell, so we have to provide a
# neutralization value for shells without unset; and this also
# works around shells that cannot unset nonexistent variables.
# Preserve -v and -x to the replacement shell.
BASH_ENV=/dev/null
ENV=/dev/null
(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
case $- in # ((((
*v*x* | *x*v* ) as_opts=-vx ;;
*v* ) as_opts=-v ;;
*x* ) as_opts=-x ;;
* ) as_opts= ;;
esac
exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
# Admittedly, this is quite paranoid, since all the known shells bail
# out after a failed `exec'.
$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
as_fn_exit 255
fi
# We don't want this to propagate to other subprocesses.
{ _as_can_reexec=; unset _as_can_reexec;}
if test "x$CONFIG_SHELL" = x; then
as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
# is contrary to our usage. Disable this feature.
alias -g '\${1+\"\$@\"}'='\"\$@\"'
setopt NO_GLOB_SUBST
else
case \`(set -o) 2>/dev/null\` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
esac
fi
"
as_required="as_fn_return () { (exit \$1); }
as_fn_success () { as_fn_return 0; }
as_fn_failure () { as_fn_return 1; }
as_fn_ret_success () { return 0; }
as_fn_ret_failure () { return 1; }
exitcode=0
as_fn_success || { exitcode=1; echo as_fn_success failed.; }
as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
else
exitcode=1; echo positional parameters were not saved.
fi
test x\$exitcode = x0 || exit 1
test -x / || exit 1"
as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1"
if (eval "$as_required") 2>/dev/null; then :
as_have_required=yes
else
as_have_required=no
fi
if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
as_found=false
for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
as_found=:
case $as_dir in #(
/*)
for as_base in sh bash ksh sh5; do
# Try only shells that exist, to save several forks.
as_shell=$as_dir/$as_base
if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
{ $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
CONFIG_SHELL=$as_shell as_have_required=yes
if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
break 2
fi
fi
done;;
esac
as_found=false
done
$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
{ $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
CONFIG_SHELL=$SHELL as_have_required=yes
fi; }
IFS=$as_save_IFS
if test "x$CONFIG_SHELL" != x; then :
export CONFIG_SHELL
# We cannot yet assume a decent shell, so we have to provide a
# neutralization value for shells without unset; and this also
# works around shells that cannot unset nonexistent variables.
# Preserve -v and -x to the replacement shell.
BASH_ENV=/dev/null
ENV=/dev/null
(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
case $- in # ((((
*v*x* | *x*v* ) as_opts=-vx ;;
*v* ) as_opts=-v ;;
*x* ) as_opts=-x ;;
* ) as_opts= ;;
esac
exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
# Admittedly, this is quite paranoid, since all the known shells bail
# out after a failed `exec'.
$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
exit 255
fi
if test x$as_have_required = xno; then :
$as_echo "$0: This script requires a shell more modern than all"
$as_echo "$0: the shells that I found on your system."
if test x${ZSH_VERSION+set} = xset ; then
$as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
$as_echo "$0: be upgraded to zsh 4.3.4 or later."
else
$as_echo "$0: Please tell bug-autoconf@gnu.org and seltenreich@gmx.de
$0: about your system, including any error possibly output
$0: before this message. Then install a modern shell, or
$0: manually run the script under such a shell if you do
$0: have one."
fi
exit 1
fi
fi
fi
SHELL=${CONFIG_SHELL-/bin/sh}
export SHELL
# Unset more variables known to interfere with behavior of common tools.
CLICOLOR_FORCE= GREP_OPTIONS=
unset CLICOLOR_FORCE GREP_OPTIONS
## --------------------- ##
## M4sh Shell Functions. ##
## --------------------- ##
# as_fn_unset VAR
# ---------------
# Portably unset VAR.
as_fn_unset ()
{
{ eval $1=; unset $1;}
}
as_unset=as_fn_unset
# as_fn_set_status STATUS
# -----------------------
# Set $? to STATUS, without forking.
as_fn_set_status ()
{
return $1
} # as_fn_set_status
# as_fn_exit STATUS
# -----------------
# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
as_fn_exit ()
{
set +e
as_fn_set_status $1
exit $1
} # as_fn_exit
# as_fn_mkdir_p
# -------------
# Create "$as_dir" as a directory, including parents if necessary.
as_fn_mkdir_p ()
{
case $as_dir in #(
-*) as_dir=./$as_dir;;
esac
test -d "$as_dir" || eval $as_mkdir_p || {
as_dirs=
while :; do
case $as_dir in #(
*\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
*) as_qdir=$as_dir;;
esac
as_dirs="'$as_qdir' $as_dirs"
as_dir=`$as_dirname -- "$as_dir" ||
$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_dir" : 'X\(//\)[^/]' \| \
X"$as_dir" : 'X\(//\)$' \| \
X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$as_dir" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
test -d "$as_dir" && break
done
test -z "$as_dirs" || eval "mkdir $as_dirs"
} || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
} # as_fn_mkdir_p
# as_fn_executable_p FILE
# -----------------------
# Test if FILE is an executable regular file.
as_fn_executable_p ()
{
test -f "$1" && test -x "$1"
} # as_fn_executable_p
# as_fn_append VAR VALUE
# ----------------------
# Append the text in VALUE to the end of the definition contained in VAR. Take
# advantage of any shell optimizations that allow amortized linear growth over
# repeated appends, instead of the typical quadratic growth present in naive
# implementations.
if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
eval 'as_fn_append ()
{
eval $1+=\$2
}'
else
as_fn_append ()
{
eval $1=\$$1\$2
}
fi # as_fn_append
# as_fn_arith ARG...
# ------------------
# Perform arithmetic evaluation on the ARGs, and store the result in the
# global $as_val. Take advantage of shells that can avoid forks. The arguments
# must be portable across $(()) and expr.
if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
eval 'as_fn_arith ()
{
as_val=$(( $* ))
}'
else
as_fn_arith ()
{
as_val=`expr "$@" || test $? -eq 1`
}
fi # as_fn_arith
# as_fn_error STATUS ERROR [LINENO LOG_FD]
# ----------------------------------------
# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
# script with STATUS, using 1 if that was 0.
as_fn_error ()
{
as_status=$1; test $as_status -eq 0 && as_status=1
if test "$4"; then
as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
$as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
fi
$as_echo "$as_me: error: $2" >&2
as_fn_exit $as_status
} # as_fn_error
if expr a : '\(a\)' >/dev/null 2>&1 &&
test "X`expr 00001 : '.*\(...\)'`" = X001; then
as_expr=expr
else
as_expr=false
fi
if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
as_basename=basename
else
as_basename=false
fi
if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
as_dirname=dirname
else
as_dirname=false
fi
as_me=`$as_basename -- "$0" ||
$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
X"$0" : 'X\(//\)$' \| \
X"$0" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X/"$0" |
sed '/^.*\/\([^/][^/]*\)\/*$/{
s//\1/
q
}
/^X\/\(\/\/\)$/{
s//\1/
q
}
/^X\/\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
# Avoid depending upon Character Ranges.
as_cr_letters='abcdefghijklmnopqrstuvwxyz'
as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
as_cr_Letters=$as_cr_letters$as_cr_LETTERS
as_cr_digits='0123456789'
as_cr_alnum=$as_cr_Letters$as_cr_digits
as_lineno_1=$LINENO as_lineno_1a=$LINENO
as_lineno_2=$LINENO as_lineno_2a=$LINENO
eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
# Blame Lee E. McMahon (1931-1989) for sed's syntax. :-)
sed -n '
p
/[$]LINENO/=
' <$as_myself |
sed '
s/[$]LINENO.*/&-/
t lineno
b
:lineno
N
:loop
s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
t loop
s/-\n.*//
' >$as_me.lineno &&
chmod +x "$as_me.lineno" ||
{ $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
# If we had to re-execute with $CONFIG_SHELL, we're ensured to have
# already done that, so ensure we don't try to do so again and fall
# in an infinite loop. This has already happened in practice.
_as_can_reexec=no; export _as_can_reexec
# Don't try to exec as it changes $[0], causing all sort of problems
# (the dirname of $[0] is not the place where we might find the
# original and so on. Autoconf is especially sensitive to this).
. "./$as_me.lineno"
# Exit status is that of the last command.
exit
}
ECHO_C= ECHO_N= ECHO_T=
case `echo -n x` in #(((((
-n*)
case `echo 'xy\c'` in
*c*) ECHO_T=' ';; # ECHO_T is single tab character.
xy) ECHO_C='\c';;
*) echo `echo ksh88 bug on AIX 6.1` > /dev/null
ECHO_T=' ';;
esac;;
*)
ECHO_N='-n';;
esac
rm -f conf$$ conf$$.exe conf$$.file
if test -d conf$$.dir; then
rm -f conf$$.dir/conf$$.file
else
rm -f conf$$.dir
mkdir conf$$.dir 2>/dev/null
fi
if (echo >conf$$.file) 2>/dev/null; then
if ln -s conf$$.file conf$$ 2>/dev/null; then
as_ln_s='ln -s'
# ... but there are two gotchas:
# 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
# 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
# In both cases, we have to default to `cp -pR'.
ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
as_ln_s='cp -pR'
elif ln conf$$.file conf$$ 2>/dev/null; then
as_ln_s=ln
else
as_ln_s='cp -pR'
fi
else
as_ln_s='cp -pR'
fi
rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
rmdir conf$$.dir 2>/dev/null
if mkdir -p . 2>/dev/null; then
as_mkdir_p='mkdir -p "$as_dir"'
else
test -d ./-p && rmdir ./-p
as_mkdir_p=false
fi
as_test_x='test -x'
as_executable_p=as_fn_executable_p
# Sed expression to map a string onto a valid CPP name.
as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
# Sed expression to map a string onto a valid variable name.
as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
test -n "$DJDIR" || exec 7<&0 &1
# Name of the host.
# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
# so uname gets run too.
ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
#
# Initializations.
#
ac_default_prefix=/usr/local
ac_clean_files=
ac_config_libobj_dir=.
LIBOBJS=
cross_compiling=no
subdirs=
MFLAGS=
MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='SQLsmith'
PACKAGE_TARNAME='sqlsmith'
PACKAGE_VERSION='1.2.1'
PACKAGE_STRING='SQLsmith 1.2.1'
PACKAGE_BUGREPORT='seltenreich@gmx.de'
PACKAGE_URL='https://github.com/anse1/sqlsmith/'
ac_subst_vars='am__EXEEXT_FALSE
am__EXEEXT_TRUE
LTLIBOBJS
LIBOBJS
REVISION
DUT_SQLITE_FALSE
DUT_SQLITE_TRUE
CONFIG_GIT_REVISION
BOOST_REGEX_LIB
am__fastdepCC_FALSE
am__fastdepCC_TRUE
CCDEPMODE
ac_ct_CC
CFLAGS
CC
BOOST_LDFLAGS
BOOST_CPPFLAGS
host_os
host_vendor
host_cpu
host
build_os
build_vendor
build_cpu
build
DUT_MONETDB_FALSE
DUT_MONETDB_TRUE
MONETDB_MAPI_LIBS
MONETDB_MAPI_CFLAGS
LIBPQXX_LIBS
LIBPQXX_CFLAGS
PKG_CONFIG_LIBDIR
PKG_CONFIG_PATH
PKG_CONFIG
POSTGRESQL_LIBS
POSTGRESQL_LDFLAGS
POSTGRESQL_CPPFLAGS
POSTGRESQL_VERSION
PG_CONFIG
HAVE_CXX11
am__fastdepCXX_FALSE
am__fastdepCXX_TRUE
CXXDEPMODE
am__nodep
AMDEPBACKSLASH
AMDEP_FALSE
AMDEP_TRUE
am__quote
am__include
DEPDIR
OBJEXT
EXEEXT
ac_ct_CXX
CPPFLAGS
LDFLAGS
CXXFLAGS
CXX
AM_BACKSLASH
AM_DEFAULT_VERBOSITY
AM_DEFAULT_V
AM_V
am__untar
am__tar
AMTAR
am__leading_dot
SET_MAKE
AWK
mkdir_p
MKDIR_P
INSTALL_STRIP_PROGRAM
STRIP
install_sh
MAKEINFO
AUTOHEADER
AUTOMAKE
AUTOCONF
ACLOCAL
VERSION
PACKAGE
CYGPATH_W
am__isrc
INSTALL_DATA
INSTALL_SCRIPT
INSTALL_PROGRAM
target_alias
host_alias
build_alias
LIBS
ECHO_T
ECHO_N
ECHO_C
DEFS
mandir
localedir
libdir
psdir
pdfdir
dvidir
htmldir
infodir
docdir
oldincludedir
includedir
runstatedir
localstatedir
sharedstatedir
sysconfdir
datadir
datarootdir
libexecdir
sbindir
bindir
program_transform_name
prefix
exec_prefix
PACKAGE_URL
PACKAGE_BUGREPORT
PACKAGE_STRING
PACKAGE_VERSION
PACKAGE_TARNAME
PACKAGE_NAME
PATH_SEPARATOR
SHELL'
ac_subst_files=''
ac_user_opts='
enable_option_checking
enable_silent_rules
enable_dependency_tracking
with_postgresql
with_boost
with_boost_libdir
with_boost_regex
'
ac_precious_vars='build_alias
host_alias
target_alias
CXX
CXXFLAGS
LDFLAGS
LIBS
CPPFLAGS
CCC
PKG_CONFIG
PKG_CONFIG_PATH
PKG_CONFIG_LIBDIR
LIBPQXX_CFLAGS
LIBPQXX_LIBS
MONETDB_MAPI_CFLAGS
MONETDB_MAPI_LIBS
CC
CFLAGS'
# Initialize some variables set by options.
ac_init_help=
ac_init_version=false
ac_unrecognized_opts=
ac_unrecognized_sep=
# The variables have the same names as the options, with
# dashes changed to underlines.
cache_file=/dev/null
exec_prefix=NONE
no_create=
no_recursion=
prefix=NONE
program_prefix=NONE
program_suffix=NONE
program_transform_name=s,x,x,
silent=
site=
srcdir=
verbose=
x_includes=NONE
x_libraries=NONE
# Installation directory options.
# These are left unexpanded so users can "make install exec_prefix=/foo"
# and all the variables that are supposed to be based on exec_prefix
# by default will actually change.
# Use braces instead of parens because sh, perl, etc. also accept them.
# (The list follows the same order as the GNU Coding Standards.)
bindir='${exec_prefix}/bin'
sbindir='${exec_prefix}/sbin'
libexecdir='${exec_prefix}/libexec'
datarootdir='${prefix}/share'
datadir='${datarootdir}'
sysconfdir='${prefix}/etc'
sharedstatedir='${prefix}/com'
localstatedir='${prefix}/var'
runstatedir='${localstatedir}/run'
includedir='${prefix}/include'
oldincludedir='/usr/include'
docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
infodir='${datarootdir}/info'
htmldir='${docdir}'
dvidir='${docdir}'
pdfdir='${docdir}'
psdir='${docdir}'
libdir='${exec_prefix}/lib'
localedir='${datarootdir}/locale'
mandir='${datarootdir}/man'
ac_prev=
ac_dashdash=
for ac_option
do
# If the previous option needs an argument, assign it.
if test -n "$ac_prev"; then
eval $ac_prev=\$ac_option
ac_prev=
continue
fi
case $ac_option in
*=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
*=) ac_optarg= ;;
*) ac_optarg=yes ;;
esac
# Accept the important Cygnus configure options, so we can diagnose typos.
case $ac_dashdash$ac_option in
--)
ac_dashdash=yes ;;
-bindir | --bindir | --bindi | --bind | --bin | --bi)
ac_prev=bindir ;;
-bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
bindir=$ac_optarg ;;
-build | --build | --buil | --bui | --bu)
ac_prev=build_alias ;;
-build=* | --build=* | --buil=* | --bui=* | --bu=*)
build_alias=$ac_optarg ;;
-cache-file | --cache-file | --cache-fil | --cache-fi \
| --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
ac_prev=cache_file ;;
-cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
| --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
cache_file=$ac_optarg ;;
--config-cache | -C)
cache_file=config.cache ;;
-datadir | --datadir | --datadi | --datad)
ac_prev=datadir ;;
-datadir=* | --datadir=* | --datadi=* | --datad=*)
datadir=$ac_optarg ;;
-datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
| --dataroo | --dataro | --datar)
ac_prev=datarootdir ;;
-datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
| --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
datarootdir=$ac_optarg ;;
-disable-* | --disable-*)
ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
as_fn_error $? "invalid feature name: $ac_useropt"
ac_useropt_orig=$ac_useropt
ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"enable_$ac_useropt"
"*) ;;
*) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
ac_unrecognized_sep=', ';;
esac
eval enable_$ac_useropt=no ;;
-docdir | --docdir | --docdi | --doc | --do)
ac_prev=docdir ;;
-docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
docdir=$ac_optarg ;;
-dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
ac_prev=dvidir ;;
-dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
dvidir=$ac_optarg ;;
-enable-* | --enable-*)
ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
as_fn_error $? "invalid feature name: $ac_useropt"
ac_useropt_orig=$ac_useropt
ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"enable_$ac_useropt"
"*) ;;
*) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
ac_unrecognized_sep=', ';;
esac
eval enable_$ac_useropt=\$ac_optarg ;;
-exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
| --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
| --exec | --exe | --ex)
ac_prev=exec_prefix ;;
-exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
| --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
| --exec=* | --exe=* | --ex=*)
exec_prefix=$ac_optarg ;;
-gas | --gas | --ga | --g)
# Obsolete; use --with-gas.
with_gas=yes ;;
-help | --help | --hel | --he | -h)
ac_init_help=long ;;
-help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
ac_init_help=recursive ;;
-help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
ac_init_help=short ;;
-host | --host | --hos | --ho)
ac_prev=host_alias ;;
-host=* | --host=* | --hos=* | --ho=*)
host_alias=$ac_optarg ;;
-htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
ac_prev=htmldir ;;
-htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
| --ht=*)
htmldir=$ac_optarg ;;
-includedir | --includedir | --includedi | --included | --include \
| --includ | --inclu | --incl | --inc)
ac_prev=includedir ;;
-includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
| --includ=* | --inclu=* | --incl=* | --inc=*)
includedir=$ac_optarg ;;
-infodir | --infodir | --infodi | --infod | --info | --inf)
ac_prev=infodir ;;
-infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
infodir=$ac_optarg ;;
-libdir | --libdir | --libdi | --libd)
ac_prev=libdir ;;
-libdir=* | --libdir=* | --libdi=* | --libd=*)
libdir=$ac_optarg ;;
-libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
| --libexe | --libex | --libe)
ac_prev=libexecdir ;;
-libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
| --libexe=* | --libex=* | --libe=*)
libexecdir=$ac_optarg ;;
-localedir | --localedir | --localedi | --localed | --locale)
ac_prev=localedir ;;
-localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
localedir=$ac_optarg ;;
-localstatedir | --localstatedir | --localstatedi | --localstated \
| --localstate | --localstat | --localsta | --localst | --locals)
ac_prev=localstatedir ;;
-localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
| --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
localstatedir=$ac_optarg ;;
-mandir | --mandir | --mandi | --mand | --man | --ma | --m)
ac_prev=mandir ;;
-mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
mandir=$ac_optarg ;;
-nfp | --nfp | --nf)
# Obsolete; use --without-fp.
with_fp=no ;;
-no-create | --no-create | --no-creat | --no-crea | --no-cre \
| --no-cr | --no-c | -n)
no_create=yes ;;
-no-recursion | --no-recursion | --no-recursio | --no-recursi \
| --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
no_recursion=yes ;;
-oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
| --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
| --oldin | --oldi | --old | --ol | --o)
ac_prev=oldincludedir ;;
-oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
| --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
| --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
oldincludedir=$ac_optarg ;;
-prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
ac_prev=prefix ;;
-prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
prefix=$ac_optarg ;;
-program-prefix | --program-prefix | --program-prefi | --program-pref \
| --program-pre | --program-pr | --program-p)
ac_prev=program_prefix ;;
-program-prefix=* | --program-prefix=* | --program-prefi=* \
| --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
program_prefix=$ac_optarg ;;
-program-suffix | --program-suffix | --program-suffi | --program-suff \
| --program-suf | --program-su | --program-s)
ac_prev=program_suffix ;;
-program-suffix=* | --program-suffix=* | --program-suffi=* \
| --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
program_suffix=$ac_optarg ;;
-program-transform-name | --program-transform-name \
| --program-transform-nam | --program-transform-na \
| --program-transform-n | --program-transform- \
| --program-transform | --program-transfor \
| --program-transfo | --program-transf \
| --program-trans | --program-tran \
| --progr-tra | --program-tr | --program-t)
ac_prev=program_transform_name ;;
-program-transform-name=* | --program-transform-name=* \
| --program-transform-nam=* | --program-transform-na=* \
| --program-transform-n=* | --program-transform-=* \
| --program-transform=* | --program-transfor=* \
| --program-transfo=* | --program-transf=* \
| --program-trans=* | --program-tran=* \
| --progr-tra=* | --program-tr=* | --program-t=*)
program_transform_name=$ac_optarg ;;
-pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
ac_prev=pdfdir ;;
-pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
pdfdir=$ac_optarg ;;
-psdir | --psdir | --psdi | --psd | --ps)
ac_prev=psdir ;;
-psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
psdir=$ac_optarg ;;
-q | -quiet | --quiet | --quie | --qui | --qu | --q \
| -silent | --silent | --silen | --sile | --sil)
silent=yes ;;
-runstatedir | --runstatedir | --runstatedi | --runstated \
| --runstate | --runstat | --runsta | --runst | --runs \
| --run | --ru | --r)
ac_prev=runstatedir ;;
-runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \
| --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \
| --run=* | --ru=* | --r=*)
runstatedir=$ac_optarg ;;
-sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
ac_prev=sbindir ;;
-sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
| --sbi=* | --sb=*)
sbindir=$ac_optarg ;;
-sharedstatedir | --sharedstatedir | --sharedstatedi \
| --sharedstated | --sharedstate | --sharedstat | --sharedsta \
| --sharedst | --shareds | --shared | --share | --shar \
| --sha | --sh)
ac_prev=sharedstatedir ;;
-sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
| --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
| --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
| --sha=* | --sh=*)
sharedstatedir=$ac_optarg ;;
-site | --site | --sit)
ac_prev=site ;;
-site=* | --site=* | --sit=*)
site=$ac_optarg ;;
-srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
ac_prev=srcdir ;;
-srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
srcdir=$ac_optarg ;;
-sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
| --syscon | --sysco | --sysc | --sys | --sy)
ac_prev=sysconfdir ;;
-sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
| --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
sysconfdir=$ac_optarg ;;
-target | --target | --targe | --targ | --tar | --ta | --t)
ac_prev=target_alias ;;
-target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
target_alias=$ac_optarg ;;
-v | -verbose | --verbose | --verbos | --verbo | --verb)
verbose=yes ;;
-version | --version | --versio | --versi | --vers | -V)
ac_init_version=: ;;
-with-* | --with-*)
ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
as_fn_error $? "invalid package name: $ac_useropt"
ac_useropt_orig=$ac_useropt
ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"with_$ac_useropt"
"*) ;;
*) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
ac_unrecognized_sep=', ';;
esac
eval with_$ac_useropt=\$ac_optarg ;;
-without-* | --without-*)
ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
as_fn_error $? "invalid package name: $ac_useropt"
ac_useropt_orig=$ac_useropt
ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"with_$ac_useropt"
"*) ;;
*) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
ac_unrecognized_sep=', ';;
esac
eval with_$ac_useropt=no ;;
--x)
# Obsolete; use --with-x.
with_x=yes ;;
-x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
| --x-incl | --x-inc | --x-in | --x-i)
ac_prev=x_includes ;;
-x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
| --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
x_includes=$ac_optarg ;;
-x-libraries | --x-libraries | --x-librarie | --x-librari \
| --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
ac_prev=x_libraries ;;
-x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
| --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
x_libraries=$ac_optarg ;;
-*) as_fn_error $? "unrecognized option: \`$ac_option'
Try \`$0 --help' for more information"
;;
*=*)
ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
# Reject names that are not valid shell variable names.
case $ac_envvar in #(
'' | [0-9]* | *[!_$as_cr_alnum]* )
as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
esac
eval $ac_envvar=\$ac_optarg
export $ac_envvar ;;
*)
# FIXME: should be removed in autoconf 3.0.
$as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
$as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
: "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
;;
esac
done
if test -n "$ac_prev"; then
ac_option=--`echo $ac_prev | sed 's/_/-/g'`
as_fn_error $? "missing argument to $ac_option"
fi
if test -n "$ac_unrecognized_opts"; then
case $enable_option_checking in
no) ;;
fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
*) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
esac
fi
# Check all directory arguments for consistency.
for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \
datadir sysconfdir sharedstatedir localstatedir includedir \
oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
libdir localedir mandir runstatedir
do
eval ac_val=\$$ac_var
# Remove trailing slashes.
case $ac_val in
*/ )
ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
eval $ac_var=\$ac_val;;
esac
# Be sure to have absolute directory names.
case $ac_val in
[\\/$]* | ?:[\\/]* ) continue;;
NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
esac
as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
done
# There might be people who depend on the old broken behavior: `$host'
# used to hold the argument of --host etc.
# FIXME: To remove some day.
build=$build_alias
host=$host_alias
target=$target_alias
# FIXME: To remove some day.
if test "x$host_alias" != x; then
if test "x$build_alias" = x; then
cross_compiling=maybe
elif test "x$build_alias" != "x$host_alias"; then
cross_compiling=yes
fi
fi
ac_tool_prefix=
test -n "$host_alias" && ac_tool_prefix=$host_alias-
test "$silent" = yes && exec 6>/dev/null
ac_pwd=`pwd` && test -n "$ac_pwd" &&
ac_ls_di=`ls -di .` &&
ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
as_fn_error $? "working directory cannot be determined"
test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
as_fn_error $? "pwd does not report name of working directory"
# Find the source files, if location was not specified.
if test -z "$srcdir"; then
ac_srcdir_defaulted=yes
# Try the directory containing this script, then the parent directory.
ac_confdir=`$as_dirname -- "$as_myself" ||
$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_myself" : 'X\(//\)[^/]' \| \
X"$as_myself" : 'X\(//\)$' \| \
X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$as_myself" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
srcdir=$ac_confdir
if test ! -r "$srcdir/$ac_unique_file"; then
srcdir=..
fi
else
ac_srcdir_defaulted=no
fi
if test ! -r "$srcdir/$ac_unique_file"; then
test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
fi
ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
ac_abs_confdir=`(
cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
pwd)`
# When building in place, set srcdir=.
if test "$ac_abs_confdir" = "$ac_pwd"; then
srcdir=.
fi
# Remove unnecessary trailing slashes from srcdir.
# Double slashes in file names in object file debugging info
# mess up M-x gdb in Emacs.
case $srcdir in
*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
esac
for ac_var in $ac_precious_vars; do
eval ac_env_${ac_var}_set=\${${ac_var}+set}
eval ac_env_${ac_var}_value=\$${ac_var}
eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
eval ac_cv_env_${ac_var}_value=\$${ac_var}
done
#
# Report the --help message.
#
if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
\`configure' configures SQLsmith 1.2.1 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
To assign environment variables (e.g., CC, CFLAGS...), specify them as
VAR=VALUE. See below for descriptions of some of the useful variables.
Defaults for the options are specified in brackets.
Configuration:
-h, --help display this help and exit
--help=short display options specific to this package
--help=recursive display the short help of all the included packages
-V, --version display version information and exit
-q, --quiet, --silent do not print \`checking ...' messages
--cache-file=FILE cache test results in FILE [disabled]
-C, --config-cache alias for \`--cache-file=config.cache'
-n, --no-create do not create output files
--srcdir=DIR find the sources in DIR [configure dir or \`..']
Installation directories:
--prefix=PREFIX install architecture-independent files in PREFIX
[$ac_default_prefix]
--exec-prefix=EPREFIX install architecture-dependent files in EPREFIX
[PREFIX]
By default, \`make install' will install all the files in
\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify
an installation prefix other than \`$ac_default_prefix' using \`--prefix',
for instance \`--prefix=\$HOME'.
For better control, use the options below.
Fine tuning of the installation directories:
--bindir=DIR user executables [EPREFIX/bin]
--sbindir=DIR system admin executables [EPREFIX/sbin]
--libexecdir=DIR program executables [EPREFIX/libexec]
--sysconfdir=DIR read-only single-machine data [PREFIX/etc]
--sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com]
--localstatedir=DIR modifiable single-machine data [PREFIX/var]
--runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run]
--libdir=DIR object code libraries [EPREFIX/lib]
--includedir=DIR C header files [PREFIX/include]
--oldincludedir=DIR C header files for non-gcc [/usr/include]
--datarootdir=DIR read-only arch.-independent data root [PREFIX/share]
--datadir=DIR read-only architecture-independent data [DATAROOTDIR]
--infodir=DIR info documentation [DATAROOTDIR/info]
--localedir=DIR locale-dependent data [DATAROOTDIR/locale]
--mandir=DIR man documentation [DATAROOTDIR/man]
--docdir=DIR documentation root [DATAROOTDIR/doc/sqlsmith]
--htmldir=DIR html documentation [DOCDIR]
--dvidir=DIR dvi documentation [DOCDIR]
--pdfdir=DIR pdf documentation [DOCDIR]
--psdir=DIR ps documentation [DOCDIR]
_ACEOF
cat <<\_ACEOF
Program names:
--program-prefix=PREFIX prepend PREFIX to installed program names
--program-suffix=SUFFIX append SUFFIX to installed program names
--program-transform-name=PROGRAM run sed PROGRAM on installed program names
System types:
--build=BUILD configure for building on BUILD [guessed]
--host=HOST cross-compile to build programs to run on HOST [BUILD]
_ACEOF
fi
if test -n "$ac_init_help"; then
case $ac_init_help in
short | recursive ) echo "Configuration of SQLsmith 1.2.1:";;
esac
cat <<\_ACEOF
Optional Features:
--disable-option-checking ignore unrecognized --enable/--with options
--disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no)
--enable-FEATURE[=ARG] include FEATURE [ARG=yes]
--enable-silent-rules less verbose build output (undo: "make V=1")
--disable-silent-rules verbose build output (undo: "make V=0")
--enable-dependency-tracking
do not reject slow dependency extractors
--disable-dependency-tracking
speeds up one-time build
Optional Packages:
--with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
--without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no)
--with-postgresql=[ARG] use PostgreSQL library [default=yes], optionally
specify path to pg_config
--with-boost[=ARG] use Boost library from a standard location
(ARG=yes), from the specified location (ARG=),
or disable it (ARG=no) [ARG=yes]
--with-boost-libdir=LIB_DIR
Force given directory for boost libraries. Note that
this will override library path detection, so use
this parameter only if default library detection
fails and you know exactly where your boost
libraries are located.
--with-boost-regex[=special-lib]
use the Regex library from boost - it is possible to
specify a certain library for the linker e.g.
--with-boost-regex=boost_regex-gcc-mt-d-1_33_1
Some influential environment variables:
CXX C++ compiler command
CXXFLAGS C++ compiler flags
LDFLAGS linker flags, e.g. -L if you have libraries in a
nonstandard directory
LIBS libraries to pass to the linker, e.g. -l
CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if
you have headers in a nonstandard directory
PKG_CONFIG path to pkg-config utility
PKG_CONFIG_PATH
directories to add to pkg-config's search path
PKG_CONFIG_LIBDIR
path overriding pkg-config's built-in search path
LIBPQXX_CFLAGS
C compiler flags for LIBPQXX, overriding pkg-config
LIBPQXX_LIBS
linker flags for LIBPQXX, overriding pkg-config
MONETDB_MAPI_CFLAGS
C compiler flags for MONETDB_MAPI, overriding pkg-config
MONETDB_MAPI_LIBS
linker flags for MONETDB_MAPI, overriding pkg-config
CC C compiler command
CFLAGS C compiler flags
Use these variables to override the choices made by `configure' or to help
it to find libraries and programs with nonstandard names/locations.
Report bugs to .
SQLsmith home page: .
_ACEOF
ac_status=$?
fi
if test "$ac_init_help" = "recursive"; then
# If there are subdirs, report their specific --help.
for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
test -d "$ac_dir" ||
{ cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
continue
ac_builddir=.
case "$ac_dir" in
.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
*)
ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
# A ".." for each directory in $ac_dir_suffix.
ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
case $ac_top_builddir_sub in
"") ac_top_builddir_sub=. ac_top_build_prefix= ;;
*) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
esac ;;
esac
ac_abs_top_builddir=$ac_pwd
ac_abs_builddir=$ac_pwd$ac_dir_suffix
# for backward compatibility:
ac_top_builddir=$ac_top_build_prefix
case $srcdir in
.) # We are building in place.
ac_srcdir=.
ac_top_srcdir=$ac_top_builddir_sub
ac_abs_top_srcdir=$ac_pwd ;;
[\\/]* | ?:[\\/]* ) # Absolute name.
ac_srcdir=$srcdir$ac_dir_suffix;
ac_top_srcdir=$srcdir
ac_abs_top_srcdir=$srcdir ;;
*) # Relative name.
ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
ac_top_srcdir=$ac_top_build_prefix$srcdir
ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
esac
ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
cd "$ac_dir" || { ac_status=$?; continue; }
# Check for guested configure.
if test -f "$ac_srcdir/configure.gnu"; then
echo &&
$SHELL "$ac_srcdir/configure.gnu" --help=recursive
elif test -f "$ac_srcdir/configure"; then
echo &&
$SHELL "$ac_srcdir/configure" --help=recursive
else
$as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
fi || ac_status=$?
cd "$ac_pwd" || { ac_status=$?; break; }
done
fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
SQLsmith configure 1.2.1
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
This configure script is free software; the Free Software Foundation
gives unlimited permission to copy, distribute and modify it.
_ACEOF
exit
fi
## ------------------------ ##
## Autoconf initialization. ##
## ------------------------ ##
# ac_fn_cxx_try_compile LINENO
# ----------------------------
# Try to compile conftest.$ac_ext, and return whether this succeeded.
ac_fn_cxx_try_compile ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
rm -f conftest.$ac_objext
if { { ac_try="$ac_compile"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compile") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
grep -v '^ *+' conftest.err >conftest.er1
cat conftest.er1 >&5
mv -f conftest.er1 conftest.err
fi
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; } && {
test -z "$ac_cxx_werror_flag" ||
test ! -s conftest.err
} && test -s conftest.$ac_objext; then :
ac_retval=0
else
$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
ac_retval=1
fi
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
as_fn_set_status $ac_retval
} # ac_fn_cxx_try_compile
# ac_fn_c_try_compile LINENO
# --------------------------
# Try to compile conftest.$ac_ext, and return whether this succeeded.
ac_fn_c_try_compile ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
rm -f conftest.$ac_objext
if { { ac_try="$ac_compile"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compile") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
grep -v '^ *+' conftest.err >conftest.er1
cat conftest.er1 >&5
mv -f conftest.er1 conftest.err
fi
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; } && {
test -z "$ac_c_werror_flag" ||
test ! -s conftest.err
} && test -s conftest.$ac_objext; then :
ac_retval=0
else
$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
ac_retval=1
fi
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
as_fn_set_status $ac_retval
} # ac_fn_c_try_compile
# ac_fn_cxx_try_link LINENO
# -------------------------
# Try to link conftest.$ac_ext, and return whether this succeeded.
ac_fn_cxx_try_link ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
rm -f conftest.$ac_objext conftest$ac_exeext
if { { ac_try="$ac_link"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
grep -v '^ *+' conftest.err >conftest.er1
cat conftest.er1 >&5
mv -f conftest.er1 conftest.err
fi
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; } && {
test -z "$ac_cxx_werror_flag" ||
test ! -s conftest.err
} && test -s conftest$ac_exeext && {
test "$cross_compiling" = yes ||
test -x conftest$ac_exeext
}; then :
ac_retval=0
else
$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
ac_retval=1
fi
# Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
# created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
# interfere with the next link command; also delete a directory that is
# left behind by Apple's compiler. We do this before executing the actions.
rm -rf conftest.dSYM conftest_ipa8_conftest.oo
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
as_fn_set_status $ac_retval
} # ac_fn_cxx_try_link
cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
It was created by SQLsmith $as_me 1.2.1, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
_ACEOF
exec 5>>config.log
{
cat <<_ASUNAME
## --------- ##
## Platform. ##
## --------- ##
hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
uname -m = `(uname -m) 2>/dev/null || echo unknown`
uname -r = `(uname -r) 2>/dev/null || echo unknown`
uname -s = `(uname -s) 2>/dev/null || echo unknown`
uname -v = `(uname -v) 2>/dev/null || echo unknown`
/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown`
/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown`
/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown`
/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown`
/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown`
/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown`
/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown`
_ASUNAME
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
$as_echo "PATH: $as_dir"
done
IFS=$as_save_IFS
} >&5
cat >&5 <<_ACEOF
## ----------- ##
## Core tests. ##
## ----------- ##
_ACEOF
# Keep a trace of the command line.
# Strip out --no-create and --no-recursion so they do not pile up.
# Strip out --silent because we don't want to record it for future runs.
# Also quote any args containing shell meta-characters.
# Make two passes to allow for proper duplicate-argument suppression.
ac_configure_args=
ac_configure_args0=
ac_configure_args1=
ac_must_keep_next=false
for ac_pass in 1 2
do
for ac_arg
do
case $ac_arg in
-no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
-q | -quiet | --quiet | --quie | --qui | --qu | --q \
| -silent | --silent | --silen | --sile | --sil)
continue ;;
*\'*)
ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
esac
case $ac_pass in
1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
2)
as_fn_append ac_configure_args1 " '$ac_arg'"
if test $ac_must_keep_next = true; then
ac_must_keep_next=false # Got value, back to normal.
else
case $ac_arg in
*=* | --config-cache | -C | -disable-* | --disable-* \
| -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
| -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
| -with-* | --with-* | -without-* | --without-* | --x)
case "$ac_configure_args0 " in
"$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
esac
;;
-* ) ac_must_keep_next=true ;;
esac
fi
as_fn_append ac_configure_args " '$ac_arg'"
;;
esac
done
done
{ ac_configure_args0=; unset ac_configure_args0;}
{ ac_configure_args1=; unset ac_configure_args1;}
# When interrupted or exit'd, cleanup temporary files, and complete
# config.log. We remove comments because anyway the quotes in there
# would cause problems or look ugly.
# WARNING: Use '\'' to represent an apostrophe within the trap.
# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
trap 'exit_status=$?
# Save into config.log some information that might help in debugging.
{
echo
$as_echo "## ---------------- ##
## Cache variables. ##
## ---------------- ##"
echo
# The following way of writing the cache mishandles newlines in values,
(
for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
eval ac_val=\$$ac_var
case $ac_val in #(
*${as_nl}*)
case $ac_var in #(
*_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
esac
case $ac_var in #(
_ | IFS | as_nl) ;; #(
BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
*) { eval $ac_var=; unset $ac_var;} ;;
esac ;;
esac
done
(set) 2>&1 |
case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
*${as_nl}ac_space=\ *)
sed -n \
"s/'\''/'\''\\\\'\'''\''/g;
s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
;; #(
*)
sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
;;
esac |
sort
)
echo
$as_echo "## ----------------- ##
## Output variables. ##
## ----------------- ##"
echo
for ac_var in $ac_subst_vars
do
eval ac_val=\$$ac_var
case $ac_val in
*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
esac
$as_echo "$ac_var='\''$ac_val'\''"
done | sort
echo
if test -n "$ac_subst_files"; then
$as_echo "## ------------------- ##
## File substitutions. ##
## ------------------- ##"
echo
for ac_var in $ac_subst_files
do
eval ac_val=\$$ac_var
case $ac_val in
*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
esac
$as_echo "$ac_var='\''$ac_val'\''"
done | sort
echo
fi
if test -s confdefs.h; then
$as_echo "## ----------- ##
## confdefs.h. ##
## ----------- ##"
echo
cat confdefs.h
echo
fi
test "$ac_signal" != 0 &&
$as_echo "$as_me: caught signal $ac_signal"
$as_echo "$as_me: exit $exit_status"
} >&5
rm -f core *.core core.conftest.* &&
rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
exit $exit_status
' 0
for ac_signal in 1 2 13 15; do
trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
done
ac_signal=0
# confdefs.h avoids OS command line length limits that DEFS can exceed.
rm -f -r conftest* confdefs.h
$as_echo "/* confdefs.h */" > confdefs.h
# Predefined preprocessor variables.
cat >>confdefs.h <<_ACEOF
#define PACKAGE_NAME "$PACKAGE_NAME"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define PACKAGE_VERSION "$PACKAGE_VERSION"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define PACKAGE_STRING "$PACKAGE_STRING"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define PACKAGE_URL "$PACKAGE_URL"
_ACEOF
# Let the site file select an alternate cache file if it wants to.
# Prefer an explicitly selected file to automatically selected ones.
ac_site_file1=NONE
ac_site_file2=NONE
if test -n "$CONFIG_SITE"; then
# We do not want a PATH search for config.site.
case $CONFIG_SITE in #((
-*) ac_site_file1=./$CONFIG_SITE;;
*/*) ac_site_file1=$CONFIG_SITE;;
*) ac_site_file1=./$CONFIG_SITE;;
esac
elif test "x$prefix" != xNONE; then
ac_site_file1=$prefix/share/config.site
ac_site_file2=$prefix/etc/config.site
else
ac_site_file1=$ac_default_prefix/share/config.site
ac_site_file2=$ac_default_prefix/etc/config.site
fi
for ac_site_file in "$ac_site_file1" "$ac_site_file2"
do
test "x$ac_site_file" = xNONE && continue
if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
$as_echo "$as_me: loading site script $ac_site_file" >&6;}
sed 's/^/| /' "$ac_site_file" >&5
. "$ac_site_file" \
|| { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "failed to load site script $ac_site_file
See \`config.log' for more details" "$LINENO" 5; }
fi
done
if test -r "$cache_file"; then
# Some versions of bash will fail to source /dev/null (special files
# actually), so we avoid doing that. DJGPP emulates it as a regular file.
if test /dev/null != "$cache_file" && test -f "$cache_file"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
$as_echo "$as_me: loading cache $cache_file" >&6;}
case $cache_file in
[\\/]* | ?:[\\/]* ) . "$cache_file";;
*) . "./$cache_file";;
esac
fi
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
$as_echo "$as_me: creating cache $cache_file" >&6;}
>$cache_file
fi
# Check that the precious variables saved in the cache have kept the same
# value.
ac_cache_corrupted=false
for ac_var in $ac_precious_vars; do
eval ac_old_set=\$ac_cv_env_${ac_var}_set
eval ac_new_set=\$ac_env_${ac_var}_set
eval ac_old_val=\$ac_cv_env_${ac_var}_value
eval ac_new_val=\$ac_env_${ac_var}_value
case $ac_old_set,$ac_new_set in
set,)
{ $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
ac_cache_corrupted=: ;;
,set)
{ $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
ac_cache_corrupted=: ;;
,);;
*)
if test "x$ac_old_val" != "x$ac_new_val"; then
# differences in whitespace do not lead to failure.
ac_old_val_w=`echo x $ac_old_val`
ac_new_val_w=`echo x $ac_new_val`
if test "$ac_old_val_w" != "$ac_new_val_w"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
ac_cache_corrupted=:
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
eval $ac_var=\$ac_old_val
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5
$as_echo "$as_me: former value: \`$ac_old_val'" >&2;}
{ $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5
$as_echo "$as_me: current value: \`$ac_new_val'" >&2;}
fi;;
esac
# Pass precious variables to config.status.
if test "$ac_new_set" = set; then
case $ac_new_val in
*\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
*) ac_arg=$ac_var=$ac_new_val ;;
esac
case " $ac_configure_args " in
*" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy.
*) as_fn_append ac_configure_args " '$ac_arg'" ;;
esac
fi
done
if $ac_cache_corrupted; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
{ $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
fi
## -------------------- ##
## Main body of script. ##
## -------------------- ##
ac_ext=c
ac_cpp='$CPP $CPPFLAGS'
ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_c_compiler_gnu
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
am__api_version='1.15'
ac_aux_dir=
for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
if test -f "$ac_dir/install-sh"; then
ac_aux_dir=$ac_dir
ac_install_sh="$ac_aux_dir/install-sh -c"
break
elif test -f "$ac_dir/install.sh"; then
ac_aux_dir=$ac_dir
ac_install_sh="$ac_aux_dir/install.sh -c"
break
elif test -f "$ac_dir/shtool"; then
ac_aux_dir=$ac_dir
ac_install_sh="$ac_aux_dir/shtool install -c"
break
fi
done
if test -z "$ac_aux_dir"; then
as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
fi
# These three variables are undocumented and unsupported,
# and are intended to be withdrawn in a future Autoconf release.
# They can cause serious problems if a builder's source tree is in a directory
# whose full name contains unusual characters.
ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var.
ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var.
ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var.
# Find a good install program. We prefer a C program (faster),
# so one script is as good as another. But avoid the broken or
# incompatible versions:
# SysV /etc/install, /usr/sbin/install
# SunOS /usr/etc/install
# IRIX /sbin/install
# AIX /bin/install
# AmigaOS /C/install, which installs bootblocks on floppy discs
# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
# AFS /usr/afsws/bin/install, which mishandles nonexistent args
# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
# OS/2's system install, which has a completely different semantic
# ./install, which can be erroneously created by make from ./install.sh.
# Reject install programs that cannot install multiple files.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
$as_echo_n "checking for a BSD-compatible install... " >&6; }
if test -z "$INSTALL"; then
if ${ac_cv_path_install+:} false; then :
$as_echo_n "(cached) " >&6
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
# Account for people who put trailing slashes in PATH elements.
case $as_dir/ in #((
./ | .// | /[cC]/* | \
/etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
/usr/ucb/* ) ;;
*)
# OSF1 and SCO ODT 3.0 have their own names for install.
# Don't use installbsd from OSF since it installs stuff as root
# by default.
for ac_prog in ginstall scoinst install; do
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then
if test $ac_prog = install &&
grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
# AIX install. It has an incompatible calling convention.
:
elif test $ac_prog = install &&
grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
# program-specific install script used by HP pwplus--don't use.
:
else
rm -rf conftest.one conftest.two conftest.dir
echo one > conftest.one
echo two > conftest.two
mkdir conftest.dir
if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
test -s conftest.one && test -s conftest.two &&
test -s conftest.dir/conftest.one &&
test -s conftest.dir/conftest.two
then
ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
break 3
fi
fi
fi
done
done
;;
esac
done
IFS=$as_save_IFS
rm -rf conftest.one conftest.two conftest.dir
fi
if test "${ac_cv_path_install+set}" = set; then
INSTALL=$ac_cv_path_install
else
# As a last resort, use the slow shell script. Don't cache a
# value for INSTALL within a source directory, because that will
# break other packages using the cache if that directory is
# removed, or if the value is a relative name.
INSTALL=$ac_install_sh
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
$as_echo "$INSTALL" >&6; }
# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
# It thinks the first close brace ends the variable substitution.
test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
$as_echo_n "checking whether build environment is sane... " >&6; }
# Reject unsafe characters in $srcdir or the absolute working directory
# name. Accept space and tab only in the latter.
am_lf='
'
case `pwd` in
*[\\\"\#\$\&\'\`$am_lf]*)
as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
esac
case $srcdir in
*[\\\"\#\$\&\'\`$am_lf\ \ ]*)
as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;;
esac
# Do 'set' in a subshell so we don't clobber the current shell's
# arguments. Must try -L first in case configure is actually a
# symlink; some systems play weird games with the mod time of symlinks
# (eg FreeBSD returns the mod time of the symlink's containing
# directory).
if (
am_has_slept=no
for am_try in 1 2; do
echo "timestamp, slept: $am_has_slept" > conftest.file
set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
if test "$*" = "X"; then
# -L didn't work.
set X `ls -t "$srcdir/configure" conftest.file`
fi
if test "$*" != "X $srcdir/configure conftest.file" \
&& test "$*" != "X conftest.file $srcdir/configure"; then
# If neither matched, then we have a broken ls. This can happen
# if, for instance, CONFIG_SHELL is bash and it inherits a
# broken ls alias from the environment. This has actually
# happened. Such a system could not be considered "sane".
as_fn_error $? "ls -t appears to fail. Make sure there is not a broken
alias in your environment" "$LINENO" 5
fi
if test "$2" = conftest.file || test $am_try -eq 2; then
break
fi
# Just in case.
sleep 1
am_has_slept=yes
done
test "$2" = conftest.file
)
then
# Ok.
:
else
as_fn_error $? "newly created file is older than distributed files!
Check your system clock" "$LINENO" 5
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
# If we didn't sleep, we still need to ensure time stamps of config.status and
# generated files are strictly newer.
am_sleep_pid=
if grep 'slept: no' conftest.file >/dev/null 2>&1; then
( sleep 1 ) &
am_sleep_pid=$!
fi
rm -f conftest.file
test "$program_prefix" != NONE &&
program_transform_name="s&^&$program_prefix&;$program_transform_name"
# Use a double $ so make ignores it.
test "$program_suffix" != NONE &&
program_transform_name="s&\$&$program_suffix&;$program_transform_name"
# Double any \ or $.
# By default was `s,x,x', remove it if useless.
ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
# Expand $ac_aux_dir to an absolute path.
am_aux_dir=`cd "$ac_aux_dir" && pwd`
if test x"${MISSING+set}" != xset; then
case $am_aux_dir in
*\ * | *\ *)
MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
*)
MISSING="\${SHELL} $am_aux_dir/missing" ;;
esac
fi
# Use eval to expand $SHELL
if eval "$MISSING --is-lightweight"; then
am_missing_run="$MISSING "
else
am_missing_run=
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5
$as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;}
fi
if test x"${install_sh+set}" != xset; then
case $am_aux_dir in
*\ * | *\ *)
install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
*)
install_sh="\${SHELL} $am_aux_dir/install-sh"
esac
fi
# Installed binaries are usually stripped using 'strip' when the user
# run "make install-strip". However 'strip' might not be the right
# tool to use in cross-compilation environments, therefore Automake
# will honor the 'STRIP' environment variable to overrule this program.
if test "$cross_compiling" != no; then
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
set dummy ${ac_tool_prefix}strip; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_STRIP+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$STRIP"; then
ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_STRIP="${ac_tool_prefix}strip"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
STRIP=$ac_cv_prog_STRIP
if test -n "$STRIP"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
$as_echo "$STRIP" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
if test -z "$ac_cv_prog_STRIP"; then
ac_ct_STRIP=$STRIP
# Extract the first word of "strip", so it can be a program name with args.
set dummy strip; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$ac_ct_STRIP"; then
ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_STRIP="strip"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
if test -n "$ac_ct_STRIP"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
$as_echo "$ac_ct_STRIP" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
if test "x$ac_ct_STRIP" = x; then
STRIP=":"
else
case $cross_compiling:$ac_tool_warned in
yes:)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
STRIP=$ac_ct_STRIP
fi
else
STRIP="$ac_cv_prog_STRIP"
fi
fi
INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
if test -z "$MKDIR_P"; then
if ${ac_cv_path_mkdir+:} false; then :
$as_echo_n "(cached) " >&6
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_prog in mkdir gmkdir; do
for ac_exec_ext in '' $ac_executable_extensions; do
as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue
case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
'mkdir (GNU coreutils) '* | \
'mkdir (coreutils) '* | \
'mkdir (fileutils) '4.1*)
ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
break 3;;
esac
done
done
done
IFS=$as_save_IFS
fi
test -d ./--version && rmdir ./--version
if test "${ac_cv_path_mkdir+set}" = set; then
MKDIR_P="$ac_cv_path_mkdir -p"
else
# As a last resort, use the slow shell script. Don't cache a
# value for MKDIR_P within a source directory, because that will
# break other packages using the cache if that directory is
# removed, or if the value is a relative name.
MKDIR_P="$ac_install_sh -d"
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
$as_echo "$MKDIR_P" >&6; }
for ac_prog in gawk mawk nawk awk
do
# Extract the first word of "$ac_prog", so it can be a program name with args.
set dummy $ac_prog; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_AWK+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$AWK"; then
ac_cv_prog_AWK="$AWK" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_AWK="$ac_prog"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
AWK=$ac_cv_prog_AWK
if test -n "$AWK"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
$as_echo "$AWK" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
test -n "$AWK" && break
done
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
set x ${MAKE-make}
ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
$as_echo_n "(cached) " >&6
else
cat >conftest.make <<\_ACEOF
SHELL = /bin/sh
all:
@echo '@@@%%%=$(MAKE)=@@@%%%'
_ACEOF
# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
case `${MAKE-make} -f conftest.make 2>/dev/null` in
*@@@%%%=?*=@@@%%%*)
eval ac_cv_prog_make_${ac_make}_set=yes;;
*)
eval ac_cv_prog_make_${ac_make}_set=no;;
esac
rm -f conftest.make
fi
if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
SET_MAKE=
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
SET_MAKE="MAKE=${MAKE-make}"
fi
rm -rf .tst 2>/dev/null
mkdir .tst 2>/dev/null
if test -d .tst; then
am__leading_dot=.
else
am__leading_dot=_
fi
rmdir .tst 2>/dev/null
# Check whether --enable-silent-rules was given.
if test "${enable_silent_rules+set}" = set; then :
enableval=$enable_silent_rules;
fi
case $enable_silent_rules in # (((
yes) AM_DEFAULT_VERBOSITY=0;;
no) AM_DEFAULT_VERBOSITY=1;;
*) AM_DEFAULT_VERBOSITY=1;;
esac
am_make=${MAKE-make}
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5
$as_echo_n "checking whether $am_make supports nested variables... " >&6; }
if ${am_cv_make_support_nested_variables+:} false; then :
$as_echo_n "(cached) " >&6
else
if $as_echo 'TRUE=$(BAR$(V))
BAR0=false
BAR1=true
V=1
am__doit:
@$(TRUE)
.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then
am_cv_make_support_nested_variables=yes
else
am_cv_make_support_nested_variables=no
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5
$as_echo "$am_cv_make_support_nested_variables" >&6; }
if test $am_cv_make_support_nested_variables = yes; then
AM_V='$(V)'
AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
else
AM_V=$AM_DEFAULT_VERBOSITY
AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY
fi
AM_BACKSLASH='\'
if test "`cd $srcdir && pwd`" != "`pwd`"; then
# Use -I$(srcdir) only when $(srcdir) != ., so that make's output
# is not polluted with repeated "-I."
am__isrc=' -I$(srcdir)'
# test to see if srcdir already configured
if test -f $srcdir/config.status; then
as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
fi
fi
# test whether we have cygpath
if test -z "$CYGPATH_W"; then
if (cygpath --version) >/dev/null 2>/dev/null; then
CYGPATH_W='cygpath -w'
else
CYGPATH_W=echo
fi
fi
# Define the identity of the package.
PACKAGE='sqlsmith'
VERSION='1.2.1'
cat >>confdefs.h <<_ACEOF
#define PACKAGE "$PACKAGE"
_ACEOF
cat >>confdefs.h <<_ACEOF
#define VERSION "$VERSION"
_ACEOF
# Some tools Automake needs.
ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
# For better backward compatibility. To be removed once Automake 1.9.x
# dies out for good. For more background, see:
#
#
mkdir_p='$(MKDIR_P)'
# We need awk for the "check" target (and possibly the TAP driver). The
# system "awk" is bad on some platforms.
# Always define AMTAR for backward compatibility. Yes, it's still used
# in the wild :-( We should find a proper way to deprecate it ...
AMTAR='$${TAR-tar}'
# We'll loop over all known methods to create a tar archive until one works.
_am_tools='gnutar pax cpio none'
am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'
# POSIX will say in a future version that running "rm -f" with no argument
# is OK; and we want to be able to make that assumption in our Makefile
# recipes. So use an aggressive probe to check that the usage we want is
# actually supported "in the wild" to an acceptable degree.
# See automake bug#10828.
# To make any issue more visible, cause the running configure to be aborted
# by default if the 'rm' program in use doesn't match our expectations; the
# user can still override this though.
if rm -f && rm -fr && rm -rf; then : OK; else
cat >&2 <<'END'
Oops!
Your 'rm' program seems unable to run without file operands specified
on the command line, even when the '-f' option is present. This is contrary
to the behaviour of most rm programs out there, and not conforming with
the upcoming POSIX standard:
Please tell bug-automake@gnu.org about your system, including the value
of your $PATH and any error possibly output before this message. This
can help us improve future automake versions.
END
if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then
echo 'Configuration will proceed anyway, since you have set the' >&2
echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2
echo >&2
else
cat >&2 <<'END'
Aborting the configuration process, to ensure you take notice of the issue.
You can download and install GNU coreutils to get an 'rm' implementation
that behaves properly: .
If you want to complete the configuration process using your problematic
'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM
to "yes", and re-run configure.
END
as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5
fi
fi
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
if test -z "$CXX"; then
if test -n "$CCC"; then
CXX=$CCC
else
if test -n "$ac_tool_prefix"; then
for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
do
# Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
set dummy $ac_tool_prefix$ac_prog; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_CXX+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$CXX"; then
ac_cv_prog_CXX="$CXX" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CXX="$ac_tool_prefix$ac_prog"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
CXX=$ac_cv_prog_CXX
if test -n "$CXX"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5
$as_echo "$CXX" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
test -n "$CXX" && break
done
fi
if test -z "$CXX"; then
ac_ct_CXX=$CXX
for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC
do
# Extract the first word of "$ac_prog", so it can be a program name with args.
set dummy $ac_prog; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_ac_ct_CXX+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$ac_ct_CXX"; then
ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_CXX="$ac_prog"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
ac_ct_CXX=$ac_cv_prog_ac_ct_CXX
if test -n "$ac_ct_CXX"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5
$as_echo "$ac_ct_CXX" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
test -n "$ac_ct_CXX" && break
done
if test "x$ac_ct_CXX" = x; then
CXX="g++"
else
case $cross_compiling:$ac_tool_warned in
yes:)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
CXX=$ac_ct_CXX
fi
fi
fi
fi
# Provide some information about the compiler.
$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5
set X $ac_compile
ac_compiler=$2
for ac_option in --version -v -V -qversion; do
{ { ac_try="$ac_compiler $ac_option >&5"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compiler $ac_option >&5") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
sed '10a\
... rest of stderr output deleted ...
10q' conftest.err >conftest.er1
cat conftest.er1 >&5
fi
rm -f conftest.er1 conftest.err
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }
done
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
ac_clean_files_save=$ac_clean_files
ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
# Try to create an executable without -o first, disregard a.out.
# It will help us diagnose broken compilers, and finding out an intuition
# of exeext.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C++ compiler works" >&5
$as_echo_n "checking whether the C++ compiler works... " >&6; }
ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
# The possible output files:
ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
ac_rmfiles=
for ac_file in $ac_files
do
case $ac_file in
*.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
* ) ac_rmfiles="$ac_rmfiles $ac_file";;
esac
done
rm -f $ac_rmfiles
if { { ac_try="$ac_link_default"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link_default") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then :
# Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
# in a Makefile. We should not override ac_cv_exeext if it was cached,
# so that the user can short-circuit this test for compilers unknown to
# Autoconf.
for ac_file in $ac_files ''
do
test -f "$ac_file" || continue
case $ac_file in
*.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
;;
[ab].out )
# We found the default executable, but exeext='' is most
# certainly right.
break;;
*.* )
if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
then :; else
ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
fi
# We set ac_cv_exeext here because the later test for it is not
# safe: cross compilers may not add the suffix if given an `-o'
# argument, so we may need to know it at that point already.
# Even if this section looks crufty: it has the advantage of
# actually working.
break;;
* )
break;;
esac
done
test "$ac_cv_exeext" = no && ac_cv_exeext=
else
ac_file=''
fi
if test -z "$ac_file"; then :
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error 77 "C++ compiler cannot create executables
See \`config.log' for more details" "$LINENO" 5; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler default output file name" >&5
$as_echo_n "checking for C++ compiler default output file name... " >&6; }
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
$as_echo "$ac_file" >&6; }
ac_exeext=$ac_cv_exeext
rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
ac_clean_files=$ac_clean_files_save
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
$as_echo_n "checking for suffix of executables... " >&6; }
if { { ac_try="$ac_link"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then :
# If both `conftest.exe' and `conftest' are `present' (well, observable)
# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will
# work properly (i.e., refer to `conftest.exe'), while it won't with
# `rm'.
for ac_file in conftest.exe conftest conftest.*; do
test -f "$ac_file" || continue
case $ac_file in
*.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
*.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
break;;
* ) break;;
esac
done
else
{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "cannot compute suffix of executables: cannot compile and link
See \`config.log' for more details" "$LINENO" 5; }
fi
rm -f conftest conftest$ac_cv_exeext
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
$as_echo "$ac_cv_exeext" >&6; }
rm -f conftest.$ac_ext
EXEEXT=$ac_cv_exeext
ac_exeext=$EXEEXT
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
main ()
{
FILE *f = fopen ("conftest.out", "w");
return ferror (f) || fclose (f) != 0;
;
return 0;
}
_ACEOF
ac_clean_files="$ac_clean_files conftest.out"
# Check that the compiler produces executables we can run. If not, either
# the compiler is broken, or we cross compile.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
$as_echo_n "checking whether we are cross compiling... " >&6; }
if test "$cross_compiling" != yes; then
{ { ac_try="$ac_link"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }
if { ac_try='./conftest$ac_cv_exeext'
{ { case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_try") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; }; then
cross_compiling=no
else
if test "$cross_compiling" = maybe; then
cross_compiling=yes
else
{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "cannot run C++ compiled programs.
If you meant to cross compile, use \`--host'.
See \`config.log' for more details" "$LINENO" 5; }
fi
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
$as_echo "$cross_compiling" >&6; }
rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
ac_clean_files=$ac_clean_files_save
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
$as_echo_n "checking for suffix of object files... " >&6; }
if ${ac_cv_objext+:} false; then :
$as_echo_n "(cached) " >&6
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
rm -f conftest.o conftest.obj
if { { ac_try="$ac_compile"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compile") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then :
for ac_file in conftest.o conftest.obj conftest.*; do
test -f "$ac_file" || continue;
case $ac_file in
*.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
*) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
break;;
esac
done
else
$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "cannot compute suffix of object files: cannot compile
See \`config.log' for more details" "$LINENO" 5; }
fi
rm -f conftest.$ac_cv_objext conftest.$ac_ext
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
$as_echo "$ac_cv_objext" >&6; }
OBJEXT=$ac_cv_objext
ac_objext=$OBJEXT
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5
$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; }
if ${ac_cv_cxx_compiler_gnu+:} false; then :
$as_echo_n "(cached) " >&6
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
#ifndef __GNUC__
choke me
#endif
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
ac_compiler_gnu=yes
else
ac_compiler_gnu=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_cv_cxx_compiler_gnu=$ac_compiler_gnu
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5
$as_echo "$ac_cv_cxx_compiler_gnu" >&6; }
if test $ac_compiler_gnu = yes; then
GXX=yes
else
GXX=
fi
ac_test_CXXFLAGS=${CXXFLAGS+set}
ac_save_CXXFLAGS=$CXXFLAGS
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5
$as_echo_n "checking whether $CXX accepts -g... " >&6; }
if ${ac_cv_prog_cxx_g+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_save_cxx_werror_flag=$ac_cxx_werror_flag
ac_cxx_werror_flag=yes
ac_cv_prog_cxx_g=no
CXXFLAGS="-g"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
ac_cv_prog_cxx_g=yes
else
CXXFLAGS=""
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
else
ac_cxx_werror_flag=$ac_save_cxx_werror_flag
CXXFLAGS="-g"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
ac_cv_prog_cxx_g=yes
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_cxx_werror_flag=$ac_save_cxx_werror_flag
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5
$as_echo "$ac_cv_prog_cxx_g" >&6; }
if test "$ac_test_CXXFLAGS" = set; then
CXXFLAGS=$ac_save_CXXFLAGS
elif test $ac_cv_prog_cxx_g = yes; then
if test "$GXX" = yes; then
CXXFLAGS="-g -O2"
else
CXXFLAGS="-g"
fi
else
if test "$GXX" = yes; then
CXXFLAGS="-O2"
else
CXXFLAGS=
fi
fi
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
DEPDIR="${am__leading_dot}deps"
ac_config_commands="$ac_config_commands depfiles"
am_make=${MAKE-make}
cat > confinc << 'END'
am__doit:
@echo this is the am__doit target
.PHONY: am__doit
END
# If we don't find an include directive, just comment out the code.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
$as_echo_n "checking for style of include used by $am_make... " >&6; }
am__include="#"
am__quote=
_am_result=none
# First try GNU make style include.
echo "include confinc" > confmf
# Ignore all kinds of additional output from 'make'.
case `$am_make -s -f confmf 2> /dev/null` in #(
*the\ am__doit\ target*)
am__include=include
am__quote=
_am_result=GNU
;;
esac
# Now try BSD make style include.
if test "$am__include" = "#"; then
echo '.include "confinc"' > confmf
case `$am_make -s -f confmf 2> /dev/null` in #(
*the\ am__doit\ target*)
am__include=.include
am__quote="\""
_am_result=BSD
;;
esac
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
$as_echo "$_am_result" >&6; }
rm -f confinc confmf
# Check whether --enable-dependency-tracking was given.
if test "${enable_dependency_tracking+set}" = set; then :
enableval=$enable_dependency_tracking;
fi
if test "x$enable_dependency_tracking" != xno; then
am_depcomp="$ac_aux_dir/depcomp"
AMDEPBACKSLASH='\'
am__nodep='_no'
fi
if test "x$enable_dependency_tracking" != xno; then
AMDEP_TRUE=
AMDEP_FALSE='#'
else
AMDEP_TRUE='#'
AMDEP_FALSE=
fi
depcc="$CXX" am_compiler_list=
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
$as_echo_n "checking dependency style of $depcc... " >&6; }
if ${am_cv_CXX_dependencies_compiler_type+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
# We make a subdir and do the tests there. Otherwise we can end up
# making bogus files that we don't know about and never remove. For
# instance it was reported that on HP-UX the gcc test will end up
# making a dummy file named 'D' -- because '-MD' means "put the output
# in D".
rm -rf conftest.dir
mkdir conftest.dir
# Copy depcomp to subdir because otherwise we won't find it if we're
# using a relative directory.
cp "$am_depcomp" conftest.dir
cd conftest.dir
# We will build objects and dependencies in a subdirectory because
# it helps to detect inapplicable dependency modes. For instance
# both Tru64's cc and ICC support -MD to output dependencies as a
# side effect of compilation, but ICC will put the dependencies in
# the current directory while Tru64 will put them in the object
# directory.
mkdir sub
am_cv_CXX_dependencies_compiler_type=none
if test "$am_compiler_list" = ""; then
am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
fi
am__universal=false
case " $depcc " in #(
*\ -arch\ *\ -arch\ *) am__universal=true ;;
esac
for depmode in $am_compiler_list; do
# Setup a source with many dependencies, because some compilers
# like to wrap large dependency lists on column 80 (with \), and
# we should not choose a depcomp mode which is confused by this.
#
# We need to recreate these files for each test, as the compiler may
# overwrite some of them when testing with obscure command lines.
# This happens at least with the AIX C compiler.
: > sub/conftest.c
for i in 1 2 3 4 5 6; do
echo '#include "conftst'$i'.h"' >> sub/conftest.c
# Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
# Solaris 10 /bin/sh.
echo '/* dummy */' > sub/conftst$i.h
done
echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
# We check with '-c' and '-o' for the sake of the "dashmstdout"
# mode. It turns out that the SunPro C++ compiler does not properly
# handle '-M -o', and we need to detect this. Also, some Intel
# versions had trouble with output in subdirs.
am__obj=sub/conftest.${OBJEXT-o}
am__minus_obj="-o $am__obj"
case $depmode in
gcc)
# This depmode causes a compiler race in universal mode.
test "$am__universal" = false || continue
;;
nosideeffect)
# After this tag, mechanisms are not by side-effect, so they'll
# only be used when explicitly requested.
if test "x$enable_dependency_tracking" = xyes; then
continue
else
break
fi
;;
msvc7 | msvc7msys | msvisualcpp | msvcmsys)
# This compiler won't grok '-c -o', but also, the minuso test has
# not run yet. These depmodes are late enough in the game, and
# so weak that their functioning should not be impacted.
am__obj=conftest.${OBJEXT-o}
am__minus_obj=
;;
none) break ;;
esac
if depmode=$depmode \
source=sub/conftest.c object=$am__obj \
depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
$SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
>/dev/null 2>conftest.err &&
grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
${MAKE-make} -s -f confmf > /dev/null 2>&1; then
# icc doesn't choke on unknown options, it will just issue warnings
# or remarks (even with -Werror). So we grep stderr for any message
# that says an option was ignored or not supported.
# When given -MP, icc 7.0 and 7.1 complain thusly:
# icc: Command line warning: ignoring option '-M'; no argument required
# The diagnosis changed in icc 8.0:
# icc: Command line remark: option '-MP' not supported
if (grep 'ignoring option' conftest.err ||
grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
am_cv_CXX_dependencies_compiler_type=$depmode
break
fi
fi
done
cd ..
rm -rf conftest.dir
else
am_cv_CXX_dependencies_compiler_type=none
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5
$as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; }
CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type
if
test "x$enable_dependency_tracking" != xno \
&& test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then
am__fastdepCXX_TRUE=
am__fastdepCXX_FALSE='#'
else
am__fastdepCXX_TRUE='#'
am__fastdepCXX_FALSE=
fi
ax_cxx_compile_cxx11_required=true
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
ac_success=no
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX supports C++11 features by default" >&5
$as_echo_n "checking whether $CXX supports C++11 features by default... " >&6; }
if ${ax_cv_cxx_compile_cxx11+:} false; then :
$as_echo_n "(cached) " >&6
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
// If the compiler admits that it is not ready for C++11, why torture it?
// Hopefully, this will speed up the test.
#ifndef __cplusplus
#error "This is not a C++ compiler"
#elif __cplusplus < 201103L
#error "This is not a C++11 compiler"
#else
namespace cxx11
{
namespace test_static_assert
{
template
struct check
{
static_assert(sizeof(int) <= sizeof(T), "not big enough");
};
}
namespace test_final_override
{
struct Base
{
virtual void f() {}
};
struct Derived : public Base
{
virtual void f() override {}
};
}
namespace test_double_right_angle_brackets
{
template < typename T >
struct check {};
typedef check single_type;
typedef check> double_type;
typedef check>> triple_type;
typedef check>>> quadruple_type;
}
namespace test_decltype
{
int
f()
{
int a = 1;
decltype(a) b = 2;
return a + b;
}
}
namespace test_type_deduction
{
template < typename T1, typename T2 >
struct is_same
{
static const bool value = false;
};
template < typename T >
struct is_same
{
static const bool value = true;
};
template < typename T1, typename T2 >
auto
add(T1 a1, T2 a2) -> decltype(a1 + a2)
{
return a1 + a2;
}
int
test(const int c, volatile int v)
{
static_assert(is_same::value == true, "");
static_assert(is_same::value == false, "");
static_assert(is_same::value == false, "");
auto ac = c;
auto av = v;
auto sumi = ac + av + 'x';
auto sumf = ac + av + 1.0;
static_assert(is_same::value == true, "");
static_assert(is_same::value == true, "");
static_assert(is_same::value == true, "");
static_assert(is_same::value == false, "");
static_assert(is_same::value == true, "");
return (sumf > 0.0) ? sumi : add(c, v);
}
}
namespace test_noexcept
{
int f() { return 0; }
int g() noexcept { return 0; }
static_assert(noexcept(f()) == false, "");
static_assert(noexcept(g()) == true, "");
}
namespace test_constexpr
{
template < typename CharT >
unsigned long constexpr
strlen_c_r(const CharT *const s, const unsigned long acc) noexcept
{
return *s ? strlen_c_r(s + 1, acc + 1) : acc;
}
template < typename CharT >
unsigned long constexpr
strlen_c(const CharT *const s) noexcept
{
return strlen_c_r(s, 0UL);
}
static_assert(strlen_c("") == 0UL, "");
static_assert(strlen_c("1") == 1UL, "");
static_assert(strlen_c("example") == 7UL, "");
static_assert(strlen_c("another\0example") == 7UL, "");
}
namespace test_rvalue_references
{
template < int N >
struct answer
{
static constexpr int value = N;
};
answer<1> f(int&) { return answer<1>(); }
answer<2> f(const int&) { return answer<2>(); }
answer<3> f(int&&) { return answer<3>(); }
void
test()
{
int i = 0;
const int c = 0;
static_assert(decltype(f(i))::value == 1, "");
static_assert(decltype(f(c))::value == 2, "");
static_assert(decltype(f(0))::value == 3, "");
}
}
namespace test_uniform_initialization
{
struct test
{
static const int zero {};
static const int one {1};
};
static_assert(test::zero == 0, "");
static_assert(test::one == 1, "");
}
namespace test_lambdas
{
void
test1()
{
auto lambda1 = [](){};
auto lambda2 = lambda1;
lambda1();
lambda2();
}
int
test2()
{
auto a = [](int i, int j){ return i + j; }(1, 2);
auto b = []() -> int { return '0'; }();
auto c = [=](){ return a + b; }();
auto d = [&](){ return c; }();
auto e = [a, &b](int x) mutable {
const auto identity = [](int y){ return y; };
for (auto i = 0; i < a; ++i)
a += b--;
return x + identity(a + b);
}(0);
return a + b + c + d + e;
}
int
test3()
{
const auto nullary = [](){ return 0; };
const auto unary = [](int x){ return x; };
using nullary_t = decltype(nullary);
using unary_t = decltype(unary);
const auto higher1st = [](nullary_t f){ return f(); };
const auto higher2nd = [unary](nullary_t f1){
return [unary, f1](unary_t f2){ return f2(unary(f1())); };
};
return higher1st(nullary) + higher2nd(nullary)(unary);
}
}
namespace test_variadic_templates
{
template
struct sum;
template
struct sum
{
static constexpr auto value = N0 + sum::value;
};
template <>
struct sum<>
{
static constexpr auto value = 0;
};
static_assert(sum<>::value == 0, "");
static_assert(sum<1>::value == 1, "");
static_assert(sum<23>::value == 23, "");
static_assert(sum<1, 2>::value == 3, "");
static_assert(sum<5, 5, 11>::value == 21, "");
static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, "");
}
// http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae
// Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function
// because of this.
namespace test_template_alias_sfinae
{
struct foo {};
template
using member = typename T::member_type;
template
void func(...) {}
template
void func(member*) {}
void test();
void test() { func(0); }
}
} // namespace cxx11
#endif // __cplusplus >= 201103L
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
ax_cv_cxx_compile_cxx11=yes
else
ax_cv_cxx_compile_cxx11=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_cv_cxx_compile_cxx11" >&5
$as_echo "$ax_cv_cxx_compile_cxx11" >&6; }
if test x$ax_cv_cxx_compile_cxx11 = xyes; then
ac_success=yes
fi
if test x$ac_success = xno; then
for switch in -std=c++11 -std=c++0x +std=c++11 "-h std=c++11"; do
cachevar=`$as_echo "ax_cv_cxx_compile_cxx11_$switch" | $as_tr_sh`
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX supports C++11 features with $switch" >&5
$as_echo_n "checking whether $CXX supports C++11 features with $switch... " >&6; }
if eval \${$cachevar+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_save_CXX="$CXX"
CXX="$CXX $switch"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
// If the compiler admits that it is not ready for C++11, why torture it?
// Hopefully, this will speed up the test.
#ifndef __cplusplus
#error "This is not a C++ compiler"
#elif __cplusplus < 201103L
#error "This is not a C++11 compiler"
#else
namespace cxx11
{
namespace test_static_assert
{
template
struct check
{
static_assert(sizeof(int) <= sizeof(T), "not big enough");
};
}
namespace test_final_override
{
struct Base
{
virtual void f() {}
};
struct Derived : public Base
{
virtual void f() override {}
};
}
namespace test_double_right_angle_brackets
{
template < typename T >
struct check {};
typedef check single_type;
typedef check> double_type;
typedef check>> triple_type;
typedef check>>> quadruple_type;
}
namespace test_decltype
{
int
f()
{
int a = 1;
decltype(a) b = 2;
return a + b;
}
}
namespace test_type_deduction
{
template < typename T1, typename T2 >
struct is_same
{
static const bool value = false;
};
template < typename T >
struct is_same
{
static const bool value = true;
};
template < typename T1, typename T2 >
auto
add(T1 a1, T2 a2) -> decltype(a1 + a2)
{
return a1 + a2;
}
int
test(const int c, volatile int v)
{
static_assert(is_same::value == true, "");
static_assert(is_same::value == false, "");
static_assert(is_same::value == false, "");
auto ac = c;
auto av = v;
auto sumi = ac + av + 'x';
auto sumf = ac + av + 1.0;
static_assert(is_same::value == true, "");
static_assert(is_same::value == true, "");
static_assert(is_same::value == true, "");
static_assert(is_same::value == false, "");
static_assert(is_same::value == true, "");
return (sumf > 0.0) ? sumi : add(c, v);
}
}
namespace test_noexcept
{
int f() { return 0; }
int g() noexcept { return 0; }
static_assert(noexcept(f()) == false, "");
static_assert(noexcept(g()) == true, "");
}
namespace test_constexpr
{
template < typename CharT >
unsigned long constexpr
strlen_c_r(const CharT *const s, const unsigned long acc) noexcept
{
return *s ? strlen_c_r(s + 1, acc + 1) : acc;
}
template < typename CharT >
unsigned long constexpr
strlen_c(const CharT *const s) noexcept
{
return strlen_c_r(s, 0UL);
}
static_assert(strlen_c("") == 0UL, "");
static_assert(strlen_c("1") == 1UL, "");
static_assert(strlen_c("example") == 7UL, "");
static_assert(strlen_c("another\0example") == 7UL, "");
}
namespace test_rvalue_references
{
template < int N >
struct answer
{
static constexpr int value = N;
};
answer<1> f(int&) { return answer<1>(); }
answer<2> f(const int&) { return answer<2>(); }
answer<3> f(int&&) { return answer<3>(); }
void
test()
{
int i = 0;
const int c = 0;
static_assert(decltype(f(i))::value == 1, "");
static_assert(decltype(f(c))::value == 2, "");
static_assert(decltype(f(0))::value == 3, "");
}
}
namespace test_uniform_initialization
{
struct test
{
static const int zero {};
static const int one {1};
};
static_assert(test::zero == 0, "");
static_assert(test::one == 1, "");
}
namespace test_lambdas
{
void
test1()
{
auto lambda1 = [](){};
auto lambda2 = lambda1;
lambda1();
lambda2();
}
int
test2()
{
auto a = [](int i, int j){ return i + j; }(1, 2);
auto b = []() -> int { return '0'; }();
auto c = [=](){ return a + b; }();
auto d = [&](){ return c; }();
auto e = [a, &b](int x) mutable {
const auto identity = [](int y){ return y; };
for (auto i = 0; i < a; ++i)
a += b--;
return x + identity(a + b);
}(0);
return a + b + c + d + e;
}
int
test3()
{
const auto nullary = [](){ return 0; };
const auto unary = [](int x){ return x; };
using nullary_t = decltype(nullary);
using unary_t = decltype(unary);
const auto higher1st = [](nullary_t f){ return f(); };
const auto higher2nd = [unary](nullary_t f1){
return [unary, f1](unary_t f2){ return f2(unary(f1())); };
};
return higher1st(nullary) + higher2nd(nullary)(unary);
}
}
namespace test_variadic_templates
{
template
struct sum;
template
struct sum
{
static constexpr auto value = N0 + sum::value;
};
template <>
struct sum<>
{
static constexpr auto value = 0;
};
static_assert(sum<>::value == 0, "");
static_assert(sum<1>::value == 1, "");
static_assert(sum<23>::value == 23, "");
static_assert(sum<1, 2>::value == 3, "");
static_assert(sum<5, 5, 11>::value == 21, "");
static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, "");
}
// http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae
// Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function
// because of this.
namespace test_template_alias_sfinae
{
struct foo {};
template
using member = typename T::member_type;
template
void func(...) {}
template
void func(member*) {}
void test();
void test() { func(0); }
}
} // namespace cxx11
#endif // __cplusplus >= 201103L
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
eval $cachevar=yes
else
eval $cachevar=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
CXX="$ac_save_CXX"
fi
eval ac_res=\$$cachevar
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
$as_echo "$ac_res" >&6; }
if eval test x\$$cachevar = xyes; then
CXX="$CXX $switch"
if test -n "$CXXCPP" ; then
CXXCPP="$CXXCPP $switch"
fi
ac_success=yes
break
fi
done
fi
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
if test x$ax_cxx_compile_cxx11_required = xtrue; then
if test x$ac_success = xno; then
as_fn_error $? "*** A compiler with support for C++11 language features is required." "$LINENO" 5
fi
fi
if test x$ac_success = xno; then
HAVE_CXX11=0
{ $as_echo "$as_me:${as_lineno-$LINENO}: No compiler with C++11 support was found" >&5
$as_echo "$as_me: No compiler with C++11 support was found" >&6;}
else
HAVE_CXX11=1
$as_echo "#define HAVE_CXX11 1" >>confdefs.h
fi
# Check whether --with-postgresql was given.
if test "${with_postgresql+set}" = set; then :
withval=$with_postgresql;
if test "$withval" = "no"; then
want_postgresql="no"
elif test "$withval" = "yes"; then
want_postgresql="yes"
else
want_postgresql="yes"
PG_CONFIG="$withval"
fi
else
want_postgresql="yes"
fi
POSTGRESQL_CPPFLAGS=""
POSTGRESQL_LDFLAGS=""
POSTGRESQL_LIBS=""
POSTGRESQL_VERSION=""
if test "$want_postgresql" = "yes"; then
if test -z "$PG_CONFIG" -o test; then
# Extract the first word of "pg_config", so it can be a program name with args.
set dummy pg_config; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_path_PG_CONFIG+:} false; then :
$as_echo_n "(cached) " >&6
else
case $PG_CONFIG in
[\\/]* | ?:[\\/]*)
ac_cv_path_PG_CONFIG="$PG_CONFIG" # Let the user override the test with a path.
;;
*)
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_path_PG_CONFIG="$as_dir/$ac_word$ac_exec_ext"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
;;
esac
fi
PG_CONFIG=$ac_cv_path_PG_CONFIG
if test -n "$PG_CONFIG"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PG_CONFIG" >&5
$as_echo "$PG_CONFIG" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
if test ! -x "$PG_CONFIG"; then
as_fn_error $? "$PG_CONFIG does not exist or it is not an exectuable file" "$LINENO" 5
PG_CONFIG="no"
found_postgresql="no"
fi
if test "$PG_CONFIG" != "no"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for PostgreSQL libraries" >&5
$as_echo_n "checking for PostgreSQL libraries... " >&6; }
POSTGRESQL_CPPFLAGS="-I`$PG_CONFIG --includedir`"
POSTGRESQL_LDFLAGS="-L`$PG_CONFIG --libdir`"
POSTGRESQL_LIBS="-lpq"
POSTGRESQL_VERSION=`$PG_CONFIG --version | sed -e 's#PostgreSQL ##'`
$as_echo "#define HAVE_POSTGRESQL 1" >>confdefs.h
found_postgresql="yes"
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
found_postgresql="no"
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
postgresql_version_req=
if test "$found_postgresql" = "yes" -a -n "$postgresql_version_req"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if PostgreSQL version is >= $postgresql_version_req" >&5
$as_echo_n "checking if PostgreSQL version is >= $postgresql_version_req... " >&6; }
postgresql_version_req_major=`expr $postgresql_version_req : '\([0-9]*\)'`
postgresql_version_req_minor=`expr $postgresql_version_req : '[0-9]*\.\([0-9]*\)'`
postgresql_version_req_micro=`expr $postgresql_version_req : '[0-9]*\.[0-9]*\.\([0-9]*\)'`
if test "x$postgresql_version_req_micro" = "x"; then
postgresql_version_req_micro="0"
fi
postgresql_version_req_number=`expr $postgresql_version_req_major \* 1000000 \
\+ $postgresql_version_req_minor \* 1000 \
\+ $postgresql_version_req_micro`
postgresql_version_major=`expr $POSTGRESQL_VERSION : '\([0-9]*\)'`
postgresql_version_minor=`expr $POSTGRESQL_VERSION : '[0-9]*\.\([0-9]*\)'`
postgresql_version_micro=`expr $POSTGRESQL_VERSION : '[0-9]*\.[0-9]*\.\([0-9]*\)'`
if test "x$postgresql_version_micro" = "x"; then
postgresql_version_micro="0"
fi
postgresql_version_number=`expr $postgresql_version_major \* 1000000 \
\+ $postgresql_version_minor \* 1000 \
\+ $postgresql_version_micro`
postgresql_version_check=`expr $postgresql_version_number \>\= $postgresql_version_req_number`
if test "$postgresql_version_check" = "1"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
$as_echo "#define HAVE_POSTGRESQL 0" >>confdefs.h
POSTGRESQL_CPPFLAGS=""
POSTGRESQL_LDFLAGS=""
POSTGRESQL_LIBS=""
fi
fi
if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args.
set dummy ${ac_tool_prefix}pkg-config; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_path_PKG_CONFIG+:} false; then :
$as_echo_n "(cached) " >&6
else
case $PKG_CONFIG in
[\\/]* | ?:[\\/]*)
ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path.
;;
*)
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
;;
esac
fi
PKG_CONFIG=$ac_cv_path_PKG_CONFIG
if test -n "$PKG_CONFIG"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5
$as_echo "$PKG_CONFIG" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
if test -z "$ac_cv_path_PKG_CONFIG"; then
ac_pt_PKG_CONFIG=$PKG_CONFIG
# Extract the first word of "pkg-config", so it can be a program name with args.
set dummy pkg-config; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then :
$as_echo_n "(cached) " >&6
else
case $ac_pt_PKG_CONFIG in
[\\/]* | ?:[\\/]*)
ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path.
;;
*)
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
;;
esac
fi
ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG
if test -n "$ac_pt_PKG_CONFIG"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5
$as_echo "$ac_pt_PKG_CONFIG" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
if test "x$ac_pt_PKG_CONFIG" = x; then
PKG_CONFIG=""
else
case $cross_compiling:$ac_tool_warned in
yes:)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
PKG_CONFIG=$ac_pt_PKG_CONFIG
fi
else
PKG_CONFIG="$ac_cv_path_PKG_CONFIG"
fi
fi
if test -n "$PKG_CONFIG"; then
_pkg_min_version=0.9.0
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5
$as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; }
if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
PKG_CONFIG=""
fi
fi
pkg_failed=no
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBPQXX" >&5
$as_echo_n "checking for LIBPQXX... " >&6; }
if test -n "$LIBPQXX_CFLAGS"; then
pkg_cv_LIBPQXX_CFLAGS="$LIBPQXX_CFLAGS"
elif test -n "$PKG_CONFIG"; then
if test -n "$PKG_CONFIG" && \
{ { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libpqxx >= 4.0\""; } >&5
($PKG_CONFIG --exists --print-errors "libpqxx >= 4.0") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then
pkg_cv_LIBPQXX_CFLAGS=`$PKG_CONFIG --cflags "libpqxx >= 4.0" 2>/dev/null`
test "x$?" != "x0" && pkg_failed=yes
else
pkg_failed=yes
fi
else
pkg_failed=untried
fi
if test -n "$LIBPQXX_LIBS"; then
pkg_cv_LIBPQXX_LIBS="$LIBPQXX_LIBS"
elif test -n "$PKG_CONFIG"; then
if test -n "$PKG_CONFIG" && \
{ { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libpqxx >= 4.0\""; } >&5
($PKG_CONFIG --exists --print-errors "libpqxx >= 4.0") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then
pkg_cv_LIBPQXX_LIBS=`$PKG_CONFIG --libs "libpqxx >= 4.0" 2>/dev/null`
test "x$?" != "x0" && pkg_failed=yes
else
pkg_failed=yes
fi
else
pkg_failed=untried
fi
if test $pkg_failed = yes; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then
_pkg_short_errors_supported=yes
else
_pkg_short_errors_supported=no
fi
if test $_pkg_short_errors_supported = yes; then
LIBPQXX_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libpqxx >= 4.0" 2>&1`
else
LIBPQXX_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libpqxx >= 4.0" 2>&1`
fi
# Put the nasty error message in config.log where it belongs
echo "$LIBPQXX_PKG_ERRORS" >&5
as_fn_error $? "Package requirements (libpqxx >= 4.0) were not met:
$LIBPQXX_PKG_ERRORS
Consider adjusting the PKG_CONFIG_PATH environment variable if you
installed software in a non-standard prefix.
Alternatively, you may set the environment variables LIBPQXX_CFLAGS
and LIBPQXX_LIBS to avoid the need to call pkg-config.
See the pkg-config man page for more details." "$LINENO" 5
elif test $pkg_failed = untried; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it
is in your PATH or set the PKG_CONFIG environment variable to the full
path to pkg-config.
Alternatively, you may set the environment variables LIBPQXX_CFLAGS
and LIBPQXX_LIBS to avoid the need to call pkg-config.
See the pkg-config man page for more details.
To get pkg-config, see .
See \`config.log' for more details" "$LINENO" 5; }
else
LIBPQXX_CFLAGS=$pkg_cv_LIBPQXX_CFLAGS
LIBPQXX_LIBS=$pkg_cv_LIBPQXX_LIBS
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
fi
pkg_failed=no
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for MONETDB_MAPI" >&5
$as_echo_n "checking for MONETDB_MAPI... " >&6; }
if test -n "$MONETDB_MAPI_CFLAGS"; then
pkg_cv_MONETDB_MAPI_CFLAGS="$MONETDB_MAPI_CFLAGS"
elif test -n "$PKG_CONFIG"; then
if test -n "$PKG_CONFIG" && \
{ { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"monetdb-mapi >= 11.23.0\""; } >&5
($PKG_CONFIG --exists --print-errors "monetdb-mapi >= 11.23.0") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then
pkg_cv_MONETDB_MAPI_CFLAGS=`$PKG_CONFIG --cflags "monetdb-mapi >= 11.23.0" 2>/dev/null`
test "x$?" != "x0" && pkg_failed=yes
else
pkg_failed=yes
fi
else
pkg_failed=untried
fi
if test -n "$MONETDB_MAPI_LIBS"; then
pkg_cv_MONETDB_MAPI_LIBS="$MONETDB_MAPI_LIBS"
elif test -n "$PKG_CONFIG"; then
if test -n "$PKG_CONFIG" && \
{ { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"monetdb-mapi >= 11.23.0\""; } >&5
($PKG_CONFIG --exists --print-errors "monetdb-mapi >= 11.23.0") 2>&5
ac_status=$?
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; then
pkg_cv_MONETDB_MAPI_LIBS=`$PKG_CONFIG --libs "monetdb-mapi >= 11.23.0" 2>/dev/null`
test "x$?" != "x0" && pkg_failed=yes
else
pkg_failed=yes
fi
else
pkg_failed=untried
fi
if test $pkg_failed = yes; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then
_pkg_short_errors_supported=yes
else
_pkg_short_errors_supported=no
fi
if test $_pkg_short_errors_supported = yes; then
MONETDB_MAPI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "monetdb-mapi >= 11.23.0" 2>&1`
else
MONETDB_MAPI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "monetdb-mapi >= 11.23.0" 2>&1`
fi
# Put the nasty error message in config.log where it belongs
echo "$MONETDB_MAPI_PKG_ERRORS" >&5
elif test $pkg_failed = untried; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
else
MONETDB_MAPI_CFLAGS=$pkg_cv_MONETDB_MAPI_CFLAGS
MONETDB_MAPI_LIBS=$pkg_cv_MONETDB_MAPI_LIBS
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
$as_echo "#define HAVE_MONETDB 1" >>confdefs.h
fi
if test x$pkg_failed = xno; then
DUT_MONETDB_TRUE=
DUT_MONETDB_FALSE='#'
else
DUT_MONETDB_TRUE='#'
DUT_MONETDB_FALSE=
fi
# Make sure we can run config.sub.
$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
$as_echo_n "checking build system type... " >&6; }
if ${ac_cv_build+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_build_alias=$build_alias
test "x$ac_build_alias" = x &&
ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
test "x$ac_build_alias" = x &&
as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
$as_echo "$ac_cv_build" >&6; }
case $ac_cv_build in
*-*-*) ;;
*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
esac
build=$ac_cv_build
ac_save_IFS=$IFS; IFS='-'
set x $ac_cv_build
shift
build_cpu=$1
build_vendor=$2
shift; shift
# Remember, the first character of IFS is used to create $*,
# except with old shells:
build_os=$*
IFS=$ac_save_IFS
case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
$as_echo_n "checking host system type... " >&6; }
if ${ac_cv_host+:} false; then :
$as_echo_n "(cached) " >&6
else
if test "x$host_alias" = x; then
ac_cv_host=$ac_cv_build
else
ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
$as_echo "$ac_cv_host" >&6; }
case $ac_cv_host in
*-*-*) ;;
*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
esac
host=$ac_cv_host
ac_save_IFS=$IFS; IFS='-'
set x $ac_cv_host
shift
host_cpu=$1
host_vendor=$2
shift; shift
# Remember, the first character of IFS is used to create $*,
# except with old shells:
host_os=$*
IFS=$ac_save_IFS
case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
# Check whether --with-boost was given.
if test "${with_boost+set}" = set; then :
withval=$with_boost;
if test "$withval" = "no"; then
want_boost="no"
elif test "$withval" = "yes"; then
want_boost="yes"
ac_boost_path=""
else
want_boost="yes"
ac_boost_path="$withval"
fi
else
want_boost="yes"
fi
# Check whether --with-boost-libdir was given.
if test "${with_boost_libdir+set}" = set; then :
withval=$with_boost_libdir;
if test -d "$withval"
then
ac_boost_lib_path="$withval"
else
as_fn_error $? "--with-boost-libdir expected directory name" "$LINENO" 5
fi
else
ac_boost_lib_path=""
fi
if test "x$want_boost" = "xyes"; then
boost_lib_version_req=1.20.0
boost_lib_version_req_shorten=`expr $boost_lib_version_req : '\([0-9]*\.[0-9]*\)'`
boost_lib_version_req_major=`expr $boost_lib_version_req : '\([0-9]*\)'`
boost_lib_version_req_minor=`expr $boost_lib_version_req : '[0-9]*\.\([0-9]*\)'`
boost_lib_version_req_sub_minor=`expr $boost_lib_version_req : '[0-9]*\.[0-9]*\.\([0-9]*\)'`
if test "x$boost_lib_version_req_sub_minor" = "x" ; then
boost_lib_version_req_sub_minor="0"
fi
WANT_BOOST_VERSION=`expr $boost_lib_version_req_major \* 100000 \+ $boost_lib_version_req_minor \* 100 \+ $boost_lib_version_req_sub_minor`
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for boostlib >= $boost_lib_version_req" >&5
$as_echo_n "checking for boostlib >= $boost_lib_version_req... " >&6; }
succeeded=no
libsubdirs="lib"
ax_arch=`uname -m`
case $ax_arch in
x86_64)
libsubdirs="lib64 libx32 lib lib64"
;;
ppc64|s390x|sparc64|aarch64|ppc64le)
libsubdirs="lib64 lib lib64"
;;
esac
libsubdirs="lib/${host_cpu}-${host_os} $libsubdirs"
case ${host_cpu} in
i?86)
libsubdirs="lib/i386-${host_os} $libsubdirs"
;;
esac
if test "$ac_boost_path" != ""; then
BOOST_CPPFLAGS="-I$ac_boost_path/include"
for ac_boost_path_tmp in $libsubdirs; do
if test -d "$ac_boost_path"/"$ac_boost_path_tmp" ; then
BOOST_LDFLAGS="-L$ac_boost_path/$ac_boost_path_tmp"
break
fi
done
elif test "$cross_compiling" != yes; then
for ac_boost_path_tmp in /usr /usr/local /opt /opt/local ; do
if test -d "$ac_boost_path_tmp/include/boost" && test -r "$ac_boost_path_tmp/include/boost"; then
for libsubdir in $libsubdirs ; do
if ls "$ac_boost_path_tmp/$libsubdir/libboost_"* >/dev/null 2>&1 ; then break; fi
done
BOOST_LDFLAGS="-L$ac_boost_path_tmp/$libsubdir"
BOOST_CPPFLAGS="-I$ac_boost_path_tmp/include"
break;
fi
done
fi
if test "$ac_boost_lib_path" != ""; then
BOOST_LDFLAGS="-L$ac_boost_lib_path"
fi
CPPFLAGS_SAVED="$CPPFLAGS"
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
export CPPFLAGS
LDFLAGS_SAVED="$LDFLAGS"
LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
export LDFLAGS
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
main ()
{
#if BOOST_VERSION >= $WANT_BOOST_VERSION
// Everything is okay
#else
# error Boost version is too old
#endif
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
succeeded=yes
found_system=yes
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
if test "x$succeeded" != "xyes"; then
CPPFLAGS="$CPPFLAGS_SAVED"
LDFLAGS="$LDFLAGS_SAVED"
BOOST_CPPFLAGS=
BOOST_LDFLAGS=
_version=0
if test "$ac_boost_path" != ""; then
if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then
for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do
_version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'`
V_CHECK=`expr $_version_tmp \> $_version`
if test "$V_CHECK" = "1" ; then
_version=$_version_tmp
fi
VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'`
BOOST_CPPFLAGS="-I$ac_boost_path/include/boost-$VERSION_UNDERSCORE"
done
if test -z "$BOOST_CPPFLAGS"; then
if test -d "$ac_boost_path/boost" && test -r "$ac_boost_path/boost"; then
BOOST_CPPFLAGS="-I$ac_boost_path"
fi
fi
fi
else
if test "$cross_compiling" != yes; then
for ac_boost_path in /usr /usr/local /opt /opt/local ; do
if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then
for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do
_version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'`
V_CHECK=`expr $_version_tmp \> $_version`
if test "$V_CHECK" = "1" ; then
_version=$_version_tmp
best_path=$ac_boost_path
fi
done
fi
done
VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'`
BOOST_CPPFLAGS="-I$best_path/include/boost-$VERSION_UNDERSCORE"
if test "$ac_boost_lib_path" = ""; then
for libsubdir in $libsubdirs ; do
if ls "$best_path/$libsubdir/libboost_"* >/dev/null 2>&1 ; then break; fi
done
BOOST_LDFLAGS="-L$best_path/$libsubdir"
fi
fi
if test "x$BOOST_ROOT" != "x"; then
for libsubdir in $libsubdirs ; do
if ls "$BOOST_ROOT/stage/$libsubdir/libboost_"* >/dev/null 2>&1 ; then break; fi
done
if test -d "$BOOST_ROOT" && test -r "$BOOST_ROOT" && test -d "$BOOST_ROOT/stage/$libsubdir" && test -r "$BOOST_ROOT/stage/$libsubdir"; then
version_dir=`expr //$BOOST_ROOT : '.*/\(.*\)'`
stage_version=`echo $version_dir | sed 's/boost_//' | sed 's/_/./g'`
stage_version_shorten=`expr $stage_version : '\([0-9]*\.[0-9]*\)'`
V_CHECK=`expr $stage_version_shorten \>\= $_version`
if test "$V_CHECK" = "1" -a "$ac_boost_lib_path" = "" ; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: We will use a staged boost library from $BOOST_ROOT" >&5
$as_echo "$as_me: We will use a staged boost library from $BOOST_ROOT" >&6;}
BOOST_CPPFLAGS="-I$BOOST_ROOT"
BOOST_LDFLAGS="-L$BOOST_ROOT/stage/$libsubdir"
fi
fi
fi
fi
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
export CPPFLAGS
LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
export LDFLAGS
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
main ()
{
#if BOOST_VERSION >= $WANT_BOOST_VERSION
// Everything is okay
#else
# error Boost version is too old
#endif
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
succeeded=yes
found_system=yes
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
fi
if test "$succeeded" != "yes" ; then
if test "$_version" = "0" ; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: We could not detect the boost libraries (version $boost_lib_version_req_shorten or higher). If you have a staged boost library (still not installed) please specify \$BOOST_ROOT in your environment and do not give a PATH to --with-boost option. If you are sure you have boost installed, then check your version number looking in . See http://randspringer.de/boost for more documentation." >&5
$as_echo "$as_me: We could not detect the boost libraries (version $boost_lib_version_req_shorten or higher). If you have a staged boost library (still not installed) please specify \$BOOST_ROOT in your environment and do not give a PATH to --with-boost option. If you are sure you have boost installed, then check your version number looking in . See http://randspringer.de/boost for more documentation." >&6;}
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: Your boost libraries seems to old (version $_version)." >&5
$as_echo "$as_me: Your boost libraries seems to old (version $_version)." >&6;}
fi
# execute ACTION-IF-NOT-FOUND (if present):
:
else
$as_echo "#define HAVE_BOOST /**/" >>confdefs.h
# execute ACTION-IF-FOUND (if present):
:
fi
CPPFLAGS="$CPPFLAGS_SAVED"
LDFLAGS="$LDFLAGS_SAVED"
fi
ac_ext=c
ac_cpp='$CPP $CPPFLAGS'
ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_c_compiler_gnu
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
set dummy ${ac_tool_prefix}gcc; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="${ac_tool_prefix}gcc"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
$as_echo "$CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
if test -z "$ac_cv_prog_CC"; then
ac_ct_CC=$CC
# Extract the first word of "gcc", so it can be a program name with args.
set dummy gcc; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_ac_ct_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$ac_ct_CC"; then
ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_CC="gcc"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
ac_ct_CC=$ac_cv_prog_ac_ct_CC
if test -n "$ac_ct_CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
$as_echo "$ac_ct_CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
if test "x$ac_ct_CC" = x; then
CC=""
else
case $cross_compiling:$ac_tool_warned in
yes:)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
CC=$ac_ct_CC
fi
else
CC="$ac_cv_prog_CC"
fi
if test -z "$CC"; then
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
set dummy ${ac_tool_prefix}cc; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="${ac_tool_prefix}cc"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
$as_echo "$CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
fi
if test -z "$CC"; then
# Extract the first word of "cc", so it can be a program name with args.
set dummy cc; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
ac_prog_rejected=no
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
ac_prog_rejected=yes
continue
fi
ac_cv_prog_CC="cc"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
if test $ac_prog_rejected = yes; then
# We found a bogon in the path, so make sure we never use it.
set dummy $ac_cv_prog_CC
shift
if test $# != 0; then
# We chose a different compiler from the bogus one.
# However, it has the same basename, so the bogon will be chosen
# first if we set CC to just the basename; use the full file name.
shift
ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
fi
fi
fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
$as_echo "$CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
fi
if test -z "$CC"; then
if test -n "$ac_tool_prefix"; then
for ac_prog in cl.exe
do
# Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
set dummy $ac_tool_prefix$ac_prog; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
$as_echo "$CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
test -n "$CC" && break
done
fi
if test -z "$CC"; then
ac_ct_CC=$CC
for ac_prog in cl.exe
do
# Extract the first word of "$ac_prog", so it can be a program name with args.
set dummy $ac_prog; ac_word=$2
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
$as_echo_n "checking for $ac_word... " >&6; }
if ${ac_cv_prog_ac_ct_CC+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -n "$ac_ct_CC"; then
ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_CC="$ac_prog"
$as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
fi
fi
ac_ct_CC=$ac_cv_prog_ac_ct_CC
if test -n "$ac_ct_CC"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
$as_echo "$ac_ct_CC" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }
fi
test -n "$ac_ct_CC" && break
done
if test "x$ac_ct_CC" = x; then
CC=""
else
case $cross_compiling:$ac_tool_warned in
yes:)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
CC=$ac_ct_CC
fi
fi
fi
test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "no acceptable C compiler found in \$PATH
See \`config.log' for more details" "$LINENO" 5; }
# Provide some information about the compiler.
$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
set X $ac_compile
ac_compiler=$2
for ac_option in --version -v -V -qversion; do
{ { ac_try="$ac_compiler $ac_option >&5"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compiler $ac_option >&5") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
sed '10a\
... rest of stderr output deleted ...
10q' conftest.err >conftest.er1
cat conftest.er1 >&5
fi
rm -f conftest.er1 conftest.err
$as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }
done
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
if ${ac_cv_c_compiler_gnu+:} false; then :
$as_echo_n "(cached) " >&6
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
#ifndef __GNUC__
choke me
#endif
;
return 0;
}
_ACEOF
if ac_fn_c_try_compile "$LINENO"; then :
ac_compiler_gnu=yes
else
ac_compiler_gnu=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_cv_c_compiler_gnu=$ac_compiler_gnu
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
$as_echo "$ac_cv_c_compiler_gnu" >&6; }
if test $ac_compiler_gnu = yes; then
GCC=yes
else
GCC=
fi
ac_test_CFLAGS=${CFLAGS+set}
ac_save_CFLAGS=$CFLAGS
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
$as_echo_n "checking whether $CC accepts -g... " >&6; }
if ${ac_cv_prog_cc_g+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_save_c_werror_flag=$ac_c_werror_flag
ac_c_werror_flag=yes
ac_cv_prog_cc_g=no
CFLAGS="-g"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_g=yes
else
CFLAGS=""
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_c_try_compile "$LINENO"; then :
else
ac_c_werror_flag=$ac_save_c_werror_flag
CFLAGS="-g"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_g=yes
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_c_werror_flag=$ac_save_c_werror_flag
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
$as_echo "$ac_cv_prog_cc_g" >&6; }
if test "$ac_test_CFLAGS" = set; then
CFLAGS=$ac_save_CFLAGS
elif test $ac_cv_prog_cc_g = yes; then
if test "$GCC" = yes; then
CFLAGS="-g -O2"
else
CFLAGS="-g"
fi
else
if test "$GCC" = yes; then
CFLAGS="-O2"
else
CFLAGS=
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
if ${ac_cv_prog_cc_c89+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_cv_prog_cc_c89=no
ac_save_CC=$CC
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
#include
struct stat;
/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */
struct buf { int x; };
FILE * (*rcsopen) (struct buf *, struct stat *, int);
static char *e (p, i)
char **p;
int i;
{
return p[i];
}
static char *f (char * (*g) (char **, int), char **p, ...)
{
char *s;
va_list v;
va_start (v,p);
s = g (p, va_arg (v,int));
va_end (v);
return s;
}
/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has
function prototypes and stuff, but not '\xHH' hex character constants.
These don't provoke an error unfortunately, instead are silently treated
as 'x'. The following induces an error, until -std is added to get
proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an
array size at least. It's necessary to write '\x00'==0 to get something
that's true only with -std. */
int osf4_cc_array ['\x00' == 0 ? 1 : -1];
/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
inside strings and character constants. */
#define FOO(x) 'x'
int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
int test (int i, double x);
struct s1 {int (*f) (int a);};
struct s2 {int (*f) (double a);};
int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
int argc;
char **argv;
int
main ()
{
return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1];
;
return 0;
}
_ACEOF
for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
do
CC="$ac_save_CC $ac_arg"
if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_c89=$ac_arg
fi
rm -f core conftest.err conftest.$ac_objext
test "x$ac_cv_prog_cc_c89" != "xno" && break
done
rm -f conftest.$ac_ext
CC=$ac_save_CC
fi
# AC_CACHE_VAL
case "x$ac_cv_prog_cc_c89" in
x)
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
$as_echo "none needed" >&6; } ;;
xno)
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
$as_echo "unsupported" >&6; } ;;
*)
CC="$CC $ac_cv_prog_cc_c89"
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
esac
if test "x$ac_cv_prog_cc_c89" != xno; then :
fi
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
ac_ext=c
ac_cpp='$CPP $CPPFLAGS'
ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_c_compiler_gnu
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5
$as_echo_n "checking whether $CC understands -c and -o together... " >&6; }
if ${am_cv_prog_cc_c_o+:} false; then :
$as_echo_n "(cached) " >&6
else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
;
return 0;
}
_ACEOF
# Make sure it works both with $CC and with simple cc.
# Following AC_PROG_CC_C_O, we do the test twice because some
# compilers refuse to overwrite an existing .o file with -o,
# though they will create one.
am_cv_prog_cc_c_o=yes
for am_i in 1 2; do
if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5
($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5
ac_status=$?
echo "$as_me:$LINENO: \$? = $ac_status" >&5
(exit $ac_status); } \
&& test -f conftest2.$ac_objext; then
: OK
else
am_cv_prog_cc_c_o=no
break
fi
done
rm -f core conftest*
unset am_i
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5
$as_echo "$am_cv_prog_cc_c_o" >&6; }
if test "$am_cv_prog_cc_c_o" != yes; then
# Losing compiler, so override with the script.
# FIXME: It is wrong to rewrite CC.
# But if we don't then we get into trouble of one sort or another.
# A longer-term fix would be to have automake use am__CC in this case,
# and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
CC="$am_aux_dir/compile $CC"
fi
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
depcc="$CC" am_compiler_list=
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
$as_echo_n "checking dependency style of $depcc... " >&6; }
if ${am_cv_CC_dependencies_compiler_type+:} false; then :
$as_echo_n "(cached) " >&6
else
if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
# We make a subdir and do the tests there. Otherwise we can end up
# making bogus files that we don't know about and never remove. For
# instance it was reported that on HP-UX the gcc test will end up
# making a dummy file named 'D' -- because '-MD' means "put the output
# in D".
rm -rf conftest.dir
mkdir conftest.dir
# Copy depcomp to subdir because otherwise we won't find it if we're
# using a relative directory.
cp "$am_depcomp" conftest.dir
cd conftest.dir
# We will build objects and dependencies in a subdirectory because
# it helps to detect inapplicable dependency modes. For instance
# both Tru64's cc and ICC support -MD to output dependencies as a
# side effect of compilation, but ICC will put the dependencies in
# the current directory while Tru64 will put them in the object
# directory.
mkdir sub
am_cv_CC_dependencies_compiler_type=none
if test "$am_compiler_list" = ""; then
am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
fi
am__universal=false
case " $depcc " in #(
*\ -arch\ *\ -arch\ *) am__universal=true ;;
esac
for depmode in $am_compiler_list; do
# Setup a source with many dependencies, because some compilers
# like to wrap large dependency lists on column 80 (with \), and
# we should not choose a depcomp mode which is confused by this.
#
# We need to recreate these files for each test, as the compiler may
# overwrite some of them when testing with obscure command lines.
# This happens at least with the AIX C compiler.
: > sub/conftest.c
for i in 1 2 3 4 5 6; do
echo '#include "conftst'$i'.h"' >> sub/conftest.c
# Using ": > sub/conftst$i.h" creates only sub/conftst1.h with
# Solaris 10 /bin/sh.
echo '/* dummy */' > sub/conftst$i.h
done
echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
# We check with '-c' and '-o' for the sake of the "dashmstdout"
# mode. It turns out that the SunPro C++ compiler does not properly
# handle '-M -o', and we need to detect this. Also, some Intel
# versions had trouble with output in subdirs.
am__obj=sub/conftest.${OBJEXT-o}
am__minus_obj="-o $am__obj"
case $depmode in
gcc)
# This depmode causes a compiler race in universal mode.
test "$am__universal" = false || continue
;;
nosideeffect)
# After this tag, mechanisms are not by side-effect, so they'll
# only be used when explicitly requested.
if test "x$enable_dependency_tracking" = xyes; then
continue
else
break
fi
;;
msvc7 | msvc7msys | msvisualcpp | msvcmsys)
# This compiler won't grok '-c -o', but also, the minuso test has
# not run yet. These depmodes are late enough in the game, and
# so weak that their functioning should not be impacted.
am__obj=conftest.${OBJEXT-o}
am__minus_obj=
;;
none) break ;;
esac
if depmode=$depmode \
source=sub/conftest.c object=$am__obj \
depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
$SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
>/dev/null 2>conftest.err &&
grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
${MAKE-make} -s -f confmf > /dev/null 2>&1; then
# icc doesn't choke on unknown options, it will just issue warnings
# or remarks (even with -Werror). So we grep stderr for any message
# that says an option was ignored or not supported.
# When given -MP, icc 7.0 and 7.1 complain thusly:
# icc: Command line warning: ignoring option '-M'; no argument required
# The diagnosis changed in icc 8.0:
# icc: Command line remark: option '-MP' not supported
if (grep 'ignoring option' conftest.err ||
grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
am_cv_CC_dependencies_compiler_type=$depmode
break
fi
fi
done
cd ..
rm -rf conftest.dir
else
am_cv_CC_dependencies_compiler_type=none
fi
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
if
test "x$enable_dependency_tracking" != xno \
&& test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
am__fastdepCC_TRUE=
am__fastdepCC_FALSE='#'
else
am__fastdepCC_TRUE='#'
am__fastdepCC_FALSE=
fi
# Check whether --with-boost-regex was given.
if test "${with_boost_regex+set}" = set; then :
withval=$with_boost_regex;
if test "$withval" = "no"; then
want_boost="no"
elif test "$withval" = "yes"; then
want_boost="yes"
ax_boost_user_regex_lib=""
else
want_boost="yes"
ax_boost_user_regex_lib="$withval"
fi
else
want_boost="yes"
fi
if test "x$want_boost" = "xyes"; then
CPPFLAGS_SAVED="$CPPFLAGS"
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
export CPPFLAGS
LDFLAGS_SAVED="$LDFLAGS"
LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
export LDFLAGS
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the Boost::Regex library is available" >&5
$as_echo_n "checking whether the Boost::Regex library is available... " >&6; }
if ${ax_cv_boost_regex+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
main ()
{
boost::regex r(); return 0;
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_compile "$LINENO"; then :
ax_cv_boost_regex=yes
else
ax_cv_boost_regex=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_ext=cpp
ac_cpp='$CXXCPP $CPPFLAGS'
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_cv_boost_regex" >&5
$as_echo "$ax_cv_boost_regex" >&6; }
if test "x$ax_cv_boost_regex" = "xyes"; then
$as_echo "#define HAVE_BOOST_REGEX /**/" >>confdefs.h
BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/[^\/]*//'`
if test "x$ax_boost_user_regex_lib" = "x"; then
for libextension in `ls $BOOSTLIBDIR/libboost_regex*.so* $BOOSTLIBDIR/libboost_regex*.dylib* $BOOSTLIBDIR/libboost_regex*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_regex.*\)\.so.*$;\1;' -e 's;^lib\(boost_regex.*\)\.dylib.*;\1;' -e 's;^lib\(boost_regex.*\)\.a.*$;\1;'` ; do
ax_lib=${libextension}
as_ac_Lib=`$as_echo "ac_cv_lib_$ax_lib''_exit" | $as_tr_sh`
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for exit in -l$ax_lib" >&5
$as_echo_n "checking for exit in -l$ax_lib... " >&6; }
if eval \${$as_ac_Lib+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_check_lib_save_LIBS=$LIBS
LIBS="-l$ax_lib $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
/* Override any GCC internal prototype to avoid an error.
Use char because int might match the return type of a GCC
builtin and then its argument prototype would still apply. */
#ifdef __cplusplus
extern "C"
#endif
char exit ();
int
main ()
{
return exit ();
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
eval "$as_ac_Lib=yes"
else
eval "$as_ac_Lib=no"
fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
LIBS=$ac_check_lib_save_LIBS
fi
eval ac_res=\$$as_ac_Lib
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
$as_echo "$ac_res" >&6; }
if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then :
BOOST_REGEX_LIB="-l$ax_lib"; link_regex="yes"; break
else
link_regex="no"
fi
done
if test "x$link_regex" != "xyes"; then
for libextension in `ls $BOOSTLIBDIR/boost_regex*.dll* $BOOSTLIBDIR/boost_regex*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_regex.*\)\.dll.*$;\1;' -e 's;^\(boost_regex.*\)\.a.*$;\1;'` ; do
ax_lib=${libextension}
as_ac_Lib=`$as_echo "ac_cv_lib_$ax_lib''_exit" | $as_tr_sh`
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for exit in -l$ax_lib" >&5
$as_echo_n "checking for exit in -l$ax_lib... " >&6; }
if eval \${$as_ac_Lib+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_check_lib_save_LIBS=$LIBS
LIBS="-l$ax_lib $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
/* Override any GCC internal prototype to avoid an error.
Use char because int might match the return type of a GCC
builtin and then its argument prototype would still apply. */
#ifdef __cplusplus
extern "C"
#endif
char exit ();
int
main ()
{
return exit ();
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
eval "$as_ac_Lib=yes"
else
eval "$as_ac_Lib=no"
fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
LIBS=$ac_check_lib_save_LIBS
fi
eval ac_res=\$$as_ac_Lib
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
$as_echo "$ac_res" >&6; }
if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then :
BOOST_REGEX_LIB="-l$ax_lib"; link_regex="yes"; break
else
link_regex="no"
fi
done
fi
else
for ax_lib in $ax_boost_user_regex_lib boost_regex-$ax_boost_user_regex_lib; do
as_ac_Lib=`$as_echo "ac_cv_lib_$ax_lib''_main" | $as_tr_sh`
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -l$ax_lib" >&5
$as_echo_n "checking for main in -l$ax_lib... " >&6; }
if eval \${$as_ac_Lib+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_check_lib_save_LIBS=$LIBS
LIBS="-l$ax_lib $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
main ()
{
return main ();
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
eval "$as_ac_Lib=yes"
else
eval "$as_ac_Lib=no"
fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
LIBS=$ac_check_lib_save_LIBS
fi
eval ac_res=\$$as_ac_Lib
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
$as_echo "$ac_res" >&6; }
if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then :
BOOST_REGEX_LIB="-l$ax_lib"; link_regex="yes"; break
else
link_regex="no"
fi
done
fi
if test "x$ax_lib" = "x"; then
as_fn_error $? "Could not find a version of the Boost::Regex library!" "$LINENO" 5
fi
if test "x$link_regex" != "xyes"; then
as_fn_error $? "Could not link against $ax_lib !" "$LINENO" 5
fi
fi
CPPFLAGS="$CPPFLAGS_SAVED"
LDFLAGS="$LDFLAGS_SAVED"
fi
CONFIG_GIT_REVISION=v1.2.1
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sqlite3_open_v2 in -lsqlite3" >&5
$as_echo_n "checking for sqlite3_open_v2 in -lsqlite3... " >&6; }
if ${ac_cv_lib_sqlite3_sqlite3_open_v2+:} false; then :
$as_echo_n "(cached) " >&6
else
ac_check_lib_save_LIBS=$LIBS
LIBS="-lsqlite3 $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
/* Override any GCC internal prototype to avoid an error.
Use char because int might match the return type of a GCC
builtin and then its argument prototype would still apply. */
#ifdef __cplusplus
extern "C"
#endif
char sqlite3_open_v2 ();
int
main ()
{
return sqlite3_open_v2 ();
;
return 0;
}
_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
ac_cv_lib_sqlite3_sqlite3_open_v2=yes
else
ac_cv_lib_sqlite3_sqlite3_open_v2=no
fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
LIBS=$ac_check_lib_save_LIBS
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_sqlite3_sqlite3_open_v2" >&5
$as_echo "$ac_cv_lib_sqlite3_sqlite3_open_v2" >&6; }
if test "x$ac_cv_lib_sqlite3_sqlite3_open_v2" = xyes; then :
cat >>confdefs.h <<_ACEOF
#define HAVE_LIBSQLITE3 1
_ACEOF
LIBS="-lsqlite3 $LIBS"
fi
if test x$ac_cv_lib_sqlite3_sqlite3_open_v2 = xyes; then
DUT_SQLITE_TRUE=
DUT_SQLITE_FALSE='#'
else
DUT_SQLITE_TRUE='#'
DUT_SQLITE_FALSE=
fi
ac_config_headers="$ac_config_headers config.h"
ac_config_files="$ac_config_files Makefile Doxyfile"
cat >confcache <<\_ACEOF
# This file is a shell script that caches the results of configure
# tests run on this system so they can be shared between configure
# scripts and configure runs, see configure's option --config-cache.
# It is not useful on other systems. If it contains results you don't
# want to keep, you may remove or edit it.
#
# config.status only pays attention to the cache file if you give it
# the --recheck option to rerun configure.
#
# `ac_cv_env_foo' variables (set or unset) will be overridden when
# loading this file, other *unset* `ac_cv_foo' will be assigned the
# following values.
_ACEOF
# The following way of writing the cache mishandles newlines in values,
# but we know of no workaround that is simple, portable, and efficient.
# So, we kill variables containing newlines.
# Ultrix sh set writes to stderr and can't be redirected directly,
# and sets the high bit in the cache file unless we assign to the vars.
(
for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
eval ac_val=\$$ac_var
case $ac_val in #(
*${as_nl}*)
case $ac_var in #(
*_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
esac
case $ac_var in #(
_ | IFS | as_nl) ;; #(
BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
*) { eval $ac_var=; unset $ac_var;} ;;
esac ;;
esac
done
(set) 2>&1 |
case $as_nl`(ac_space=' '; set) 2>&1` in #(
*${as_nl}ac_space=\ *)
# `set' does not quote correctly, so add quotes: double-quote
# substitution turns \\\\ into \\, and sed turns \\ into \.
sed -n \
"s/'/'\\\\''/g;
s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
;; #(
*)
# `set' quotes correctly as required by POSIX, so do not add quotes.
sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
;;
esac |
sort
) |
sed '
/^ac_cv_env_/b end
t clear
:clear
s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
t end
s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
:end' >>confcache
if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
if test -w "$cache_file"; then
if test "x$cache_file" != "x/dev/null"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
$as_echo "$as_me: updating cache $cache_file" >&6;}
if test ! -f "$cache_file" || test -h "$cache_file"; then
cat confcache >"$cache_file"
else
case $cache_file in #(
*/* | ?:*)
mv -f confcache "$cache_file"$$ &&
mv -f "$cache_file"$$ "$cache_file" ;; #(
*)
mv -f confcache "$cache_file" ;;
esac
fi
fi
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
fi
fi
rm -f confcache
test "x$prefix" = xNONE && prefix=$ac_default_prefix
# Let make expand exec_prefix.
test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
DEFS=-DHAVE_CONFIG_H
ac_libobjs=
ac_ltlibobjs=
U=
for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
# 1. Remove the extension, and $U if already installed.
ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
# 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR
# will be set to the directory where LIBOBJS objects are built.
as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
done
LIBOBJS=$ac_libobjs
LTLIBOBJS=$ac_ltlibobjs
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5
$as_echo_n "checking that generated files are newer than configure... " >&6; }
if test -n "$am_sleep_pid"; then
# Hide warnings about reused PIDs.
wait $am_sleep_pid 2>/dev/null
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5
$as_echo "done" >&6; }
if test -n "$EXEEXT"; then
am__EXEEXT_TRUE=
am__EXEEXT_FALSE='#'
else
am__EXEEXT_TRUE='#'
am__EXEEXT_FALSE=
fi
if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
as_fn_error $? "conditional \"AMDEP\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then
as_fn_error $? "conditional \"am__fastdepCXX\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
if test -z "${DUT_MONETDB_TRUE}" && test -z "${DUT_MONETDB_FALSE}"; then
as_fn_error $? "conditional \"DUT_MONETDB\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
if test -z "${DUT_SQLITE_TRUE}" && test -z "${DUT_SQLITE_FALSE}"; then
as_fn_error $? "conditional \"DUT_SQLITE\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
: "${CONFIG_STATUS=./config.status}"
ac_write_fail=0
ac_clean_files_save=$ac_clean_files
ac_clean_files="$ac_clean_files $CONFIG_STATUS"
{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
as_write_fail=0
cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
#! $SHELL
# Generated by $as_me.
# Run this file to recreate the current configuration.
# Compiler output produced by configure, useful for debugging
# configure, is in config.log if it exists.
debug=false
ac_cs_recheck=false
ac_cs_silent=false
SHELL=\${CONFIG_SHELL-$SHELL}
export SHELL
_ASEOF
cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
## -------------------- ##
## M4sh Initialization. ##
## -------------------- ##
# Be more Bourne compatible
DUALCASE=1; export DUALCASE # for MKS sh
if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
else
case `(set -o) 2>/dev/null` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
esac
fi
as_nl='
'
export as_nl
# Printing a long string crashes Solaris 7 /usr/bin/printf.
as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
# Prefer a ksh shell builtin over an external printf program on Solaris,
# but without wasting forks for bash or zsh.
if test -z "$BASH_VERSION$ZSH_VERSION" \
&& (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='print -r --'
as_echo_n='print -rn --'
elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='printf %s\n'
as_echo_n='printf %s'
else
if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
as_echo_n='/usr/ucb/echo -n'
else
as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
as_echo_n_body='eval
arg=$1;
case $arg in #(
*"$as_nl"*)
expr "X$arg" : "X\\(.*\\)$as_nl";
arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
esac;
expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
'
export as_echo_n_body
as_echo_n='sh -c $as_echo_n_body as_echo'
fi
export as_echo_body
as_echo='sh -c $as_echo_body as_echo'
fi
# The user is always right.
if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
PATH_SEPARATOR=';'
}
fi
# IFS
# We need space, tab and new line, in precisely that order. Quoting is
# there to prevent editors from complaining about space-tab.
# (If _AS_PATH_WALK were called with IFS unset, it would disable word
# splitting by setting IFS to empty value.)
IFS=" "" $as_nl"
# Find who we are. Look in the path if we contain no directory separator.
as_myself=
case $0 in #((
*[\\/]* ) as_myself=$0 ;;
*) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
done
IFS=$as_save_IFS
;;
esac
# We did not find ourselves, most probably we were run as `sh COMMAND'
# in which case we are not to be found in the path.
if test "x$as_myself" = x; then
as_myself=$0
fi
if test ! -f "$as_myself"; then
$as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
exit 1
fi
# Unset variables that we do not need and which cause bugs (e.g. in
# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
# suppresses any "Segmentation fault" message there. '((' could
# trigger a bug in pdksh 5.2.14.
for as_var in BASH_ENV ENV MAIL MAILPATH
do eval test x\${$as_var+set} = xset \
&& ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
done
PS1='$ '
PS2='> '
PS4='+ '
# NLS nuisances.
LC_ALL=C
export LC_ALL
LANGUAGE=C
export LANGUAGE
# CDPATH.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# as_fn_error STATUS ERROR [LINENO LOG_FD]
# ----------------------------------------
# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
# script with STATUS, using 1 if that was 0.
as_fn_error ()
{
as_status=$1; test $as_status -eq 0 && as_status=1
if test "$4"; then
as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
$as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
fi
$as_echo "$as_me: error: $2" >&2
as_fn_exit $as_status
} # as_fn_error
# as_fn_set_status STATUS
# -----------------------
# Set $? to STATUS, without forking.
as_fn_set_status ()
{
return $1
} # as_fn_set_status
# as_fn_exit STATUS
# -----------------
# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
as_fn_exit ()
{
set +e
as_fn_set_status $1
exit $1
} # as_fn_exit
# as_fn_unset VAR
# ---------------
# Portably unset VAR.
as_fn_unset ()
{
{ eval $1=; unset $1;}
}
as_unset=as_fn_unset
# as_fn_append VAR VALUE
# ----------------------
# Append the text in VALUE to the end of the definition contained in VAR. Take
# advantage of any shell optimizations that allow amortized linear growth over
# repeated appends, instead of the typical quadratic growth present in naive
# implementations.
if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
eval 'as_fn_append ()
{
eval $1+=\$2
}'
else
as_fn_append ()
{
eval $1=\$$1\$2
}
fi # as_fn_append
# as_fn_arith ARG...
# ------------------
# Perform arithmetic evaluation on the ARGs, and store the result in the
# global $as_val. Take advantage of shells that can avoid forks. The arguments
# must be portable across $(()) and expr.
if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
eval 'as_fn_arith ()
{
as_val=$(( $* ))
}'
else
as_fn_arith ()
{
as_val=`expr "$@" || test $? -eq 1`
}
fi # as_fn_arith
if expr a : '\(a\)' >/dev/null 2>&1 &&
test "X`expr 00001 : '.*\(...\)'`" = X001; then
as_expr=expr
else
as_expr=false
fi
if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
as_basename=basename
else
as_basename=false
fi
if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
as_dirname=dirname
else
as_dirname=false
fi
as_me=`$as_basename -- "$0" ||
$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
X"$0" : 'X\(//\)$' \| \
X"$0" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X/"$0" |
sed '/^.*\/\([^/][^/]*\)\/*$/{
s//\1/
q
}
/^X\/\(\/\/\)$/{
s//\1/
q
}
/^X\/\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
# Avoid depending upon Character Ranges.
as_cr_letters='abcdefghijklmnopqrstuvwxyz'
as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
as_cr_Letters=$as_cr_letters$as_cr_LETTERS
as_cr_digits='0123456789'
as_cr_alnum=$as_cr_Letters$as_cr_digits
ECHO_C= ECHO_N= ECHO_T=
case `echo -n x` in #(((((
-n*)
case `echo 'xy\c'` in
*c*) ECHO_T=' ';; # ECHO_T is single tab character.
xy) ECHO_C='\c';;
*) echo `echo ksh88 bug on AIX 6.1` > /dev/null
ECHO_T=' ';;
esac;;
*)
ECHO_N='-n';;
esac
rm -f conf$$ conf$$.exe conf$$.file
if test -d conf$$.dir; then
rm -f conf$$.dir/conf$$.file
else
rm -f conf$$.dir
mkdir conf$$.dir 2>/dev/null
fi
if (echo >conf$$.file) 2>/dev/null; then
if ln -s conf$$.file conf$$ 2>/dev/null; then
as_ln_s='ln -s'
# ... but there are two gotchas:
# 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
# 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
# In both cases, we have to default to `cp -pR'.
ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
as_ln_s='cp -pR'
elif ln conf$$.file conf$$ 2>/dev/null; then
as_ln_s=ln
else
as_ln_s='cp -pR'
fi
else
as_ln_s='cp -pR'
fi
rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
rmdir conf$$.dir 2>/dev/null
# as_fn_mkdir_p
# -------------
# Create "$as_dir" as a directory, including parents if necessary.
as_fn_mkdir_p ()
{
case $as_dir in #(
-*) as_dir=./$as_dir;;
esac
test -d "$as_dir" || eval $as_mkdir_p || {
as_dirs=
while :; do
case $as_dir in #(
*\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
*) as_qdir=$as_dir;;
esac
as_dirs="'$as_qdir' $as_dirs"
as_dir=`$as_dirname -- "$as_dir" ||
$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_dir" : 'X\(//\)[^/]' \| \
X"$as_dir" : 'X\(//\)$' \| \
X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$as_dir" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
test -d "$as_dir" && break
done
test -z "$as_dirs" || eval "mkdir $as_dirs"
} || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
} # as_fn_mkdir_p
if mkdir -p . 2>/dev/null; then
as_mkdir_p='mkdir -p "$as_dir"'
else
test -d ./-p && rmdir ./-p
as_mkdir_p=false
fi
# as_fn_executable_p FILE
# -----------------------
# Test if FILE is an executable regular file.
as_fn_executable_p ()
{
test -f "$1" && test -x "$1"
} # as_fn_executable_p
as_test_x='test -x'
as_executable_p=as_fn_executable_p
# Sed expression to map a string onto a valid CPP name.
as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
# Sed expression to map a string onto a valid variable name.
as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
exec 6>&1
## ----------------------------------- ##
## Main body of $CONFIG_STATUS script. ##
## ----------------------------------- ##
_ASEOF
test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# Save the log message, to keep $0 and so on meaningful, and to
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
This file was extended by SQLsmith $as_me 1.2.1, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
CONFIG_HEADERS = $CONFIG_HEADERS
CONFIG_LINKS = $CONFIG_LINKS
CONFIG_COMMANDS = $CONFIG_COMMANDS
$ $0 $@
on `(hostname || uname -n) 2>/dev/null | sed 1q`
"
_ACEOF
case $ac_config_files in *"
"*) set x $ac_config_files; shift; ac_config_files=$*;;
esac
case $ac_config_headers in *"
"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
esac
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
# Files that config.status was made for.
config_files="$ac_config_files"
config_headers="$ac_config_headers"
config_commands="$ac_config_commands"
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
ac_cs_usage="\
\`$as_me' instantiates files and other configuration actions
from templates according to the current configuration. Unless the files
and actions are specified as TAGs, all are instantiated by default.
Usage: $0 [OPTION]... [TAG]...
-h, --help print this help, then exit
-V, --version print version number and configuration settings, then exit
--config print configuration, then exit
-q, --quiet, --silent
do not print progress messages
-d, --debug don't remove temporary files
--recheck update $as_me by reconfiguring in the same conditions
--file=FILE[:TEMPLATE]
instantiate the configuration file FILE
--header=FILE[:TEMPLATE]
instantiate the configuration header FILE
Configuration files:
$config_files
Configuration headers:
$config_headers
Configuration commands:
$config_commands
Report bugs to .
SQLsmith home page: ."
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
SQLsmith config.status 1.2.1
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
Copyright (C) 2012 Free Software Foundation, Inc.
This config.status script is free software; the Free Software Foundation
gives unlimited permission to copy, distribute and modify it."
ac_pwd='$ac_pwd'
srcdir='$srcdir'
INSTALL='$INSTALL'
MKDIR_P='$MKDIR_P'
AWK='$AWK'
test -n "\$AWK" || AWK=awk
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# The default lists apply if the user does not specify any file.
ac_need_defaults=:
while test $# != 0
do
case $1 in
--*=?*)
ac_option=`expr "X$1" : 'X\([^=]*\)='`
ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
ac_shift=:
;;
--*=)
ac_option=`expr "X$1" : 'X\([^=]*\)='`
ac_optarg=
ac_shift=:
;;
*)
ac_option=$1
ac_optarg=$2
ac_shift=shift
;;
esac
case $ac_option in
# Handling of the options.
-recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
ac_cs_recheck=: ;;
--version | --versio | --versi | --vers | --ver | --ve | --v | -V )
$as_echo "$ac_cs_version"; exit ;;
--config | --confi | --conf | --con | --co | --c )
$as_echo "$ac_cs_config"; exit ;;
--debug | --debu | --deb | --de | --d | -d )
debug=: ;;
--file | --fil | --fi | --f )
$ac_shift
case $ac_optarg in
*\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
'') as_fn_error $? "missing file argument" ;;
esac
as_fn_append CONFIG_FILES " '$ac_optarg'"
ac_need_defaults=false;;
--header | --heade | --head | --hea )
$ac_shift
case $ac_optarg in
*\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
esac
as_fn_append CONFIG_HEADERS " '$ac_optarg'"
ac_need_defaults=false;;
--he | --h)
# Conflict between --help and --header
as_fn_error $? "ambiguous option: \`$1'
Try \`$0 --help' for more information.";;
--help | --hel | -h )
$as_echo "$ac_cs_usage"; exit ;;
-q | -quiet | --quiet | --quie | --qui | --qu | --q \
| -silent | --silent | --silen | --sile | --sil | --si | --s)
ac_cs_silent=: ;;
# This is an error.
-*) as_fn_error $? "unrecognized option: \`$1'
Try \`$0 --help' for more information." ;;
*) as_fn_append ac_config_targets " $1"
ac_need_defaults=false ;;
esac
shift
done
ac_configure_extra_args=
if $ac_cs_silent; then
exec 6>/dev/null
ac_configure_extra_args="$ac_configure_extra_args --silent"
fi
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
if \$ac_cs_recheck; then
set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
shift
\$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
CONFIG_SHELL='$SHELL'
export CONFIG_SHELL
exec "\$@"
fi
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
exec 5>>config.log
{
echo
sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
## Running $as_me. ##
_ASBOX
$as_echo "$ac_log"
} >&5
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
#
# INIT-COMMANDS
#
AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# Handling of arguments.
for ac_config_target in $ac_config_targets
do
case $ac_config_target in
"depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
"config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
"Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
"Doxyfile") CONFIG_FILES="$CONFIG_FILES Doxyfile" ;;
*) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
esac
done
# If the user did not use the arguments to specify the items to instantiate,
# then the envvar interface is used. Set only those that are not.
# We use the long form for the default assignment because of an extremely
# bizarre bug on SunOS 4.1.3.
if $ac_need_defaults; then
test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
fi
# Have a temporary directory for convenience. Make it in the build tree
# simply because there is no reason against having it here, and in addition,
# creating and moving files from /tmp can sometimes cause problems.
# Hook for its removal unless debugging.
# Note that there is a small window in which the directory will not be cleaned:
# after its creation but before its name has been assigned to `$tmp'.
$debug ||
{
tmp= ac_tmp=
trap 'exit_status=$?
: "${ac_tmp:=$tmp}"
{ test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
' 0
trap 'as_fn_exit 1' 1 2 13 15
}
# Create a (secure) tmp directory for tmp files.
{
tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
test -d "$tmp"
} ||
{
tmp=./conf$$-$RANDOM
(umask 077 && mkdir "$tmp")
} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
ac_tmp=$tmp
# Set up the scripts for CONFIG_FILES section.
# No need to generate them if there are no CONFIG_FILES.
# This happens for instance with `./config.status config.h'.
if test -n "$CONFIG_FILES"; then
ac_cr=`echo X | tr X '\015'`
# On cygwin, bash can eat \r inside `` if the user requested igncr.
# But we know of no other shell where ac_cr would be empty at this
# point, so we can use a bashism as a fallback.
if test "x$ac_cr" = x; then
eval ac_cr=\$\'\\r\'
fi
ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null`
if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
ac_cs_awk_cr='\\r'
else
ac_cs_awk_cr=$ac_cr
fi
echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
_ACEOF
{
echo "cat >conf$$subs.awk <<_ACEOF" &&
echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
echo "_ACEOF"
} >conf$$subs.sh ||
as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
ac_delim='%!_!# '
for ac_last_try in false false false false false :; do
. ./conf$$subs.sh ||
as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
if test $ac_delim_n = $ac_delim_num; then
break
elif $ac_last_try; then
as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
else
ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
fi
done
rm -f conf$$subs.sh
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
_ACEOF
sed -n '
h
s/^/S["/; s/!.*/"]=/
p
g
s/^[^!]*!//
:repl
t repl
s/'"$ac_delim"'$//
t delim
:nl
h
s/\(.\{148\}\)..*/\1/
t more1
s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
p
n
b repl
:more1
s/["\\]/\\&/g; s/^/"/; s/$/"\\/
p
g
s/.\{148\}//
t nl
:delim
h
s/\(.\{148\}\)..*/\1/
t more2
s/["\\]/\\&/g; s/^/"/; s/$/"/
p
b
:more2
s/["\\]/\\&/g; s/^/"/; s/$/"\\/
p
g
s/.\{148\}//
t delim
' >$CONFIG_STATUS || ac_write_fail=1
rm -f conf$$subs.awk
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
_ACAWK
cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
for (key in S) S_is_set[key] = 1
FS = ""
}
{
line = $ 0
nfields = split(line, field, "@")
substed = 0
len = length(field[1])
for (i = 2; i < nfields; i++) {
key = field[i]
keylen = length(key)
if (S_is_set[key]) {
value = S[key]
line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
len += length(value) + length(field[++i])
substed = 1
} else
len += 1 + keylen
}
print line
}
_ACAWK
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
else
cat
fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
|| as_fn_error $? "could not setup config files machinery" "$LINENO" 5
_ACEOF
# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
# trailing colons and then remove the whole line if VPATH becomes empty
# (actually we leave an empty line to preserve line numbers).
if test "x$srcdir" = x.; then
ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{
h
s///
s/^/:/
s/[ ]*$/:/
s/:\$(srcdir):/:/g
s/:\${srcdir}:/:/g
s/:@srcdir@:/:/g
s/^:*//
s/:*$//
x
s/\(=[ ]*\).*/\1/
G
s/\n//
s/^[^=]*=[ ]*$//
}'
fi
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
fi # test -n "$CONFIG_FILES"
# Set up the scripts for CONFIG_HEADERS section.
# No need to generate them if there are no CONFIG_HEADERS.
# This happens for instance with `./config.status Makefile'.
if test -n "$CONFIG_HEADERS"; then
cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
BEGIN {
_ACEOF
# Transform confdefs.h into an awk script `defines.awk', embedded as
# here-document in config.status, that substitutes the proper values into
# config.h.in to produce config.h.
# Create a delimiter string that does not exist in confdefs.h, to ease
# handling of long lines.
ac_delim='%!_!# '
for ac_last_try in false false :; do
ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
if test -z "$ac_tt"; then
break
elif $ac_last_try; then
as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
else
ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
fi
done
# For the awk script, D is an array of macro values keyed by name,
# likewise P contains macro parameters if any. Preserve backslash
# newline sequences.
ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
sed -n '
s/.\{148\}/&'"$ac_delim"'/g
t rset
:rset
s/^[ ]*#[ ]*define[ ][ ]*/ /
t def
d
:def
s/\\$//
t bsnl
s/["\\]/\\&/g
s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
D["\1"]=" \3"/p
s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p
d
:bsnl
s/["\\]/\\&/g
s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\
D["\1"]=" \3\\\\\\n"\\/p
t cont
s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
t cont
d
:cont
n
s/.\{148\}/&'"$ac_delim"'/g
t clear
:clear
s/\\$//
t bsnlc
s/["\\]/\\&/g; s/^/"/; s/$/"/p
d
:bsnlc
s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
b cont
' >$CONFIG_STATUS || ac_write_fail=1
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
for (key in D) D_is_set[key] = 1
FS = ""
}
/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
line = \$ 0
split(line, arg, " ")
if (arg[1] == "#") {
defundef = arg[2]
mac1 = arg[3]
} else {
defundef = substr(arg[1], 2)
mac1 = arg[2]
}
split(mac1, mac2, "(") #)
macro = mac2[1]
prefix = substr(line, 1, index(line, defundef) - 1)
if (D_is_set[macro]) {
# Preserve the white space surrounding the "#".
print prefix "define", macro P[macro] D[macro]
next
} else {
# Replace #undef with comments. This is necessary, for example,
# in the case of _POSIX_SOURCE, which is predefined and required
# on some systems where configure will not decide to define it.
if (defundef == "undef") {
print "/*", prefix defundef, macro, "*/"
next
}
}
}
{ print }
_ACAWK
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
fi # test -n "$CONFIG_HEADERS"
eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS"
shift
for ac_tag
do
case $ac_tag in
:[FHLC]) ac_mode=$ac_tag; continue;;
esac
case $ac_mode$ac_tag in
:[FHL]*:*);;
:L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
:[FH]-) ac_tag=-:-;;
:[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
esac
ac_save_IFS=$IFS
IFS=:
set x $ac_tag
IFS=$ac_save_IFS
shift
ac_file=$1
shift
case $ac_mode in
:L) ac_source=$1;;
:[FH])
ac_file_inputs=
for ac_f
do
case $ac_f in
-) ac_f="$ac_tmp/stdin";;
*) # Look for the file first in the build tree, then in the source tree
# (if the path is not absolute). The absolute path cannot be DOS-style,
# because $ac_f cannot contain `:'.
test -f "$ac_f" ||
case $ac_f in
[\\/$]*) false;;
*) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
esac ||
as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
esac
case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
as_fn_append ac_file_inputs " '$ac_f'"
done
# Let's still pretend it is `configure' which instantiates (i.e., don't
# use $as_me), people would be surprised to read:
# /* config.h. Generated by config.status. */
configure_input='Generated from '`
$as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
`' by configure.'
if test x"$ac_file" != x-; then
configure_input="$ac_file. $configure_input"
{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
$as_echo "$as_me: creating $ac_file" >&6;}
fi
# Neutralize special characters interpreted by sed in replacement strings.
case $configure_input in #(
*\&* | *\|* | *\\* )
ac_sed_conf_input=`$as_echo "$configure_input" |
sed 's/[\\\\&|]/\\\\&/g'`;; #(
*) ac_sed_conf_input=$configure_input;;
esac
case $ac_tag in
*:-:* | *:-) cat >"$ac_tmp/stdin" \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
esac
;;
esac
ac_dir=`$as_dirname -- "$ac_file" ||
$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$ac_file" : 'X\(//\)[^/]' \| \
X"$ac_file" : 'X\(//\)$' \| \
X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$ac_file" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
as_dir="$ac_dir"; as_fn_mkdir_p
ac_builddir=.
case "$ac_dir" in
.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
*)
ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
# A ".." for each directory in $ac_dir_suffix.
ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
case $ac_top_builddir_sub in
"") ac_top_builddir_sub=. ac_top_build_prefix= ;;
*) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
esac ;;
esac
ac_abs_top_builddir=$ac_pwd
ac_abs_builddir=$ac_pwd$ac_dir_suffix
# for backward compatibility:
ac_top_builddir=$ac_top_build_prefix
case $srcdir in
.) # We are building in place.
ac_srcdir=.
ac_top_srcdir=$ac_top_builddir_sub
ac_abs_top_srcdir=$ac_pwd ;;
[\\/]* | ?:[\\/]* ) # Absolute name.
ac_srcdir=$srcdir$ac_dir_suffix;
ac_top_srcdir=$srcdir
ac_abs_top_srcdir=$srcdir ;;
*) # Relative name.
ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
ac_top_srcdir=$ac_top_build_prefix$srcdir
ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
esac
ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
case $ac_mode in
:F)
#
# CONFIG_FILE
#
case $INSTALL in
[\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
*) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
esac
ac_MKDIR_P=$MKDIR_P
case $MKDIR_P in
[\\/$]* | ?:[\\/]* ) ;;
*/*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
esac
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# If the template does not know about datarootdir, expand it.
# FIXME: This hack should be removed a few years after 2.60.
ac_datarootdir_hack=; ac_datarootdir_seen=
ac_sed_dataroot='
/datarootdir/ {
p
q
}
/@datadir@/p
/@docdir@/p
/@infodir@/p
/@localedir@/p
/@mandir@/p'
case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
*datarootdir*) ac_datarootdir_seen=yes;;
*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_datarootdir_hack='
s&@datadir@&$datadir&g
s&@docdir@&$docdir&g
s&@infodir@&$infodir&g
s&@localedir@&$localedir&g
s&@mandir@&$mandir&g
s&\\\${datarootdir}&$datarootdir&g' ;;
esac
_ACEOF
# Neutralize VPATH when `$srcdir' = `.'.
# Shell code in configure.ac might set extrasub.
# FIXME: do we really want to maintain this feature?
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_sed_extra="$ac_vpsub
$extrasub
_ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
:t
/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
s|@configure_input@|$ac_sed_conf_input|;t t
s&@top_builddir@&$ac_top_builddir_sub&;t t
s&@top_build_prefix@&$ac_top_build_prefix&;t t
s&@srcdir@&$ac_srcdir&;t t
s&@abs_srcdir@&$ac_abs_srcdir&;t t
s&@top_srcdir@&$ac_top_srcdir&;t t
s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
s&@builddir@&$ac_builddir&;t t
s&@abs_builddir@&$ac_abs_builddir&;t t
s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
s&@INSTALL@&$ac_INSTALL&;t t
s&@MKDIR_P@&$ac_MKDIR_P&;t t
$ac_datarootdir_hack
"
eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
>$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
{ ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
{ ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \
"$ac_tmp/out"`; test -z "$ac_out"; } &&
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&5
$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&2;}
rm -f "$ac_tmp/stdin"
case $ac_file in
-) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
*) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
esac \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
;;
:H)
#
# CONFIG_HEADER
#
if test x"$ac_file" != x-; then
{
$as_echo "/* $configure_input */" \
&& eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
} >"$ac_tmp/config.h" \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
$as_echo "$as_me: $ac_file is unchanged" >&6;}
else
rm -f "$ac_file"
mv "$ac_tmp/config.h" "$ac_file" \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
fi
else
$as_echo "/* $configure_input */" \
&& eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
|| as_fn_error $? "could not create -" "$LINENO" 5
fi
# Compute "$ac_file"'s index in $config_headers.
_am_arg="$ac_file"
_am_stamp_count=1
for _am_header in $config_headers :; do
case $_am_header in
$_am_arg | $_am_arg:* )
break ;;
* )
_am_stamp_count=`expr $_am_stamp_count + 1` ;;
esac
done
echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$_am_arg" : 'X\(//\)[^/]' \| \
X"$_am_arg" : 'X\(//\)$' \| \
X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$_am_arg" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`/stamp-h$_am_stamp_count
;;
:C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
$as_echo "$as_me: executing $ac_file commands" >&6;}
;;
esac
case $ac_file$ac_mode in
"depfiles":C) test x"$AMDEP_TRUE" != x"" || {
# Older Autoconf quotes --file arguments for eval, but not when files
# are listed without --file. Let's play safe and only enable the eval
# if we detect the quoting.
case $CONFIG_FILES in
*\'*) eval set x "$CONFIG_FILES" ;;
*) set x $CONFIG_FILES ;;
esac
shift
for mf
do
# Strip MF so we end up with the name of the file.
mf=`echo "$mf" | sed -e 's/:.*$//'`
# Check whether this is an Automake generated Makefile or not.
# We used to match only the files named 'Makefile.in', but
# some people rename them; so instead we look at the file content.
# Grep'ing the first line is not enough: some people post-process
# each Makefile.in and add a new line on top of each file to say so.
# Grep'ing the whole file is not good either: AIX grep has a line
# limit of 2048, but all sed's we know have understand at least 4000.
if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
dirpart=`$as_dirname -- "$mf" ||
$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$mf" : 'X\(//\)[^/]' \| \
X"$mf" : 'X\(//\)$' \| \
X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$mf" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
else
continue
fi
# Extract the definition of DEPDIR, am__include, and am__quote
# from the Makefile without running 'make'.
DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
test -z "$DEPDIR" && continue
am__include=`sed -n 's/^am__include = //p' < "$mf"`
test -z "$am__include" && continue
am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
# Find all dependency output files, they are included files with
# $(DEPDIR) in their names. We invoke sed twice because it is the
# simplest approach to changing $(DEPDIR) to its actual value in the
# expansion.
for file in `sed -n "
s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
# Make sure the directory exists.
test -f "$dirpart/$file" && continue
fdir=`$as_dirname -- "$file" ||
$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$file" : 'X\(//\)[^/]' \| \
X"$file" : 'X\(//\)$' \| \
X"$file" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$file" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
as_dir=$dirpart/$fdir; as_fn_mkdir_p
# echo "creating $dirpart/$file"
echo '# dummy' > "$dirpart/$file"
done
done
}
;;
esac
done # for ac_tag
as_fn_exit 0
_ACEOF
ac_clean_files=$ac_clean_files_save
test $ac_write_fail = 0 ||
as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
# configure is writing to config.log, and then calls config.status.
# config.status does its own redirection, appending to config.log.
# Unfortunately, on DOS this fails, as config.log is still kept open
# by configure, so config.status won't be able to write to it; its
# output is simply discarded. So we exec the FD to /dev/null,
# effectively closing config.log, so it can be properly (re)opened and
# appended to by config.status. When coming back to configure, we
# need to make the FD available again.
if test "$no_create" != yes; then
ac_cs_success=:
ac_config_status_args=
test "$silent" = yes &&
ac_config_status_args="$ac_config_status_args --quiet"
exec 5>/dev/null
$SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
exec 5>>config.log
# Use ||, not &&, to avoid exiting from the if with $? = 1, which
# would make configure fail if this is the last instruction.
$ac_cs_success || as_fn_exit 1
fi
if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
fi
sqlsmith-1.2.1/relmodel.cc 0000644 0001750 0001750 00000000423 13272636632 012423 0000000 0000000 #include "relmodel.hh"
map sqltype::typemap;
sqltype * sqltype::get(string n)
{
if (typemap.count(n))
return typemap[n];
else
return typemap[n] = new sqltype(n);
}
bool sqltype::consistent(struct sqltype *rvalue)
{
return this == rvalue;
}
sqlsmith-1.2.1/grammar.hh 0000644 0001750 0001750 00000020654 13272401105 012251 0000000 0000000 /// @file
/// @brief grammar: Top-level and unsorted grammar productions
#ifndef GRAMMAR_HH
#define GRAMMAR_HH
#include
#include "relmodel.hh"
#include
#include "schema.hh"
#include "prod.hh"
#include "expr.hh"
using std::shared_ptr;
struct table_ref : prod {
vector > refs;
static shared_ptr factory(prod *p);
table_ref(prod *p) : prod(p) { }
virtual ~table_ref() { }
};
struct table_or_query_name : table_ref {
virtual void out(std::ostream &out);
table_or_query_name(prod *p);
virtual ~table_or_query_name() { }
named_relation *t;
};
struct target_table : table_ref {
virtual void out(std::ostream &out);
target_table(prod *p, table *victim = 0);
virtual ~target_table() { }
table *victim_;
};
struct table_sample : table_ref {
virtual void out(std::ostream &out);
table_sample(prod *p);
virtual ~table_sample() { }
struct table *t;
private:
string method;
double percent;
};
struct table_subquery : table_ref {
bool is_lateral;
virtual void out(std::ostream &out);
shared_ptr query;
table_subquery(prod *p, bool lateral = false);
virtual ~table_subquery();
virtual void accept(prod_visitor *v);
};
struct lateral_subquery : table_subquery {
lateral_subquery(prod *p)
: table_subquery(p, true) { }
};
struct join_cond : prod {
static shared_ptr factory(prod *p, table_ref &lhs, table_ref &rhs);
join_cond(prod *p, table_ref &lhs, table_ref &rhs)
: prod(p) { (void) lhs; (void) rhs;}
};
struct simple_join_cond : join_cond {
std::string condition;
simple_join_cond(prod *p, table_ref &lhs, table_ref &rhs);
virtual void out(std::ostream &out);
};
struct expr_join_cond : join_cond {
struct scope joinscope;
shared_ptr search;
expr_join_cond(prod *p, table_ref &lhs, table_ref &rhs);
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v) {
search->accept(v);
v->visit(this);
}
};
struct joined_table : table_ref {
virtual void out(std::ostream &out);
joined_table(prod *p);
std::string type;
std::string alias;
virtual std::string ident() { return alias; }
shared_ptr lhs;
shared_ptr rhs;
shared_ptr condition;
virtual ~joined_table() {
}
virtual void accept(prod_visitor *v) {
lhs->accept(v);
rhs->accept(v);
condition->accept(v);
v->visit(this);
}
};
struct from_clause : prod {
std::vector > reflist;
virtual void out(std::ostream &out);
from_clause(prod *p);
~from_clause() { }
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : reflist)
p->accept(v);
}
};
struct select_list : prod {
std::vector > value_exprs;
relation derived_table;
int columns = 0;
select_list(prod *p);
virtual void out(std::ostream &out);
~select_list() { }
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : value_exprs)
p->accept(v);
}
};
struct query_spec : prod {
std::string set_quantifier;
shared_ptr from_clause;
shared_ptr select_list;
shared_ptr search;
std::string limit_clause;
struct scope myscope;
virtual void out(std::ostream &out);
query_spec(prod *p, struct scope *s, bool lateral = 0);
virtual void accept(prod_visitor *v) {
v->visit(this);
select_list->accept(v);
from_clause->accept(v);
search->accept(v);
}
};
struct select_for_update : query_spec {
const char *lockmode;
virtual void out(std::ostream &out);
select_for_update(prod *p, struct scope *s, bool lateral = 0);
};
struct prepare_stmt : prod {
query_spec q;
static long seq;
long id;
virtual void out(std::ostream &out) {
out << "prepare prep" << id << " as " << q;
}
prepare_stmt(prod *p) : prod(p), q(p, scope) {
id = seq++;
}
virtual void accept(prod_visitor *v) {
v->visit(this);
q.accept(v);
}
};
struct modifying_stmt : prod {
table *victim;
struct scope myscope;
modifying_stmt(prod *p, struct scope *s, struct table *victim = 0);
// shared_ptr modifying_stmt::factory(prod *p, struct scope *s);
virtual void pick_victim();
};
struct delete_stmt : modifying_stmt {
shared_ptr search;
delete_stmt(prod *p, struct scope *s, table *v);
virtual ~delete_stmt() { }
virtual void out(std::ostream &out) {
out << "delete from " << victim->ident();
indent(out);
out << "where " << std::endl << *search;
}
virtual void accept(prod_visitor *v) {
v->visit(this);
search->accept(v);
}
};
struct delete_returning : delete_stmt {
shared_ptr select_list;
delete_returning(prod *p, struct scope *s, table *victim = 0);
virtual void out(std::ostream &out) {
delete_stmt::out(out);
out << std::endl << "returning " << *select_list;
}
virtual void accept(prod_visitor *v) {
v->visit(this);
search->accept(v);
select_list->accept(v);
}
};
struct insert_stmt : modifying_stmt {
vector > value_exprs;
insert_stmt(prod *p, struct scope *s, table *victim = 0);
virtual ~insert_stmt() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : value_exprs) p->accept(v);
}
};
struct set_list : prod {
vector > value_exprs;
vector names;
set_list(prod *p, table *target);
virtual ~set_list() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : value_exprs) p->accept(v);
}
};
struct upsert_stmt : insert_stmt {
shared_ptr set_list;
string constraint;
shared_ptr search;
upsert_stmt(prod *p, struct scope *s, table *v = 0);
virtual void out(std::ostream &out) {
insert_stmt::out(out);
out << " on conflict on constraint " << constraint << " do update ";
out << *set_list << " where " << *search;
}
virtual void accept(prod_visitor *v) {
insert_stmt::accept(v);
set_list->accept(v);
search->accept(v);
}
virtual ~upsert_stmt() { }
};
struct update_stmt : modifying_stmt {
shared_ptr search;
shared_ptr set_list;
update_stmt(prod *p, struct scope *s, table *victim = 0);
virtual ~update_stmt() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v) {
v->visit(this);
search->accept(v);
}
};
struct when_clause : prod {
bool matched;
shared_ptr condition;
// shared_ptr merge_action;
when_clause(struct merge_stmt *p);
virtual ~when_clause() { }
static shared_ptr factory(struct merge_stmt *p);
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct when_clause_update : when_clause {
shared_ptr set_list;
struct scope myscope;
when_clause_update(struct merge_stmt *p);
virtual ~when_clause_update() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct when_clause_insert : when_clause {
vector > exprs;
when_clause_insert(struct merge_stmt *p);
virtual ~when_clause_insert() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct merge_stmt : modifying_stmt {
merge_stmt(prod *p, struct scope *s, table *victim = 0);
shared_ptr target_table_;
shared_ptr data_source;
shared_ptr join_condition;
vector > clauselist;
virtual ~merge_stmt() { }
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct update_returning : update_stmt {
shared_ptr select_list;
update_returning(prod *p, struct scope *s, table *victim = 0);
virtual void out(std::ostream &out) {
update_stmt::out(out);
out << std::endl << "returning " << *select_list;
}
virtual void accept(prod_visitor *v) {
v->visit(this);
search->accept(v);
set_list->accept(v);
select_list->accept(v);
}
};
shared_ptr statement_factory(struct scope *s);
struct common_table_expression : prod {
vector > with_queries;
shared_ptr query;
vector > refs;
struct scope myscope;
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
common_table_expression(prod *parent, struct scope *s);
};
#endif
sqlsmith-1.2.1/random.cc 0000644 0001750 0001750 00000001177 13272636632 012107 0000000 0000000 #include "random.hh"
namespace smith {
std::mt19937_64 rng;
}
int d6() {
static std::uniform_int_distribution<> pick(1, 6);
return pick(smith::rng);
}
int d9() {
static std::uniform_int_distribution<> pick(1, 9);
return pick(smith::rng);
}
int d12() {
static std::uniform_int_distribution<> pick(1, 12);
return pick(smith::rng);
}
int d20() {
static std::uniform_int_distribution<> pick(1, 20);
return pick(smith::rng);
}
int d42() {
static std::uniform_int_distribution<> pick(1, 42);
return pick(smith::rng);
}
int d100() {
static std::uniform_int_distribution<> pick(1, 100);
return pick(smith::rng);
}
sqlsmith-1.2.1/schema.cc 0000644 0001750 0001750 00000002574 13272636632 012071 0000000 0000000 #include
#include "config.h"
#include "schema.hh"
#include "relmodel.hh"
#include
#include "gitrev.h"
using namespace std;
using namespace pqxx;
void schema::generate_indexes() {
cerr << "Generating indexes...";
for (auto &type: types) {
assert(type);
for(auto &r: aggregates) {
if (type->consistent(r.restype))
aggregates_returning_type.insert(pair(type, &r));
}
for(auto &r: routines) {
if (!type->consistent(r.restype))
continue;
routines_returning_type.insert(pair(type, &r));
if(!r.argtypes.size())
parameterless_routines_returning_type.insert(pair(type, &r));
}
for (auto &t: tables) {
for (auto &c: t.columns()) {
if (type->consistent(c.type)) {
tables_with_columns_of_type.insert(pair(type, &t));
break;
}
}
}
for (auto &concrete: types) {
if (type->consistent(concrete))
concrete_type.insert(pair(type, concrete));
}
for (auto &o: operators) {
if (type->consistent(o.result))
operators_returning_type.insert(pair(type, &o));
}
}
for (auto &t: tables) {
if (t.is_base_table)
base_tables.push_back(&t);
}
cerr << "done." << endl;
assert(booltype);
assert(inttype);
assert(internaltype);
assert(arraytype);
}
sqlsmith-1.2.1/log-v1.0-to-v1.2.sql 0000644 0001750 0001750 00000002472 13272636744 013413 0000000 0000000 -- upgrade SQLsmith logging schema from 1.0 to 1.2
alter table stat add column impedance jsonb;
alter table instance add column seed text;
alter table error add column sqlstate text;
create or replace view impedance as
SELECT stat.id,
stat.generated,
stat.level,
stat.nodes,
stat.updated,
js.prod,
js.ok,
js.bad,
js.retries,
js.limited,
js.failed
FROM stat,
LATERAL jsonb_to_recordset(stat.impedance -> 'impedance')
js(prod text,
ok integer,
bad integer,
retries integer,
limited integer,
failed integer)
WHERE stat.impedance IS NOT NULL;
comment on view impedance is 'stat table with normalized jsonb';
create view impedance_report as
SELECT instance.rev,
impedance.prod,
sum(impedance.generated) AS generated,
sum(impedance.ok) AS ok,
sum(impedance.bad) AS bad,
sum(impedance.retries) AS retries,
sum(impedance.limited) AS limited,
sum(impedance.failed) AS failed
FROM impedance
JOIN instance USING (id)
WHERE instance.rev = (( SELECT instance_1.rev
FROM instance instance_1
ORDER BY instance_1.t DESC
LIMIT 1))
GROUP BY instance.rev, impedance.prod
ORDER BY sum(impedance.retries);
comment on view impedance_report is 'impedance report for latest revision';
sqlsmith-1.2.1/expr.hh 0000644 0001750 0001750 00000011365 13272401105 011600 0000000 0000000 /// @file
/// @brief grammar: Value expression productions
#ifndef EXPR_HH
#define EXPR_HH
#include "prod.hh"
#include
using std::shared_ptr;
using std::vector;
using std::string;
struct value_expr: prod {
sqltype *type;
virtual void out(std::ostream &out) = 0;
virtual ~value_expr() { }
value_expr(prod *p) : prod(p) { }
static shared_ptr factory(prod *p, sqltype *type_constraint = 0);
};
struct case_expr : value_expr {
shared_ptr condition;
shared_ptr true_expr;
shared_ptr false_expr;
case_expr(prod *p, sqltype *type_constraint = 0);
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct funcall : value_expr {
routine *proc;
bool is_aggregate;
vector > parms;
virtual void out(std::ostream &out);
virtual ~funcall() { }
funcall(prod *p, sqltype *type_constraint = 0, bool agg = 0);
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : parms)
p->accept(v);
}
};
struct atomic_subselect : value_expr {
table *tab;
column *col;
int offset;
routine *agg;
atomic_subselect(prod *p, sqltype *type_constraint = 0);
virtual void out(std::ostream &out);
};
struct const_expr: value_expr {
std::string expr;
const_expr(prod *p, sqltype *type_constraint = 0);
virtual void out(std::ostream &out) { out << expr; }
virtual ~const_expr() { }
};
struct column_reference: value_expr {
column_reference(prod *p, sqltype *type_constraint = 0);
virtual void out(std::ostream &out) { out << reference; }
std::string reference;
virtual ~column_reference() { }
};
struct coalesce : value_expr {
const char *abbrev_;
vector > value_exprs;
virtual ~coalesce() { };
coalesce(prod *p, sqltype *type_constraint = 0, const char *abbrev = "coalesce");
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v) {
v->visit(this);
for (auto p : value_exprs)
p->accept(v);
}
};
struct nullif : coalesce {
virtual ~nullif() { };
nullif(prod *p, sqltype *type_constraint = 0)
: coalesce(p, type_constraint, "nullif")
{ };
};
struct bool_expr : value_expr {
virtual ~bool_expr() { }
bool_expr(prod *p) : value_expr(p) { type = scope->schema->booltype; }
static shared_ptr factory(prod *p);
};
struct truth_value : bool_expr {
virtual ~truth_value() { }
const char *op;
virtual void out(std::ostream &out) { out << op; }
truth_value(prod *p) : bool_expr(p) {
op = ( (d6() < 4) ? scope->schema->true_literal : scope->schema->false_literal);
}
};
struct null_predicate : bool_expr {
virtual ~null_predicate() { }
const char *negate;
shared_ptr expr;
null_predicate(prod *p) : bool_expr(p) {
negate = ((d6()<4) ? "not " : "");
expr = value_expr::factory(this);
}
virtual void out(std::ostream &out) {
out << *expr << " is " << negate << "NULL";
}
virtual void accept(prod_visitor *v) {
v->visit(this);
expr->accept(v);
}
};
struct exists_predicate : bool_expr {
shared_ptr subquery;
virtual ~exists_predicate() { }
exists_predicate(prod *p);
virtual void out(std::ostream &out);
virtual void accept(prod_visitor *v);
};
struct bool_binop : bool_expr {
shared_ptr lhs, rhs;
bool_binop(prod *p) : bool_expr(p) { }
virtual void out(std::ostream &out) = 0;
virtual void accept(prod_visitor *v) {
v->visit(this);
lhs->accept(v);
rhs->accept(v);
}
};
struct bool_term : bool_binop {
virtual ~bool_term() { }
const char *op;
virtual void out(std::ostream &out) {
out << "(" << *lhs << ") ";
indent(out);
out << op << " (" << *rhs << ")";
}
bool_term(prod *p) : bool_binop(p)
{
op = ((d6()<4) ? "or" : "and");
lhs = bool_expr::factory(this);
rhs = bool_expr::factory(this);
}
};
struct distinct_pred : bool_binop {
distinct_pred(prod *p);
virtual ~distinct_pred() { };
virtual void out(std::ostream &o) {
o << *lhs << " is distinct from " << *rhs;
}
};
struct comparison_op : bool_binop {
op *oper;
comparison_op(prod *p);
virtual ~comparison_op() { };
virtual void out(std::ostream &o) {
o << *lhs << " " << oper->name << " " << *rhs;
}
};
struct window_function : value_expr {
virtual void out(std::ostream &out);
virtual ~window_function() { }
window_function(prod *p, sqltype *type_constraint);
vector > partition_by;
vector > order_by;
shared_ptr aggregate;
static bool allowed(prod *pprod);
virtual void accept(prod_visitor *v) {
v->visit(this);
aggregate->accept(v);
for (auto p : partition_by)
p->accept(v);
for (auto p : order_by)
p->accept(v);
}
};
#endif
sqlsmith-1.2.1/missing 0000755 0001750 0001750 00000015330 13073733000 011675 0000000 0000000 #! /bin/sh
# Common wrapper for a few potentially missing GNU programs.
scriptversion=2013-10-28.13; # UTC
# Copyright (C) 1996-2014 Free Software Foundation, Inc.
# Originally written by Fran,cois Pinard , 1996.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
if test $# -eq 0; then
echo 1>&2 "Try '$0 --help' for more information"
exit 1
fi
case $1 in
--is-lightweight)
# Used by our autoconf macros to check whether the available missing
# script is modern enough.
exit 0
;;
--run)
# Back-compat with the calling convention used by older automake.
shift
;;
-h|--h|--he|--hel|--help)
echo "\
$0 [OPTION]... PROGRAM [ARGUMENT]...
Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due
to PROGRAM being missing or too old.
Options:
-h, --help display this help and exit
-v, --version output version information and exit
Supported PROGRAM values:
aclocal autoconf autoheader autom4te automake makeinfo
bison yacc flex lex help2man
Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and
'g' are ignored when checking the name.
Send bug reports to ."
exit $?
;;
-v|--v|--ve|--ver|--vers|--versi|--versio|--version)
echo "missing $scriptversion (GNU Automake)"
exit $?
;;
-*)
echo 1>&2 "$0: unknown '$1' option"
echo 1>&2 "Try '$0 --help' for more information"
exit 1
;;
esac
# Run the given program, remember its exit status.
"$@"; st=$?
# If it succeeded, we are done.
test $st -eq 0 && exit 0
# Also exit now if we it failed (or wasn't found), and '--version' was
# passed; such an option is passed most likely to detect whether the
# program is present and works.
case $2 in --version|--help) exit $st;; esac
# Exit code 63 means version mismatch. This often happens when the user
# tries to use an ancient version of a tool on a file that requires a
# minimum version.
if test $st -eq 63; then
msg="probably too old"
elif test $st -eq 127; then
# Program was missing.
msg="missing on your system"
else
# Program was found and executed, but failed. Give up.
exit $st
fi
perl_URL=http://www.perl.org/
flex_URL=http://flex.sourceforge.net/
gnu_software_URL=http://www.gnu.org/software
program_details ()
{
case $1 in
aclocal|automake)
echo "The '$1' program is part of the GNU Automake package:"
echo "<$gnu_software_URL/automake>"
echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:"
echo "<$gnu_software_URL/autoconf>"
echo "<$gnu_software_URL/m4/>"
echo "<$perl_URL>"
;;
autoconf|autom4te|autoheader)
echo "The '$1' program is part of the GNU Autoconf package:"
echo "<$gnu_software_URL/autoconf/>"
echo "It also requires GNU m4 and Perl in order to run:"
echo "<$gnu_software_URL/m4/>"
echo "<$perl_URL>"
;;
esac
}
give_advice ()
{
# Normalize program name to check for.
normalized_program=`echo "$1" | sed '
s/^gnu-//; t
s/^gnu//; t
s/^g//; t'`
printf '%s\n' "'$1' is $msg."
configure_deps="'configure.ac' or m4 files included by 'configure.ac'"
case $normalized_program in
autoconf*)
echo "You should only need it if you modified 'configure.ac',"
echo "or m4 files included by it."
program_details 'autoconf'
;;
autoheader*)
echo "You should only need it if you modified 'acconfig.h' or"
echo "$configure_deps."
program_details 'autoheader'
;;
automake*)
echo "You should only need it if you modified 'Makefile.am' or"
echo "$configure_deps."
program_details 'automake'
;;
aclocal*)
echo "You should only need it if you modified 'acinclude.m4' or"
echo "$configure_deps."
program_details 'aclocal'
;;
autom4te*)
echo "You might have modified some maintainer files that require"
echo "the 'autom4te' program to be rebuilt."
program_details 'autom4te'
;;
bison*|yacc*)
echo "You should only need it if you modified a '.y' file."
echo "You may want to install the GNU Bison package:"
echo "<$gnu_software_URL/bison/>"
;;
lex*|flex*)
echo "You should only need it if you modified a '.l' file."
echo "You may want to install the Fast Lexical Analyzer package:"
echo "<$flex_URL>"
;;
help2man*)
echo "You should only need it if you modified a dependency" \
"of a man page."
echo "You may want to install the GNU Help2man package:"
echo "<$gnu_software_URL/help2man/>"
;;
makeinfo*)
echo "You should only need it if you modified a '.texi' file, or"
echo "any other file indirectly affecting the aspect of the manual."
echo "You might want to install the Texinfo package:"
echo "<$gnu_software_URL/texinfo/>"
echo "The spurious makeinfo call might also be the consequence of"
echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might"
echo "want to install GNU make:"
echo "<$gnu_software_URL/make/>"
;;
*)
echo "You might have modified some files without having the proper"
echo "tools for further handling them. Check the 'README' file, it"
echo "often tells you about the needed prerequisites for installing"
echo "this package. You may also peek at any GNU archive site, in"
echo "case some other package contains this missing '$1' program."
;;
esac
}
give_advice "$1" | sed -e '1s/^/WARNING: /' \
-e '2,$s/^/ /' >&2
# Propagate the correct exit status (expected to be 127 for a program
# not found, 63 for a program that failed due to version mismatch).
exit $st
# Local variables:
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:
sqlsmith-1.2.1/Doxyfile.in 0000644 0001750 0001750 00000317402 13272401105 012415 0000000 0000000 # Doxyfile 1.8.11
# This file describes the settings to be used by the documentation system
# doxygen (www.doxygen.org) for a project.
#
# All text after a double hash (##) is considered a comment and is placed in
# front of the TAG it is preceding.
#
# All text after a single hash (#) is considered a comment and will be ignored.
# The format is:
# TAG = value [value, ...]
# For lists, items can also be appended using:
# TAG += value [value, ...]
# Values that contain spaces should be placed between quotes (\" \").
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
# This tag specifies the encoding used for all characters in the config file
# that follow. The default is UTF-8 which is also the encoding used for all text
# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
# for the list of possible encodings.
# The default value is: UTF-8.
DOXYFILE_ENCODING = UTF-8
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
# double-quotes, unless you are using Doxywizard) that should identify the
# project for which the documentation is generated. This name is used in the
# title of most generated pages and in a few other places.
# The default value is: My Project.
PROJECT_NAME = "@PACKAGE_NAME@"
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = "@CONFIG_GIT_REVISION@"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
# quick idea about the purpose of the project. Keep the description short.
PROJECT_BRIEF = "A random SQL query generator"
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
# in the documentation. The maximum height of the logo should not exceed 55
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
# the logo to the output directory.
PROJECT_LOGO = "logo.png"
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
# into which the generated documentation will be written. If a relative path is
# entered, it will be relative to the location where doxygen was started. If
# left blank the current directory will be used.
OUTPUT_DIRECTORY =
# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
# directories (in 2 levels) under the output directory of each output format and
# will distribute the generated files over these directories. Enabling this
# option can be useful when feeding doxygen a huge amount of source files, where
# putting all generated files in the same directory would otherwise causes
# performance problems for the file system.
# The default value is: NO.
CREATE_SUBDIRS = NO
# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
# characters to appear in the names of generated files. If set to NO, non-ASCII
# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
# U+3044.
# The default value is: NO.
ALLOW_UNICODE_NAMES = NO
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
# documentation generated by doxygen is written. Doxygen will use this
# information to generate all constant output in the proper language.
# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
# Ukrainian and Vietnamese.
# The default value is: English.
OUTPUT_LANGUAGE = English
# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
# descriptions after the members that are listed in the file and class
# documentation (similar to Javadoc). Set to NO to disable this.
# The default value is: YES.
BRIEF_MEMBER_DESC = YES
# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
# description of a member or function before the detailed description
#
# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
# brief descriptions will be completely suppressed.
# The default value is: YES.
REPEAT_BRIEF = YES
# This tag implements a quasi-intelligent brief description abbreviator that is
# used to form the text in various listings. Each string in this list, if found
# as the leading text of the brief description, will be stripped from the text
# and the result, after processing the whole list, is used as the annotated
# text. Otherwise, the brief description is used as-is. If left blank, the
# following values are used ($name is automatically replaced with the name of
# the entity):The $name class, The $name widget, The $name file, is, provides,
# specifies, contains, represents, a, an and the.
ABBREVIATE_BRIEF =
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
# doxygen will generate a detailed section even if there is only a brief
# description.
# The default value is: NO.
ALWAYS_DETAILED_SEC = NO
# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
# inherited members of a class in the documentation of that class as if those
# members were ordinary class members. Constructors, destructors and assignment
# operators of the base classes will not be shown.
# The default value is: NO.
INLINE_INHERITED_MEMB = NO
# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
# before files name in the file list and in the header files. If set to NO the
# shortest path that makes the file name unique will be used
# The default value is: YES.
FULL_PATH_NAMES = YES
# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
# Stripping is only done if one of the specified strings matches the left-hand
# part of the path. The tag can be used to show relative paths in the file list.
# If left blank the directory from which doxygen is run is used as the path to
# strip.
#
# Note that you can specify absolute paths here, but also relative paths, which
# will be relative from the directory where doxygen is started.
# This tag requires that the tag FULL_PATH_NAMES is set to YES.
STRIP_FROM_PATH =
# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
# path mentioned in the documentation of a class, which tells the reader which
# header file to include in order to use a class. If left blank only the name of
# the header file containing the class definition is used. Otherwise one should
# specify the list of include paths that are normally passed to the compiler
# using the -I flag.
STRIP_FROM_INC_PATH =
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
# less readable) file names. This can be useful is your file systems doesn't
# support long names like on DOS, Mac, or CD-ROM.
# The default value is: NO.
SHORT_NAMES = NO
# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
# first line (until the first dot) of a Javadoc-style comment as the brief
# description. If set to NO, the Javadoc-style will behave just like regular Qt-
# style comments (thus requiring an explicit @brief command for a brief
# description.)
# The default value is: NO.
JAVADOC_AUTOBRIEF = YES
# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
# line (until the first dot) of a Qt-style comment as the brief description. If
# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
# requiring an explicit \brief command for a brief description.)
# The default value is: NO.
QT_AUTOBRIEF = NO
# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
# a brief description. This used to be the default behavior. The new default is
# to treat a multi-line C++ comment block as a detailed description. Set this
# tag to YES if you prefer the old behavior instead.
#
# Note that setting this tag to YES also means that rational rose comments are
# not recognized any more.
# The default value is: NO.
MULTILINE_CPP_IS_BRIEF = NO
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
# documentation from any documented member that it re-implements.
# The default value is: YES.
INHERIT_DOCS = YES
# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
# page for each member. If set to NO, the documentation of a member will be part
# of the file/class/namespace that contains it.
# The default value is: NO.
SEPARATE_MEMBER_PAGES = NO
# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
# uses this value to replace tabs by spaces in code fragments.
# Minimum value: 1, maximum value: 16, default value: 4.
TAB_SIZE = 8
# This tag can be used to specify a number of aliases that act as commands in
# the documentation. An alias has the form:
# name=value
# For example adding
# "sideeffect=@par Side Effects:\n"
# will allow you to put the command \sideeffect (or @sideeffect) in the
# documentation, which will result in a user-defined paragraph with heading
# "Side Effects:". You can put \n's in the value part of an alias to insert
# newlines.
ALIASES =
# This tag can be used to specify a number of word-keyword mappings (TCL only).
# A mapping has the form "name=value". For example adding "class=itcl::class"
# will allow you to use the command class in the itcl::class meaning.
TCL_SUBST =
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
# only. Doxygen will then generate output that is more tailored for C. For
# instance, some of the names that are used will be different. The list of all
# members will be omitted, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_FOR_C = NO
# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
# Python sources only. Doxygen will then generate output that is more tailored
# for that language. For instance, namespaces will be presented as packages,
# qualified scopes will look different, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_JAVA = NO
# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
# sources. Doxygen will then generate output that is tailored for Fortran.
# The default value is: NO.
OPTIMIZE_FOR_FORTRAN = NO
# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
# sources. Doxygen will then generate output that is tailored for VHDL.
# The default value is: NO.
OPTIMIZE_OUTPUT_VHDL = NO
# Doxygen selects the parser to use depending on the extension of the files it
# parses. With this tag you can assign which parser to use for a given
# extension. Doxygen has a built-in mapping, but you can override or extend it
# using this tag. The format is ext=language, where ext is a file extension, and
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
# Fortran. In the later case the parser tries to guess whether the code is fixed
# or free formatted code, this is the default for Fortran type files), VHDL. For
# instance to make doxygen treat .inc files as Fortran files (default is PHP),
# and .f files as C (default is Fortran), use: inc=Fortran f=C.
#
# Note: For files without extension you can use no_extension as a placeholder.
#
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
# the files are not read by doxygen.
EXTENSION_MAPPING =
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
# according to the Markdown format, which allows for more readable
# documentation. See http://daringfireball.net/projects/markdown/ for details.
# The output of markdown processing is further processed by doxygen, so you can
# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
# case of backward compatibilities issues.
# The default value is: YES.
MARKDOWN_SUPPORT = YES
# When enabled doxygen tries to link words that correspond to documented
# classes, or namespaces to their corresponding documentation. Such a link can
# be prevented in individual cases by putting a % sign in front of the word or
# globally by setting AUTOLINK_SUPPORT to NO.
# The default value is: YES.
AUTOLINK_SUPPORT = YES
# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
# to include (a tag file for) the STL sources as input, then you should set this
# tag to YES in order to let doxygen match functions declarations and
# definitions whose arguments contain STL classes (e.g. func(std::string);
# versus func(std::string) {}). This also make the inheritance and collaboration
# diagrams that involve STL classes more complete and accurate.
# The default value is: NO.
BUILTIN_STL_SUPPORT = NO
# If you use Microsoft's C++/CLI language, you should set this option to YES to
# enable parsing support.
# The default value is: NO.
CPP_CLI_SUPPORT = NO
# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
# will parse them like normal C++ but will assume all classes use public instead
# of private inheritance when no explicit protection keyword is present.
# The default value is: NO.
SIP_SUPPORT = NO
# For Microsoft's IDL there are propget and propput attributes to indicate
# getter and setter methods for a property. Setting this option to YES will make
# doxygen to replace the get and set methods by a property in the documentation.
# This will only work if the methods are indeed getting or setting a simple
# type. If this is not the case, or you want to show the methods anyway, you
# should set this option to NO.
# The default value is: YES.
IDL_PROPERTY_SUPPORT = YES
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
# tag is set to YES then doxygen will reuse the documentation of the first
# member in the group (if any) for the other members of the group. By default
# all members of a group must be documented explicitly.
# The default value is: NO.
DISTRIBUTE_GROUP_DOC = NO
# If one adds a struct or class to a group and this option is enabled, then also
# any nested class or struct is added to the same group. By default this option
# is disabled and one has to add nested compounds explicitly via \ingroup.
# The default value is: NO.
GROUP_NESTED_COMPOUNDS = NO
# Set the SUBGROUPING tag to YES to allow class member groups of the same type
# (for instance a group of public functions) to be put as a subgroup of that
# type (e.g. under the Public Functions section). Set it to NO to prevent
# subgrouping. Alternatively, this can be done per class using the
# \nosubgrouping command.
# The default value is: YES.
SUBGROUPING = YES
# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
# are shown inside the group in which they are included (e.g. using \ingroup)
# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
# and RTF).
#
# Note that this feature does not work in combination with
# SEPARATE_MEMBER_PAGES.
# The default value is: NO.
INLINE_GROUPED_CLASSES = NO
# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
# with only public data fields or simple typedef fields will be shown inline in
# the documentation of the scope in which they are defined (i.e. file,
# namespace, or group documentation), provided this scope is documented. If set
# to NO, structs, classes, and unions are shown on a separate page (for HTML and
# Man pages) or section (for LaTeX and RTF).
# The default value is: NO.
INLINE_SIMPLE_STRUCTS = NO
# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
# enum is documented as struct, union, or enum with the name of the typedef. So
# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
# with name TypeT. When disabled the typedef will appear as a member of a file,
# namespace, or class. And the struct will be named TypeS. This can typically be
# useful for C code in case the coding convention dictates that all compound
# types are typedef'ed and only the typedef is referenced, never the tag name.
# The default value is: NO.
TYPEDEF_HIDES_STRUCT = NO
# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
# cache is used to resolve symbols given their name and scope. Since this can be
# an expensive process and often the same symbol appears multiple times in the
# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
# doxygen will become slower. If the cache is too large, memory is wasted. The
# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
# symbols. At the end of a run doxygen will report the cache usage and suggest
# the optimal cache size from a speed point of view.
# Minimum value: 0, maximum value: 9, default value: 0.
LOOKUP_CACHE_SIZE = 0
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
# documentation are documented, even if no documentation was available. Private
# class members and static file members will be hidden unless the
# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
# Note: This will also disable the warnings about undocumented members that are
# normally produced when WARNINGS is set to YES.
# The default value is: NO.
EXTRACT_ALL = NO
# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
# be included in the documentation.
# The default value is: NO.
EXTRACT_PRIVATE = NO
# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
# scope will be included in the documentation.
# The default value is: NO.
EXTRACT_PACKAGE = NO
# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
# included in the documentation.
# The default value is: NO.
EXTRACT_STATIC = NO
# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
# locally in source files will be included in the documentation. If set to NO,
# only classes defined in header files are included. Does not have any effect
# for Java sources.
# The default value is: YES.
EXTRACT_LOCAL_CLASSES = YES
# This flag is only useful for Objective-C code. If set to YES, local methods,
# which are defined in the implementation section but not in the interface are
# included in the documentation. If set to NO, only methods in the interface are
# included.
# The default value is: NO.
EXTRACT_LOCAL_METHODS = NO
# If this flag is set to YES, the members of anonymous namespaces will be
# extracted and appear in the documentation as a namespace called
# 'anonymous_namespace{file}', where file will be replaced with the base name of
# the file that contains the anonymous namespace. By default anonymous namespace
# are hidden.
# The default value is: NO.
EXTRACT_ANON_NSPACES = NO
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
# undocumented members inside documented classes or files. If set to NO these
# members will be included in the various overviews, but no documentation
# section is generated. This option has no effect if EXTRACT_ALL is enabled.
# The default value is: NO.
HIDE_UNDOC_MEMBERS = NO
# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
# undocumented classes that are normally visible in the class hierarchy. If set
# to NO, these classes will be included in the various overviews. This option
# has no effect if EXTRACT_ALL is enabled.
# The default value is: NO.
HIDE_UNDOC_CLASSES = NO
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
# (class|struct|union) declarations. If set to NO, these declarations will be
# included in the documentation.
# The default value is: NO.
HIDE_FRIEND_COMPOUNDS = NO
# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
# documentation blocks found inside the body of a function. If set to NO, these
# blocks will be appended to the function's detailed documentation block.
# The default value is: NO.
HIDE_IN_BODY_DOCS = NO
# The INTERNAL_DOCS tag determines if documentation that is typed after a
# \internal command is included. If the tag is set to NO then the documentation
# will be excluded. Set it to YES to include the internal documentation.
# The default value is: NO.
INTERNAL_DOCS = NO
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
# names in lower-case letters. If set to YES, upper-case letters are also
# allowed. This is useful if you have classes or files whose names only differ
# in case and if your file system supports case sensitive file names. Windows
# and Mac users are advised to set this option to NO.
# The default value is: system dependent.
CASE_SENSE_NAMES = YES
# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
# their full class and namespace scopes in the documentation. If set to YES, the
# scope will be hidden.
# The default value is: NO.
HIDE_SCOPE_NAMES = NO
# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
# append additional text to a page's title, such as Class Reference. If set to
# YES the compound reference will be hidden.
# The default value is: NO.
HIDE_COMPOUND_REFERENCE= NO
# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
# the files that are included by a file in the documentation of that file.
# The default value is: YES.
SHOW_INCLUDE_FILES = YES
# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
# grouped member an include statement to the documentation, telling the reader
# which file to include in order to use the member.
# The default value is: NO.
SHOW_GROUPED_MEMB_INC = NO
# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
# files with double quotes in the documentation rather than with sharp brackets.
# The default value is: NO.
FORCE_LOCAL_INCLUDES = NO
# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
# documentation for inline members.
# The default value is: YES.
INLINE_INFO = YES
# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
# (detailed) documentation of file and class members alphabetically by member
# name. If set to NO, the members will appear in declaration order.
# The default value is: YES.
SORT_MEMBER_DOCS = YES
# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
# descriptions of file, namespace and class members alphabetically by member
# name. If set to NO, the members will appear in declaration order. Note that
# this will also influence the order of the classes in the class list.
# The default value is: NO.
SORT_BRIEF_DOCS = NO
# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
# (brief and detailed) documentation of class members so that constructors and
# destructors are listed first. If set to NO the constructors will appear in the
# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
# member documentation.
# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
# detailed member documentation.
# The default value is: NO.
SORT_MEMBERS_CTORS_1ST = NO
# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
# of group names into alphabetical order. If set to NO the group names will
# appear in their defined order.
# The default value is: NO.
SORT_GROUP_NAMES = NO
# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
# fully-qualified names, including namespaces. If set to NO, the class list will
# be sorted only by class name, not including the namespace part.
# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
# Note: This option applies only to the class list, not to the alphabetical
# list.
# The default value is: NO.
SORT_BY_SCOPE_NAME = NO
# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
# type resolution of all parameters of a function it will reject a match between
# the prototype and the implementation of a member function even if there is
# only one candidate or it is obvious which candidate to choose by doing a
# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
# accept a match between prototype and implementation in such cases.
# The default value is: NO.
STRICT_PROTO_MATCHING = NO
# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
# list. This list is created by putting \todo commands in the documentation.
# The default value is: YES.
GENERATE_TODOLIST = YES
# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
# list. This list is created by putting \test commands in the documentation.
# The default value is: YES.
GENERATE_TESTLIST = YES
# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
# list. This list is created by putting \bug commands in the documentation.
# The default value is: YES.
GENERATE_BUGLIST = YES
# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
# the deprecated list. This list is created by putting \deprecated commands in
# the documentation.
# The default value is: YES.
GENERATE_DEPRECATEDLIST= YES
# The ENABLED_SECTIONS tag can be used to enable conditional documentation
# sections, marked by \if ... \endif and \cond
# ... \endcond blocks.
ENABLED_SECTIONS =
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
# initial value of a variable or macro / define can have for it to appear in the
# documentation. If the initializer consists of more lines than specified here
# it will be hidden. Use a value of 0 to hide initializers completely. The
# appearance of the value of individual variables and macros / defines can be
# controlled using \showinitializer or \hideinitializer command in the
# documentation regardless of this setting.
# Minimum value: 0, maximum value: 10000, default value: 30.
MAX_INITIALIZER_LINES = 30
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
# the bottom of the documentation of classes and structs. If set to YES, the
# list will mention the files that were used to generate the documentation.
# The default value is: YES.
SHOW_USED_FILES = YES
# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
# will remove the Files entry from the Quick Index and from the Folder Tree View
# (if specified).
# The default value is: YES.
SHOW_FILES = YES
# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
# page. This will remove the Namespaces entry from the Quick Index and from the
# Folder Tree View (if specified).
# The default value is: YES.
SHOW_NAMESPACES = YES
# The FILE_VERSION_FILTER tag can be used to specify a program or script that
# doxygen should invoke to get the current version for each file (typically from
# the version control system). Doxygen will invoke the program by executing (via
# popen()) the command command input-file, where command is the value of the
# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
# by doxygen. Whatever the program writes to standard output is used as the file
# version. For an example see the documentation.
FILE_VERSION_FILTER =
# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
# by doxygen. The layout file controls the global structure of the generated
# output files in an output format independent way. To create the layout file
# that represents doxygen's defaults, run doxygen with the -l option. You can
# optionally specify a file name after the option, if omitted DoxygenLayout.xml
# will be used as the name of the layout file.
#
# Note that if you run doxygen from a directory containing a file called
# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
# tag is left empty.
LAYOUT_FILE =
# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
# the reference definitions. This must be a list of .bib files. The .bib
# extension is automatically appended if omitted. This requires the bibtex tool
# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
# For LaTeX the style of the bibliography can be controlled using
# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
# search path. See also \cite for info how to create references.
CITE_BIB_FILES =
#---------------------------------------------------------------------------
# Configuration options related to warning and progress messages
#---------------------------------------------------------------------------
# The QUIET tag can be used to turn on/off the messages that are generated to
# standard output by doxygen. If QUIET is set to YES this implies that the
# messages are off.
# The default value is: NO.
QUIET = NO
# The WARNINGS tag can be used to turn on/off the warning messages that are
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
# this implies that the warnings are on.
#
# Tip: Turn warnings on while writing the documentation.
# The default value is: YES.
WARNINGS = YES
# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
# will automatically be disabled.
# The default value is: YES.
WARN_IF_UNDOCUMENTED = YES
# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
# potential errors in the documentation, such as not documenting some parameters
# in a documented function, or documenting parameters that don't exist or using
# markup commands wrongly.
# The default value is: YES.
WARN_IF_DOC_ERROR = YES
# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
# are documented, but have no documentation for their parameters or return
# value. If set to NO, doxygen will only warn about wrong or incomplete
# parameter documentation, but not about the absence of documentation.
# The default value is: NO.
WARN_NO_PARAMDOC = NO
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
# a warning is encountered.
# The default value is: NO.
WARN_AS_ERROR = NO
# The WARN_FORMAT tag determines the format of the warning messages that doxygen
# can produce. The string should contain the $file, $line, and $text tags, which
# will be replaced by the file and line number from which the warning originated
# and the warning text. Optionally the format may contain $version, which will
# be replaced by the version of the file (if it could be obtained via
# FILE_VERSION_FILTER)
# The default value is: $file:$line: $text.
WARN_FORMAT = "$file:$line: $text"
# The WARN_LOGFILE tag can be used to specify a file to which warning and error
# messages should be written. If left blank the output is written to standard
# error (stderr).
WARN_LOGFILE =
#---------------------------------------------------------------------------
# Configuration options related to the input files
#---------------------------------------------------------------------------
# The INPUT tag is used to specify the files and/or directories that contain
# documented source files. You may enter file names like myfile.cpp or
# directories like /usr/src/myproject. Separate the files or directories with
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
# Note: If this tag is empty the current directory is searched.
INPUT = ./
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
# documentation (see: http://www.gnu.org/software/libiconv) for the list of
# possible encodings.
# The default value is: UTF-8.
INPUT_ENCODING = UTF-8
# If the value of the INPUT tag contains directories, you can use the
# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
# *.h) to filter out the source-files in the directories.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# read by doxygen.
#
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl,
# *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js.
FILE_PATTERNS =
# The RECURSIVE tag can be used to specify whether or not subdirectories should
# be searched for input files as well.
# The default value is: NO.
RECURSIVE = NO
# The EXCLUDE tag can be used to specify files and/or directories that should be
# excluded from the INPUT source files. This way you can easily exclude a
# subdirectory from a directory tree whose root is specified with the INPUT tag.
#
# Note that relative paths are relative to the directory from which doxygen is
# run.
EXCLUDE =
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
# directories that are symbolic links (a Unix file system feature) are excluded
# from the input.
# The default value is: NO.
EXCLUDE_SYMLINKS = NO
# If the value of the INPUT tag contains directories, you can use the
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
# certain files from those directories.
#
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories for example use the pattern */test/*
EXCLUDE_PATTERNS =
# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
# (namespaces, classes, functions, etc.) that should be excluded from the
# output. The symbol name can be a fully qualified name, a word, or if the
# wildcard * is used, a substring. Examples: ANamespace, AClass,
# AClass::ANamespace, ANamespace::*Test
#
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories use the pattern */test/*
EXCLUDE_SYMBOLS =
# The EXAMPLE_PATH tag can be used to specify one or more files or directories
# that contain example code fragments that are included (see the \include
# command).
EXAMPLE_PATH =
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
# *.h) to filter out the source-files in the directories. If left blank all
# files are included.
EXAMPLE_PATTERNS =
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
# searched for input files to be used with the \include or \dontinclude commands
# irrespective of the value of the RECURSIVE tag.
# The default value is: NO.
EXAMPLE_RECURSIVE = NO
# The IMAGE_PATH tag can be used to specify one or more files or directories
# that contain images that are to be included in the documentation (see the
# \image command).
IMAGE_PATH =
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
# by executing (via popen()) the command:
#
#
#
# where is the value of the INPUT_FILTER tag, and is the
# name of an input file. Doxygen will then use the output that the filter
# program writes to standard output. If FILTER_PATTERNS is specified, this tag
# will be ignored.
#
# Note that the filter must not add or remove lines; it is applied before the
# code is scanned, but not when the output code is generated. If lines are added
# or removed, the anchors will not be placed correctly.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# properly processed by doxygen.
INPUT_FILTER =
# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
# basis. Doxygen will compare the file name with each pattern and apply the
# filter if there is a match. The filters are a list of the form: pattern=filter
# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
# patterns match the file name, INPUT_FILTER is applied.
#
# Note that for custom extensions or not directly supported extensions you also
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
# properly processed by doxygen.
FILTER_PATTERNS =
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
# INPUT_FILTER) will also be used to filter the input files that are used for
# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
# The default value is: NO.
FILTER_SOURCE_FILES = NO
# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
# it is also possible to disable source filtering for a specific pattern using
# *.ext= (so without naming a filter).
# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
FILTER_SOURCE_PATTERNS =
# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
# is part of the input, its contents will be placed on the main page
# (index.html). This can be useful if you have a project on for instance GitHub
# and want to reuse the introduction page also for the doxygen output.
USE_MDFILE_AS_MAINPAGE =
#---------------------------------------------------------------------------
# Configuration options related to source browsing
#---------------------------------------------------------------------------
# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
# generated. Documented entities will be cross-referenced with these sources.
#
# Note: To get rid of all source code in the generated output, make sure that
# also VERBATIM_HEADERS is set to NO.
# The default value is: NO.
SOURCE_BROWSER = YES
# Setting the INLINE_SOURCES tag to YES will include the body of functions,
# classes and enums directly into the documentation.
# The default value is: NO.
INLINE_SOURCES = NO
# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
# special comment blocks from generated source code fragments. Normal C, C++ and
# Fortran comments will always remain visible.
# The default value is: YES.
STRIP_CODE_COMMENTS = YES
# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
# function all documented functions referencing it will be listed.
# The default value is: NO.
REFERENCED_BY_RELATION = YES
# If the REFERENCES_RELATION tag is set to YES then for each documented function
# all documented entities called/used by that function will be listed.
# The default value is: NO.
REFERENCES_RELATION = YES
# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
# to YES then the hyperlinks from functions in REFERENCES_RELATION and
# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
# link to the documentation.
# The default value is: YES.
REFERENCES_LINK_SOURCE = YES
# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
# source code will show a tooltip with additional information such as prototype,
# brief description and links to the definition and documentation. Since this
# will make the HTML file larger and loading of large files a bit slower, you
# can opt to disable this feature.
# The default value is: YES.
# This tag requires that the tag SOURCE_BROWSER is set to YES.
SOURCE_TOOLTIPS = YES
# If the USE_HTAGS tag is set to YES then the references to source code will
# point to the HTML generated by the htags(1) tool instead of doxygen built-in
# source browser. The htags tool is part of GNU's global source tagging system
# (see http://www.gnu.org/software/global/global.html). You will need version
# 4.8.6 or higher.
#
# To use it do the following:
# - Install the latest version of global
# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
# - Make sure the INPUT points to the root of the source tree
# - Run doxygen as normal
#
# Doxygen will invoke htags (and that will in turn invoke gtags), so these
# tools must be available from the command line (i.e. in the search path).
#
# The result: instead of the source browser generated by doxygen, the links to
# source code will now point to the output of htags.
# The default value is: NO.
# This tag requires that the tag SOURCE_BROWSER is set to YES.
USE_HTAGS = NO
# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
# verbatim copy of the header file for each class for which an include is
# specified. Set to NO to disable this.
# See also: Section \class.
# The default value is: YES.
VERBATIM_HEADERS = YES
# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
# cost of reduced performance. This can be particularly helpful with template
# rich C++ code for which doxygen's built-in parser lacks the necessary type
# information.
# Note: The availability of this option depends on whether or not doxygen was
# generated with the -Duse-libclang=ON option for CMake.
# The default value is: NO.
CLANG_ASSISTED_PARSING = NO
# If clang assisted parsing is enabled you can provide the compiler with command
# line options that you would normally use when invoking the compiler. Note that
# the include paths will already be set by doxygen for the files and directories
# specified with INPUT and INCLUDE_PATH.
# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
CLANG_OPTIONS =
#---------------------------------------------------------------------------
# Configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
# compounds will be generated. Enable this if the project contains a lot of
# classes, structs, unions or interfaces.
# The default value is: YES.
ALPHABETICAL_INDEX = YES
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
# which the alphabetical index list will be split.
# Minimum value: 1, maximum value: 20, default value: 5.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
COLS_IN_ALPHA_INDEX = 5
# In case all classes in a project start with a common prefix, all classes will
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
# can be used to specify a prefix (or a list of prefixes) that should be ignored
# while generating the index headers.
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the HTML output
#---------------------------------------------------------------------------
# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
# The default value is: YES.
GENERATE_HTML = YES
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
# it.
# The default directory is: html.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_OUTPUT = html
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
# generated HTML page (for example: .htm, .php, .asp).
# The default value is: .html.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FILE_EXTENSION = .html
# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
# each generated HTML page. If the tag is left blank doxygen will generate a
# standard header.
#
# To get valid HTML the header file that includes any scripts and style sheets
# that doxygen needs, which is dependent on the configuration options used (e.g.
# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
# default header using
# doxygen -w html new_header.html new_footer.html new_stylesheet.css
# YourConfigFile
# and then modify the file new_header.html. See also section "Doxygen usage"
# for information on how to generate the default header that doxygen normally
# uses.
# Note: The header is subject to change so you typically have to regenerate the
# default header when upgrading to a newer version of doxygen. For a description
# of the possible markers and block names see the documentation.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_HEADER =
# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
# generated HTML page. If the tag is left blank doxygen will generate a standard
# footer. See HTML_HEADER for more information on how to generate a default
# footer and what special commands can be used inside the footer. See also
# section "Doxygen usage" for information on how to generate the default footer
# that doxygen normally uses.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_FOOTER =
# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
# sheet that is used by each HTML page. It can be used to fine-tune the look of
# the HTML output. If left blank doxygen will generate a default style sheet.
# See also section "Doxygen usage" for information on how to generate the style
# sheet that doxygen normally uses.
# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
# it is more robust and this tag (HTML_STYLESHEET) will in the future become
# obsolete.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_STYLESHEET =
# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
# cascading style sheets that are included after the standard style sheets
# created by doxygen. Using this option one can overrule certain style aspects.
# This is preferred over using HTML_STYLESHEET since it does not replace the
# standard style sheet and is therefore more robust against future updates.
# Doxygen will copy the style sheet files to the output directory.
# Note: The order of the extra style sheet files is of importance (e.g. the last
# style sheet in the list overrules the setting of the previous ones in the
# list). For an example see the documentation.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_EXTRA_STYLESHEET =
# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
# other source files which should be copied to the HTML output directory. Note
# that these files will be copied to the base HTML output directory. Use the
# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
# files will be copied as-is; there are no commands or markers available.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_EXTRA_FILES =
# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
# will adjust the colors in the style sheet and background images according to
# this color. Hue is specified as an angle on a colorwheel, see
# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
# purple, and 360 is red again.
# Minimum value: 0, maximum value: 359, default value: 220.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_HUE = 220
# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
# in the HTML output. For a value of 0 the output will use grayscales only. A
# value of 255 will produce the most vivid colors.
# Minimum value: 0, maximum value: 255, default value: 100.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_SAT = 100
# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
# luminance component of the colors in the HTML output. Values below 100
# gradually make the output lighter, whereas values above 100 make the output
# darker. The value divided by 100 is the actual gamma applied, so 80 represents
# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
# change the gamma.
# Minimum value: 40, maximum value: 240, default value: 80.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_COLORSTYLE_GAMMA = 80
# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
# page will contain the date and time when the page was generated. Setting this
# to YES can help to show when doxygen was last run and thus if the
# documentation is up to date.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_TIMESTAMP = NO
# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
# documentation will contain sections that can be hidden and shown after the
# page has loaded.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_DYNAMIC_SECTIONS = NO
# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
# shown in the various tree structured indices initially; the user can expand
# and collapse entries dynamically later on. Doxygen will expand the tree to
# such a level that at most the specified number of entries are visible (unless
# a fully collapsed tree already exceeds this amount). So setting the number of
# entries 1 will produce a full collapsed tree by default. 0 is a special value
# representing an infinite number of entries and will result in a full expanded
# tree by default.
# Minimum value: 0, maximum value: 9999, default value: 100.
# This tag requires that the tag GENERATE_HTML is set to YES.
HTML_INDEX_NUM_ENTRIES = 100
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
# generated that can be used as input for Apple's Xcode 3 integrated development
# environment (see: http://developer.apple.com/tools/xcode/), introduced with
# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
# Makefile in the HTML output directory. Running make will produce the docset in
# that directory and running make install will install the docset in
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
# for more information.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_DOCSET = NO
# This tag determines the name of the docset feed. A documentation feed provides
# an umbrella under which multiple documentation sets from a single provider
# (such as a company or product suite) can be grouped.
# The default value is: Doxygen generated docs.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_FEEDNAME = "Doxygen generated docs"
# This tag specifies a string that should uniquely identify the documentation
# set bundle. This should be a reverse domain-name style string, e.g.
# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_BUNDLE_ID = org.doxygen.Project
# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
# the documentation publisher. This should be a reverse domain-name style
# string, e.g. com.mycompany.MyDocSet.documentation.
# The default value is: org.doxygen.Publisher.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
# The default value is: Publisher.
# This tag requires that the tag GENERATE_DOCSET is set to YES.
DOCSET_PUBLISHER_NAME = Publisher
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
# Windows.
#
# The HTML Help Workshop contains a compiler that can convert all HTML output
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
# files are now used as the Windows 98 help format, and will replace the old
# Windows help format (.hlp) on all Windows platforms in the future. Compressed
# HTML files also contain an index, a table of contents, and you can search for
# words in the documentation. The HTML workshop also contains a viewer for
# compressed HTML files.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_HTMLHELP = NO
# The CHM_FILE tag can be used to specify the file name of the resulting .chm
# file. You can add a path in front of the file if the result should not be
# written to the html output directory.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
CHM_FILE =
# The HHC_LOCATION tag can be used to specify the location (absolute path
# including file name) of the HTML help compiler (hhc.exe). If non-empty,
# doxygen will try to run the HTML help compiler on the generated index.hhp.
# The file has to be specified with full path.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
HHC_LOCATION =
# The GENERATE_CHI flag controls if a separate .chi index file is generated
# (YES) or that it should be included in the master .chm file (NO).
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
GENERATE_CHI = NO
# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
# and project file content.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
CHM_INDEX_ENCODING =
# The BINARY_TOC flag controls whether a binary table of contents is generated
# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
# enables the Previous and Next buttons.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
BINARY_TOC = NO
# The TOC_EXPAND flag can be set to YES to add extra items for group members to
# the table of contents of the HTML help documentation and to the tree view.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
TOC_EXPAND = NO
# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
# (.qch) of the generated HTML documentation.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_QHP = NO
# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
# the file name of the resulting .qch file. The path specified is relative to
# the HTML output folder.
# This tag requires that the tag GENERATE_QHP is set to YES.
QCH_FILE =
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
# Project output. For more information please see Qt Help Project / Namespace
# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_NAMESPACE = org.doxygen.Project
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
# Help Project output. For more information please see Qt Help Project / Virtual
# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
# folders).
# The default value is: doc.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_VIRTUAL_FOLDER = doc
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
# filter to add. For more information please see Qt Help Project / Custom
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_NAME =
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
# custom filter to add. For more information please see Qt Help Project / Custom
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
# filters).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_CUST_FILTER_ATTRS =
# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
# project's filter section matches. Qt Help Project / Filter Attributes (see:
# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
# This tag requires that the tag GENERATE_QHP is set to YES.
QHP_SECT_FILTER_ATTRS =
# The QHG_LOCATION tag can be used to specify the location of Qt's
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
# generated .qhp file.
# This tag requires that the tag GENERATE_QHP is set to YES.
QHG_LOCATION =
# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
# generated, together with the HTML files, they form an Eclipse help plugin. To
# install this plugin and make it available under the help contents menu in
# Eclipse, the contents of the directory containing the HTML and XML files needs
# to be copied into the plugins directory of eclipse. The name of the directory
# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
# After copying Eclipse needs to be restarted before the help appears.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_ECLIPSEHELP = NO
# A unique identifier for the Eclipse help plugin. When installing the plugin
# the directory name containing the HTML and XML files should also have this
# name. Each documentation set should have its own identifier.
# The default value is: org.doxygen.Project.
# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
ECLIPSE_DOC_ID = org.doxygen.Project
# If you want full control over the layout of the generated HTML pages it might
# be necessary to disable the index and replace it with your own. The
# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
# of each HTML page. A value of NO enables the index and the value YES disables
# it. Since the tabs in the index contain the same information as the navigation
# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
DISABLE_INDEX = NO
# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
# structure should be generated to display hierarchical information. If the tag
# value is set to YES, a side panel will be generated containing a tree-like
# index structure (just like the one that is generated for HTML Help). For this
# to work a browser that supports JavaScript, DHTML, CSS and frames is required
# (i.e. any modern browser). Windows users are probably better off using the
# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
# further fine-tune the look of the index. As an example, the default style
# sheet generated by doxygen has an example that shows how to put an image at
# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
# the same information as the tab index, you could consider setting
# DISABLE_INDEX to YES when enabling this option.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
GENERATE_TREEVIEW = NO
# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
# doxygen will group on one line in the generated HTML documentation.
#
# Note that a value of 0 will completely suppress the enum values from appearing
# in the overview section.
# Minimum value: 0, maximum value: 20, default value: 4.
# This tag requires that the tag GENERATE_HTML is set to YES.
ENUM_VALUES_PER_LINE = 4
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
# to set the initial width (in pixels) of the frame in which the tree is shown.
# Minimum value: 0, maximum value: 1500, default value: 250.
# This tag requires that the tag GENERATE_HTML is set to YES.
TREEVIEW_WIDTH = 250
# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
# external symbols imported via tag files in a separate window.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
EXT_LINKS_IN_WINDOW = NO
# Use this tag to change the font size of LaTeX formulas included as images in
# the HTML documentation. When you change the font size after a successful
# doxygen run you need to manually remove any form_*.png images from the HTML
# output directory to force them to be regenerated.
# Minimum value: 8, maximum value: 50, default value: 10.
# This tag requires that the tag GENERATE_HTML is set to YES.
FORMULA_FONTSIZE = 10
# Use the FORMULA_TRANPARENT tag to determine whether or not the images
# generated for formulas are transparent PNGs. Transparent PNGs are not
# supported properly for IE 6.0, but are supported on all modern browsers.
#
# Note that when changing this option you need to delete any form_*.png files in
# the HTML output directory before the changes have effect.
# The default value is: YES.
# This tag requires that the tag GENERATE_HTML is set to YES.
FORMULA_TRANSPARENT = YES
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# http://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
# installed or if you want to formulas look prettier in the HTML output. When
# enabled you may also need to install MathJax separately and configure the path
# to it using the MATHJAX_RELPATH option.
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
USE_MATHJAX = NO
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
# http://docs.mathjax.org/en/latest/output.html) for more details.
# Possible values are: HTML-CSS (which is slower, but has the best
# compatibility), NativeMML (i.e. MathML) and SVG.
# The default value is: HTML-CSS.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_FORMAT = HTML-CSS
# When MathJax is enabled you need to specify the location relative to the HTML
# output directory using the MATHJAX_RELPATH option. The destination directory
# should contain the MathJax.js script. For instance, if the mathjax directory
# is located at the same level as the HTML output directory, then
# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
# Content Delivery Network so you can quickly see the result without installing
# MathJax. However, it is strongly recommended to install a local copy of
# MathJax from http://www.mathjax.org before deployment.
# The default value is: http://cdn.mathjax.org/mathjax/latest.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
# extension names that should be enabled during MathJax rendering. For example
# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_EXTENSIONS =
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
# example see the documentation.
# This tag requires that the tag USE_MATHJAX is set to YES.
MATHJAX_CODEFILE =
# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
# the HTML output. The underlying search engine uses javascript and DHTML and
# should work on any modern browser. Note that when using HTML help
# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
# there is already a search function so this one should typically be disabled.
# For large projects the javascript based search engine can be slow, then
# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
# search using the keyboard; to jump to the search box use + S
# (what the is depends on the OS and browser, but it is typically
# , /