sleuthkit-4.11.1/000755 000765 000024 00000000000 14137073563 014415 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/man/000755 000765 000024 00000000000 14137073563 015170 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/configure.ac000644 000765 000024 00000027131 14137073414 016702 0ustar00carrierstaff000000 000000 dnl -*- Autoconf -*- dnl Process this file with autoconf to produce a configure script. AC_PREREQ(2.59) AC_INIT(sleuthkit, 4.11.1) m4_include([m4/ax_pthread.m4]) dnl include the version from 1.12.1. This will work for m4_include([m4/cppunit.m4]) m4_include([m4/ax_jni_include_dir.m4]) m4_include([m4/ac_prog_javac_works.m4]) m4_include([m4/ac_prog_javac.m4]) m4_include([m4/ac_prog_java_works.m4]) m4_include([m4/ac_prog_java.m4]) m4_include([m4/ax_cxx_compile_stdcxx.m4]) AC_CONFIG_SRCDIR([tsk/base/tsk_base.h]) AC_CONFIG_HEADERS([tsk/tsk_config.h]) AC_CONFIG_AUX_DIR(config) AM_INIT_AUTOMAKE([foreign tar-ustar]) AM_PATH_CPPUNIT(1.12.1) AM_CONDITIONAL([CPPUNIT],[test "x$no_cppunit" = x]) AM_PROG_LIBTOOL AM_MAINTAINER_MODE AC_CONFIG_MACRO_DIR([m4]) dnl Checks for programs. AC_PROG_CXX AX_CXX_COMPILE_STDCXX([14], [noext], [mandatory]) AC_PROG_CC AC_PROG_CPP AC_PROG_INSTALL AC_PROG_LN_S AC_PROG_MAKE_SET AC_PATH_PROG(PERL, perl) TSK_CHECK_PROG_PKGCONFIG dnl Checks for header files. AC_HEADER_STDC dnl AC_HEADER_MAJOR dnl AC_HEADER_SYS_WAIT dnl AC_CHECK_HEADERS([fcntl.h inttypes.h limits.h locale.h memory.h netinet/in.h stdint.h stdlib.h string.h sys/ioctl.h sys/param.h sys/time.h unistd.h utime.h wchar.h wctype.h]) AC_CHECK_HEADERS([err.h inttypes.h unistd.h stdint.h sys/param.h sys/resource.h]) dnl Checks for typedefs, structures, and compiler characteristics. AC_HEADER_STDBOOL AC_C_CONST AC_TYPE_UID_T AC_TYPE_MODE_T AC_TYPE_OFF_T AC_TYPE_SIZE_T dnl AC_CHECK_MEMBERS([struct stat.st_rdev]) dnl AC_HEADER_TIME dnl AC_STRUCT_TM dnl check for large file support AC_SYS_LARGEFILE dnl Checks for library functions. AC_FUNC_ALLOCA AC_FUNC_ERROR_AT_LINE dnl AC_FUNC_FORK AC_FUNC_FSEEKO AC_PROG_GCC_TRADITIONAL AC_FUNC_LSTAT AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK dnl AC_FUNC_MALLOC dnl AC_FUNC_MBRTOWC dnl AC_FUNC_MEMCMP dnl AC_FUNC_MKTIME dnl AC_FUNC_MMAP dnl AC_FUNC_REALLOC AC_FUNC_SELECT_ARGTYPES dnl AC_FUNC_STAT AC_FUNC_UTIME_NULL AC_FUNC_VPRINTF dnl AC_CHECK_FUNCS([dup2 gethostname isascii iswprint memset munmap regcomp select setlocale strcasecmp strchr strdup strerror strndup strrchr strtol strtoul strtoull utime wcwidth]) AC_CHECK_FUNCS([ishexnumber err errx warn warnx vasprintf getrusage]) AC_CHECK_FUNCS([strlcpy strlcat]) AX_PTHREAD([ AC_DEFINE(HAVE_PTHREAD,1,[Define if you have POSIX threads libraries and header files.]) CLIBS="$PTHREAD_LIBS $LIBS" CPPFLAGS="$CPPFLAGS $PTHREAD_CFLAGS" LDFLAGS="$LDFLAGS $PTHREAD_CFLAGS" CC="$PTHREAD_CC"],[]) dnl Permit single-threaded builds AC_ARG_ENABLE([multithreading], [AS_HELP_STRING([--disable-multithreading], [Build without multithreading support])]) dnl Enable multithreading by default in the presence of pthread AS_IF([test "x$ax_pthread_ok" = "xyes" && test "x$enable_multithreading" != "xno"], [ax_multithread=yes], [ax_multithread=no]) case "$host" in *-*-mingw*) dnl Adding the native /usr/local is wrong for cross-compiling ;; *) dnl Not all compilers include /usr/local in the include and link path if test -d /usr/local/include; then CPPFLAGS="$CPPFLAGS -I/usr/local/include" LDFLAGS="$LDFLAGS -L/usr/local/lib" fi ;; esac dnl Add enable/disable option AC_ARG_ENABLE([java], [AS_HELP_STRING([--disable-java], [Do not build the java bindings or jar file])]) dnl Checks for libraries. dnl Some platforms will complain about missing included functions if libstdc++ is not included. AC_CHECK_LIB(stdc++, main, , AC_MSG_ERROR([missing libstdc++])) AC_CHECK_HEADERS(list, , , AC_MSG_ERROR([missing STL list class header])) AC_CHECK_HEADERS(map, , , AC_MSG_ERROR([missing STL map class header])) AC_CHECK_HEADERS(queue, , , AC_MSG_ERROR([missing STL queue class header])) AC_CHECK_HEADERS(set, , , AC_MSG_ERROR([missing STL set class header])) AC_CHECK_HEADERS(stack, , , AC_MSG_ERROR([missing STL stack class header])) AC_CHECK_HEADERS(streambuf, , , AC_MSG_ERROR([missing STL streambuf class header])) AC_CHECK_HEADERS(string, , , AC_MSG_ERROR([missing STL string class header])) AC_CHECK_HEADERS(vector, , , AC_MSG_ERROR([missing STL vector class header])) dnl Check for sqlite and its dependencies AS_IF([test "x$ac_cv_prog_PKGCONFIG" = "xyes"], [ SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" TSK_PKG_CHECK_MODULES([SQLITE3], [], [sqlite3], [ CFLAGS="$CFLAGS $SQLITE3_CFLAGS" CXXFLAGS="$CXXFLAGS $SQLITE3_CFLAGS" LIBS="$LIBS $SQLITE3_LIBS" ], [ AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_sqlite3=no ] )] ) dnl needed for sqllite AC_CHECK_LIB(dl, dlopen) AC_CHECK_HEADERS([sqlite3.h], [AC_CHECK_LIB([sqlite3], [sqlite3_open])]) AS_IF([test "x$ac_cv_lib_sqlite3_sqlite3_open" = "xyes"], [ax_sqlite3=yes]) dnl Compile the bundled sqlite if there is no system one installed AC_MSG_CHECKING(which sqlite3 to use) AS_IF([test "x$ax_sqlite3" = "xyes"], [AC_MSG_RESULT([system]) PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lsqlite3"], [AC_MSG_RESULT([bundled])]) AM_CONDITIONAL([HAVE_LIBSQLITE3], [test "x$ax_sqlite3" = "xyes"]) dnl Check if we should link with afflib TSK_OPT_DEP_CHECK([afflib], [], [], [afflib/afflib.h], [afflib], [af_open]) dnl Check if we should link with zlib TSK_OPT_DEP_CHECK([zlib], [ZLIB], [zlib], [zlib.h], [z], [inflate]) dnl Check if we should link with libewf TSK_OPT_DEP_CHECK([libewf], [EWF], [libewf], [libewf.h], [ewf], [libewf_get_version]) dnl Check if we should link with libvhdi TSK_OPT_DEP_CHECK([libvhdi], [VHDI], [libvhdi], [libvhdi.h], [vhdi], [libvhdi_get_version]) dnl Check if we should link with libvmdk TSK_OPT_DEP_CHECK([libvmdk], [VMDK], [libvmdk], [libvmdk.h], [vmdk], [libvmdk_get_version]) dnl check for cppunit AC_ARG_ENABLE([cppunit], [AS_HELP_STRING([--disable-cppunit], [Build without cppunit tests])]) ac_cv_cppunit=no AS_IF([test "x$enable_cppunit" != "xno"], [ AS_IF([test "x$ac_cv_prog_PKGCONFIG" = "xyes"], [ dnl IGNOREs keep cppunit out of .pc file, as it's for testing only TSK_PKG_CHECK_MODULES([CPPUNIT], [], [cppunit >= 1.12.1], [ac_cv_cppunit=yes], [ac_cv_cppunit=no], [IGNORE], [IGNORE]) ] ) AS_IF([test "x$ac_cv_cppunit" != "xyes"], [AM_PATH_CPPUNIT(1.12.1) AS_IF([test "x$no_cppunit" = x], [ac_cv_cppunit=yes])] ) AC_MSG_CHECKING([for TestRunner in -lcppunit]) SAVED_CFLAGS="$CFLAGS" SAVED_LDFLAGS="$LDFLAGS" CFLAGS="$CPPUNIT_CLFAGS" LDFLAGS="$CPPUNIT_LIBS" AC_LANG_PUSH([C++]) AC_LINK_IFELSE([AC_LANG_PROGRAM( [[#include ]], [[CppUnit::TextUi::TestRunner();]])], [ax_cv_cppunit=yes], [ax_cv_cppunit=no]) AC_LANG_POP([C++]) CFLAGS="$SAVED_CFLAGS" LDFLAGS="$SAVED_LDFLAGS" AC_MSG_RESULT([$ax_cv_cppunit]) ]) AM_CONDITIONAL([HAVE_CPPUNIT],[test "x$ac_cv_cppunit" = xyes]) dnl check for user online input AC_ARG_ENABLE([offline], [ AS_HELP_STRING([--enable-offline],[Turn on offline mode])], [case "${enableval}" in yes) offline=true ;; no) offline=false ;; *) AC_MSG_ERROR([bad value ${enableval} for --enable-online]) ;; esac],[offline=false]) AM_CONDITIONAL([OFFLINE], [test "x$offline" = xtrue]) dnl Test for the various java things that we need for bindings AS_IF([test "x$enable_java" != "xno"], [ dnl javac is needed to compile the JAR file AC_PROG_JAVAC if test "x$JAVAC" != x; then AX_JNI_INCLUDE_DIR for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS do JNI_CPPFLAGS="$JNI_CPPFLAGS -I$JNI_INCLUDE_DIR" done dnl Export the paths so that the makefile gets them AC_SUBST(JNI_CPPFLAGS, $JNI_CPPFLAGS) fi dnl java is needed by ant dnl we had one report of a system with javac and not java AC_PROG_JAVA dnl Test is ant is available AC_PATH_PROG([ANT_FOUND], [ant], []) ]) dnl test enable_java dnl if we found everything we need, set ax_java_support for the dnl status message and set X_JNI for use in Makefile AS_IF([test "x$JNI_CPPFLAGS" != x && test "x$ANT_FOUND" != x && test "x$JAVA" != x], [ax_java_support=yes], [ax_java_support=no]) AM_CONDITIONAL([X_JNI],[test "x$ax_java_support" == "xyes"]) AC_CONFIG_COMMANDS([tsk/tsk_incs.h], [echo "#ifndef _TSK_INCS_H" > tsk/tsk_incs.h echo "#define _TSK_INCS_H" >> tsk/tsk_incs.h echo "// automatically by ./configure" >> tsk/tsk_incs.h echo "// Contains the config.h data needed by programs that use libtsk" >> tsk/tsk_incs.h echo "" >> tsk/tsk_incs.h if test x$ac_cv_header_unistd_h = xyes; then echo "#include " >> tsk/tsk_incs.h fi if test x$ac_cv_header_inttypes_h = xyes; then echo "#ifndef __STDC_FORMAT_MACROS" >> tsk/tsk_incs.h echo "#define __STDC_FORMAT_MACROS" >> tsk/tsk_incs.h echo "#endif" >> tsk/tsk_incs.h echo "#include " >> tsk/tsk_incs.h fi if test x$ac_cv_header_sys_param_h = xyes; then echo "#include " >> tsk/tsk_incs.h fi if test x$ax_multithread = xyes; then echo "#define TSK_MULTITHREAD_LIB // enable multithreading" >> tsk/tsk_incs.h fi echo "" >> tsk/tsk_incs.h echo "#endif" >> tsk/tsk_incs.h], [ac_cv_header_unistd_h=$ac_cv_header_unistd_h ac_cv_header_inttypes_h=$ac_cv_header_inttypes_h ac_cv_header_sys_param_h=$ac_cv_header_sys_param_h ax_multithread=$ax_multithread]) AC_MSG_CHECKING([if libtool needs -no-undefined flag to build shared libraries]) case "$host" in *-*-mingw*) dnl Add -no-undefined flag to LDFLAGS to let libtool build DLLs. AC_MSG_RESULT([yes]) LIBTSK_LDFLAGS="-no-undefined" AC_SUBST([LIBTSK_LDFLAGS]) ;; *) dnl No additional flags needed. AC_MSG_RESULT([no]) ;; esac dnl Dependencies for fiwalk AC_CHECK_FUNCS([getline]) AC_SEARCH_LIBS(regexec, [regex], , AC_MSG_ERROR([missing regex])) dnl OpenSSL support for encryption - currently disabled due to automatic test failures dnl AX_CHECK_OPENSSL( dnl [ax_openssl=yes] dnl LIBTSK_LDFLAGS="$LIBTSK_LDFLAGS $OPENSSL_LDFLAGS $OPENSSL_LIBS", dnl AC_SUBST([LIBTSK_LDFLAGS]) [ax_openssl=no] dnl [AC_MSG_ERROR([OpenSSL headers cannot be located. Consider using the --with-openssl option to specify an appropriate path.])] dnl ) dnl For the moment, disable the openssl library so the Travis test will pass dnl AS_IF([test "x$ax_openssl" = xyes], AC_DEFINE(HAVE_LIBOPENSSL,1, [Define if using opensll]), []) dnl Enable compliation warnings WARNINGS='-Wall -Wextra -Wno-unused-parameter' AC_SUBST(AM_CFLAGS, $WARNINGS) AC_SUBST(AM_CXXFLAGS, $WARNINGS) AC_CONFIG_FILES([ Makefile tsk/Makefile tsk/base/Makefile tsk/img/Makefile tsk/vs/Makefile tsk/fs/Makefile tsk/hashdb/Makefile tsk/auto/Makefile tsk/pool/Makefile tsk/util/Makefile tools/Makefile tools/imgtools/Makefile tools/vstools/Makefile tools/fstools/Makefile tools/hashtools/Makefile tools/srchtools/Makefile tools/autotools/Makefile tools/pooltools/Makefile tools/sorter/Makefile tools/timeline/Makefile tools/fiwalk/Makefile tools/fiwalk/src/Makefile tools/fiwalk/plugins/Makefile tests/Makefile samples/Makefile man/Makefile bindings/java/Makefile bindings/java/jni/Makefile case-uco/java/Makefile unit_tests/Makefile unit_tests/base/Makefile]) AC_OUTPUT dnl Print a summary dnl openssl is disabled, so removed line openssl support: $ax_openssl AC_MSG_NOTICE([ Building: afflib support: $ax_afflib libewf support: $ax_libewf zlib support: $ax_zlib libvhdi support: $ax_libvhdi libvmdk support: $ax_libvmdk Features: Java/JNI support: $ax_java_support Multithreading: $ax_multithread ]); sleuthkit-4.11.1/tools/000755 000765 000024 00000000000 14137073563 015555 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/licenses/000755 000765 000024 00000000000 14137073557 016225 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/configure000755 000765 000024 00002716102 14137073441 016330 0ustar00carrierstaff000000 000000 #! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for sleuthkit 4.11.1. # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 as_fn_exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1 test \$(( 1 + 1 )) = 2 || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org about your system, $0: including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" SHELL=${CONFIG_SHELL-/bin/sh} test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='sleuthkit' PACKAGE_TARNAME='sleuthkit' PACKAGE_VERSION='4.11.1' PACKAGE_STRING='sleuthkit 4.11.1' PACKAGE_BUGREPORT='' PACKAGE_URL='' ac_unique_file="tsk/base/tsk_base.h" # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef STDC_HEADERS # include # include #else # ifdef HAVE_STDLIB_H # include # endif #endif #ifdef HAVE_STRING_H # if !defined STDC_HEADERS && defined HAVE_MEMORY_H # include # endif # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" ac_header_list= ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS AM_CXXFLAGS AM_CFLAGS LIBTSK_LDFLAGS X_JNI_FALSE X_JNI_TRUE ANT_FOUND uudecode JAVA JNI_CPPFLAGS _ACJNI_JAVAC JAVAC OFFLINE_FALSE OFFLINE_TRUE HAVE_CPPUNIT_FALSE HAVE_CPPUNIT_TRUE IGNORE VMDK_LIBS VMDK_CFLAGS VHDI_LIBS VHDI_CFLAGS EWF_LIBS EWF_CFLAGS ZLIB_LIBS ZLIB_CFLAGS HAVE_LIBSQLITE3_FALSE HAVE_LIBSQLITE3_TRUE AX_PACKAGE_REQUIRES_PRIVATE AX_PACKAGE_REQUIRES SQLITE3_LIBS SQLITE3_CFLAGS PTHREAD_CFLAGS PTHREAD_LIBS PTHREAD_CC ax_pthread_config LIBOBJS ALLOCA PACKAGE_LIBS_PRIVATE PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PKG_CONFIG PKGCONFIG PERL HAVE_CXX14 CXXCPP am__fastdepCXX_FALSE am__fastdepCXX_TRUE CXXDEPMODE ac_ct_CXX CXXFLAGS CXX MAINT MAINTAINER_MODE_FALSE MAINTAINER_MODE_TRUE CPP LT_SYS_LIBRARY_PATH OTOOL64 OTOOL LIPO NMEDIT DSYMUTIL MANIFEST_TOOL RANLIB ac_ct_AR AR DLLTOOL OBJDUMP LN_S NM ac_ct_DUMPBIN DUMPBIN LD FGREP EGREP GREP SED am__fastdepCC_FALSE am__fastdepCC_TRUE CCDEPMODE am__nodep AMDEPBACKSLASH AMDEP_FALSE AMDEP_TRUE am__quote am__include DEPDIR OBJEXT EXEEXT ac_ct_CC CPPFLAGS LDFLAGS CFLAGS CC host_os host_vendor host_cpu host build_os build_vendor build_cpu build LIBTOOL CPPUNIT_FALSE CPPUNIT_TRUE CPPUNIT_LIBS CPPUNIT_CFLAGS CPPUNIT_CONFIG AM_BACKSLASH AM_DEFAULT_VERBOSITY AM_DEFAULT_V AM_V am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_silent_rules with_cppunit_prefix with_cppunit_exec_prefix enable_shared enable_static with_pic enable_fast_install with_aix_soname enable_dependency_tracking with_gnu_ld with_sysroot enable_libtool_lock enable_maintainer_mode enable_largefile enable_multithreading enable_java with_afflib with_zlib with_libewf with_libvhdi with_libvmdk enable_cppunit enable_offline ' ac_precious_vars='build_alias host_alias target_alias CC CFLAGS LDFLAGS LIBS CPPFLAGS LT_SYS_LIBRARY_PATH CPP CXX CXXFLAGS CCC CXXCPP PKG_CONFIG PKG_CONFIG_PATH PKG_CONFIG_LIBDIR SQLITE3_CFLAGS SQLITE3_LIBS ZLIB_CFLAGS ZLIB_LIBS EWF_CFLAGS EWF_LIBS VHDI_CFLAGS VHDI_LIBS VMDK_CFLAGS VMDK_LIBS CPPUNIT_CFLAGS CPPUNIT_LIBS' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures sleuthkit 4.11.1 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/sleuthkit] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names System types: --build=BUILD configure for building on BUILD [guessed] --host=HOST cross-compile to build programs to run on HOST [BUILD] _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of sleuthkit 4.11.1:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-silent-rules less verbose build output (undo: "make V=1") --disable-silent-rules verbose build output (undo: "make V=0") --enable-shared[=PKGS] build shared libraries [default=yes] --enable-static[=PKGS] build static libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --enable-dependency-tracking do not reject slow dependency extractors --disable-dependency-tracking speeds up one-time build --disable-libtool-lock avoid locking (might break parallel builds) --enable-maintainer-mode enable make rules and dependencies not useful (and sometimes confusing) to the casual installer --disable-largefile omit support for large files --disable-multithreading Build without multithreading support --disable-java Do not build the java bindings or jar file --disable-cppunit Build without cppunit tests --enable-offline Turn on offline mode Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-cppunit-prefix=PFX Prefix where CppUnit is installed (optional) --with-cppunit-exec-prefix=PFX Exec prefix where CppUnit is installed (optional) --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use both] --with-aix-soname=aix|svr4|both shared library versioning (aka "SONAME") variant to provide on AIX, [default=aix]. --with-gnu-ld assume the C compiler uses GNU ld [default=no] --with-sysroot[=DIR] Search for dependent libraries within DIR (or the compiler's sysroot if not specified). --without-afflib Do not use afflib even if it is installed --with-afflib=dir Specify that afflib is installed in directory 'dir' --without-zlib Do not use zlib even if it is installed --with-zlib=dir Specify that zlib is installed in directory 'dir' --without-libewf Do not use libewf even if it is installed --with-libewf=dir Specify that libewf is installed in directory 'dir' --without-libvhdi Do not use libvhdi even if it is installed --with-libvhdi=dir Specify that libvhdi is installed in directory 'dir' --without-libvmdk Do not use libvmdk even if it is installed --with-libvmdk=dir Specify that libvmdk is installed in directory 'dir' Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory LT_SYS_LIBRARY_PATH User-defined run-time library search path. CPP C preprocessor CXX C++ compiler command CXXFLAGS C++ compiler flags CXXCPP C++ preprocessor PKG_CONFIG path to pkg-config utility PKG_CONFIG_PATH directories to add to pkg-config's search path PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path SQLITE3_CFLAGS C compiler flags for SQLITE3, overriding pkg-config SQLITE3_LIBS linker flags for SQLITE3, overriding pkg-config ZLIB_CFLAGS C compiler flags for ZLIB, overriding pkg-config ZLIB_LIBS linker flags for ZLIB, overriding pkg-config EWF_CFLAGS C compiler flags for EWF, overriding pkg-config EWF_LIBS linker flags for EWF, overriding pkg-config VHDI_CFLAGS C compiler flags for VHDI, overriding pkg-config VHDI_LIBS linker flags for VHDI, overriding pkg-config VMDK_CFLAGS C compiler flags for VMDK, overriding pkg-config VMDK_LIBS linker flags for VMDK, overriding pkg-config CPPUNIT_CFLAGS C compiler flags for CPPUNIT, overriding pkg-config CPPUNIT_LIBS linker flags for CPPUNIT, overriding pkg-config Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to the package provider. _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF sleuthkit configure 4.11.1 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_c_try_compile LINENO # -------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_compile # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_link # ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_c_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_header_compile # ac_fn_c_try_cpp LINENO # ---------------------- # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_cpp # ac_fn_c_try_run LINENO # ---------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_c_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_run # ac_fn_c_check_func LINENO FUNC VAR # ---------------------------------- # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_c_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_func # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_cxx_try_cpp LINENO # ------------------------ # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_cpp # ac_fn_cxx_try_link LINENO # ------------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_link # ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_c_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if eval \${$3+:} false; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.i conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_header_mongrel # ac_fn_c_check_type LINENO TYPE VAR INCLUDES # ------------------------------------------- # Tests whether TYPE exists after having included INCLUDES, setting cache # variable VAR accordingly. ac_fn_c_check_type () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=no" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof ($2)) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof (($2))) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else eval "$3=yes" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_type cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by sleuthkit $as_me 4.11.1, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi as_fn_append ac_header_list " utime.h" # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # =========================================================================== # http://www.gnu.org/software/autoconf-archive/ax_pthread.html # =========================================================================== # # SYNOPSIS # # AX_PTHREAD([ACTION-IF-FOUND[, ACTION-IF-NOT-FOUND]]) # # DESCRIPTION # # This macro figures out how to build C programs using POSIX threads. It # sets the PTHREAD_LIBS output variable to the threads library and linker # flags, and the PTHREAD_CFLAGS output variable to any special C compiler # flags that are needed. (The user can also force certain compiler # flags/libs to be tested by setting these environment variables.) # # Also sets PTHREAD_CC to any special C compiler that is needed for # multi-threaded programs (defaults to the value of CC otherwise). (This # is necessary on AIX to use the special cc_r compiler alias.) # # NOTE: You are assumed to not only compile your program with these flags, # but also link it with them as well. e.g. you should link with # $PTHREAD_CC $CFLAGS $PTHREAD_CFLAGS $LDFLAGS ... $PTHREAD_LIBS $LIBS # # If you are only building threads programs, you may wish to use these # variables in your default LIBS, CFLAGS, and CC: # # LIBS="$PTHREAD_LIBS $LIBS" # CFLAGS="$CFLAGS $PTHREAD_CFLAGS" # CC="$PTHREAD_CC" # # In addition, if the PTHREAD_CREATE_JOINABLE thread-attribute constant # has a nonstandard name, defines PTHREAD_CREATE_JOINABLE to that name # (e.g. PTHREAD_CREATE_UNDETACHED on AIX). # # ACTION-IF-FOUND is a list of shell commands to run if a threads library # is found, and ACTION-IF-NOT-FOUND is a list of commands to run it if it # is not found. If ACTION-IF-FOUND is not specified, the default action # will define HAVE_PTHREAD. # # Please let the authors know if this macro fails on any platform, or if # you have any other suggestions or comments. This macro was based on work # by SGJ on autoconf scripts for FFTW (http://www.fftw.org/) (with help # from M. Frigo), as well as ac_pthread and hb_pthread macros posted by # Alejandro Forero Cuervo to the autoconf macro repository. We are also # grateful for the helpful feedback of numerous users. # # LICENSE # # Copyright (c) 2008 Steven G. Johnson # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation, either version 3 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see . # # As a special exception, the respective Autoconf Macro's copyright owner # gives unlimited permission to copy, distribute and modify the configure # scripts that are the output of Autoconf when processing the Macro. You # need not follow the terms of the GNU General Public License when using # or distributing such scripts, even though portions of the text of the # Macro appear in them. The GNU General Public License (GPL) does govern # all other use of the material that constitutes the Autoconf Macro. # # This special exception to the GPL applies to versions of the Autoconf # Macro released by the Autoconf Archive. When you make and distribute a # modified version of the Autoconf Macro, you may extend this special # exception to the GPL to apply to your modified version as well. #serial 11 # This is what autoupdate's m4 run will expand. It fires # the warning (with _au_warn_XXX), outputs it into the # updated configure.ac (with AC_DIAGNOSE), and then outputs # the replacement expansion. # This is an auxiliary macro that is also run when # autoupdate runs m4. It simply calls m4_warning, but # we need a wrapper so that each warning is emitted only # once. We break the quoting in m4_warning's argument in # order to expand this macro's arguments, not AU_DEFUN's. # Finally, this is the expansion that is picked up by # autoconf. It tells the user to run autoupdate, and # then outputs the replacement expansion. We do not care # about autoupdate's warning because that contains # information on what to do *after* running autoupdate. # =========================================================================== # http://www.gnu.org/software/autoconf-archive/ax_jni_include_dir.html # =========================================================================== # # SYNOPSIS # # AX_JNI_INCLUDE_DIR # # DESCRIPTION # # AX_JNI_INCLUDE_DIR finds include directories needed for compiling # programs using the JNI interface. # # JNI include directories are usually in the Java distribution. This is # deduced from the value of $JAVA_HOME, $JAVAC, or the path to "javac", in # that order. When this macro completes, a list of directories is left in # the variable JNI_INCLUDE_DIRS. # # Example usage follows: # # AX_JNI_INCLUDE_DIR # # for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS # do # CPPFLAGS="$CPPFLAGS -I$JNI_INCLUDE_DIR" # done # # If you want to force a specific compiler: # # - at the configure.in level, set JAVAC=yourcompiler before calling # AX_JNI_INCLUDE_DIR # # - at the configure level, setenv JAVAC # # Note: This macro can work with the autoconf M4 macros for Java programs. # This particular macro is not part of the original set of macros. # # LICENSE # # Copyright (c) 2008 Don Anderson # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice # and this notice are preserved. This file is offered as-is, without any # warranty. # # TSK: This has been modifed to not error out if JNI things cannot be resolved #serial 11 # This is what autoupdate's m4 run will expand. It fires # the warning (with _au_warn_XXX), outputs it into the # updated configure.ac (with AC_DIAGNOSE), and then outputs # the replacement expansion. # This is an auxiliary macro that is also run when # autoupdate runs m4. It simply calls m4_warning, but # we need a wrapper so that each warning is emitted only # once. We break the quoting in m4_warning's argument in # order to expand this macro's arguments, not AU_DEFUN's. # Finally, this is the expansion that is picked up by # autoconf. It tells the user to run autoupdate, and # then outputs the replacement expansion. We do not care # about autoupdate's warning because that contains # information on what to do *after* running autoupdate. # _ACJNI_FOLLOW_SYMLINKS # Follows symbolic links on , # finally setting variable _ACJNI_FOLLOWED # ---------------------------------------- # _ACJNI # =========================================================================== # https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html # =========================================================================== # # SYNOPSIS # # AX_CXX_COMPILE_STDCXX(VERSION, [ext|noext], [mandatory|optional]) # # DESCRIPTION # # Check for baseline language coverage in the compiler for the specified # version of the C++ standard. If necessary, add switches to CXX and # CXXCPP to enable support. VERSION may be '11' (for the C++11 standard) # or '14' (for the C++14 standard). # # The second argument, if specified, indicates whether you insist on an # extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g. # -std=c++11). If neither is specified, you get whatever works, with # preference for an extended mode. # # The third argument, if specified 'mandatory' or if left unspecified, # indicates that baseline support for the specified C++ standard is # required and that the macro should error out if no mode with that # support is found. If specified 'optional', then configuration proceeds # regardless, after defining HAVE_CXX${VERSION} if and only if a # supporting mode is found. # # LICENSE # # Copyright (c) 2008 Benjamin Kosnik # Copyright (c) 2012 Zack Weinberg # Copyright (c) 2013 Roy Stogner # Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov # Copyright (c) 2015 Paul Norman # Copyright (c) 2015 Moritz Klammler # Copyright (c) 2016, 2018 Krzesimir Nowak # Copyright (c) 2019 Enji Cooper # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice # and this notice are preserved. This file is offered as-is, without any # warranty. #serial 11 ac_config_headers="$ac_config_headers tsk/tsk_config.h" ac_aux_dir= for ac_dir in config "$srcdir"/config; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in config \"$srcdir\"/config" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. am__api_version='1.15' # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if ${ac_cv_path_install+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error $? "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi if test "$2" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$2" = conftest.file ) then # Ok. : else as_fn_error $? "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi rm -f conftest.file test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # Expand $ac_aux_dir to an absolute path. am_aux_dir=`cd "$ac_aux_dir" && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} fi if test x"${install_sh+set}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if ${ac_cv_path_mkdir+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AWK+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; fi case $enable_silent_rules in # ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=1;; esac am_make=${MAKE-make} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 $as_echo_n "checking whether $am_make supports nested variables... " >&6; } if ${am_cv_make_support_nested_variables+:} false; then : $as_echo_n "(cached) " >&6 else if $as_echo 'TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 $as_echo "$am_cv_make_support_nested_variables" >&6; } if test $am_cv_make_support_nested_variables = yes; then AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AM_BACKSLASH='\' if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='sleuthkit' VERSION='4.11.1' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # mkdir_p='$(MKDIR_P)' # We need awk for the "check" target (and possibly the TAP driver). The # system "awk" is bad on some platforms. # Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AMTAR='$${TAR-tar}' # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar plaintar pax cpio none' # The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether UID '$am_uid' is supported by ustar format" >&5 $as_echo_n "checking whether UID '$am_uid' is supported by ustar format... " >&6; } if test $am_uid -le $am_max_uid; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } _am_tools=none fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether GID '$am_gid' is supported by ustar format" >&5 $as_echo_n "checking whether GID '$am_gid' is supported by ustar format... " >&6; } if test $am_gid -le $am_max_gid; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } _am_tools=none fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to create a ustar tar archive" >&5 $as_echo_n "checking how to create a ustar tar archive... " >&6; } # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_ustar-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do { echo "$as_me:$LINENO: $_am_tar --version" >&5 ($_am_tar --version) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && break done am__tar="$_am_tar --format=ustar -chf - "'"$$tardir"' am__tar_="$_am_tar --format=ustar -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x ustar -w "$$tardir"' am__tar_='pax -L -x ustar -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H ustar -L' am__tar_='find "$tardir" -print | cpio -o -H ustar -L' am__untar='cpio -i -H ustar -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_ustar}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file { echo "$as_me:$LINENO: tardir=conftest.dir && eval $am__tar_ >conftest.tar" >&5 (tardir=conftest.dir && eval $am__tar_ >conftest.tar) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } rm -rf conftest.dir if test -s conftest.tar; then { echo "$as_me:$LINENO: $am__untar &5 ($am__untar &5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { echo "$as_me:$LINENO: cat conftest.dir/file" >&5 (cat conftest.dir/file) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } grep GrepMe conftest.dir/file >/dev/null 2>&1 && break fi done rm -rf conftest.dir if ${am_cv_prog_tar_ustar+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_prog_tar_ustar=$_am_tool fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_tar_ustar" >&5 $as_echo "$am_cv_prog_tar_ustar" >&6; } # POSIX will say in a future version that running "rm -f" with no argument # is OK; and we want to be able to make that assumption in our Makefile # recipes. So use an aggressive probe to check that the usage we want is # actually supported "in the wild" to an acceptable degree. # See automake bug#10828. # To make any issue more visible, cause the running configure to be aborted # by default if the 'rm' program in use doesn't match our expectations; the # user can still override this though. if rm -f && rm -fr && rm -rf; then : OK; else cat >&2 <<'END' Oops! Your 'rm' program seems unable to run without file operands specified on the command line, even when the '-f' option is present. This is contrary to the behaviour of most rm programs out there, and not conforming with the upcoming POSIX standard: Please tell bug-automake@gnu.org about your system, including the value of your $PATH and any error possibly output before this message. This can help us improve future automake versions. END if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then echo 'Configuration will proceed anyway, since you have set the' >&2 echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 echo >&2 else cat >&2 <<'END' Aborting the configuration process, to ensure you take notice of the issue. You can download and install GNU coreutils to get an 'rm' implementation that behaves properly: . If you want to complete the configuration process using your problematic 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM to "yes", and re-run configure. END as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 fi fi # Check whether --with-cppunit-prefix was given. if test "${with_cppunit_prefix+set}" = set; then : withval=$with_cppunit_prefix; cppunit_config_prefix="$withval" else cppunit_config_prefix="" fi # Check whether --with-cppunit-exec-prefix was given. if test "${with_cppunit_exec_prefix+set}" = set; then : withval=$with_cppunit_exec_prefix; cppunit_config_exec_prefix="$withval" else cppunit_config_exec_prefix="" fi if test x$cppunit_config_exec_prefix != x ; then cppunit_config_args="$cppunit_config_args --exec-prefix=$cppunit_config_exec_prefix" if test x${CPPUNIT_CONFIG+set} != xset ; then CPPUNIT_CONFIG=$cppunit_config_exec_prefix/bin/cppunit-config fi fi if test x$cppunit_config_prefix != x ; then cppunit_config_args="$cppunit_config_args --prefix=$cppunit_config_prefix" if test x${CPPUNIT_CONFIG+set} != xset ; then CPPUNIT_CONFIG=$cppunit_config_prefix/bin/cppunit-config fi fi # Extract the first word of "cppunit-config", so it can be a program name with args. set dummy cppunit-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_CPPUNIT_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $CPPUNIT_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_CPPUNIT_CONFIG="$CPPUNIT_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CPPUNIT_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_CPPUNIT_CONFIG" && ac_cv_path_CPPUNIT_CONFIG="no" ;; esac fi CPPUNIT_CONFIG=$ac_cv_path_CPPUNIT_CONFIG if test -n "$CPPUNIT_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPPUNIT_CONFIG" >&5 $as_echo "$CPPUNIT_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi cppunit_version_min=1.12.1 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Cppunit - version >= $cppunit_version_min" >&5 $as_echo_n "checking for Cppunit - version >= $cppunit_version_min... " >&6; } no_cppunit="" if test "$CPPUNIT_CONFIG" = "no" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } no_cppunit=yes else CPPUNIT_CFLAGS=`$CPPUNIT_CONFIG --cflags` CPPUNIT_LIBS=`$CPPUNIT_CONFIG --libs` cppunit_version=`$CPPUNIT_CONFIG --version` cppunit_major_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\1/'` cppunit_minor_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\2/'` cppunit_micro_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\3/'` cppunit_major_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\1/'` if test "x${cppunit_major_min}" = "x" ; then cppunit_major_min=0 fi cppunit_minor_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\2/'` if test "x${cppunit_minor_min}" = "x" ; then cppunit_minor_min=0 fi cppunit_micro_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\3/'` if test "x${cppunit_micro_min}" = "x" ; then cppunit_micro_min=0 fi cppunit_version_proper=`expr \ $cppunit_major_version \> $cppunit_major_min \| \ $cppunit_major_version \= $cppunit_major_min \& \ $cppunit_minor_version \> $cppunit_minor_min \| \ $cppunit_major_version \= $cppunit_major_min \& \ $cppunit_minor_version \= $cppunit_minor_min \& \ $cppunit_micro_version \>= $cppunit_micro_min ` if test "$cppunit_version_proper" = "1" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cppunit_major_version.$cppunit_minor_version.$cppunit_micro_version" >&5 $as_echo "$cppunit_major_version.$cppunit_minor_version.$cppunit_micro_version" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } no_cppunit=yes fi fi if test "x$no_cppunit" = x ; then : else CPPUNIT_CFLAGS="" CPPUNIT_LIBS="" : fi if test "x$no_cppunit" = x; then CPPUNIT_TRUE= CPPUNIT_FALSE='#' else CPPUNIT_TRUE='#' CPPUNIT_FALSE= fi case `pwd` in *\ * | *\ *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 $as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; esac macro_version='2.4.6' macro_revision='2.4.6' ltmain=$ac_aux_dir/ltmain.sh # Make sure we can run config.sub. $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 $as_echo_n "checking build system type... " >&6; } if ${ac_cv_build+:} false; then : $as_echo_n "(cached) " >&6 else ac_build_alias=$build_alias test "x$ac_build_alias" = x && ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` test "x$ac_build_alias" = x && as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 $as_echo "$ac_cv_build" >&6; } case $ac_cv_build in *-*-*) ;; *) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; esac build=$ac_cv_build ac_save_IFS=$IFS; IFS='-' set x $ac_cv_build shift build_cpu=$1 build_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: build_os=$* IFS=$ac_save_IFS case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 $as_echo_n "checking host system type... " >&6; } if ${ac_cv_host+:} false; then : $as_echo_n "(cached) " >&6 else if test "x$host_alias" = x; then ac_cv_host=$ac_cv_build else ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 $as_echo "$ac_cv_host" >&6; } case $ac_cv_host in *-*-*) ;; *) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; esac host=$ac_cv_host ac_save_IFS=$IFS; IFS='-' set x $ac_cv_host shift host_cpu=$1 host_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: host_os=$* IFS=$ac_save_IFS case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac # Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\(["`$\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 $as_echo_n "checking how to print strings... " >&6; } # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "" } case $ECHO in printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 $as_echo "printf" >&6; } ;; print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 $as_echo "print -r" >&6; } ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 $as_echo "cat" >&6; } ;; esac DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 $as_echo_n "checking for style of include used by $am_make... " >&6; } am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 $as_echo "$_am_result" >&6; } rm -f confinc confmf # Check whether --enable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then : enableval=$enable_dependency_tracking; fi if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 $as_echo_n "checking whether the C compiler works... " >&6; } ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else ac_file='' fi if test -z "$ac_file"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "C compiler cannot create executables See \`config.log' for more details" "$LINENO" 5; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 $as_echo_n "checking for C compiler default output file name... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 $as_echo "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 $as_echo_n "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest conftest$ac_cv_exeext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 $as_echo "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 $as_echo_n "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details" "$LINENO" 5; } fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 $as_echo "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 $as_echo_n "checking for suffix of object files... " >&6; } if ${ac_cv_objext+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of object files: cannot compile See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 $as_echo "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if ${ac_cv_c_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if ${ac_cv_prog_cc_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 $as_echo_n "checking whether $CC understands -c and -o together... " >&6; } if ${am_cv_prog_cc_c_o+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 $as_echo "$am_cv_prog_cc_c_o" >&6; } if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CC_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 $as_echo_n "checking for a sed that does not truncate output... " >&6; } if ${ac_cv_path_SED+:} false; then : $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed { ac_script=; unset ac_script;} if test -z "$SED"; then ac_path_SED_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_SED" || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" ac_path_SED_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_SED_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_SED"; then as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 fi else ac_cv_path_SED=$SED fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 $as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 $as_echo_n "checking for grep that handles long lines and -e... " >&6; } if ${ac_cv_path_GREP+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$GREP"; then ac_path_GREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in *GNU*) ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'GREP' >> "conftest.nl" "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_GREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_GREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_GREP"; then as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_GREP=$GREP fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 $as_echo "$ac_cv_path_GREP" >&6; } GREP="$ac_cv_path_GREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 $as_echo_n "checking for egrep... " >&6; } if ${ac_cv_path_EGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 then ac_cv_path_EGREP="$GREP -E" else if test -z "$EGREP"; then ac_path_EGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in *GNU*) ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'EGREP' >> "conftest.nl" "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_EGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_EGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_EGREP"; then as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_EGREP=$EGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 $as_echo "$ac_cv_path_EGREP" >&6; } EGREP="$ac_cv_path_EGREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 $as_echo_n "checking for fgrep... " >&6; } if ${ac_cv_path_FGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 then ac_cv_path_FGREP="$GREP -F" else if test -z "$FGREP"; then ac_path_FGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in fgrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_FGREP" || continue # Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in *GNU*) ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'FGREP' >> "conftest.nl" "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_FGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_FGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_FGREP"; then as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_FGREP=$FGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 $as_echo "$ac_cv_path_FGREP" >&6; } FGREP="$ac_cv_path_FGREP" test -z "$GREP" && GREP=grep # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 $as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } if ${lt_cv_path_NM+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM=$NM else lt_nm_to_check=${ac_tool_prefix}nm if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. tmp_nm=$ac_dir/$lt_tmp_nm if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then # Check to see if the nm accepts a BSD-compat flag. # Adding the 'sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty case $build_os in mingw*) lt_bad_file=conftest.nm/nofile ;; *) lt_bad_file=/dev/null ;; esac case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in *$lt_bad_file* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break 2 ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break 2 ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS=$lt_save_ifs done : ${lt_cv_path_NM=no} fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 $as_echo "$lt_cv_path_NM" >&6; } if test no != "$lt_cv_path_NM"; then NM=$lt_cv_path_NM else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else if test -n "$ac_tool_prefix"; then for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DUMPBIN"; then ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DUMPBIN=$ac_cv_prog_DUMPBIN if test -n "$DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 $as_echo "$DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DUMPBIN" && break done fi if test -z "$DUMPBIN"; then ac_ct_DUMPBIN=$DUMPBIN for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DUMPBIN"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN if test -n "$ac_ct_DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 $as_echo "$ac_ct_DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_DUMPBIN" && break done if test "x$ac_ct_DUMPBIN" = x; then DUMPBIN=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DUMPBIN=$ac_ct_DUMPBIN fi fi case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols -headers" ;; *) DUMPBIN=: ;; esac fi if test : != "$DUMPBIN"; then NM=$DUMPBIN fi fi test -z "$NM" && NM=nm { $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 $as_echo_n "checking the name lister ($NM) interface... " >&6; } if ${lt_cv_nm_interface+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: output\"" >&5) cat conftest.out >&5 if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 $as_echo "$lt_cv_nm_interface" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 $as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 $as_echo "no, using $LN_S" >&6; } fi # find the maximum length of command line arguments { $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 $as_echo_n "checking the maximum length of command line arguments... " >&6; } if ${lt_cv_sys_max_cmd_len+:} false; then : $as_echo_n "(cached) " >&6 else i=0 teststring=ABCD case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len" && \ test undefined != "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test X`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test 17 != "$i" # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac fi if test -n "$lt_cv_sys_max_cmd_len"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 $as_echo "$lt_cv_sys_max_cmd_len" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 $as_echo "none" >&6; } fi max_cmd_len=$lt_cv_sys_max_cmd_len : ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 $as_echo_n "checking how to convert $build file names to $host format... " >&6; } if ${lt_cv_to_host_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac fi to_host_file_cmd=$lt_cv_to_host_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 $as_echo "$lt_cv_to_host_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 $as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } if ${lt_cv_to_tool_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else #assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac fi to_tool_file_cmd=$lt_cv_to_tool_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 $as_echo "$lt_cv_to_tool_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 $as_echo_n "checking for $LD option to reload object files... " >&6; } if ${lt_cv_ld_reload_flag+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_reload_flag='-r' fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 $as_echo "$lt_cv_ld_reload_flag" >&6; } reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; *) reload_flag=" $reload_flag" ;; esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in cygwin* | mingw* | pw32* | cegcc*) if test yes != "$GCC"; then reload_cmds=false fi ;; darwin*) if test yes = "$GCC"; then reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' else reload_cmds='$LD$reload_flag -o $output$reload_objs' fi ;; esac if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 $as_echo "$OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OBJDUMP"; then ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 $as_echo "$ac_ct_OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then OBJDUMP="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP fi else OBJDUMP="$ac_cv_prog_OBJDUMP" fi test -z "$OBJDUMP" && OBJDUMP=objdump { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 $as_echo_n "checking how to recognize dependent libraries... " >&6; } if ${lt_cv_deplibs_check_method+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # 'unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # that responds to the $file_magic_cmd with a given extended regex. # If you have 'file' or equivalent on your system and you're not sure # whether 'pass_all' will *always* work, you probably want this one. case $host_os in aix[4-9]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[45]*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. if ( file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[3-9]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) lt_cv_deplibs_check_method=pass_all ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd* | bitrig*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; os2*) lt_cv_deplibs_check_method=pass_all ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 $as_echo "$lt_cv_deplibs_check_method" >&6; } file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. set dummy ${ac_tool_prefix}dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DLLTOOL"; then ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DLLTOOL=$ac_cv_prog_DLLTOOL if test -n "$DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 $as_echo "$DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DLLTOOL"; then ac_ct_DLLTOOL=$DLLTOOL # Extract the first word of "dlltool", so it can be a program name with args. set dummy dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DLLTOOL"; then ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DLLTOOL="dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL if test -n "$ac_ct_DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 $as_echo "$ac_ct_DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DLLTOOL" = x; then DLLTOOL="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DLLTOOL=$ac_ct_DLLTOOL fi else DLLTOOL="$ac_cv_prog_DLLTOOL" fi test -z "$DLLTOOL" && DLLTOOL=dlltool { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 $as_echo_n "checking how to associate runtime and link libraries... " >&6; } if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh; # decide which one to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd=$ECHO ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 $as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO if test -n "$ac_tool_prefix"; then for ac_prog in ar do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 $as_echo "$AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AR" && break done fi if test -z "$AR"; then ac_ct_AR=$AR for ac_prog in ar do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AR="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 $as_echo "$ac_ct_AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_AR" && break done if test "x$ac_ct_AR" = x; then AR="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AR=$ac_ct_AR fi fi : ${AR=ar} : ${AR_FLAGS=cru} { $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 $as_echo_n "checking for archiver @FILE support... " >&6; } if ${lt_cv_ar_at_file+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ar_at_file=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test 0 -eq "$ac_status"; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test 0 -ne "$ac_status"; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 $as_echo "$lt_cv_ar_at_file" >&6; } if test no = "$lt_cv_ar_at_file"; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi test -z "$STRIP" && STRIP=: if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi test -z "$RANLIB" && RANLIB=: # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in bitrig* | openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Check for command to grab the raw symbol name followed by C symbol from nm. { $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 $as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } if ${lt_cv_sys_global_symbol_pipe+:} false; then : $as_echo_n "(cached) " >&6 else # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[BCDEGRST]' # Regexp to match symbols that can be accessed directly from C. sympat='\([_A-Za-z][_A-Za-z0-9]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[BCDT]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[ABCDGISTW]' ;; hpux*) if test ia64 = "$host_cpu"; then symcode='[ABCDEGRST]' fi ;; irix* | nonstopux*) symcode='[BCDEGRST]' ;; osf*) symcode='[BCDEGQRST]' ;; solaris*) symcode='[BDRT]' ;; sco3.2v5*) symcode='[DT]' ;; sysv4.2uw2*) symcode='[DT]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[ABDT]' ;; sysv4) symcode='[DFNSTU]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[ABCDGIRSTW]' ;; esac if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Gets list of data symbols to import. lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" # Adjust the below global symbol transforms to fixup imported variables. lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" lt_c_name_lib_hook="\ -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" else # Disable hooks by default. lt_cv_sys_global_symbol_to_import= lt_cdecl_hook= lt_c_name_hook= lt_c_name_lib_hook= fi # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n"\ $lt_cdecl_hook\ " -e 's/^T .* \(.*\)$/extern int \1();/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ $lt_c_name_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" # Transform an extracted symbol line into symbol name with lib prefix and # symbol address. lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ $lt_c_name_lib_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function, # D for any global variable and I for any imported variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK '"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ " /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ " /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ " {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ " s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Now try to grab the symbols. nlist=conftest.nm if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE /* DATA imports from DLLs on WIN32 can't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT_DLSYM_CONST #elif defined __osf__ /* This system does not cope well with relocations in const data. */ # define LT_DLSYM_CONST #else # define LT_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS=conftstm.$ac_objext CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest$ac_exeext; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&5 fi else echo "cannot find nm_test_var in $nlist" >&5 fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 fi else echo "$progname: failed program was:" >&5 cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test yes = "$pipe_works"; then break else lt_cv_sys_global_symbol_pipe= fi done fi if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then nm_file_list_spec='@' fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 $as_echo_n "checking for sysroot... " >&6; } # Check whether --with-sysroot was given. if test "${with_sysroot+set}" = set; then : withval=$with_sysroot; else with_sysroot=no fi lt_sysroot= case $with_sysroot in #( yes) if test yes = "$GCC"; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_sysroot" >&5 $as_echo "$with_sysroot" >&6; } as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 $as_echo "${lt_sysroot:-no}" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a working dd" >&5 $as_echo_n "checking for a working dd... " >&6; } if ${ac_cv_path_lt_DD+:} false; then : $as_echo_n "(cached) " >&6 else printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i : ${lt_DD:=$DD} if test -z "$lt_DD"; then ac_path_lt_DD_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in dd; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_lt_DD="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_lt_DD" || continue if "$ac_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: fi $ac_path_lt_DD_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_lt_DD"; then : fi else ac_cv_path_lt_DD=$lt_DD fi rm -f conftest.i conftest2.i conftest.out fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_lt_DD" >&5 $as_echo "$ac_cv_path_lt_DD" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to truncate binary pipes" >&5 $as_echo_n "checking how to truncate binary pipes... " >&6; } if ${lt_cv_truncate_bin+:} false; then : $as_echo_n "(cached) " >&6 else printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i lt_cv_truncate_bin= if "$ac_cv_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" fi rm -f conftest.i conftest2.i conftest.out test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_truncate_bin" >&5 $as_echo "$lt_cv_truncate_bin" >&6; } # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in $*""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } # Check whether --enable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then : enableval=$enable_libtool_lock; fi test no = "$enable_libtool_lock" || enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out what ABI is being produced by ac_compile, and set mode # options accordingly. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE=32 ;; *ELF-64*) HPUX_IA64_MODE=64 ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '#line '$LINENO' "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then if test yes = "$lt_cv_prog_gnu_ld"; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; mips64*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '#line '$LINENO' "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then emul=elf case `/usr/bin/file conftest.$ac_objext` in *32-bit*) emul="${emul}32" ;; *64-bit*) emul="${emul}64" ;; esac case `/usr/bin/file conftest.$ac_objext` in *MSB*) emul="${emul}btsmip" ;; *LSB*) emul="${emul}ltsmip" ;; esac case `/usr/bin/file conftest.$ac_objext` in *N32*) emul="${emul}n32" ;; esac LD="${LD-ld} -m $emul" fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. Note that the listed cases only cover the # situations where additional linker options are needed (such as when # doing 32-bit compilation for a host where ld defaults to 64-bit, or # vice versa); the common cases where no linker options are needed do # not appear in the list. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) case `/usr/bin/file conftest.o` in *x86-64*) LD="${LD-ld} -m elf32_x86_64" ;; *) LD="${LD-ld} -m elf_i386" ;; esac ;; powerpc64le-*linux*) LD="${LD-ld} -m elf32lppclinux" ;; powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; powerpcle-*linux*) LD="${LD-ld} -m elf64lppc" ;; powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS=$CFLAGS CFLAGS="$CFLAGS -belf" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 $as_echo_n "checking whether the C compiler needs -belf... " >&6; } if ${lt_cv_cc_needs_belf+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_cc_needs_belf=yes else lt_cv_cc_needs_belf=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 $as_echo "$lt_cv_cc_needs_belf" >&6; } if test yes != "$lt_cv_cc_needs_belf"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS=$SAVE_CFLAGS fi ;; *-*solaris*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*|x86_64-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD=${LD-ld}_sol2 fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks=$enable_libtool_lock if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. set dummy ${ac_tool_prefix}mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$MANIFEST_TOOL"; then ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL if test -n "$MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 $as_echo "$MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL # Extract the first word of "mt", so it can be a program name with args. set dummy mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_MANIFEST_TOOL"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL if test -n "$ac_ct_MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 $as_echo "$ac_ct_MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_MANIFEST_TOOL" = x; then MANIFEST_TOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL fi else MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" fi test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 $as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } if ${lt_cv_path_mainfest_tool+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&5 if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 $as_echo "$lt_cv_path_mainfest_tool" >&6; } if test yes != "$lt_cv_path_mainfest_tool"; then MANIFEST_TOOL=: fi case $host_os in rhapsody* | darwin*) if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DSYMUTIL"; then ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DSYMUTIL=$ac_cv_prog_DSYMUTIL if test -n "$DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 $as_echo "$DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DSYMUTIL"; then ac_ct_DSYMUTIL=$DSYMUTIL # Extract the first word of "dsymutil", so it can be a program name with args. set dummy dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DSYMUTIL"; then ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL if test -n "$ac_ct_DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 $as_echo "$ac_ct_DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DSYMUTIL" = x; then DSYMUTIL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DSYMUTIL=$ac_ct_DSYMUTIL fi else DSYMUTIL="$ac_cv_prog_DSYMUTIL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. set dummy ${ac_tool_prefix}nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NMEDIT"; then ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi NMEDIT=$ac_cv_prog_NMEDIT if test -n "$NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 $as_echo "$NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_NMEDIT"; then ac_ct_NMEDIT=$NMEDIT # Extract the first word of "nmedit", so it can be a program name with args. set dummy nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_NMEDIT"; then ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_NMEDIT="nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT if test -n "$ac_ct_NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 $as_echo "$ac_ct_NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_NMEDIT" = x; then NMEDIT=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac NMEDIT=$ac_ct_NMEDIT fi else NMEDIT="$ac_cv_prog_NMEDIT" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. set dummy ${ac_tool_prefix}lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$LIPO"; then ac_cv_prog_LIPO="$LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_LIPO="${ac_tool_prefix}lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi LIPO=$ac_cv_prog_LIPO if test -n "$LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 $as_echo "$LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_LIPO"; then ac_ct_LIPO=$LIPO # Extract the first word of "lipo", so it can be a program name with args. set dummy lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_LIPO"; then ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_LIPO="lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO if test -n "$ac_ct_LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 $as_echo "$ac_ct_LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_LIPO" = x; then LIPO=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac LIPO=$ac_ct_LIPO fi else LIPO="$ac_cv_prog_LIPO" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. set dummy ${ac_tool_prefix}otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL"; then ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL="${ac_tool_prefix}otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL=$ac_cv_prog_OTOOL if test -n "$OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 $as_echo "$OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL"; then ac_ct_OTOOL=$OTOOL # Extract the first word of "otool", so it can be a program name with args. set dummy otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL"; then ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL="otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL if test -n "$ac_ct_OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 $as_echo "$ac_ct_OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL" = x; then OTOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL=$ac_ct_OTOOL fi else OTOOL="$ac_cv_prog_OTOOL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. set dummy ${ac_tool_prefix}otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL64"; then ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL64=$ac_cv_prog_OTOOL64 if test -n "$OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 $as_echo "$OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL64"; then ac_ct_OTOOL64=$OTOOL64 # Extract the first word of "otool64", so it can be a program name with args. set dummy otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL64"; then ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL64="otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 if test -n "$ac_ct_OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 $as_echo "$ac_ct_OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL64" = x; then OTOOL64=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL64=$ac_ct_OTOOL64 fi else OTOOL64="$ac_cv_prog_OTOOL64" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 $as_echo_n "checking for -single_module linker flag... " >&6; } if ${lt_cv_apple_cc_single_mod+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_apple_cc_single_mod=no if test -z "$LT_MULTI_MODULE"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&5 $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&5 # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test 0 = "$_lt_result"; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&5 fi rm -rf libconftest.dylib* rm -f conftest.* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 $as_echo "$lt_cv_apple_cc_single_mod" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 $as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } if ${lt_cv_ld_exported_symbols_list+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_ld_exported_symbols_list=yes else lt_cv_ld_exported_symbols_list=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 $as_echo "$lt_cv_ld_exported_symbols_list" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 $as_echo_n "checking for -force_load linker flag... " >&6; } if ${lt_cv_ld_force_load+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 echo "$AR cru libconftest.a conftest.o" >&5 $AR cru libconftest.a conftest.o 2>&5 echo "$RANLIB libconftest.a" >&5 $RANLIB libconftest.a 2>&5 cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&5 elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then lt_cv_ld_force_load=yes else cat conftest.err >&5 fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 $as_echo "$lt_cv_ld_force_load" >&6; } case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[91]*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; 10.[012][,.]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test yes = "$lt_cv_apple_cc_single_mod"; then _lt_dar_single_mod='$single_module' fi if test yes = "$lt_cv_ld_exported_symbols_list"; then _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' fi if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac # func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x$2 in x) ;; *:) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" ;; x:*) eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" ;; *::*) eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" ;; *) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" ;; esac } ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if ${ac_cv_prog_CPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in dlfcn.h do : ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default " if test "x$ac_cv_header_dlfcn_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DLFCN_H 1 _ACEOF fi done # Set options enable_dlopen=no enable_win32_dll=no # Check whether --enable-shared was given. if test "${enable_shared+set}" = set; then : enableval=$enable_shared; p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS=$lt_save_ifs ;; esac else enable_shared=yes fi # Check whether --enable-static was given. if test "${enable_static+set}" = set; then : enableval=$enable_static; p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS=$lt_save_ifs ;; esac else enable_static=yes fi # Check whether --with-pic was given. if test "${with_pic+set}" = set; then : withval=$with_pic; lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for lt_pkg in $withval; do IFS=$lt_save_ifs if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS=$lt_save_ifs ;; esac else pic_mode=default fi # Check whether --enable-fast-install was given. if test "${enable_fast_install+set}" = set; then : enableval=$enable_fast_install; p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS=$lt_save_ifs ;; esac else enable_fast_install=yes fi shared_archive_member_spec= case $host,$enable_shared in power*-*-aix[5-9]*,yes) { $as_echo "$as_me:${as_lineno-$LINENO}: checking which variant of shared library versioning to provide" >&5 $as_echo_n "checking which variant of shared library versioning to provide... " >&6; } # Check whether --with-aix-soname was given. if test "${with_aix_soname+set}" = set; then : withval=$with_aix_soname; case $withval in aix|svr4|both) ;; *) as_fn_error $? "Unknown argument to --with-aix-soname" "$LINENO" 5 ;; esac lt_cv_with_aix_soname=$with_aix_soname else if ${lt_cv_with_aix_soname+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_with_aix_soname=aix fi with_aix_soname=$lt_cv_with_aix_soname fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_aix_soname" >&5 $as_echo "$with_aix_soname" >&6; } if test aix != "$with_aix_soname"; then # For the AIX way of multilib, we name the shared archive member # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, # the AIX toolchain works better with OBJECT_MODE set (default 32). if test 64 = "${OBJECT_MODE-32}"; then shared_archive_member_spec=shr_64 else shared_archive_member_spec=shr fi fi ;; *) with_aix_soname=aix ;; esac # This can be used to rebuild libtool when needed LIBTOOL_DEPS=$ltmain # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' test -z "$LN_S" && LN_S="ln -s" if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 $as_echo_n "checking for objdir... " >&6; } if ${lt_cv_objdir+:} false; then : $as_echo_n "(cached) " >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 $as_echo "$lt_cv_objdir" >&6; } objdir=$lt_cv_objdir cat >>confdefs.h <<_ACEOF #define LT_OBJDIR "$lt_cv_objdir/" _ACEOF case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld=$lt_cv_prog_gnu_ld old_CC=$CC old_CFLAGS=$CFLAGS # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o func_cc_basename $compiler cc_basename=$func_cc_basename_result # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 $as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/${ac_tool_prefix}file"; then lt_cv_path_MAGIC_CMD=$ac_dir/"${ac_tool_prefix}file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac fi MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 $as_echo_n "checking for file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/file"; then lt_cv_path_MAGIC_CMD=$ac_dir/"file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac fi MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else MAGIC_CMD=: fi fi fi ;; esac # Use C for the default configuration in the libtool script lt_save_CC=$CC ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o objext=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* if test -n "$compiler"; then lt_prog_compiler_no_builtin_flag= if test yes = "$GCC"; then case $cc_basename in nvcc*) lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; *) lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 $as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } if test yes = "$lt_cv_prog_compiler_rtti_exceptions"; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl= lt_prog_compiler_pic= lt_prog_compiler_static= if test yes = "$GCC"; then lt_prog_compiler_wl='-Wl,' lt_prog_compiler_static='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' fi lt_prog_compiler_pic='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) lt_prog_compiler_pic='-fPIC' ;; esac ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic=-Kconform_pic fi ;; *) lt_prog_compiler_pic='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 lt_prog_compiler_wl='-Xlinker ' if test -n "$lt_prog_compiler_pic"; then lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl='-Wl,' if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' else lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' fi ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' case $cc_basename in nagfor*) # NAG Fortran compiler lt_prog_compiler_wl='-Wl,-Wl,,' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; esac ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static='$wl-static' ;; esac ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static='$wl-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in # old Intel for x86_64, which still supported -KPIC. ecc*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; # Lahey Fortran 8.1. lf95*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='--shared' lt_prog_compiler_static='--static' ;; nagfor*) # NAG Fortran compiler lt_prog_compiler_wl='-Wl,-Wl,,' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; tcc*) # Fabrice Bellard et al's Tiny C Compiler lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; ccc*) lt_prog_compiler_wl='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-qpic' lt_prog_compiler_static='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='' ;; *Sun\ F* | *Sun*Fortran*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Wl,' ;; *Intel*\ [CF]*Compiler*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; *Portland\ Group*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; esac ;; esac ;; newsos6) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static='-non_shared' ;; rdos*) lt_prog_compiler_static='-non_shared' ;; solaris*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) lt_prog_compiler_wl='-Qoption ld ';; *) lt_prog_compiler_wl='-Wl,';; esac ;; sunos4*) lt_prog_compiler_wl='-Qoption ld ' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic='-Kconform_pic' lt_prog_compiler_static='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; unicos*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_can_build_shared=no ;; uts4*) lt_prog_compiler_pic='-pic' lt_prog_compiler_static='-Bstatic' ;; *) lt_prog_compiler_can_build_shared=no ;; esac fi case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic= ;; *) lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic=$lt_prog_compiler_pic fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 $as_echo "$lt_cv_prog_compiler_pic" >&6; } lt_prog_compiler_pic=$lt_cv_prog_compiler_pic # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } if ${lt_cv_prog_compiler_pic_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic -DPIC" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } if test yes = "$lt_cv_prog_compiler_pic_works"; then case $lt_prog_compiler_pic in "" | " "*) ;; *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; esac else lt_prog_compiler_pic= lt_prog_compiler_can_build_shared=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works=yes fi else lt_cv_prog_compiler_static_works=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 $as_echo "$lt_cv_prog_compiler_static_works" >&6; } if test yes = "$lt_cv_prog_compiler_static_works"; then : else lt_prog_compiler_static= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } hard_links=nottested if test no = "$lt_cv_prog_compiler_c_o" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test no = "$hard_links"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } runpath_var= allow_undefined_flag= always_export_symbols=no archive_cmds= archive_expsym_cmds= compiler_needs_object=no enable_shared_with_static_runtimes=no export_dynamic_flag_spec= export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' hardcode_automatic=no hardcode_direct=no hardcode_direct_absolute=no hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_minus_L=no hardcode_shlibpath_var=unsupported inherit_rpath=no link_all_deplibs=unknown module_cmds= module_expsym_cmds= old_archive_from_new_cmds= old_archive_from_expsyms_cmds= thread_safe_flag_spec= whole_archive_flag_spec= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ' (' and ')$', so one must not match beginning or # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', # as well as any symbol that contains 'd'. exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test yes != "$GCC"; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd* | bitrig*) with_gnu_ld=no ;; esac ld_shlibs=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test yes = "$with_gnu_ld"; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; *\ \(GNU\ Binutils\)\ [3-9]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test yes = "$lt_use_gnu_ld_interface"; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='$wl' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' export_dynamic_flag_spec='$wl--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else whole_archive_flag_spec= fi supports_anon_versioning=no case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test ia64 != "$host_cpu"; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' export_dynamic_flag_spec='$wl--export-all-symbols' allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs=no fi ;; haiku*) archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' link_all_deplibs=yes ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported shrext_cmds=.dll archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes=yes ;; interix[3-9]*) hardcode_direct=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='$wl-rpath,$libdir' export_dynamic_flag_spec='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test linux-dietlibc = "$host_os"; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test no = "$tmp_diet" then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 whole_archive_flag_spec= tmp_sharedflag='--shared' ;; nagfor*) # NAGFOR 5.3 tmp_sharedflag='-Wl,-shared' ;; xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 whole_archive_flag_spec='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi case $cc_basename in tcc*) export_dynamic_flag_spec='-rdynamic' ;; xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else ld_shlibs=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac ;; sunos4*) archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct=yes hardcode_shlibpath_var=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac if test no = "$ld_shlibs"; then runpath_var= hardcode_libdir_flag_spec= export_dynamic_flag_spec= whole_archive_flag_spec= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag=unsupported always_export_symbols=yes archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else export_symbols_cmds='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then aix_use_runtimelinking=yes break fi done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds='' hardcode_direct=yes hardcode_direct_absolute=yes hardcode_libdir_separator=':' link_all_deplibs=yes file_list_spec='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # traditional, no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. hardcode_direct=no hardcode_direct_absolute=no ;; esac if test yes = "$GCC"; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag="$shared_flag "'$wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi export_dynamic_flag_spec='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then hardcode_libdir_flag_spec='$wl-R $libdir:/usr/lib:/lib' allow_undefined_flag="-z nodefs" archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag=' $wl-bernotok' allow_undefined_flag=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec='$convenience' fi archive_cmds_need_lc=yes archive_expsym_cmds='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared libraries. archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols archive_expsym_cmds="$archive_expsym_cmds"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi archive_expsym_cmds="$archive_expsym_cmds"'~$RM -r $output_objdir/$realname.d' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) export_dynamic_flag_spec=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported always_export_symbols=yes file_list_spec='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, )='true' enable_shared_with_static_runtimes=yes exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib old_postinstall_cmds='chmod 644 $oldlib' postlink_cmds='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_from_new_cmds='true' # FIXME: Should let the user specify the lib program. old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' enable_shared_with_static_runtimes=yes ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc=no hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported if test yes = "$lt_cv_ld_force_load"; then whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec='' fi link_all_deplibs=yes allow_undefined_flag=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" archive_expsym_cmds="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" module_expsym_cmds="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" else ld_shlibs=no fi ;; dgux*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; hpux9*) if test yes = "$GCC"; then archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes export_dynamic_flag_spec='$wl-E' ;; hpux10*) if test yes,no = "$GCC,$with_gnu_ld"; then archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test yes,no = "$GCC,$with_gnu_ld"; then case $host_cpu in hppa*64*) archive_cmds='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) archive_cmds='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 $as_echo_n "checking if $CC understands -b... " >&6; } if ${lt_cv_prog_compiler__b+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler__b=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -b" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler__b=yes fi else lt_cv_prog_compiler__b=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 $as_echo "$lt_cv_prog_compiler__b" >&6; } if test yes = "$lt_cv_prog_compiler__b"; then archive_cmds='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi ;; esac fi if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no hardcode_shlibpath_var=no ;; *) hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test yes = "$GCC"; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 $as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } if ${lt_cv_irix_exported_symbol+:} false; then : $as_echo_n "(cached) " >&6 else save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int foo (void) { return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_irix_exported_symbol=yes else lt_cv_irix_exported_symbol=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 $as_echo "$lt_cv_irix_exported_symbol" >&6; } if test yes = "$lt_cv_irix_exported_symbol"; then archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' fi else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: inherit_rpath=yes link_all_deplibs=yes ;; linux*) case $cc_basename in tcc*) # Fabrice Bellard et al's Tiny C Compiler ld_shlibs=yes archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; newsos6) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: hardcode_shlibpath_var=no ;; *nto* | *qnx*) ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes hardcode_shlibpath_var=no hardcode_direct_absolute=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' hardcode_libdir_flag_spec='$wl-rpath,$libdir' export_dynamic_flag_spec='$wl-E' else archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='$wl-rpath,$libdir' fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported shrext_cmds=.dll archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes=yes ;; osf3*) if test yes = "$GCC"; then allow_undefined_flag=' $wl-expect_unresolved $wl\*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test yes = "$GCC"; then allow_undefined_flag=' $wl-expect_unresolved $wl\*' archive_cmds='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi archive_cmds_need_lc='no' hardcode_libdir_separator=: ;; solaris*) no_undefined_flag=' -z defs' if test yes = "$GCC"; then wlarc='$wl' archive_cmds='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' archive_cmds='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='$wl' archive_cmds='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi hardcode_libdir_flag_spec='-R$libdir' hardcode_shlibpath_var=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. GCC discards it without '$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test yes = "$GCC"; then whole_archive_flag_spec='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' else whole_archive_flag_spec='-z allextract$convenience -z defaultextract' fi ;; esac link_all_deplibs=yes ;; sunos4*) if test sequent = "$host_vendor"; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; sysv4) case $host_vendor in sni) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds='$CC -r -o $output$reload_objs' hardcode_direct=no ;; motorola) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv4.3*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag='$wl-z,text' archive_cmds_need_lc=no hardcode_shlibpath_var=no runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag='$wl-z,text' allow_undefined_flag='$wl-z,nodefs' archive_cmds_need_lc=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='$wl-R,$libdir' hardcode_libdir_separator=':' link_all_deplibs=yes export_dynamic_flag_spec='$wl-Bexport' runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; *) ld_shlibs=no ;; esac if test sni = "$host_vendor"; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) export_dynamic_flag_spec='$wl-Blargedynsym' ;; esac fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 $as_echo "$ld_shlibs" >&6; } test no = "$ld_shlibs" && can_build_shared=no with_gnu_ld=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc" in x|xyes) # Assume -lc should be added archive_cmds_need_lc=yes if test yes,yes = "$GCC,$enable_shared"; then case $archive_cmds in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl pic_flag=$lt_prog_compiler_pic compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc=no else lt_cv_archive_cmds_need_lc=yes fi allow_undefined_flag=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 $as_echo "$lt_cv_archive_cmds_need_lc" >&6; } archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } if test yes = "$GCC"; then case $host_os in darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; *) lt_awk_arg='/^libraries:/' ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq='s|=\([A-Za-z]:\)|\1|g' ;; *) lt_sed_strip_eq='s|=/|/|g' ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary... lt_tmp_lt_search_path_spec= lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` # ...but if some path component already ends with the multilib dir we assume # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). case "$lt_multi_os_dir; $lt_search_path_spec " in "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) lt_multi_os_dir= ;; esac for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" elif test -n "$lt_multi_os_dir"; then test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS = " "; FS = "/|\n";} { lt_foo = ""; lt_count = 0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo = "/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[lt_foo]++; } if (lt_freq[lt_foo] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's|/\([A-Za-z]:\)|\1|g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a(lib.so.V)' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. hardcode_libdir_flag_spec='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action= if test -n "$hardcode_libdir_flag_spec" || test -n "$runpath_var" || test yes = "$hardcode_automatic"; then # We can hardcode non-existent directories. if test no != "$hardcode_direct" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, )" && test no != "$hardcode_minus_L"; then # Linking always hardcodes the temporary library directory. hardcode_action=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 $as_echo "$hardcode_action" >&6; } if test relink = "$hardcode_action" || test yes = "$inherit_rpath"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi if test yes != "$enable_dlopen"; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen=load_add_on lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen=LoadLibrary lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen=dlopen lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl else lt_cv_dlopen=dyld lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; tpf*) # Don't try to run any link tests for TPF. We know it's impossible # because TPF is a cross-compiler, and we know how we open DSOs. lt_cv_dlopen=dlopen lt_cv_dlopen_libs= lt_cv_dlopen_self=no ;; *) ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" if test "x$ac_cv_func_shl_load" = xyes; then : lt_cv_dlopen=shl_load else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 $as_echo_n "checking for shl_load in -ldld... " >&6; } if ${ac_cv_lib_dld_shl_load+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char shl_load (); int main () { return shl_load (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_shl_load=yes else ac_cv_lib_dld_shl_load=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 $as_echo "$ac_cv_lib_dld_shl_load" >&6; } if test "x$ac_cv_lib_dld_shl_load" = xyes; then : lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld else ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" if test "x$ac_cv_func_dlopen" = xyes; then : lt_cv_dlopen=dlopen else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 $as_echo_n "checking for dlopen in -lsvld... " >&6; } if ${ac_cv_lib_svld_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_svld_dlopen=yes else ac_cv_lib_svld_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 $as_echo "$ac_cv_lib_svld_dlopen" >&6; } if test "x$ac_cv_lib_svld_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 $as_echo_n "checking for dld_link in -ldld... " >&6; } if ${ac_cv_lib_dld_dld_link+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dld_link (); int main () { return dld_link (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_dld_link=yes else ac_cv_lib_dld_dld_link=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 $as_echo "$ac_cv_lib_dld_dld_link" >&6; } if test "x$ac_cv_lib_dld_dld_link" = xyes; then : lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld fi fi fi fi fi fi ;; esac if test no = "$lt_cv_dlopen"; then enable_dlopen=no else enable_dlopen=yes fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS=$CPPFLAGS test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS=$LDFLAGS wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS=$LIBS LIBS="$lt_cv_dlopen_libs $LIBS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 $as_echo_n "checking whether a program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self+:} false; then : $as_echo_n "(cached) " >&6 else if test yes = "$cross_compiling"; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 $as_echo "$lt_cv_dlopen_self" >&6; } if test yes = "$lt_cv_dlopen_self"; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self_static+:} false; then : $as_echo_n "(cached) " >&6 else if test yes = "$cross_compiling"; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 $as_echo "$lt_cv_dlopen_self_static" >&6; } fi CPPFLAGS=$save_CPPFLAGS LDFLAGS=$save_LDFLAGS LIBS=$save_LIBS ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi striplib= old_striplib= { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 $as_echo_n "checking whether stripping libraries is possible... " >&6; } if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP"; then striplib="$STRIP -x" old_striplib="$STRIP -S" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ;; esac fi # Report what library types will actually be built { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 $as_echo_n "checking if libtool supports shared libraries... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 $as_echo "$can_build_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 $as_echo_n "checking whether to build shared libraries... " >&6; } test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[4-9]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 $as_echo "$enable_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 $as_echo_n "checking whether to build static libraries... " >&6; } # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 $as_echo "$enable_static" >&6; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC=$lt_save_CC ac_config_commands="$ac_config_commands libtool" # Only expand once: { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5 $as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; } # Check whether --enable-maintainer-mode was given. if test "${enable_maintainer_mode+set}" = set; then : enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval else USE_MAINTAINER_MODE=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5 $as_echo "$USE_MAINTAINER_MODE" >&6; } if test $USE_MAINTAINER_MODE = yes; then MAINTAINER_MODE_TRUE= MAINTAINER_MODE_FALSE='#' else MAINTAINER_MODE_TRUE='#' MAINTAINER_MODE_FALSE= fi MAINT=$MAINTAINER_MODE_TRUE ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if ${ac_cv_cxx_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if ${ac_cv_prog_cxx_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CXX_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi func_stripname_cnf () { case $2 in .*) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%\\\\$2\$%%"`;; *) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%$2\$%%"`;; esac } # func_stripname_cnf if test -n "$CXX" && ( test no != "$CXX" && ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || (test g++ != "$CXX"))); then ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if ${ac_cv_prog_CXXCPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu else _lt_caught_CXX_error=yes fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu archive_cmds_need_lc_CXX=no allow_undefined_flag_CXX= always_export_symbols_CXX=no archive_expsym_cmds_CXX= compiler_needs_object_CXX=no export_dynamic_flag_spec_CXX= hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no hardcode_libdir_flag_spec_CXX= hardcode_libdir_separator_CXX= hardcode_minus_L_CXX=no hardcode_shlibpath_var_CXX=unsupported hardcode_automatic_CXX=no inherit_rpath_CXX=no module_cmds_CXX= module_expsym_cmds_CXX= link_all_deplibs_CXX=unknown old_archive_cmds_CXX=$old_archive_cmds reload_flag_CXX=$reload_flag reload_cmds_CXX=$reload_cmds no_undefined_flag_CXX= whole_archive_flag_spec_CXX= enable_shared_with_static_runtimes_CXX=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o objext_CXX=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_caught_CXX_error"; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC compiler_CXX=$CC func_cc_basename $compiler cc_basename=$func_cc_basename_result if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test yes = "$GXX"; then lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin' else lt_prog_compiler_no_builtin_flag_CXX= fi if test yes = "$GXX"; then # Set up default GNU C++ configuration # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test yes = "$with_gnu_ld"; then archive_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='$wl' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_CXX=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else whole_archive_flag_spec_CXX= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ld_shlibs_CXX=yes case $host_os in aix3*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aix[4-9]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_CXX='' hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes file_list_spec_CXX='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no ;; esac if test yes = "$GXX"; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct_CXX=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_CXX=yes hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_libdir_separator_CXX= fi esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag=$shared_flag' $wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi export_dynamic_flag_spec_CXX='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. always_export_symbols_CXX=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. # The "-G" linker flag allows undefined symbols. no_undefined_flag_CXX='-bernotok' # Determine the default libpath from the value encoded in an empty # executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='$wl-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then hardcode_libdir_flag_spec_CXX='$wl-R $libdir:/usr/lib:/lib' allow_undefined_flag_CXX="-z nodefs" archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_CXX=' $wl-bernotok' allow_undefined_flag_CXX=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec_CXX='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_CXX='$convenience' fi archive_cmds_need_lc_CXX=yes archive_expsym_cmds_CXX='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$RM -r $output_objdir/$realname.d' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_CXX=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_CXX='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else ld_shlibs_CXX=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec_CXX=' ' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=yes file_list_spec_CXX='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' archive_expsym_cmds_CXX='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' enable_shared_with_static_runtimes_CXX=yes # Don't use ranlib old_postinstall_cmds_CXX='chmod 644 $oldlib' postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ func_to_tool_file "$lt_outputfile"~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_CXX='-L$libdir' export_dynamic_flag_spec_CXX='$wl--export-all-symbols' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=no enable_shared_with_static_runtimes_CXX=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... archive_expsym_cmds_CXX='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs_CXX=no fi ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc_CXX=no hardcode_direct_CXX=no hardcode_automatic_CXX=yes hardcode_shlibpath_var_CXX=unsupported if test yes = "$lt_cv_ld_force_load"; then whole_archive_flag_spec_CXX='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec_CXX='' fi link_all_deplibs_CXX=yes allow_undefined_flag_CXX=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all archive_cmds_CXX="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" module_cmds_CXX="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" archive_expsym_cmds_CXX="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" module_expsym_cmds_CXX="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" if test yes != "$lt_cv_apple_cc_single_mod"; then archive_cmds_CXX="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" archive_expsym_cmds_CXX="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" fi else ld_shlibs_CXX=no fi ;; os2*) hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_minus_L_CXX=yes allow_undefined_flag_CXX=unsupported shrext_cmds=.dll archive_cmds_CXX='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds_CXX='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds_CXX='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes_CXX=yes ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF ld_shlibs_CXX=no ;; freebsd-elf*) archive_cmds_need_lc_CXX=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions ld_shlibs_CXX=yes ;; haiku*) archive_cmds_CXX='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' link_all_deplibs_CXX=yes ;; hpux9*) hardcode_libdir_flag_spec_CXX='$wl+b $wl$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='$wl-E' hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) archive_cmds_CXX='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; hpux10*|hpux11*) if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec_CXX='$wl+b $wl$libdir' hardcode_libdir_separator_CXX=: case $host_cpu in hppa*64*|ia64*) ;; *) export_dynamic_flag_spec_CXX='$wl-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no ;; *) hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; interix[3-9]*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds_CXX='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds_CXX='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' fi fi link_all_deplibs_CXX=yes ;; esac hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' hardcode_libdir_separator_CXX=: inherit_rpath_CXX=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac archive_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac archive_cmds_need_lc_CXX=no hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' whole_archive_flag_spec_CXX='$wl--whole-archive$convenience $wl--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [1-5].* | *pgcpp\ [1-5].*) prelink_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' old_archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='$wl--rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' whole_archive_flag_spec_CXX='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ;; cxx*) # Compaq C++ archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' archive_cmds_CXX='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds_CXX='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' hardcode_libdir_flag_spec_CXX='-R$libdir' whole_archive_flag_spec_CXX='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object_CXX=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; m88k*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) ld_shlibs_CXX=yes ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no hardcode_direct_absolute_CXX=yes archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' export_dynamic_flag_spec_CXX='$wl-E' whole_archive_flag_spec_CXX=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else ld_shlibs_CXX=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; *) old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx*) case $host in osf3*) allow_undefined_flag_CXX=' $wl-expect_unresolved $wl\*' archive_cmds_CXX='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' ;; *) allow_undefined_flag_CXX=' -expect_unresolved \*' archive_cmds_CXX='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ $RM $lib.exp' hardcode_libdir_flag_spec_CXX='-rpath $libdir' ;; esac hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes,no = "$GXX,$with_gnu_ld"; then allow_undefined_flag_CXX=' $wl-expect_unresolved $wl\*' case $host in osf3*) archive_cmds_CXX='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; *) archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ archive_cmds_need_lc_CXX=yes no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_shlibpath_var_CXX=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_CXX='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_CXX=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test yes,no = "$GXX,$with_gnu_ld"; then no_undefined_flag_CXX=' $wl-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require '-G' NOT '-shared' on this # platform. archive_cmds_CXX='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi hardcode_libdir_flag_spec_CXX='$wl-R $wl$libdir' case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) whole_archive_flag_spec_CXX='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag_CXX='$wl-z,text' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag_CXX='$wl-z,text' allow_undefined_flag_CXX='$wl-z,nodefs' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='$wl-R,$libdir' hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes export_dynamic_flag_spec_CXX='$wl-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' old_archive_cmds_CXX='$CC -Tprelink_objects $oldobjs~ '"$old_archive_cmds_CXX" reload_cmds_CXX='$CC -Tprelink_objects $reload_objs~ '"$reload_cmds_CXX" ;; *) archive_cmds_CXX='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test no = "$ld_shlibs_CXX" && can_build_shared=no GCC_CXX=$GXX LD_CXX=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... # Dependencies to place before and after the object being linked: predep_objects_CXX= postdep_objects_CXX= predeps_CXX= postdeps_CXX= compiler_lib_search_path_CXX= cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $prev$p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test x-L = "$p" || test x-R = "$p"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test no = "$pre_test_object_deps_done"; then case $prev in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$compiler_lib_search_path_CXX"; then compiler_lib_search_path_CXX=$prev$p else compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} $prev$p" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$postdeps_CXX"; then postdeps_CXX=$prev$p else postdeps_CXX="${postdeps_CXX} $prev$p" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test no = "$pre_test_object_deps_done"; then if test -z "$predep_objects_CXX"; then predep_objects_CXX=$p else predep_objects_CXX="$predep_objects_CXX $p" fi else if test -z "$postdep_objects_CXX"; then postdep_objects_CXX=$p else postdep_objects_CXX="$postdep_objects_CXX $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling CXX test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken case $host_os in interix[3-9]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. predep_objects_CXX= postdep_objects_CXX= postdeps_CXX= ;; esac case " $postdeps_CXX " in *" -lc "*) archive_cmds_need_lc_CXX=no ;; esac compiler_lib_search_dirs_CXX= if test -n "${compiler_lib_search_path_CXX}"; then compiler_lib_search_dirs_CXX=`echo " ${compiler_lib_search_path_CXX}" | $SED -e 's! -L! !g' -e 's!^ !!'` fi lt_prog_compiler_wl_CXX= lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= # C++ specific cases for pic, static, wl, etc. if test yes = "$GXX"; then lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' fi lt_prog_compiler_pic_CXX='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic_CXX='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic_CXX='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static_CXX='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_CXX='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all lt_prog_compiler_pic_CXX= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static_CXX= ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_CXX=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac else case $host_os in aix[4-9]*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' else lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; dgux*) case $cc_basename in ec++*) lt_prog_compiler_pic_CXX='-KPIC' ;; ghcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='$wl-a ${wl}archive' if test ia64 != "$host_cpu"; then lt_prog_compiler_pic_CXX='+Z' fi ;; aCC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='$wl-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_CXX='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # KAI C++ Compiler lt_prog_compiler_wl_CXX='--backend -Wl,' lt_prog_compiler_pic_CXX='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64, which still supported -KPIC. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fPIC' lt_prog_compiler_static_CXX='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fpic' lt_prog_compiler_static_CXX='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; xlc* | xlC* | bgxl[cC]* | mpixl[cC]*) # IBM XL 8.0, 9.0 on PPC and BlueGene lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-qpic' lt_prog_compiler_static_CXX='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) lt_prog_compiler_pic_CXX='-W c,exportall' ;; *) ;; esac ;; netbsd*) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) lt_prog_compiler_wl_CXX='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 lt_prog_compiler_pic_CXX='-pic' ;; cxx*) # Digital/Compaq C++ lt_prog_compiler_wl_CXX='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x lt_prog_compiler_pic_CXX='-pic' lt_prog_compiler_static_CXX='-Bstatic' ;; lcc*) # Lucid lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 lt_prog_compiler_pic_CXX='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) lt_prog_compiler_can_build_shared_CXX=no ;; esac fi case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_CXX= ;; *) lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; } if ${lt_cv_prog_compiler_pic_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works_CXX=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; } if test yes = "$lt_cv_prog_compiler_pic_works_CXX"; then case $lt_prog_compiler_pic_CXX in "" | " "*) ;; *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;; esac else lt_prog_compiler_pic_CXX= lt_prog_compiler_can_build_shared_CXX=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works_CXX=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works_CXX=yes fi else lt_cv_prog_compiler_static_works_CXX=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; } if test yes = "$lt_cv_prog_compiler_static_works_CXX"; then : else lt_prog_compiler_static_CXX= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } hard_links=nottested if test no = "$lt_cv_prog_compiler_c_o_CXX" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test no = "$hard_links"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' case $host_os in aix[4-9]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_CXX='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi ;; pw32*) export_symbols_cmds_CXX=$ltdll_cmds ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) exclude_expsyms_CXX='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' ;; esac ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test no = "$ld_shlibs_CXX" && can_build_shared=no with_gnu_ld_CXX=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_CXX" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_CXX=yes if test yes,yes = "$GCC,$enable_shared"; then case $archive_cmds_CXX in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc_CXX+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_CXX pic_flag=$lt_prog_compiler_pic_CXX compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc_CXX=no else lt_cv_archive_cmds_need_lc_CXX=yes fi allow_undefined_flag_CXX=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_CXX" >&5 $as_echo "$lt_cv_archive_cmds_need_lc_CXX" >&6; } archive_cmds_need_lc_CXX=$lt_cv_archive_cmds_need_lc_CXX ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a(lib.so.V)' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. hardcode_libdir_flag_spec_CXX='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || test -n "$runpath_var_CXX" || test yes = "$hardcode_automatic_CXX"; then # We can hardcode non-existent directories. if test no != "$hardcode_direct_CXX" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, CXX)" && test no != "$hardcode_minus_L_CXX"; then # Linking always hardcodes the temporary library directory. hardcode_action_CXX=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_CXX=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_CXX=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5 $as_echo "$hardcode_action_CXX" >&6; } if test relink = "$hardcode_action_CXX" || test yes = "$inherit_rpath_CXX"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test yes != "$_lt_caught_CXX_error" ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ax_cxx_compile_alternatives="14 1y" ax_cxx_compile_cxx14_required=true ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu ac_success=no if test x$ac_success = xno; then for alternative in ${ax_cxx_compile_alternatives}; do for switch in -std=c++${alternative} +std=c++${alternative} "-h std=c++${alternative}"; do cachevar=`$as_echo "ax_cv_cxx_compile_cxx14_$switch" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX supports C++14 features with $switch" >&5 $as_echo_n "checking whether $CXX supports C++14 features with $switch... " >&6; } if eval \${$cachevar+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_CXX="$CXX" CXX="$CXX $switch" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ // If the compiler admits that it is not ready for C++11, why torture it? // Hopefully, this will speed up the test. #ifndef __cplusplus #error "This is not a C++ compiler" #elif __cplusplus < 201103L #error "This is not a C++11 compiler" #else namespace cxx11 { namespace test_static_assert { template struct check { static_assert(sizeof(int) <= sizeof(T), "not big enough"); }; } namespace test_final_override { struct Base { virtual ~Base() {} virtual void f() {} }; struct Derived : public Base { virtual ~Derived() override {} virtual void f() override {} }; } namespace test_double_right_angle_brackets { template < typename T > struct check {}; typedef check single_type; typedef check> double_type; typedef check>> triple_type; typedef check>>> quadruple_type; } namespace test_decltype { int f() { int a = 1; decltype(a) b = 2; return a + b; } } namespace test_type_deduction { template < typename T1, typename T2 > struct is_same { static const bool value = false; }; template < typename T > struct is_same { static const bool value = true; }; template < typename T1, typename T2 > auto add(T1 a1, T2 a2) -> decltype(a1 + a2) { return a1 + a2; } int test(const int c, volatile int v) { static_assert(is_same::value == true, ""); static_assert(is_same::value == false, ""); static_assert(is_same::value == false, ""); auto ac = c; auto av = v; auto sumi = ac + av + 'x'; auto sumf = ac + av + 1.0; static_assert(is_same::value == true, ""); static_assert(is_same::value == true, ""); static_assert(is_same::value == true, ""); static_assert(is_same::value == false, ""); static_assert(is_same::value == true, ""); return (sumf > 0.0) ? sumi : add(c, v); } } namespace test_noexcept { int f() { return 0; } int g() noexcept { return 0; } static_assert(noexcept(f()) == false, ""); static_assert(noexcept(g()) == true, ""); } namespace test_constexpr { template < typename CharT > unsigned long constexpr strlen_c_r(const CharT *const s, const unsigned long acc) noexcept { return *s ? strlen_c_r(s + 1, acc + 1) : acc; } template < typename CharT > unsigned long constexpr strlen_c(const CharT *const s) noexcept { return strlen_c_r(s, 0UL); } static_assert(strlen_c("") == 0UL, ""); static_assert(strlen_c("1") == 1UL, ""); static_assert(strlen_c("example") == 7UL, ""); static_assert(strlen_c("another\0example") == 7UL, ""); } namespace test_rvalue_references { template < int N > struct answer { static constexpr int value = N; }; answer<1> f(int&) { return answer<1>(); } answer<2> f(const int&) { return answer<2>(); } answer<3> f(int&&) { return answer<3>(); } void test() { int i = 0; const int c = 0; static_assert(decltype(f(i))::value == 1, ""); static_assert(decltype(f(c))::value == 2, ""); static_assert(decltype(f(0))::value == 3, ""); } } namespace test_uniform_initialization { struct test { static const int zero {}; static const int one {1}; }; static_assert(test::zero == 0, ""); static_assert(test::one == 1, ""); } namespace test_lambdas { void test1() { auto lambda1 = [](){}; auto lambda2 = lambda1; lambda1(); lambda2(); } int test2() { auto a = [](int i, int j){ return i + j; }(1, 2); auto b = []() -> int { return '0'; }(); auto c = [=](){ return a + b; }(); auto d = [&](){ return c; }(); auto e = [a, &b](int x) mutable { const auto identity = [](int y){ return y; }; for (auto i = 0; i < a; ++i) a += b--; return x + identity(a + b); }(0); return a + b + c + d + e; } int test3() { const auto nullary = [](){ return 0; }; const auto unary = [](int x){ return x; }; using nullary_t = decltype(nullary); using unary_t = decltype(unary); const auto higher1st = [](nullary_t f){ return f(); }; const auto higher2nd = [unary](nullary_t f1){ return [unary, f1](unary_t f2){ return f2(unary(f1())); }; }; return higher1st(nullary) + higher2nd(nullary)(unary); } } namespace test_variadic_templates { template struct sum; template struct sum { static constexpr auto value = N0 + sum::value; }; template <> struct sum<> { static constexpr auto value = 0; }; static_assert(sum<>::value == 0, ""); static_assert(sum<1>::value == 1, ""); static_assert(sum<23>::value == 23, ""); static_assert(sum<1, 2>::value == 3, ""); static_assert(sum<5, 5, 11>::value == 21, ""); static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, ""); } // http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae // Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function // because of this. namespace test_template_alias_sfinae { struct foo {}; template using member = typename T::member_type; template void func(...) {} template void func(member*) {} void test(); void test() { func(0); } } } // namespace cxx11 #endif // __cplusplus >= 201103L // If the compiler admits that it is not ready for C++14, why torture it? // Hopefully, this will speed up the test. #ifndef __cplusplus #error "This is not a C++ compiler" #elif __cplusplus < 201402L #error "This is not a C++14 compiler" #else namespace cxx14 { namespace test_polymorphic_lambdas { int test() { const auto lambda = [](auto&&... args){ const auto istiny = [](auto x){ return (sizeof(x) == 1UL) ? 1 : 0; }; const int aretiny[] = { istiny(args)... }; return aretiny[0]; }; return lambda(1, 1L, 1.0f, '1'); } } namespace test_binary_literals { constexpr auto ivii = 0b0000000000101010; static_assert(ivii == 42, "wrong value"); } namespace test_generalized_constexpr { template < typename CharT > constexpr unsigned long strlen_c(const CharT *const s) noexcept { auto length = 0UL; for (auto p = s; *p; ++p) ++length; return length; } static_assert(strlen_c("") == 0UL, ""); static_assert(strlen_c("x") == 1UL, ""); static_assert(strlen_c("test") == 4UL, ""); static_assert(strlen_c("another\0test") == 7UL, ""); } namespace test_lambda_init_capture { int test() { auto x = 0; const auto lambda1 = [a = x](int b){ return a + b; }; const auto lambda2 = [a = lambda1(x)](){ return a; }; return lambda2(); } } namespace test_digit_separators { constexpr auto ten_million = 100'000'000; static_assert(ten_million == 100000000, ""); } namespace test_return_type_deduction { auto f(int& x) { return x; } decltype(auto) g(int& x) { return x; } template < typename T1, typename T2 > struct is_same { static constexpr auto value = false; }; template < typename T > struct is_same { static constexpr auto value = true; }; int test() { auto x = 0; static_assert(is_same::value, ""); static_assert(is_same::value, ""); return x; } } } // namespace cxx14 #endif // __cplusplus >= 201402L _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval $cachevar=yes else eval $cachevar=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext CXX="$ac_save_CXX" fi eval ac_res=\$$cachevar { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test x\$$cachevar = xyes; then CXX="$CXX $switch" if test -n "$CXXCPP" ; then CXXCPP="$CXXCPP $switch" fi ac_success=yes break fi done if test x$ac_success = xyes; then break fi done fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test x$ax_cxx_compile_cxx14_required = xtrue; then if test x$ac_success = xno; then as_fn_error $? "*** A compiler with support for C++14 language features is required." "$LINENO" 5 fi fi if test x$ac_success = xno; then HAVE_CXX14=0 { $as_echo "$as_me:${as_lineno-$LINENO}: No compiler with C++14 support was found" >&5 $as_echo "$as_me: No compiler with C++14 support was found" >&6;} else HAVE_CXX14=1 $as_echo "#define HAVE_CXX14 1" >>confdefs.h fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if ${ac_cv_c_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if ${ac_cv_prog_cc_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 $as_echo_n "checking whether $CC understands -c and -o together... " >&6; } if ${am_cv_prog_cc_c_o+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 $as_echo "$am_cv_prog_cc_c_o" >&6; } if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CC_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if ${ac_cv_prog_CPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 $as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 $as_echo "no, using $LN_S" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi # Extract the first word of "perl", so it can be a program name with args. set dummy perl; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PERL+:} false; then : $as_echo_n "(cached) " >&6 else case $PERL in [\\/]* | ?:[\\/]*) ac_cv_path_PERL="$PERL" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PERL="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PERL=$ac_cv_path_PERL if test -n "$PERL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PERL" >&5 $as_echo "$PERL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_PKGCONFIG+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$PKGCONFIG"; then ac_cv_prog_PKGCONFIG="$PKGCONFIG" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_PKGCONFIG="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_PKGCONFIG" && ac_cv_prog_PKGCONFIG="no" fi fi PKGCONFIG=$ac_cv_prog_PKGCONFIG if test -n "$PKGCONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKGCONFIG" >&5 $as_echo "$PKGCONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 $as_echo "$PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_PKG_CONFIG"; then ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 $as_echo "$ac_pt_PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then PKG_CONFIG="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG fi else PKG_CONFIG="$ac_cv_path_PKG_CONFIG" fi fi if test -n "$PKG_CONFIG"; then _pkg_min_version=0.9.0 { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 $as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PKG_CONFIG="" fi fi if test "x$enable_shared" != "xyes"; then : PKG_CONFIG="$PKG_CONFIG --static" fi fi PACKAGE_LIBS_PRIVATE= { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi for ac_header in err.h inttypes.h unistd.h stdint.h sys/param.h sys/resource.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdbool.h that conforms to C99" >&5 $as_echo_n "checking for stdbool.h that conforms to C99... " >&6; } if ${ac_cv_header_stdbool_h+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef bool "error: bool is not defined" #endif #ifndef false "error: false is not defined" #endif #if false "error: false is not 0" #endif #ifndef true "error: true is not defined" #endif #if true != 1 "error: true is not 1" #endif #ifndef __bool_true_false_are_defined "error: __bool_true_false_are_defined is not defined" #endif struct s { _Bool s: 1; _Bool t; } s; char a[true == 1 ? 1 : -1]; char b[false == 0 ? 1 : -1]; char c[__bool_true_false_are_defined == 1 ? 1 : -1]; char d[(bool) 0.5 == true ? 1 : -1]; /* See body of main program for 'e'. */ char f[(_Bool) 0.0 == false ? 1 : -1]; char g[true]; char h[sizeof (_Bool)]; char i[sizeof s.t]; enum { j = false, k = true, l = false * true, m = true * 256 }; /* The following fails for HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */ _Bool n[m]; char o[sizeof n == m * sizeof n[0] ? 1 : -1]; char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1]; /* Catch a bug in an HP-UX C compiler. See http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html */ _Bool q = true; _Bool *pq = &q; int main () { bool e = &s; *pq |= q; *pq |= ! q; /* Refer to every declared value, to avoid compiler optimizations. */ return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l + !m + !n + !o + !p + !q + !pq); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdbool_h=yes else ac_cv_header_stdbool_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdbool_h" >&5 $as_echo "$ac_cv_header_stdbool_h" >&6; } ac_fn_c_check_type "$LINENO" "_Bool" "ac_cv_type__Bool" "$ac_includes_default" if test "x$ac_cv_type__Bool" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE__BOOL 1 _ACEOF fi if test $ac_cv_header_stdbool_h = yes; then $as_echo "#define HAVE_STDBOOL_H 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 $as_echo_n "checking for an ANSI C-conforming const... " >&6; } if ${ac_cv_c_const+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __cplusplus /* Ultrix mips cc rejects this sort of thing. */ typedef int charset[2]; const charset cs = { 0, 0 }; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; /* NEC SVR4.0.2 mips cc rejects this. */ struct point {int x, y;}; static struct point const zero = {0,0}; /* AIX XL C 1.02.0.0 rejects this. It does not let you subtract one const X* pointer from another in an arm of an if-expression whose if-part is not a constant expression */ const char *g = "string"; pcpcc = &g + (g ? g-g : 0); /* HPUX 7.0 cc rejects these. */ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; { /* SCO 3.2v4 cc rejects this sort of thing. */ char tx; char *t = &tx; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; if (s) return 0; } { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ int x[] = {25, 17}; const int *foo = &x[0]; ++foo; } { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ typedef const int *iptr; iptr p = 0; ++p; } { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ struct s { int j; const int *ap[3]; } bx; struct s *b = &bx; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; if (!foo) return 0; } return !cs[0] && !zero.x; #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_c_const=yes else ac_cv_c_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 $as_echo "$ac_cv_c_const" >&6; } if test $ac_cv_c_const = no; then $as_echo "#define const /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for uid_t in sys/types.h" >&5 $as_echo_n "checking for uid_t in sys/types.h... " >&6; } if ${ac_cv_type_uid_t+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "uid_t" >/dev/null 2>&1; then : ac_cv_type_uid_t=yes else ac_cv_type_uid_t=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_uid_t" >&5 $as_echo "$ac_cv_type_uid_t" >&6; } if test $ac_cv_type_uid_t = no; then $as_echo "#define uid_t int" >>confdefs.h $as_echo "#define gid_t int" >>confdefs.h fi ac_fn_c_check_type "$LINENO" "mode_t" "ac_cv_type_mode_t" "$ac_includes_default" if test "x$ac_cv_type_mode_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define mode_t int _ACEOF fi ac_fn_c_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define off_t long int _ACEOF fi ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned int _ACEOF fi # Check whether --enable-largefile was given. if test "${enable_largefile+set}" = set; then : enableval=$enable_largefile; fi if test "$enable_largefile" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 $as_echo_n "checking for special C compiler options needed for large files... " >&6; } if ${ac_cv_sys_largefile_CC+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_sys_largefile_CC=no if test "$GCC" != yes; then ac_save_CC=$CC while :; do # IRIX 6.2 and later do not support large files by default, # so use the C compiler's -n32 option if that helps. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : break fi rm -f core conftest.err conftest.$ac_objext CC="$CC -n32" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_largefile_CC=' -n32'; break fi rm -f core conftest.err conftest.$ac_objext break done CC=$ac_save_CC rm -f conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 $as_echo "$ac_cv_sys_largefile_CC" >&6; } if test "$ac_cv_sys_largefile_CC" != no; then CC=$CC$ac_cv_sys_largefile_CC fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 $as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } if ${ac_cv_sys_file_offset_bits+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _FILE_OFFSET_BITS 64 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=64; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_file_offset_bits=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 $as_echo "$ac_cv_sys_file_offset_bits" >&6; } case $ac_cv_sys_file_offset_bits in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits _ACEOF ;; esac rm -rf conftest* if test $ac_cv_sys_file_offset_bits = unknown; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 $as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } if ${ac_cv_sys_large_files+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_large_files=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _LARGE_FILES 1 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_large_files=1; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_large_files=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 $as_echo "$ac_cv_sys_large_files" >&6; } case $ac_cv_sys_large_files in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _LARGE_FILES $ac_cv_sys_large_files _ACEOF ;; esac rm -rf conftest* fi fi # The Ultrix 4.2 mips builtin alloca declared by alloca.h only works # for constant arguments. Useless! { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working alloca.h" >&5 $as_echo_n "checking for working alloca.h... " >&6; } if ${ac_cv_working_alloca_h+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { char *p = (char *) alloca (2 * sizeof (int)); if (p) return 0; ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_working_alloca_h=yes else ac_cv_working_alloca_h=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_working_alloca_h" >&5 $as_echo "$ac_cv_working_alloca_h" >&6; } if test $ac_cv_working_alloca_h = yes; then $as_echo "#define HAVE_ALLOCA_H 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for alloca" >&5 $as_echo_n "checking for alloca... " >&6; } if ${ac_cv_func_alloca_works+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __GNUC__ # define alloca __builtin_alloca #else # ifdef _MSC_VER # include # define alloca _alloca # else # ifdef HAVE_ALLOCA_H # include # else # ifdef _AIX #pragma alloca # else # ifndef alloca /* predefined by HP cc +Olibcalls */ void *alloca (size_t); # endif # endif # endif # endif #endif int main () { char *p = (char *) alloca (1); if (p) return 0; ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_func_alloca_works=yes else ac_cv_func_alloca_works=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_alloca_works" >&5 $as_echo "$ac_cv_func_alloca_works" >&6; } if test $ac_cv_func_alloca_works = yes; then $as_echo "#define HAVE_ALLOCA 1" >>confdefs.h else # The SVR3 libPW and SVR4 libucb both contain incompatible functions # that cause trouble. Some versions do not even contain alloca or # contain a buggy version. If you still want to use their alloca, # use ar to extract alloca.o from them instead of compiling alloca.c. ALLOCA=\${LIBOBJDIR}alloca.$ac_objext $as_echo "#define C_ALLOCA 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether \`alloca.c' needs Cray hooks" >&5 $as_echo_n "checking whether \`alloca.c' needs Cray hooks... " >&6; } if ${ac_cv_os_cray+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined CRAY && ! defined CRAY2 webecray #else wenotbecray #endif _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "webecray" >/dev/null 2>&1; then : ac_cv_os_cray=yes else ac_cv_os_cray=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_os_cray" >&5 $as_echo "$ac_cv_os_cray" >&6; } if test $ac_cv_os_cray = yes; then for ac_func in _getb67 GETB67 getb67; do as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define CRAY_STACKSEG_END $ac_func _ACEOF break fi done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking stack direction for C alloca" >&5 $as_echo_n "checking stack direction for C alloca... " >&6; } if ${ac_cv_c_stack_direction+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_c_stack_direction=0 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int find_stack_direction (int *addr, int depth) { int dir, dummy = 0; if (! addr) addr = &dummy; *addr = addr < &dummy ? 1 : addr == &dummy ? 0 : -1; dir = depth ? find_stack_direction (addr, depth - 1) : 0; return dir + dummy; } int main (int argc, char **argv) { return find_stack_direction (0, argc + !argv + 20) < 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_c_stack_direction=1 else ac_cv_c_stack_direction=-1 fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_stack_direction" >&5 $as_echo "$ac_cv_c_stack_direction" >&6; } cat >>confdefs.h <<_ACEOF #define STACK_DIRECTION $ac_cv_c_stack_direction _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for error_at_line" >&5 $as_echo_n "checking for error_at_line... " >&6; } if ${ac_cv_lib_error_at_line+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { error_at_line (0, 0, "", 0, "an error occurred"); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_error_at_line=yes else ac_cv_lib_error_at_line=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_error_at_line" >&5 $as_echo "$ac_cv_lib_error_at_line" >&6; } if test $ac_cv_lib_error_at_line = no; then case " $LIBOBJS " in *" error.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS error.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGEFILE_SOURCE value needed for large files" >&5 $as_echo_n "checking for _LARGEFILE_SOURCE value needed for large files... " >&6; } if ${ac_cv_sys_largefile_source+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* for off_t */ #include int main () { int (*fp) (FILE *, off_t, int) = fseeko; return fseeko (stdin, 0, 0) && fp (stdin, 0, 0); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_sys_largefile_source=no; break fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _LARGEFILE_SOURCE 1 #include /* for off_t */ #include int main () { int (*fp) (FILE *, off_t, int) = fseeko; return fseeko (stdin, 0, 0) && fp (stdin, 0, 0); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_sys_largefile_source=1; break fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_cv_sys_largefile_source=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_source" >&5 $as_echo "$ac_cv_sys_largefile_source" >&6; } case $ac_cv_sys_largefile_source in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _LARGEFILE_SOURCE $ac_cv_sys_largefile_source _ACEOF ;; esac rm -rf conftest* # We used to try defining _XOPEN_SOURCE=500 too, to work around a bug # in glibc 2.1.3, but that breaks too many other things. # If you want fseeko and ftello with glibc, upgrade to a fixed glibc. if test $ac_cv_sys_largefile_source != unknown; then $as_echo "#define HAVE_FSEEKO 1" >>confdefs.h fi if test $ac_cv_c_compiler_gnu = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC needs -traditional" >&5 $as_echo_n "checking whether $CC needs -traditional... " >&6; } if ${ac_cv_prog_gcc_traditional+:} false; then : $as_echo_n "(cached) " >&6 else ac_pattern="Autoconf.*'x'" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TIOCGETP _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes else ac_cv_prog_gcc_traditional=no fi rm -f conftest* if test $ac_cv_prog_gcc_traditional = no; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TCGETA _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes fi rm -f conftest* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_gcc_traditional" >&5 $as_echo "$ac_cv_prog_gcc_traditional" >&6; } if test $ac_cv_prog_gcc_traditional = yes; then CC="$CC -traditional" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if ${ac_cv_func_lstat_dereferences_slashed_symlink+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat accepts an empty string" >&5 $as_echo_n "checking whether lstat accepts an empty string... " >&6; } if ${ac_cv_func_lstat_empty_string_bug+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_lstat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return lstat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_empty_string_bug=no else ac_cv_func_lstat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_empty_string_bug" >&5 $as_echo "$ac_cv_func_lstat_empty_string_bug" >&6; } if test $ac_cv_func_lstat_empty_string_bug = yes; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_LSTAT_EMPTY_STRING_BUG 1 _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if ${ac_cv_func_lstat_dereferences_slashed_symlink+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi for ac_header in sys/select.h sys/socket.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking types of arguments for select" >&5 $as_echo_n "checking types of arguments for select... " >&6; } if ${ac_cv_func_select_args+:} false; then : $as_echo_n "(cached) " >&6 else for ac_arg234 in 'fd_set *' 'int *' 'void *'; do for ac_arg1 in 'int' 'size_t' 'unsigned long int' 'unsigned int'; do for ac_arg5 in 'struct timeval *' 'const struct timeval *'; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_SYS_SELECT_H # include #endif #ifdef HAVE_SYS_SOCKET_H # include #endif int main () { extern int select ($ac_arg1, $ac_arg234, $ac_arg234, $ac_arg234, $ac_arg5); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_func_select_args="$ac_arg1,$ac_arg234,$ac_arg5"; break 3 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done done done # Provide a safe default value. : "${ac_cv_func_select_args=int,int *,struct timeval *}" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_select_args" >&5 $as_echo "$ac_cv_func_select_args" >&6; } ac_save_IFS=$IFS; IFS=',' set dummy `echo "$ac_cv_func_select_args" | sed 's/\*/\*/g'` IFS=$ac_save_IFS shift cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG1 $1 _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG234 ($2) _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG5 ($3) _ACEOF rm -f conftest* for ac_header in $ac_header_list do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether utime accepts a null argument" >&5 $as_echo_n "checking whether utime accepts a null argument... " >&6; } if ${ac_cv_func_utime_null+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.data; >conftest.data # Sequent interprets utime(file, 0) to mean use start of epoch. Wrong. if test "$cross_compiling" = yes; then : ac_cv_func_utime_null='guessing yes' else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_UTIME_H # include #endif int main () { struct stat s, t; return ! (stat ("conftest.data", &s) == 0 && utime ("conftest.data", 0) == 0 && stat ("conftest.data", &t) == 0 && t.st_mtime >= s.st_mtime && t.st_mtime - s.st_mtime < 120); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_utime_null=yes else ac_cv_func_utime_null=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_utime_null" >&5 $as_echo "$ac_cv_func_utime_null" >&6; } if test "x$ac_cv_func_utime_null" != xno; then ac_cv_func_utime_null=yes $as_echo "#define HAVE_UTIME_NULL 1" >>confdefs.h fi rm -f conftest.data for ac_func in vprintf do : ac_fn_c_check_func "$LINENO" "vprintf" "ac_cv_func_vprintf" if test "x$ac_cv_func_vprintf" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VPRINTF 1 _ACEOF ac_fn_c_check_func "$LINENO" "_doprnt" "ac_cv_func__doprnt" if test "x$ac_cv_func__doprnt" = xyes; then : $as_echo "#define HAVE_DOPRNT 1" >>confdefs.h fi fi done for ac_func in ishexnumber err errx warn warnx vasprintf getrusage do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done for ac_func in strlcpy strlcat do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ax_pthread_ok=no # We used to check for pthread.h first, but this fails if pthread.h # requires special compiler flags (e.g. on True64 or Sequent). # It gets checked for in the link test anyway. # First of all, check if the user has set any of the PTHREAD_LIBS, # etcetera environment variables, and if threads linking works using # them: if test x"$PTHREAD_LIBS$PTHREAD_CFLAGS" != x; then save_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS" save_LIBS="$LIBS" LIBS="$PTHREAD_LIBS $LIBS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthread_join in LIBS=$PTHREAD_LIBS with CFLAGS=$PTHREAD_CFLAGS" >&5 $as_echo_n "checking for pthread_join in LIBS=$PTHREAD_LIBS with CFLAGS=$PTHREAD_CFLAGS... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char pthread_join (); int main () { return pthread_join (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ax_pthread_ok=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_pthread_ok" >&5 $as_echo "$ax_pthread_ok" >&6; } if test x"$ax_pthread_ok" = xno; then PTHREAD_LIBS="" PTHREAD_CFLAGS="" fi LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" fi # We must check for the threads library under a number of different # names; the ordering is very important because some systems # (e.g. DEC) have both -lpthread and -lpthreads, where one of the # libraries is broken (non-POSIX). # Create a list of thread flags to try. Items starting with a "-" are # C compiler flags, and other items are library names, except for "none" # which indicates that we try without any flags at all, and "pthread-config" # which is a program returning the flags for the Pth emulation library. ax_pthread_flags="pthreads none -Kthread -kthread lthread -pthread lpthread -pthreads -mthreads pthread --thread-safe -mt pthread-config" # The ordering *is* (sometimes) important. Some notes on the # individual items follow: # pthreads: AIX (must check this before -lpthread) # none: in case threads are in libc; should be tried before -Kthread and # other compiler flags to prevent continual compiler warnings # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h) # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able) # lthread: LinuxThreads port on FreeBSD (also preferred to -pthread) # -pthread: Linux/gcc (kernel threads), BSD/gcc (userland threads) # -pthreads: Solaris/gcc # -mthreads: Mingw32/gcc, Lynx/gcc # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it # doesn't hurt to check since this sometimes defines pthreads too; # also defines -D_REENTRANT) # ... -mt is also the pthreads flag for HP/aCC # pthread: Linux, etcetera # --thread-safe: KAI C++ # pthread-config: use pthread-config program (for GNU Pth library) case "${host_cpu}-${host_os}" in *solaris*) # On Solaris (at least, for some versions), libc contains stubbed # (non-functional) versions of the pthreads routines, so link-based # tests will erroneously succeed. (We need to link with -pthreads/-mt/ # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather # a function called by this macro, so we could check for that, but # who knows whether they'll stub that too in a future libc.) So, # we'll just look for -pthreads and -lpthread first: ax_pthread_flags="-pthreads pthread -mt -pthread $ax_pthread_flags" ;; *-darwin*) ax_pthread_flags="-pthread $ax_pthread_flags" ;; esac if test x"$ax_pthread_ok" = xno; then for flag in $ax_pthread_flags; do case $flag in none) { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether pthreads work without any flags" >&5 $as_echo_n "checking whether pthreads work without any flags... " >&6; } ;; -*) { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether pthreads work with $flag" >&5 $as_echo_n "checking whether pthreads work with $flag... " >&6; } PTHREAD_CFLAGS="$flag" ;; pthread-config) # Extract the first word of "pthread-config", so it can be a program name with args. set dummy pthread-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ax_pthread_config+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ax_pthread_config"; then ac_cv_prog_ax_pthread_config="$ax_pthread_config" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ax_pthread_config="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_ax_pthread_config" && ac_cv_prog_ax_pthread_config="no" fi fi ax_pthread_config=$ac_cv_prog_ax_pthread_config if test -n "$ax_pthread_config"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_pthread_config" >&5 $as_echo "$ax_pthread_config" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test x"$ax_pthread_config" = xno; then continue; fi PTHREAD_CFLAGS="`pthread-config --cflags`" PTHREAD_LIBS="`pthread-config --ldflags` `pthread-config --libs`" ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: checking for the pthreads library -l$flag" >&5 $as_echo_n "checking for the pthreads library -l$flag... " >&6; } PTHREAD_LIBS="-l$flag" ;; esac save_LIBS="$LIBS" save_CFLAGS="$CFLAGS" LIBS="$PTHREAD_LIBS $LIBS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS" # Check for various functions. We must include pthread.h, # since some functions may be macros. (On the Sequent, we # need a special flag -Kthread to make this header compile.) # We check for pthread_join because it is in -lpthread on IRIX # while pthread_create is in libc. We check for pthread_attr_init # due to DEC craziness with -lpthreads. We check for # pthread_cleanup_push because it is one of the few pthread # functions on Solaris that doesn't have a non-functional libc stub. # We try pthread_create on general principles. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include static void routine(void* a) {a=0;} static void* start_routine(void* a) {return a;} int main () { pthread_t th; pthread_attr_t attr; pthread_create(&th,0,start_routine,0); pthread_join(th, 0); pthread_attr_init(&attr); pthread_cleanup_push(routine, 0); pthread_cleanup_pop(0); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ax_pthread_ok=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_pthread_ok" >&5 $as_echo "$ax_pthread_ok" >&6; } if test "x$ax_pthread_ok" = xyes; then break; fi PTHREAD_LIBS="" PTHREAD_CFLAGS="" done fi # Various other checks: if test "x$ax_pthread_ok" = xyes; then save_LIBS="$LIBS" LIBS="$PTHREAD_LIBS $LIBS" save_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS" # Detect AIX lossage: JOINABLE attribute is called UNDETACHED. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for joinable pthread attribute" >&5 $as_echo_n "checking for joinable pthread attribute... " >&6; } attr_name=unknown for attr in PTHREAD_CREATE_JOINABLE PTHREAD_CREATE_UNDETACHED; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { int attr=$attr; return attr; ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : attr_name=$attr; break fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext done { $as_echo "$as_me:${as_lineno-$LINENO}: result: $attr_name" >&5 $as_echo "$attr_name" >&6; } if test "$attr_name" != PTHREAD_CREATE_JOINABLE; then cat >>confdefs.h <<_ACEOF #define PTHREAD_CREATE_JOINABLE $attr_name _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if more special flags are required for pthreads" >&5 $as_echo_n "checking if more special flags are required for pthreads... " >&6; } flag=no case "${host_cpu}-${host_os}" in *-aix* | *-freebsd* | *-darwin*) flag="-D_THREAD_SAFE";; *solaris* | *-osf* | *-hpux*) flag="-D_REENTRANT";; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${flag}" >&5 $as_echo "${flag}" >&6; } if test "x$flag" != xno; then PTHREAD_CFLAGS="$flag $PTHREAD_CFLAGS" fi LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" # More AIX lossage: must compile with xlc_r or cc_r if test x"$GCC" != xyes; then for ac_prog in xlc_r cc_r do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_PTHREAD_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$PTHREAD_CC"; then ac_cv_prog_PTHREAD_CC="$PTHREAD_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_PTHREAD_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi PTHREAD_CC=$ac_cv_prog_PTHREAD_CC if test -n "$PTHREAD_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PTHREAD_CC" >&5 $as_echo "$PTHREAD_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$PTHREAD_CC" && break done test -n "$PTHREAD_CC" || PTHREAD_CC="${CC}" else PTHREAD_CC=$CC fi else PTHREAD_CC="$CC" fi # Finally, execute ACTION-IF-FOUND/ACTION-IF-NOT-FOUND: if test x"$ax_pthread_ok" = xyes; then $as_echo "#define HAVE_PTHREAD 1" >>confdefs.h CLIBS="$PTHREAD_LIBS $LIBS" CPPFLAGS="$CPPFLAGS $PTHREAD_CFLAGS" LDFLAGS="$LDFLAGS $PTHREAD_CFLAGS" CC="$PTHREAD_CC" : else ax_pthread_ok=no fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Check whether --enable-multithreading was given. if test "${enable_multithreading+set}" = set; then : enableval=$enable_multithreading; fi if test "x$ax_pthread_ok" = "xyes" && test "x$enable_multithreading" != "xno"; then : ax_multithread=yes else ax_multithread=no fi case "$host" in *-*-mingw*) ;; *) if test -d /usr/local/include; then CPPFLAGS="$CPPFLAGS -I/usr/local/include" LDFLAGS="$LDFLAGS -L/usr/local/lib" fi ;; esac # Check whether --enable-java was given. if test "${enable_java+set}" = set; then : enableval=$enable_java; fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -lstdc++" >&5 $as_echo_n "checking for main in -lstdc++... " >&6; } if ${ac_cv_lib_stdcpp_main+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lstdc++ $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { return main (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_stdcpp_main=yes else ac_cv_lib_stdcpp_main=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_stdcpp_main" >&5 $as_echo "$ac_cv_lib_stdcpp_main" >&6; } if test "x$ac_cv_lib_stdcpp_main" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBSTDC__ 1 _ACEOF LIBS="-lstdc++ $LIBS" else as_fn_error $? "missing libstdc++" "$LINENO" 5 fi for ac_header in list do : ac_fn_c_check_header_compile "$LINENO" "list" "ac_cv_header_list" "as_fn_error $? \"missing STL list class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_list" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIST 1 _ACEOF fi done for ac_header in map do : ac_fn_c_check_header_compile "$LINENO" "map" "ac_cv_header_map" "as_fn_error $? \"missing STL map class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_map" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_MAP 1 _ACEOF fi done for ac_header in queue do : ac_fn_c_check_header_compile "$LINENO" "queue" "ac_cv_header_queue" "as_fn_error $? \"missing STL queue class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_queue" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_QUEUE 1 _ACEOF fi done for ac_header in set do : ac_fn_c_check_header_compile "$LINENO" "set" "ac_cv_header_set" "as_fn_error $? \"missing STL set class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_set" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SET 1 _ACEOF fi done for ac_header in stack do : ac_fn_c_check_header_compile "$LINENO" "stack" "ac_cv_header_stack" "as_fn_error $? \"missing STL stack class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_stack" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STACK 1 _ACEOF fi done for ac_header in streambuf do : ac_fn_c_check_header_compile "$LINENO" "streambuf" "ac_cv_header_streambuf" "as_fn_error $? \"missing STL streambuf class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_streambuf" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STREAMBUF 1 _ACEOF fi done for ac_header in string do : ac_fn_c_check_header_compile "$LINENO" "string" "ac_cv_header_string" "as_fn_error $? \"missing STL string class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_string" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRING 1 _ACEOF fi done for ac_header in vector do : ac_fn_c_check_header_compile "$LINENO" "vector" "ac_cv_header_vector" "as_fn_error $? \"missing STL vector class header\" \"$LINENO\" 5 " if test "x$ac_cv_header_vector" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VECTOR 1 _ACEOF fi done if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES " AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE sqlite3" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sqlite3" >&5 $as_echo_n "checking for sqlite3... " >&6; } if test -n "$SQLITE3_CFLAGS"; then pkg_cv_SQLITE3_CFLAGS="$SQLITE3_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" sqlite3\""; } >&5 ($PKG_CONFIG --exists --print-errors " sqlite3") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_SQLITE3_CFLAGS=`$PKG_CONFIG --cflags " sqlite3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$SQLITE3_LIBS"; then pkg_cv_SQLITE3_LIBS="$SQLITE3_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" sqlite3\""; } >&5 ($PKG_CONFIG --exists --print-errors " sqlite3") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_SQLITE3_LIBS=`$PKG_CONFIG --libs " sqlite3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then SQLITE3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " sqlite3" 2>&1` else SQLITE3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " sqlite3" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$SQLITE3_PKG_ERRORS" >&5 AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_sqlite3=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_sqlite3=no else SQLITE3_CFLAGS=$pkg_cv_SQLITE3_CFLAGS SQLITE3_LIBS=$pkg_cv_SQLITE3_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CFLAGS="$CFLAGS $SQLITE3_CFLAGS" CXXFLAGS="$CXXFLAGS $SQLITE3_CFLAGS" LIBS="$LIBS $SQLITE3_LIBS" fi # Substitute output. fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBDL 1 _ACEOF LIBS="-ldl $LIBS" fi for ac_header in sqlite3.h do : ac_fn_c_check_header_mongrel "$LINENO" "sqlite3.h" "ac_cv_header_sqlite3_h" "$ac_includes_default" if test "x$ac_cv_header_sqlite3_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SQLITE3_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sqlite3_open in -lsqlite3" >&5 $as_echo_n "checking for sqlite3_open in -lsqlite3... " >&6; } if ${ac_cv_lib_sqlite3_sqlite3_open+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsqlite3 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char sqlite3_open (); int main () { return sqlite3_open (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_sqlite3_sqlite3_open=yes else ac_cv_lib_sqlite3_sqlite3_open=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_sqlite3_sqlite3_open" >&5 $as_echo "$ac_cv_lib_sqlite3_sqlite3_open" >&6; } if test "x$ac_cv_lib_sqlite3_sqlite3_open" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBSQLITE3 1 _ACEOF LIBS="-lsqlite3 $LIBS" fi fi done if test "x$ac_cv_lib_sqlite3_sqlite3_open" = "xyes"; then : ax_sqlite3=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking which sqlite3 to use" >&5 $as_echo_n "checking which sqlite3 to use... " >&6; } if test "x$ax_sqlite3" = "xyes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: system" >&5 $as_echo "system" >&6; } PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lsqlite3" else { $as_echo "$as_me:${as_lineno-$LINENO}: result: bundled" >&5 $as_echo "bundled" >&6; } fi if test "x$ax_sqlite3" = "xyes"; then HAVE_LIBSQLITE3_TRUE= HAVE_LIBSQLITE3_FALSE='#' else HAVE_LIBSQLITE3_TRUE='#' HAVE_LIBSQLITE3_FALSE= fi # Check whether --with-afflib was given. if test "${with_afflib+set}" = set; then : withval=$with_afflib; else with_afflib=yes fi ax_afflib=no if test "x$with_afflib" != "xno"; then : SAVED_CPPFLAGS="$CPPFLAGS" SAVED_CFLAGS="$CFLAGS" SAVED_CXXFLAGS="$CXXFLAGS" SAVED_LDFLAGS="$LDFLAGS" SAVED_LIBS="$LIBS" if test "x$with_afflib" = "xyes"; then : else if test -d "$with_afflib/include"; then : CPPFLAGS="$CPPFLAGS -I$with_afflib/include" LDFLAGS="$LDFLAGS -L$with_afflib/lib" else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "afflib directory not found at $with_afflib See \`config.log' for more details" "$LINENO" 5; } fi fi for ac_header in afflib/afflib.h do : ac_fn_c_check_header_mongrel "$LINENO" "afflib/afflib.h" "ac_cv_header_afflib_afflib_h" "$ac_includes_default" if test "x$ac_cv_header_afflib_afflib_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_AFFLIB_AFFLIB_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for af_open in -lafflib" >&5 $as_echo_n "checking for af_open in -lafflib... " >&6; } if ${ac_cv_lib_afflib_af_open+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lafflib $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char af_open (); int main () { return af_open (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_afflib_af_open=yes else ac_cv_lib_afflib_af_open=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_afflib_af_open" >&5 $as_echo "$ac_cv_lib_afflib_af_open" >&6; } if test "x$ac_cv_lib_afflib_af_open" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBAFFLIB 1 _ACEOF LIBS="-lafflib $LIBS" fi fi done if test "x$ac_cv_lib_afflib_af_open" = "xyes"; then : if test "x$ax_afflib" = "xyes"; then : CPPFLAGS="$SAVED_CPPFLAGS" else ax_afflib=yes PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lafflib" fi else CPPFLAGS="$SAVED_CPPFLAGS" CFLAGS="$SAVED_CFLAGS" CXXFLAGS="$SAVED_CXXFLAGS" LDFLAGS="$SAVED_LDFLAGS" LIBS="$SAVED_LIBS" ax_afflib=no fi fi # Check whether --with-zlib was given. if test "${with_zlib+set}" = set; then : withval=$with_zlib; else with_zlib=yes fi ax_zlib=no if test "x$with_zlib" != "xno"; then : SAVED_CPPFLAGS="$CPPFLAGS" SAVED_CFLAGS="$CFLAGS" SAVED_CXXFLAGS="$CXXFLAGS" SAVED_LDFLAGS="$LDFLAGS" SAVED_LIBS="$LIBS" if test "x$with_zlib" = "xyes"; then : if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : SAVED_AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES" SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES " AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE zlib" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for zlib" >&5 $as_echo_n "checking for zlib... " >&6; } if test -n "$ZLIB_CFLAGS"; then pkg_cv_ZLIB_CFLAGS="$ZLIB_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" zlib\""; } >&5 ($PKG_CONFIG --exists --print-errors " zlib") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ZLIB_CFLAGS=`$PKG_CONFIG --cflags " zlib" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ZLIB_LIBS"; then pkg_cv_ZLIB_LIBS="$ZLIB_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" zlib\""; } >&5 ($PKG_CONFIG --exists --print-errors " zlib") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ZLIB_LIBS=`$PKG_CONFIG --libs " zlib" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ZLIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " zlib" 2>&1` else ZLIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " zlib" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ZLIB_PKG_ERRORS" >&5 AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_zlib=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_zlib=no else ZLIB_CFLAGS=$pkg_cv_ZLIB_CFLAGS ZLIB_LIBS=$pkg_cv_ZLIB_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS" CFLAGS="$CFLAGS $ZLIB_CFLAGS" CXXFLAGS="$CXXFLAGS $ZLIB_CFLAGS" LIBS="$LIBS $ZLIB_LIBS" ax_zlib=yes fi # Substitute output. fi else if test -d "$with_zlib/include"; then : CPPFLAGS="$CPPFLAGS -I$with_zlib/include" LDFLAGS="$LDFLAGS -L$with_zlib/lib" else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "zlib directory not found at $with_zlib See \`config.log' for more details" "$LINENO" 5; } fi fi for ac_header in zlib.h do : ac_fn_c_check_header_mongrel "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default" if test "x$ac_cv_header_zlib_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_ZLIB_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for inflate in -lz" >&5 $as_echo_n "checking for inflate in -lz... " >&6; } if ${ac_cv_lib_z_inflate+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lz $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char inflate (); int main () { return inflate (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_z_inflate=yes else ac_cv_lib_z_inflate=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_inflate" >&5 $as_echo "$ac_cv_lib_z_inflate" >&6; } if test "x$ac_cv_lib_z_inflate" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBZ 1 _ACEOF LIBS="-lz $LIBS" fi fi done if test "x$ac_cv_lib_z_inflate" = "xyes"; then : if test "x$ax_zlib" = "xyes"; then : CPPFLAGS="$SAVED_CPPFLAGS" else ax_zlib=yes PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lz" fi else CPPFLAGS="$SAVED_CPPFLAGS" CFLAGS="$SAVED_CFLAGS" CXXFLAGS="$SAVED_CXXFLAGS" LDFLAGS="$SAVED_LDFLAGS" LIBS="$SAVED_LIBS" ax_zlib=no fi fi # Check whether --with-libewf was given. if test "${with_libewf+set}" = set; then : withval=$with_libewf; else with_libewf=yes fi ax_libewf=no if test "x$with_libewf" != "xno"; then : SAVED_CPPFLAGS="$CPPFLAGS" SAVED_CFLAGS="$CFLAGS" SAVED_CXXFLAGS="$CXXFLAGS" SAVED_LDFLAGS="$LDFLAGS" SAVED_LIBS="$LIBS" if test "x$with_libewf" = "xyes"; then : if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : SAVED_AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES" SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES " AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE libewf" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libewf" >&5 $as_echo_n "checking for libewf... " >&6; } if test -n "$EWF_CFLAGS"; then pkg_cv_EWF_CFLAGS="$EWF_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libewf\""; } >&5 ($PKG_CONFIG --exists --print-errors " libewf") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_EWF_CFLAGS=`$PKG_CONFIG --cflags " libewf" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$EWF_LIBS"; then pkg_cv_EWF_LIBS="$EWF_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libewf\""; } >&5 ($PKG_CONFIG --exists --print-errors " libewf") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_EWF_LIBS=`$PKG_CONFIG --libs " libewf" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then EWF_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " libewf" 2>&1` else EWF_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " libewf" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$EWF_PKG_ERRORS" >&5 AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libewf=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libewf=no else EWF_CFLAGS=$pkg_cv_EWF_CFLAGS EWF_LIBS=$pkg_cv_EWF_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CPPFLAGS="$CPPFLAGS $EWF_CFLAGS" CFLAGS="$CFLAGS $EWF_CFLAGS" CXXFLAGS="$CXXFLAGS $EWF_CFLAGS" LIBS="$LIBS $EWF_LIBS" ax_libewf=yes fi # Substitute output. fi else if test -d "$with_libewf/include"; then : CPPFLAGS="$CPPFLAGS -I$with_libewf/include" LDFLAGS="$LDFLAGS -L$with_libewf/lib" else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "libewf directory not found at $with_libewf See \`config.log' for more details" "$LINENO" 5; } fi fi for ac_header in libewf.h do : ac_fn_c_check_header_mongrel "$LINENO" "libewf.h" "ac_cv_header_libewf_h" "$ac_includes_default" if test "x$ac_cv_header_libewf_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBEWF_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libewf_get_version in -lewf" >&5 $as_echo_n "checking for libewf_get_version in -lewf... " >&6; } if ${ac_cv_lib_ewf_libewf_get_version+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lewf $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char libewf_get_version (); int main () { return libewf_get_version (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ewf_libewf_get_version=yes else ac_cv_lib_ewf_libewf_get_version=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ewf_libewf_get_version" >&5 $as_echo "$ac_cv_lib_ewf_libewf_get_version" >&6; } if test "x$ac_cv_lib_ewf_libewf_get_version" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBEWF 1 _ACEOF LIBS="-lewf $LIBS" fi fi done if test "x$ac_cv_lib_ewf_libewf_get_version" = "xyes"; then : if test "x$ax_libewf" = "xyes"; then : CPPFLAGS="$SAVED_CPPFLAGS" else ax_libewf=yes PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lewf" fi else CPPFLAGS="$SAVED_CPPFLAGS" CFLAGS="$SAVED_CFLAGS" CXXFLAGS="$SAVED_CXXFLAGS" LDFLAGS="$SAVED_LDFLAGS" LIBS="$SAVED_LIBS" ax_libewf=no fi fi # Check whether --with-libvhdi was given. if test "${with_libvhdi+set}" = set; then : withval=$with_libvhdi; else with_libvhdi=yes fi ax_libvhdi=no if test "x$with_libvhdi" != "xno"; then : SAVED_CPPFLAGS="$CPPFLAGS" SAVED_CFLAGS="$CFLAGS" SAVED_CXXFLAGS="$CXXFLAGS" SAVED_LDFLAGS="$LDFLAGS" SAVED_LIBS="$LIBS" if test "x$with_libvhdi" = "xyes"; then : if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : SAVED_AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES" SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES " AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE libvhdi" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libvhdi" >&5 $as_echo_n "checking for libvhdi... " >&6; } if test -n "$VHDI_CFLAGS"; then pkg_cv_VHDI_CFLAGS="$VHDI_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libvhdi\""; } >&5 ($PKG_CONFIG --exists --print-errors " libvhdi") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_VHDI_CFLAGS=`$PKG_CONFIG --cflags " libvhdi" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$VHDI_LIBS"; then pkg_cv_VHDI_LIBS="$VHDI_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libvhdi\""; } >&5 ($PKG_CONFIG --exists --print-errors " libvhdi") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_VHDI_LIBS=`$PKG_CONFIG --libs " libvhdi" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then VHDI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " libvhdi" 2>&1` else VHDI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " libvhdi" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$VHDI_PKG_ERRORS" >&5 AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libvhdi=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libvhdi=no else VHDI_CFLAGS=$pkg_cv_VHDI_CFLAGS VHDI_LIBS=$pkg_cv_VHDI_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CPPFLAGS="$CPPFLAGS $VHDI_CFLAGS" CFLAGS="$CFLAGS $VHDI_CFLAGS" CXXFLAGS="$CXXFLAGS $VHDI_CFLAGS" LIBS="$LIBS $VHDI_LIBS" ax_libvhdi=yes fi # Substitute output. fi else if test -d "$with_libvhdi/include"; then : CPPFLAGS="$CPPFLAGS -I$with_libvhdi/include" LDFLAGS="$LDFLAGS -L$with_libvhdi/lib" else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "libvhdi directory not found at $with_libvhdi See \`config.log' for more details" "$LINENO" 5; } fi fi for ac_header in libvhdi.h do : ac_fn_c_check_header_mongrel "$LINENO" "libvhdi.h" "ac_cv_header_libvhdi_h" "$ac_includes_default" if test "x$ac_cv_header_libvhdi_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBVHDI_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libvhdi_get_version in -lvhdi" >&5 $as_echo_n "checking for libvhdi_get_version in -lvhdi... " >&6; } if ${ac_cv_lib_vhdi_libvhdi_get_version+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lvhdi $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char libvhdi_get_version (); int main () { return libvhdi_get_version (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_vhdi_libvhdi_get_version=yes else ac_cv_lib_vhdi_libvhdi_get_version=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_vhdi_libvhdi_get_version" >&5 $as_echo "$ac_cv_lib_vhdi_libvhdi_get_version" >&6; } if test "x$ac_cv_lib_vhdi_libvhdi_get_version" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBVHDI 1 _ACEOF LIBS="-lvhdi $LIBS" fi fi done if test "x$ac_cv_lib_vhdi_libvhdi_get_version" = "xyes"; then : if test "x$ax_libvhdi" = "xyes"; then : CPPFLAGS="$SAVED_CPPFLAGS" else ax_libvhdi=yes PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lvhdi" fi else CPPFLAGS="$SAVED_CPPFLAGS" CFLAGS="$SAVED_CFLAGS" CXXFLAGS="$SAVED_CXXFLAGS" LDFLAGS="$SAVED_LDFLAGS" LIBS="$SAVED_LIBS" ax_libvhdi=no fi fi # Check whether --with-libvmdk was given. if test "${with_libvmdk+set}" = set; then : withval=$with_libvmdk; else with_libvmdk=yes fi ax_libvmdk=no if test "x$with_libvmdk" != "xno"; then : SAVED_CPPFLAGS="$CPPFLAGS" SAVED_CFLAGS="$CFLAGS" SAVED_CXXFLAGS="$CXXFLAGS" SAVED_LDFLAGS="$LDFLAGS" SAVED_LIBS="$LIBS" if test "x$with_libvmdk" = "xyes"; then : if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : SAVED_AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES" SAVED_AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE" AX_PACKAGE_REQUIRES="$AX_PACKAGE_REQUIRES " AX_PACKAGE_REQUIRES_PRIVATE="$AX_PACKAGE_REQUIRES_PRIVATE libvmdk" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libvmdk" >&5 $as_echo_n "checking for libvmdk... " >&6; } if test -n "$VMDK_CFLAGS"; then pkg_cv_VMDK_CFLAGS="$VMDK_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libvmdk\""; } >&5 ($PKG_CONFIG --exists --print-errors " libvmdk") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_VMDK_CFLAGS=`$PKG_CONFIG --cflags " libvmdk" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$VMDK_LIBS"; then pkg_cv_VMDK_LIBS="$VMDK_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" libvmdk\""; } >&5 ($PKG_CONFIG --exists --print-errors " libvmdk") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_VMDK_LIBS=`$PKG_CONFIG --libs " libvmdk" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then VMDK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " libvmdk" 2>&1` else VMDK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " libvmdk" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$VMDK_PKG_ERRORS" >&5 AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libvmdk=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } AX_PACKAGE_REQUIRES="$SAVED_AX_PACKAGE_REQUIRES" AX_PACKAGE_REQUIRES_PRIVATE="$SAVED_AX_PACKAGE_REQUIRES_PRIVATE" ax_libvmdk=no else VMDK_CFLAGS=$pkg_cv_VMDK_CFLAGS VMDK_LIBS=$pkg_cv_VMDK_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CPPFLAGS="$CPPFLAGS $VMDK_CFLAGS" CFLAGS="$CFLAGS $VMDK_CFLAGS" CXXFLAGS="$CXXFLAGS $VMDK_CFLAGS" LIBS="$LIBS $VMDK_LIBS" ax_libvmdk=yes fi # Substitute output. fi else if test -d "$with_libvmdk/include"; then : CPPFLAGS="$CPPFLAGS -I$with_libvmdk/include" LDFLAGS="$LDFLAGS -L$with_libvmdk/lib" else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "libvmdk directory not found at $with_libvmdk See \`config.log' for more details" "$LINENO" 5; } fi fi for ac_header in libvmdk.h do : ac_fn_c_check_header_mongrel "$LINENO" "libvmdk.h" "ac_cv_header_libvmdk_h" "$ac_includes_default" if test "x$ac_cv_header_libvmdk_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBVMDK_H 1 _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libvmdk_get_version in -lvmdk" >&5 $as_echo_n "checking for libvmdk_get_version in -lvmdk... " >&6; } if ${ac_cv_lib_vmdk_libvmdk_get_version+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lvmdk $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char libvmdk_get_version (); int main () { return libvmdk_get_version (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_vmdk_libvmdk_get_version=yes else ac_cv_lib_vmdk_libvmdk_get_version=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_vmdk_libvmdk_get_version" >&5 $as_echo "$ac_cv_lib_vmdk_libvmdk_get_version" >&6; } if test "x$ac_cv_lib_vmdk_libvmdk_get_version" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBVMDK 1 _ACEOF LIBS="-lvmdk $LIBS" fi fi done if test "x$ac_cv_lib_vmdk_libvmdk_get_version" = "xyes"; then : if test "x$ax_libvmdk" = "xyes"; then : CPPFLAGS="$SAVED_CPPFLAGS" else ax_libvmdk=yes PACKAGE_LIBS_PRIVATE="$PACKAGE_LIBS_PRIVATE -lvmdk" fi else CPPFLAGS="$SAVED_CPPFLAGS" CFLAGS="$SAVED_CFLAGS" CXXFLAGS="$SAVED_CXXFLAGS" LDFLAGS="$SAVED_LDFLAGS" LIBS="$SAVED_LIBS" ax_libvmdk=no fi fi # Check whether --enable-cppunit was given. if test "${enable_cppunit+set}" = set; then : enableval=$enable_cppunit; fi ac_cv_cppunit=no if test "x$enable_cppunit" != "xno"; then : if test "x$ac_cv_prog_PKGCONFIG" = "xyes"; then : IGNORE="$IGNORE " IGNORE="$IGNORE cppunit >= 1.12.1" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for cppunit >= 1.12.1" >&5 $as_echo_n "checking for cppunit >= 1.12.1... " >&6; } if test -n "$CPPUNIT_CFLAGS"; then pkg_cv_CPPUNIT_CFLAGS="$CPPUNIT_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" cppunit >= 1.12.1\""; } >&5 ($PKG_CONFIG --exists --print-errors " cppunit >= 1.12.1") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_CPPUNIT_CFLAGS=`$PKG_CONFIG --cflags " cppunit >= 1.12.1" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$CPPUNIT_LIBS"; then pkg_cv_CPPUNIT_LIBS="$CPPUNIT_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \" cppunit >= 1.12.1\""; } >&5 ($PKG_CONFIG --exists --print-errors " cppunit >= 1.12.1") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_CPPUNIT_LIBS=`$PKG_CONFIG --libs " cppunit >= 1.12.1" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then CPPUNIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs " cppunit >= 1.12.1" 2>&1` else CPPUNIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs " cppunit >= 1.12.1" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$CPPUNIT_PKG_ERRORS" >&5 ac_cv_cppunit=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ac_cv_cppunit=no else CPPUNIT_CFLAGS=$pkg_cv_CPPUNIT_CFLAGS CPPUNIT_LIBS=$pkg_cv_CPPUNIT_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ac_cv_cppunit=yes fi # Substitute output. fi if test "x$ac_cv_cppunit" != "xyes"; then : # Check whether --with-cppunit-prefix was given. if test "${with_cppunit_prefix+set}" = set; then : withval=$with_cppunit_prefix; cppunit_config_prefix="$withval" else cppunit_config_prefix="" fi # Check whether --with-cppunit-exec-prefix was given. if test "${with_cppunit_exec_prefix+set}" = set; then : withval=$with_cppunit_exec_prefix; cppunit_config_exec_prefix="$withval" else cppunit_config_exec_prefix="" fi if test x$cppunit_config_exec_prefix != x ; then cppunit_config_args="$cppunit_config_args --exec-prefix=$cppunit_config_exec_prefix" if test x${CPPUNIT_CONFIG+set} != xset ; then CPPUNIT_CONFIG=$cppunit_config_exec_prefix/bin/cppunit-config fi fi if test x$cppunit_config_prefix != x ; then cppunit_config_args="$cppunit_config_args --prefix=$cppunit_config_prefix" if test x${CPPUNIT_CONFIG+set} != xset ; then CPPUNIT_CONFIG=$cppunit_config_prefix/bin/cppunit-config fi fi # Extract the first word of "cppunit-config", so it can be a program name with args. set dummy cppunit-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_CPPUNIT_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $CPPUNIT_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_CPPUNIT_CONFIG="$CPPUNIT_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CPPUNIT_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_CPPUNIT_CONFIG" && ac_cv_path_CPPUNIT_CONFIG="no" ;; esac fi CPPUNIT_CONFIG=$ac_cv_path_CPPUNIT_CONFIG if test -n "$CPPUNIT_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPPUNIT_CONFIG" >&5 $as_echo "$CPPUNIT_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi cppunit_version_min=1.12.1 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Cppunit - version >= $cppunit_version_min" >&5 $as_echo_n "checking for Cppunit - version >= $cppunit_version_min... " >&6; } no_cppunit="" if test "$CPPUNIT_CONFIG" = "no" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } no_cppunit=yes else CPPUNIT_CFLAGS=`$CPPUNIT_CONFIG --cflags` CPPUNIT_LIBS=`$CPPUNIT_CONFIG --libs` cppunit_version=`$CPPUNIT_CONFIG --version` cppunit_major_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\1/'` cppunit_minor_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\2/'` cppunit_micro_version=`echo $cppunit_version | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\3/'` cppunit_major_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\1/'` if test "x${cppunit_major_min}" = "x" ; then cppunit_major_min=0 fi cppunit_minor_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\2/'` if test "x${cppunit_minor_min}" = "x" ; then cppunit_minor_min=0 fi cppunit_micro_min=`echo $cppunit_version_min | \ sed 's/\([0-9]*\).\([0-9]*\).\([0-9]*\)/\3/'` if test "x${cppunit_micro_min}" = "x" ; then cppunit_micro_min=0 fi cppunit_version_proper=`expr \ $cppunit_major_version \> $cppunit_major_min \| \ $cppunit_major_version \= $cppunit_major_min \& \ $cppunit_minor_version \> $cppunit_minor_min \| \ $cppunit_major_version \= $cppunit_major_min \& \ $cppunit_minor_version \= $cppunit_minor_min \& \ $cppunit_micro_version \>= $cppunit_micro_min ` if test "$cppunit_version_proper" = "1" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cppunit_major_version.$cppunit_minor_version.$cppunit_micro_version" >&5 $as_echo "$cppunit_major_version.$cppunit_minor_version.$cppunit_micro_version" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } no_cppunit=yes fi fi if test "x$no_cppunit" = x ; then : else CPPUNIT_CFLAGS="" CPPUNIT_LIBS="" : fi if test "x$no_cppunit" = x; then : ac_cv_cppunit=yes fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for TestRunner in -lcppunit" >&5 $as_echo_n "checking for TestRunner in -lcppunit... " >&6; } SAVED_CFLAGS="$CFLAGS" SAVED_LDFLAGS="$LDFLAGS" CFLAGS="$CPPUNIT_CLFAGS" LDFLAGS="$CPPUNIT_LIBS" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CppUnit::TextUi::TestRunner(); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ax_cv_cppunit=yes else ax_cv_cppunit=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CFLAGS="$SAVED_CFLAGS" LDFLAGS="$SAVED_LDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_cv_cppunit" >&5 $as_echo "$ax_cv_cppunit" >&6; } fi if test "x$ac_cv_cppunit" = xyes; then HAVE_CPPUNIT_TRUE= HAVE_CPPUNIT_FALSE='#' else HAVE_CPPUNIT_TRUE='#' HAVE_CPPUNIT_FALSE= fi # Check whether --enable-offline was given. if test "${enable_offline+set}" = set; then : enableval=$enable_offline; case "${enableval}" in yes) offline=true ;; no) offline=false ;; *) as_fn_error $? "bad value ${enableval} for --enable-online" "$LINENO" 5 ;; esac else offline=false fi if test "x$offline" = xtrue; then OFFLINE_TRUE= OFFLINE_FALSE='#' else OFFLINE_TRUE='#' OFFLINE_FALSE= fi if test "x$enable_java" != "xno"; then : if test "x$JAVAPREFIX" = x; then test "x$JAVAC" = x && for ac_prog in "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT javac$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVAC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVAC"; then ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVAC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVAC=$ac_cv_prog_JAVAC if test -n "$JAVAC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5 $as_echo "$JAVAC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVAC" && break done else test "x$JAVAC" = x && for ac_prog in "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT javac$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVAC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVAC"; then ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVAC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVAC=$ac_cv_prog_JAVAC if test -n "$JAVAC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5 $as_echo "$JAVAC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVAC" && break done test -n "$JAVAC" || JAVAC="$JAVAPREFIX" fi test "x$JAVAC" = x && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: no acceptable Java compiler found in \$PATH" >&5 $as_echo "$as_me: WARNING: no acceptable Java compiler found in \$PATH" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVAC works" >&5 $as_echo_n "checking if $JAVAC works... " >&6; } if ${ac_cv_prog_javac_works+:} false; then : $as_echo_n "(cached) " >&6 else JAVA_TEST=Test.java CLASS_TEST=Test.class cat << \EOF > $JAVA_TEST /* #line 21593 "configure" */ public class Test { } EOF if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 (eval $ac_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; } >/dev/null 2>&1; then ac_cv_prog_javac_works=yes else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&5 $as_echo "$as_me: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&2;} echo "configure: failed program was:" >&5 cat $JAVA_TEST >&5 fi rm -f $JAVA_TEST $CLASS_TEST fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_javac_works" >&5 $as_echo "$ac_cv_prog_javac_works" >&6; } if test "x$JAVAC" != x; then JNI_INCLUDE_DIRS="" if test "x$JAVA_HOME" != x; then _JTOPDIR="$JAVA_HOME" else if test "x$JAVAC" = x; then JAVAC=javac fi # Extract the first word of "$JAVAC", so it can be a program name with args. set dummy $JAVAC; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path__ACJNI_JAVAC+:} false; then : $as_echo_n "(cached) " >&6 else case $_ACJNI_JAVAC in [\\/]* | ?:[\\/]*) ac_cv_path__ACJNI_JAVAC="$_ACJNI_JAVAC" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path__ACJNI_JAVAC="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path__ACJNI_JAVAC" && ac_cv_path__ACJNI_JAVAC="no" ;; esac fi _ACJNI_JAVAC=$ac_cv_path__ACJNI_JAVAC if test -n "$_ACJNI_JAVAC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_ACJNI_JAVAC" >&5 $as_echo "$_ACJNI_JAVAC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$_ACJNI_JAVAC" = xno; then $as_echo "cannot find JDK; try setting \$JAVAC or \$JAVA_HOME" else # find the include directory relative to the javac executable _cur=""$_ACJNI_JAVAC"" while ls -ld "$_cur" 2>/dev/null | grep " -> " >/dev/null; do { $as_echo "$as_me:${as_lineno-$LINENO}: checking symlink for $_cur" >&5 $as_echo_n "checking symlink for $_cur... " >&6; } _slink=`ls -ld "$_cur" | sed 's/.* -> //'` case "$_slink" in /*) _cur="$_slink";; # 'X' avoids triggering unwanted echo options. *) _cur=`echo "X$_cur" | sed -e 's/^X//' -e 's:[^/]*$::'`"$_slink";; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_cur" >&5 $as_echo "$_cur" >&6; } done _ACJNI_FOLLOWED="$_cur" _JTOPDIR=`echo "$_ACJNI_FOLLOWED" | sed -e 's://*:/:g' -e 's:/[^/]*$::'` fi fi case "$host_os" in darwin*) # Apple JDK is at /System location and has headers symlinked elsewhere case "$_JTOPDIR" in /System/Library/Frameworks/JavaVM.framework/*) _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[^/]*$::'` _JINC="$_JTOPDIR/Headers";; *) _JINC="$_JTOPDIR/include";; esac;; *) _JINC="$_JTOPDIR/include";; esac $as_echo "$as_me:${as_lineno-$LINENO}: _JTOPDIR=$_JTOPDIR" >&5 $as_echo "$as_me:${as_lineno-$LINENO}: _JINC=$_JINC" >&5 # On Mac OS X 10.6.4, jni.h is a symlink: # /System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers/jni.h # -> ../../CurrentJDK/Headers/jni.h. as_ac_File=`$as_echo "ac_cv_file_$_JINC/jni.h" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $_JINC/jni.h" >&5 $as_echo_n "checking for $_JINC/jni.h... " >&6; } if eval \${$as_ac_File+:} false; then : $as_echo_n "(cached) " >&6 else test "$cross_compiling" = yes && as_fn_error $? "cannot check for file existence when cross compiling" "$LINENO" 5 if test -r "$_JINC/jni.h"; then eval "$as_ac_File=yes" else eval "$as_ac_File=no" fi fi eval ac_res=\$$as_ac_File { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_File"\" = x"yes"; then : JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JINC" else _JTOPDIR=`echo "$_JTOPDIR" | sed -e 's:/[^/]*$::'` as_ac_File=`$as_echo "ac_cv_file_$_JTOPDIR/include/jni.h" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $_JTOPDIR/include/jni.h" >&5 $as_echo_n "checking for $_JTOPDIR/include/jni.h... " >&6; } if eval \${$as_ac_File+:} false; then : $as_echo_n "(cached) " >&6 else test "$cross_compiling" = yes && as_fn_error $? "cannot check for file existence when cross compiling" "$LINENO" 5 if test -r "$_JTOPDIR/include/jni.h"; then eval "$as_ac_File=yes" else eval "$as_ac_File=no" fi fi eval ac_res=\$$as_ac_File { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_File"\" = x"yes"; then : JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include" else $as_echo "cannot find JDK header files" fi fi # get the likely subdirectories for system specific java includes case "$host_os" in bsdi*) _JNI_INC_SUBDIRS="bsdos";; freebsd*) _JNI_INC_SUBDIRS="freebsd";; darwin*) _JNI_INC_SUBDIRS="darwin";; linux*) _JNI_INC_SUBDIRS="linux genunix";; osf*) _JNI_INC_SUBDIRS="alpha";; solaris*) _JNI_INC_SUBDIRS="solaris";; mingw*) _JNI_INC_SUBDIRS="win32";; cygwin*) _JNI_INC_SUBDIRS="win32";; *) _JNI_INC_SUBDIRS="genunix";; esac # add any subdirectories that are present for JINCSUBDIR in $_JNI_INC_SUBDIRS do if test -d "$_JTOPDIR/include/$JINCSUBDIR"; then JNI_INCLUDE_DIRS="$JNI_INCLUDE_DIRS $_JTOPDIR/include/$JINCSUBDIR" fi done for JNI_INCLUDE_DIR in $JNI_INCLUDE_DIRS do JNI_CPPFLAGS="$JNI_CPPFLAGS -I$JNI_INCLUDE_DIR" done JNI_CPPFLAGS=$JNI_CPPFLAGS fi if test x$JAVAPREFIX = x; then test x$JAVA = x && for ac_prog in kaffe$EXEEXT java$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVA+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVA"; then ac_cv_prog_JAVA="$JAVA" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVA="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVA=$ac_cv_prog_JAVA if test -n "$JAVA"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA" >&5 $as_echo "$JAVA" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVA" && break done else test x$JAVA = x && for ac_prog in kaffe$EXEEXT java$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVA+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVA"; then ac_cv_prog_JAVA="$JAVA" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVA="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVA=$ac_cv_prog_JAVA if test -n "$JAVA"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVA" >&5 $as_echo "$JAVA" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVA" && break done test -n "$JAVA" || JAVA="$JAVAPREFIX" fi test x$JAVA = x && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: no acceptable Java virtual machine found in \$PATH" >&5 $as_echo "$as_me: WARNING: no acceptable Java virtual machine found in \$PATH" >&2;} # Extract the first word of "uudecode$EXEEXT", so it can be a program name with args. set dummy uudecode$EXEEXT; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_uudecode+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$uudecode"; then ac_cv_prog_uudecode="$uudecode" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_uudecode="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi uudecode=$ac_cv_prog_uudecode if test -n "$uudecode"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $uudecode" >&5 $as_echo "$uudecode" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test x$JAVA != x; then if test x$uudecode = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if uudecode can decode base 64 file" >&5 $as_echo_n "checking if uudecode can decode base 64 file... " >&6; } if ${ac_cv_prog_uudecode_base64+:} false; then : $as_echo_n "(cached) " >&6 else cat << \EOF > Test.uue begin-base64 644 Test.class yv66vgADAC0AFQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51 bWJlclRhYmxlDAAKAAsBAARleGl0AQAEKEkpVgoADQAJBwAOAQAQamF2YS9s YW5nL1N5c3RlbQEABjxpbml0PgEAAygpVgwADwAQCgADABEBAApTb3VyY2VG aWxlAQAJVGVzdC5qYXZhACEAAQADAAAAAAACAAkABQAGAAEABwAAACEAAQAB AAAABQO4AAyxAAAAAQAIAAAACgACAAAACgAEAAsAAQAPABAAAQAHAAAAIQAB AAEAAAAFKrcAErEAAAABAAgAAAAKAAIAAAAEAAQABAABABMAAAACABQ= ==== EOF if uudecode$EXEEXT Test.uue; then ac_cv_prog_uudecode_base64=yes else echo "configure: 21937: uudecode had trouble decoding base 64 file 'Test.uue'" >&5 echo "configure: failed file was:" >&5 cat Test.uue >&5 ac_cv_prog_uudecode_base64=no fi rm -f Test.uue fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_uudecode_base64" >&5 $as_echo "$ac_cv_prog_uudecode_base64" >&6; } fi if test x$ac_cv_prog_uudecode_base64 != xyes; then rm -f Test.class { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: I have to compile Test.class from scratch" >&5 $as_echo "$as_me: WARNING: I have to compile Test.class from scratch" >&2;} if test x$ac_cv_prog_javac_works = xno; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot compile java source. $JAVAC does not work properly" >&5 $as_echo "$as_me: WARNING: Cannot compile java source. $JAVAC does not work properly" >&2;} fi if test x$ac_cv_prog_javac_works = x; then if test "x$JAVAPREFIX" = x; then test "x$JAVAC" = x && for ac_prog in "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT javac$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVAC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVAC"; then ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVAC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVAC=$ac_cv_prog_JAVAC if test -n "$JAVAC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5 $as_echo "$JAVAC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVAC" && break done else test "x$JAVAC" = x && for ac_prog in "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT javac$EXEEXT do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_JAVAC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$JAVAC"; then ac_cv_prog_JAVAC="$JAVAC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_JAVAC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi JAVAC=$ac_cv_prog_JAVAC if test -n "$JAVAC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $JAVAC" >&5 $as_echo "$JAVAC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$JAVAC" && break done test -n "$JAVAC" || JAVAC="$JAVAPREFIX" fi test "x$JAVAC" = x && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: no acceptable Java compiler found in \$PATH" >&5 $as_echo "$as_me: WARNING: no acceptable Java compiler found in \$PATH" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVAC works" >&5 $as_echo_n "checking if $JAVAC works... " >&6; } if ${ac_cv_prog_javac_works+:} false; then : $as_echo_n "(cached) " >&6 else JAVA_TEST=Test.java CLASS_TEST=Test.class cat << \EOF > $JAVA_TEST /* #line 22057 "configure" */ public class Test { } EOF if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 (eval $ac_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; } >/dev/null 2>&1; then ac_cv_prog_javac_works=yes else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&5 $as_echo "$as_me: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&2;} echo "configure: failed program was:" >&5 cat $JAVA_TEST >&5 fi rm -f $JAVA_TEST $CLASS_TEST fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_javac_works" >&5 $as_echo "$ac_cv_prog_javac_works" >&6; } fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $JAVA works" >&5 $as_echo_n "checking if $JAVA works... " >&6; } if ${ac_cv_prog_java_works+:} false; then : $as_echo_n "(cached) " >&6 else JAVA_TEST=Test.java CLASS_TEST=Test.class TEST=Test cat << \EOF > $JAVA_TEST /* [#]line 22093 "configure" */ public class Test { public static void main (String args[]) { System.exit(0); } } EOF if test x$ac_cv_prog_uudecode_base64 != xyes; then if { ac_try='$JAVAC $JAVACFLAGS $JAVA_TEST' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 (eval $ac_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; } && test -s $CLASS_TEST; then : else echo "configure: failed program was:" >&5 cat $JAVA_TEST >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&5 $as_echo "$as_me: WARNING: The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)" >&2;} fi fi if { ac_try='$JAVA $JAVAFLAGS $TEST' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 (eval $ac_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; } >/dev/null 2>&1; then ac_cv_prog_java_works=yes else echo "configure: failed program was:" >&5 cat $JAVA_TEST >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: The Java VM $JAVA failed (see config.log, check the CLASSPATH?)" >&5 $as_echo "$as_me: WARNING: The Java VM $JAVA failed (see config.log, check the CLASSPATH?)" >&2;} fi rm -fr $JAVA_TEST $CLASS_TEST Test.uue fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_java_works" >&5 $as_echo "$ac_cv_prog_java_works" >&6; } fi # Extract the first word of "ant", so it can be a program name with args. set dummy ant; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ANT_FOUND+:} false; then : $as_echo_n "(cached) " >&6 else case $ANT_FOUND in [\\/]* | ?:[\\/]*) ac_cv_path_ANT_FOUND="$ANT_FOUND" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ANT_FOUND="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ANT_FOUND=$ac_cv_path_ANT_FOUND if test -n "$ANT_FOUND"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ANT_FOUND" >&5 $as_echo "$ANT_FOUND" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test "x$JNI_CPPFLAGS" != x && test "x$ANT_FOUND" != x && test "x$JAVA" != x; then : ax_java_support=yes else ax_java_support=no fi if test "x$ax_java_support" == "xyes"; then X_JNI_TRUE= X_JNI_FALSE='#' else X_JNI_TRUE='#' X_JNI_FALSE= fi ac_config_commands="$ac_config_commands tsk/tsk_incs.h" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool needs -no-undefined flag to build shared libraries" >&5 $as_echo_n "checking if libtool needs -no-undefined flag to build shared libraries... " >&6; } case "$host" in *-*-mingw*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } LIBTSK_LDFLAGS="-no-undefined" ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ;; esac for ac_func in getline do : ac_fn_c_check_func "$LINENO" "getline" "ac_cv_func_getline" if test "x$ac_cv_func_getline" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_GETLINE 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing regexec" >&5 $as_echo_n "checking for library containing regexec... " >&6; } if ${ac_cv_search_regexec+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char regexec (); int main () { return regexec (); ; return 0; } _ACEOF for ac_lib in '' regex; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_regexec=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_regexec+:} false; then : break fi done if ${ac_cv_search_regexec+:} false; then : else ac_cv_search_regexec=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_regexec" >&5 $as_echo "$ac_cv_search_regexec" >&6; } ac_res=$ac_cv_search_regexec if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" else as_fn_error $? "missing regex" "$LINENO" 5 fi ax_openssl=no WARNINGS='-Wall -Wextra -Wno-unused-parameter' AM_CFLAGS=$WARNINGS AM_CXXFLAGS=$WARNINGS ac_config_files="$ac_config_files Makefile tsk/Makefile tsk/base/Makefile tsk/img/Makefile tsk/vs/Makefile tsk/fs/Makefile tsk/hashdb/Makefile tsk/auto/Makefile tsk/pool/Makefile tsk/util/Makefile tools/Makefile tools/imgtools/Makefile tools/vstools/Makefile tools/fstools/Makefile tools/hashtools/Makefile tools/srchtools/Makefile tools/autotools/Makefile tools/pooltools/Makefile tools/sorter/Makefile tools/timeline/Makefile tools/fiwalk/Makefile tools/fiwalk/src/Makefile tools/fiwalk/plugins/Makefile tests/Makefile samples/Makefile man/Makefile bindings/java/Makefile bindings/java/jni/Makefile case-uco/java/Makefile unit_tests/Makefile unit_tests/base/Makefile" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs { $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 $as_echo_n "checking that generated files are newer than configure... " >&6; } if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 $as_echo "done" >&6; } if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' else am__EXEEXT_TRUE='#' am__EXEEXT_FALSE= fi if test -z "${CPPUNIT_TRUE}" && test -z "${CPPUNIT_FALSE}"; then as_fn_error $? "conditional \"CPPUNIT\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then as_fn_error $? "conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then as_fn_error $? "conditional \"MAINTAINER_MODE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${HAVE_LIBSQLITE3_TRUE}" && test -z "${HAVE_LIBSQLITE3_FALSE}"; then as_fn_error $? "conditional \"HAVE_LIBSQLITE3\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${HAVE_CPPUNIT_TRUE}" && test -z "${HAVE_CPPUNIT_FALSE}"; then as_fn_error $? "conditional \"HAVE_CPPUNIT\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${OFFLINE_TRUE}" && test -z "${OFFLINE_FALSE}"; then as_fn_error $? "conditional \"OFFLINE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${X_JNI_TRUE}" && test -z "${X_JNI_FALSE}"; then as_fn_error $? "conditional \"X_JNI\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by sleuthkit $as_me 4.11.1, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" config_commands="$ac_config_commands" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to the package provider." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ sleuthkit config.status 4.11.1 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error $? "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' shared_archive_member_spec='`$ECHO "$shared_archive_member_spec" | $SED "$delay_single_quote_subst"`' SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_import='`$ECHO "$lt_cv_sys_global_symbol_to_import" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' lt_cv_nm_interface='`$ECHO "$lt_cv_nm_interface" | $SED "$delay_single_quote_subst"`' nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' lt_cv_truncate_bin='`$ECHO "$lt_cv_truncate_bin" | $SED "$delay_single_quote_subst"`' objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' configure_time_dlsearch_path='`$ECHO "$configure_time_dlsearch_path" | $SED "$delay_single_quote_subst"`' configure_time_lt_sys_library_path='`$ECHO "$configure_time_lt_sys_library_path" | $SED "$delay_single_quote_subst"`' hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`' predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`' postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`' predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`' postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`' LD_CXX='`$ECHO "$LD_CXX" | $SED "$delay_single_quote_subst"`' reload_flag_CXX='`$ECHO "$reload_flag_CXX" | $SED "$delay_single_quote_subst"`' reload_cmds_CXX='`$ECHO "$reload_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes_CXX='`$ECHO "$enable_shared_with_static_runtimes_CXX" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec_CXX='`$ECHO "$export_dynamic_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec_CXX='`$ECHO "$whole_archive_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' compiler_needs_object_CXX='`$ECHO "$compiler_needs_object_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds_CXX='`$ECHO "$old_archive_from_new_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds_CXX='`$ECHO "$old_archive_from_expsyms_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_CXX='`$ECHO "$archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds_CXX='`$ECHO "$archive_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_cmds_CXX='`$ECHO "$module_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_expsym_cmds_CXX='`$ECHO "$module_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' with_gnu_ld_CXX='`$ECHO "$with_gnu_ld_CXX" | $SED "$delay_single_quote_subst"`' allow_undefined_flag_CXX='`$ECHO "$allow_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' no_undefined_flag_CXX='`$ECHO "$no_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_CXX='`$ECHO "$hardcode_libdir_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator_CXX='`$ECHO "$hardcode_libdir_separator_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_CXX='`$ECHO "$hardcode_direct_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute_CXX='`$ECHO "$hardcode_direct_absolute_CXX" | $SED "$delay_single_quote_subst"`' hardcode_minus_L_CXX='`$ECHO "$hardcode_minus_L_CXX" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_single_quote_subst"`' hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' predep_objects_CXX='`$ECHO "$predep_objects_CXX" | $SED "$delay_single_quote_subst"`' postdep_objects_CXX='`$ECHO "$postdep_objects_CXX" | $SED "$delay_single_quote_subst"`' predeps_CXX='`$ECHO "$predeps_CXX" | $SED "$delay_single_quote_subst"`' postdeps_CXX='`$ECHO "$postdeps_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path_CXX='`$ECHO "$compiler_lib_search_path_CXX" | $SED "$delay_single_quote_subst"`' LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$1 _LTECHO_EOF' } # Quote evaled strings. for var in SHELL \ ECHO \ PATH_SEPARATOR \ SED \ GREP \ EGREP \ FGREP \ LD \ NM \ LN_S \ lt_SP2NL \ lt_NL2SP \ reload_flag \ OBJDUMP \ deplibs_check_method \ file_magic_cmd \ file_magic_glob \ want_nocaseglob \ DLLTOOL \ sharedlib_from_linklib_cmd \ AR \ AR_FLAGS \ archiver_list_spec \ STRIP \ RANLIB \ CC \ CFLAGS \ compiler \ lt_cv_sys_global_symbol_pipe \ lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_import \ lt_cv_sys_global_symbol_to_c_name_address \ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ lt_cv_nm_interface \ nm_file_list_spec \ lt_cv_truncate_bin \ lt_prog_compiler_no_builtin_flag \ lt_prog_compiler_pic \ lt_prog_compiler_wl \ lt_prog_compiler_static \ lt_cv_prog_compiler_c_o \ need_locks \ MANIFEST_TOOL \ DSYMUTIL \ NMEDIT \ LIPO \ OTOOL \ OTOOL64 \ shrext_cmds \ export_dynamic_flag_spec \ whole_archive_flag_spec \ compiler_needs_object \ with_gnu_ld \ allow_undefined_flag \ no_undefined_flag \ hardcode_libdir_flag_spec \ hardcode_libdir_separator \ exclude_expsyms \ include_expsyms \ file_list_spec \ variables_saved_for_relink \ libname_spec \ library_names_spec \ soname_spec \ install_override_mode \ finish_eval \ old_striplib \ striplib \ compiler_lib_search_dirs \ predep_objects \ postdep_objects \ predeps \ postdeps \ compiler_lib_search_path \ LD_CXX \ reload_flag_CXX \ compiler_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ lt_prog_compiler_pic_CXX \ lt_prog_compiler_wl_CXX \ lt_prog_compiler_static_CXX \ lt_cv_prog_compiler_c_o_CXX \ export_dynamic_flag_spec_CXX \ whole_archive_flag_spec_CXX \ compiler_needs_object_CXX \ with_gnu_ld_CXX \ allow_undefined_flag_CXX \ no_undefined_flag_CXX \ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_separator_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX \ file_list_spec_CXX \ compiler_lib_search_dirs_CXX \ predep_objects_CXX \ postdep_objects_CXX \ predeps_CXX \ postdeps_CXX \ compiler_lib_search_path_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in reload_cmds \ old_postinstall_cmds \ old_postuninstall_cmds \ old_archive_cmds \ extract_expsyms_cmds \ old_archive_from_new_cmds \ old_archive_from_expsyms_cmds \ archive_cmds \ archive_expsym_cmds \ module_cmds \ module_expsym_cmds \ export_symbols_cmds \ prelink_cmds \ postlink_cmds \ postinstall_cmds \ postuninstall_cmds \ finish_cmds \ sys_lib_search_path_spec \ configure_time_dlsearch_path \ configure_time_lt_sys_library_path \ reload_cmds_CXX \ old_archive_cmds_CXX \ old_archive_from_new_cmds_CXX \ old_archive_from_expsyms_cmds_CXX \ archive_cmds_CXX \ archive_expsym_cmds_CXX \ module_cmds_CXX \ module_expsym_cmds_CXX \ export_symbols_cmds_CXX \ prelink_cmds_CXX \ postlink_cmds_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done ac_aux_dir='$ac_aux_dir' # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi PACKAGE='$PACKAGE' VERSION='$VERSION' RM='$RM' ofile='$ofile' ac_cv_header_unistd_h=$ac_cv_header_unistd_h ac_cv_header_inttypes_h=$ac_cv_header_inttypes_h ac_cv_header_sys_param_h=$ac_cv_header_sys_param_h ax_multithread=$ax_multithread _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "tsk/tsk_config.h") CONFIG_HEADERS="$CONFIG_HEADERS tsk/tsk_config.h" ;; "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; "tsk/tsk_incs.h") CONFIG_COMMANDS="$CONFIG_COMMANDS tsk/tsk_incs.h" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "tsk/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/Makefile" ;; "tsk/base/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/base/Makefile" ;; "tsk/img/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/img/Makefile" ;; "tsk/vs/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/vs/Makefile" ;; "tsk/fs/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/fs/Makefile" ;; "tsk/hashdb/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/hashdb/Makefile" ;; "tsk/auto/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/auto/Makefile" ;; "tsk/pool/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/pool/Makefile" ;; "tsk/util/Makefile") CONFIG_FILES="$CONFIG_FILES tsk/util/Makefile" ;; "tools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/Makefile" ;; "tools/imgtools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/imgtools/Makefile" ;; "tools/vstools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/vstools/Makefile" ;; "tools/fstools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/fstools/Makefile" ;; "tools/hashtools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/hashtools/Makefile" ;; "tools/srchtools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/srchtools/Makefile" ;; "tools/autotools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/autotools/Makefile" ;; "tools/pooltools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/pooltools/Makefile" ;; "tools/sorter/Makefile") CONFIG_FILES="$CONFIG_FILES tools/sorter/Makefile" ;; "tools/timeline/Makefile") CONFIG_FILES="$CONFIG_FILES tools/timeline/Makefile" ;; "tools/fiwalk/Makefile") CONFIG_FILES="$CONFIG_FILES tools/fiwalk/Makefile" ;; "tools/fiwalk/src/Makefile") CONFIG_FILES="$CONFIG_FILES tools/fiwalk/src/Makefile" ;; "tools/fiwalk/plugins/Makefile") CONFIG_FILES="$CONFIG_FILES tools/fiwalk/plugins/Makefile" ;; "tests/Makefile") CONFIG_FILES="$CONFIG_FILES tests/Makefile" ;; "samples/Makefile") CONFIG_FILES="$CONFIG_FILES samples/Makefile" ;; "man/Makefile") CONFIG_FILES="$CONFIG_FILES man/Makefile" ;; "bindings/java/Makefile") CONFIG_FILES="$CONFIG_FILES bindings/java/Makefile" ;; "bindings/java/jni/Makefile") CONFIG_FILES="$CONFIG_FILES bindings/java/jni/Makefile" ;; "case-uco/java/Makefile") CONFIG_FILES="$CONFIG_FILES case-uco/java/Makefile" ;; "unit_tests/Makefile") CONFIG_FILES="$CONFIG_FILES unit_tests/Makefile" ;; "unit_tests/base/Makefile") CONFIG_FILES="$CONFIG_FILES unit_tests/base/Makefile" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$ac_tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_tt=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_tt"; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" } >"$ac_tmp/config.h" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 $as_echo "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$ac_tmp/config.h" "$ac_file" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error $? "could not create -" "$LINENO" 5 fi # Compute "$ac_file"'s index in $config_headers. _am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || $as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$_am_arg" : 'X\(//\)[^/]' \| \ X"$_am_arg" : 'X\(//\)$' \| \ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'`/stamp-h$_am_stamp_count ;; :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 $as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in "depfiles":C) test x"$AMDEP_TRUE" != x"" || { # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir=$dirpart/$fdir; as_fn_mkdir_p # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ;; "libtool":C) # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi cfgfile=${ofile}T trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # Generated automatically by $as_me ($PACKAGE) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # Provide generalized library-building support services. # Written by Gordon Matzigkeit, 1996 # Copyright (C) 2014 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program or library that is built # using GNU Libtool, you may include this file under the same # distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # The names of the tagged configurations supported by this script. available_tags='CXX ' # Configured defaults for sys_lib_dlsearch_path munging. : \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} # ### BEGIN LIBTOOL CONFIG # Which release of libtool.m4 was used? macro_version=$macro_version macro_revision=$macro_revision # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # What type of objects to build. pic_mode=$pic_mode # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # Shared archive member basename,for filename based shared library versioning on AIX. shared_archive_member_spec=$shared_archive_member_spec # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # An echo program that protects backslashes. ECHO=$lt_ECHO # The PATH separator for the build system. PATH_SEPARATOR=$lt_PATH_SEPARATOR # The host system. host_alias=$host_alias host=$host host_os=$host_os # The build system. build_alias=$build_alias build=$build build_os=$build_os # A sed program that does not truncate output. SED=$lt_SED # Sed that helps us avoid accidentally triggering echo(1) options like -n. Xsed="\$SED -e 1s/^X//" # A grep program that handles long lines. GREP=$lt_GREP # An ERE matcher. EGREP=$lt_EGREP # A literal string matcher. FGREP=$lt_FGREP # A BSD- or MS-compatible name lister. NM=$lt_NM # Whether we need soft or hard links. LN_S=$lt_LN_S # What is the maximum length of a command? max_cmd_len=$max_cmd_len # Object file suffix (normally "o"). objext=$ac_objext # Executable file suffix (normally ""). exeext=$exeext # whether the shell understands "unset". lt_unset=$lt_unset # turn spaces into newlines. SP2NL=$lt_lt_SP2NL # turn newlines into spaces. NL2SP=$lt_lt_NL2SP # convert \$build file names to \$host format. to_host_file_cmd=$lt_cv_to_host_file_cmd # convert \$build files to toolchain format. to_tool_file_cmd=$lt_cv_to_tool_file_cmd # An object symbol dumper. OBJDUMP=$lt_OBJDUMP # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method = "file_magic". file_magic_cmd=$lt_file_magic_cmd # How to find potential files when deplibs_check_method = "file_magic". file_magic_glob=$lt_file_magic_glob # Find potential files using nocaseglob when deplibs_check_method = "file_magic". want_nocaseglob=$lt_want_nocaseglob # DLL creation program. DLLTOOL=$lt_DLLTOOL # Command to associate shared and link libraries. sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd # The archiver. AR=$lt_AR # Flags to create an archive. AR_FLAGS=$lt_AR_FLAGS # How to feed a file listing to the archiver. archiver_list_spec=$lt_archiver_list_spec # A symbol stripping program. STRIP=$lt_STRIP # Commands used to install an old-style archive. RANLIB=$lt_RANLIB old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Whether to use a lock for old archive extraction. lock_old_archive_extraction=$lock_old_archive_extraction # A C compiler. LTCC=$lt_CC # LTCC compiler flags. LTCFLAGS=$lt_CFLAGS # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration. global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm into a list of symbols to manually relocate. global_symbol_to_import=$lt_lt_cv_sys_global_symbol_to_import # Transform the output of nm in a C name address pair. global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # Transform the output of nm in a C name address pair when lib prefix is needed. global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix # The name lister interface. nm_interface=$lt_lt_cv_nm_interface # Specify filename containing input files for \$NM. nm_file_list_spec=$lt_nm_file_list_spec # The root where to search for dependent libraries,and where our libraries should be installed. lt_sysroot=$lt_sysroot # Command to truncate a binary pipe. lt_truncate_bin=$lt_lt_cv_truncate_bin # The name of the directory that contains temporary libtool files. objdir=$objdir # Used to examine libraries when file_magic_cmd begins with "file". MAGIC_CMD=$MAGIC_CMD # Must we lock files when doing compilation? need_locks=$lt_need_locks # Manifest tool. MANIFEST_TOOL=$lt_MANIFEST_TOOL # Tool to manipulate archived DWARF debug symbol files on Mac OS X. DSYMUTIL=$lt_DSYMUTIL # Tool to change global to local symbols on Mac OS X. NMEDIT=$lt_NMEDIT # Tool to manipulate fat objects and archives on Mac OS X. LIPO=$lt_LIPO # ldd/readelf like tool for Mach-O binaries on Mac OS X. OTOOL=$lt_OTOOL # ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. OTOOL64=$lt_OTOOL64 # Old archive suffix (normally "a"). libext=$libext # Shared library suffix (normally ".so"). shrext_cmds=$lt_shrext_cmds # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Variables whose values should be saved in libtool wrapper scripts and # restored at link time. variables_saved_for_relink=$lt_variables_saved_for_relink # Do we need the "lib" prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Library versioning type. version_type=$version_type # Shared library runtime path variable. runpath_var=$runpath_var # Shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Permission mode override for installation of shared libraries. install_override_mode=$lt_install_override_mode # Command to use after installation of a shared archive. postinstall_cmds=$lt_postinstall_cmds # Command to use after uninstallation of a shared archive. postuninstall_cmds=$lt_postuninstall_cmds # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # As "finish_cmds", except a single script fragment to be evaled but # not shown. finish_eval=$lt_finish_eval # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Compile-time system search path for libraries. sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Detected run-time system search path for libraries. sys_lib_dlsearch_path_spec=$lt_configure_time_dlsearch_path # Explicit LT_SYS_LIBRARY_PATH set during ./configure time. configure_time_lt_sys_library_path=$lt_configure_time_lt_sys_library_path # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # The linker used to build libraries. LD=$lt_LD # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds # A language specific compiler. CC=$lt_compiler # Is the compiler the GNU compiler? with_gcc=$GCC # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds archive_expsym_cmds=$lt_archive_expsym_cmds # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds module_expsym_cmds=$lt_module_expsym_cmds # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \$shlibpath_var if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms # Symbols that must always be exported. include_expsyms=$lt_include_expsyms # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds # Specify filename containing input files. file_list_spec=$lt_file_list_spec # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects postdep_objects=$lt_postdep_objects predeps=$lt_predeps postdeps=$lt_postdeps # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path # ### END LIBTOOL CONFIG _LT_EOF cat <<'_LT_EOF' >> "$cfgfile" # ### BEGIN FUNCTIONS SHARED WITH CONFIGURE # func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x$2 in x) ;; *:) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" ;; x:*) eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" ;; *::*) eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" ;; *) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" ;; esac } # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in $*""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } # ### END FUNCTIONS SHARED WITH CONFIGURE _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac ltmain=$ac_aux_dir/ltmain.sh # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" cat <<_LT_EOF >> "$ofile" # ### BEGIN LIBTOOL TAG CONFIG: CXX # The linker used to build libraries. LD=$lt_LD_CXX # How to create reloadable object files. reload_flag=$lt_reload_flag_CXX reload_cmds=$lt_reload_cmds_CXX # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds_CXX # A language specific compiler. CC=$lt_compiler_CXX # Is the compiler the GNU compiler? with_gcc=$GCC_CXX # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_CXX # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_CXX # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object_CXX # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds_CXX archive_expsym_cmds=$lt_archive_expsym_cmds_CXX # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds_CXX module_expsym_cmds=$lt_module_expsym_cmds_CXX # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld_CXX # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_CXX # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_CXX # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct_CXX # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \$shlibpath_var if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute_CXX # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L_CXX # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic_CXX # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath_CXX # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols_CXX # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_CXX # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_CXX # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_CXX # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds_CXX # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds_CXX # Specify filename containing input files. file_list_spec=$lt_file_list_spec_CXX # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_CXX # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects_CXX postdep_objects=$lt_postdep_objects_CXX predeps=$lt_predeps_CXX postdeps=$lt_postdeps_CXX # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_CXX # ### END LIBTOOL TAG CONFIG: CXX _LT_EOF ;; "tsk/tsk_incs.h":C) echo "#ifndef _TSK_INCS_H" > tsk/tsk_incs.h echo "#define _TSK_INCS_H" >> tsk/tsk_incs.h echo "// automatically by ./configure" >> tsk/tsk_incs.h echo "// Contains the config.h data needed by programs that use libtsk" >> tsk/tsk_incs.h echo "" >> tsk/tsk_incs.h if test x$ac_cv_header_unistd_h = xyes; then echo "#include " >> tsk/tsk_incs.h fi if test x$ac_cv_header_inttypes_h = xyes; then echo "#ifndef __STDC_FORMAT_MACROS" >> tsk/tsk_incs.h echo "#define __STDC_FORMAT_MACROS" >> tsk/tsk_incs.h echo "#endif" >> tsk/tsk_incs.h echo "#include " >> tsk/tsk_incs.h fi if test x$ac_cv_header_sys_param_h = xyes; then echo "#include " >> tsk/tsk_incs.h fi if test x$ax_multithread = xyes; then echo "#define TSK_MULTITHREAD_LIB // enable multithreading" >> tsk/tsk_incs.h fi echo "" >> tsk/tsk_incs.h echo "#endif" >> tsk/tsk_incs.h ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi { $as_echo "$as_me:${as_lineno-$LINENO}: Building: afflib support: $ax_afflib libewf support: $ax_libewf zlib support: $ax_zlib libvhdi support: $ax_libvhdi libvmdk support: $ax_libvmdk Features: Java/JNI support: $ax_java_support Multithreading: $ax_multithread " >&5 $as_echo "$as_me: Building: afflib support: $ax_afflib libewf support: $ax_libewf zlib support: $ax_zlib libvhdi support: $ax_libvhdi libvmdk support: $ax_libvmdk Features: Java/JNI support: $ax_java_support Multithreading: $ax_multithread " >&6;}; sleuthkit-4.11.1/unit_tests/000755 000765 000024 00000000000 14137073563 016616 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/config/000755 000765 000024 00000000000 14137073563 015662 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/NEWS.txt000644 000765 000024 00000260540 14137073414 015734 0ustar00carrierstaff000000 000000 ---------------- VERSION 4.11.1 -------------- C/C++: - Several fixes from Joachim Metz - NTFS Decompression bug fix from Kim Stone and Joel Uckelman Java: - Fixed connection leak when making OS Accounts in bridge - OsAccount updates for instance types and special Windows SIDs - Fixed issue with duplicate value in Japanese timeline translation ---------------- VERSION 4.11.0 -------------- C/C++: - Added checks at various layers to detect encrypted file systems and disks to give more useful error messages. - Added checks to detect file formats that are not supported (such as AD1, ZIP, etc.) to give more useful error messages. - Added tsk_imageinfo tool that detects if an image is supported by TSK and if it is encrypted. - Add numerous bound checks from Joachim Metz. - Clarified licenses as pointed out by Joachim Metz. Java: - Updated from Schema 8.6 to 9.1. - Added tables and classes for OS Accounts and Realms (Domains). - Added tables and classes for Host Addresses (IP, MAC, etc.). - Added tables and classes for Analysis Results vs Data Artifacts by adding onto BlackboardArtifacts. - Added tables and classes for Host and Person to make it easier to group data sources. - Added static types for standard artifact types. - Added File Attribute table to allow custom information to be stored for each file. - Made ordering of getting lock and connection consistent. - Made the findFile methods more efficient by using extension (which is indexed). ---------------- VERSION 4.10.2 -------------- C/C++ - Added support for Ext4 inline data Java - New Blackboard Artifacts for ALEAPP/ILEAPP, Yara, Geo Area, etc. - Upgraded to PostgreSQL JDBC Driver 42.2.18 - Added SHA256 to files table in DB and added utility calculation methods. - Changed TimelineManager to make events for any artifact with a time stamp - Added Japanese translations - Fixed sychronization bug in getUniquePath ---------------- VERSION 4.10.1 -------------- C/C++: - Changed Windows build to use Nuget for libewf, libvmdk, libvhdi. - Fixed compiler warnings - Clarrified licenses and added Apache license to distribution - Improved error handling for out of memory issues - Rejistry++ memory leak fixes Java: - Localized for Japanese ---------------- VERSION 4.10.0 -------------- C/C++: - Removed PostgreSQL code (that was used only by Java code) - Added Java callback support so that database inserts are done in Java. Java: - Added methods and callbacks as required to allow database population to happen in Java instead of C/C++. - Added support to allow Autopsy streaming ingest where files are added in batches. - Added TaggingManager class and concept of a TagSet to support ProjectVic categories. - Fixed changes to normalization and validation of emails and phone numbers. - Added a CASE/UCO JAR file that creates JSON-LD based on TSK objects. ---------------- VERSION 4.9.0 -------------- C/C++ - Removed framework project. Use Autopsy instead if you need an analysis framework. - Various fixes from Google-based fuzzing. - Ensure all reads (even big ones) are sector aligned when reading from Windows device. - Ensure all command line tools support new pool command line arguments. - Create virtual files for APFS unallocated space - HFS fix to display type Java: - More artifact helper methods - More artifacts and attributes for drones and GPS coordinates - Updated TimelineManager to insert GPS artifacts into events table ---------------- VERSION 4.8.0 -------------- C/C++ - Pool layer was added to support APFS. NOTE: API is likely to change. - Limited APFS support added in libtsk and some of the command line tools. -- Encryption support is not complete. -- Blackbag Technologies submitted the initial PR. Basis Technology did some minor refactoring. - Refactoring and minor fixes to logical imager - Various bug fixes from Google fuzzing efforts and Jonathan B from Afarsec - Fixed infinite NTFS loop from cyclical attribute lists. Reported by X. - File system bug fixes from uckelman-sf on github Database: - DB schema was updated to support pools - Added concept of JSON in Blackboard Attributes - Schema supports cascading deletes to enable data source deletion Java: - Added Pool class and associated infrastructure - Added methods to support deleting data sources from database - Removed JavaFX as a dependency by refactoring the recently introduced timeline filtering classes. - Added attachment support to the blackboard helper package. ---------------- VERSION 4.7.0 -------------- C/C++: - DB schema was expanded to store tsk_events and related tables. Time-based data is automatically added when files and artifacts are created. Used by Autopsy timeline. - Logical Imager can save files as individual files instead of in VHD (saves space). - Logical imager produces log of results - Logical Imager refactor - Removed PRIuOFF and other macros that caused problems with signed/unsigned printing. For example, TSK_OFF_T is a signed value and PRIuOFF would cause problems as it printed a negative number as a big positive number. Java - Travis and Debian package use OpenJDK instead of OracleJDK - New Blackboard Helper packages (blackboardutils) to make it easier to make artifacts. - Blackboard scope was expanded, including the new postArtifact() method that adds event data to database and broadcasts an event to listeners. - SleuthkitCase now has an EventBus for database-related events. - New TimelineManager and associated filter classes to support new events table ---------------- VERSION 4.6.7 -------------- C/C++ Code: - First release of new logical imager tool - VHD image writer fixes for out of space scenarios Java: - Expand Communications Manager API - Performance improvement for SleuthkitCase.addLocalFile() ---------------- VERSION 4.6.6 -------------- C/C++ Code: - Acquisition deteails are set in DB for E01 files - Fix NTFS decompression issue (from Joe Sylve) - Image reading fix when cache fails (Joe Sylve) - Fix HFS+ issue with large catalog files (Joe Sylve) - Fix free memory issue in srch_strings (Derrick Karpo) Java: - Fix so that local files can be relative - More Blackboard artifacts and attributes for web data - Added methods to CaseDbManager to enable checking for and modifying tables. - APIs to get and set acquisition details - Added methods to add volume and file systems to database - Added method to add LayoutFile for allocated files - Changed handling of JNI handles to better support multiple cases ---------------- VERSION 4.6.5 -------------- C/C++ Code: - HFS boundary check fix - New fields for hash values and acquisition details in case database - Store "created schema version" in case database Java Code: - New artifacts and attributes defined - Fixed bug in SleuthkitCase.getContentById() for data sources - Fixed bug in LayoutFile.read() that could allow reading past end offile ---------------- VERSION 4.6.4 -------------- Java Code: - Increase max statements in database to prevent errors under load - Have a max timeout for SQLite retries ---------------- VERSION 4.6.3 -------------- C/C++ Code: - Hashdb bug fixes for corrupt indexes and 0 hashes - New code for testing power of number in ExtX code Java Code: - New class that allows generic database access - New methods that check for duplicate artifacts - Added caches for frequently used content Database Schema: - Added Examiner table - Tags are now associated with Examiners - Changed parent_path for logical files to be consistent with FS files. ---------------- VERSION 4.6.2 -------------- C/C++ Code: - Various compiler warning fixes - Added small delay into image writer to not starve other threads Java: - Added more locking to ensure that handles were not closed while other threads were using them. - Added APIs to support more queries by data source - Added memory-based caching when detecting if an object has children or not. ---------------- VERSION 4.6.1 -------------- C/C++ Code: - Lots of bounds checking fixes from Google's fuzzing tests. Thanks Google. - Cleanup and fixes from uckelman-sf and others - PostgreSQL, libvhdi, & libvmdk are supported for Linux / OS X - Fixed display of NTFS GUID in istat - report from Eric Zimmerman. - NTFS istat shows details about all FILE_NAME attributes, not just the first. report from Eric Zimmerman. Java: - Reports can be URLs - Reports are Content - Added APIs for graph view of communications - JNI library is extracted to name with user name in it to avoid conflicts Database: - Version upgraded from to 8.0 because Reports are now Content ---------------- VERSION 4.6.0 -------------- New Features - New Communications related Java classes and database tables. - Java build updates for Autopsy Linux build - Blackboard artifacts are now Content objects in Java and part of tsk_objects table in database. - Increased cache sizes. - Lots of bounds checking fixes from Google's fuzzing tests. Thanks Google. - HFS fix from uckelman-sf. ---------------- VERSION 4.5.0 -------------- New Features: - Support for LZVN compressed HFS files (from Joel Uckelman) - Use sector size from E01 (helps with 4k sector sizes) - More specific version number of DB schema - New Local Directory type in DB to differentiate with Virtual Directories - All blackboard artifacts in DB are now 'content'. Attachments can now be children of their parent message. - Added extension as a column in tsk_files table. Bug Fixes: - Faster resolving of HFS hard links - Lots of fixes from Google Fuzzing efforts. ---------------- VERSION 4.4.2 -------------- New Features: - usnjls tool for NTFS USN log (from noxdafox) - Added index to mime type column in DB - Use local SQLite3 if it exists (from uckelman-sf) - Blackboard Artifacts have a shortDescription metho Bug Fixes: - Fix for highest HFS+ inum lookup (from uckelman-sf) - Fix ISO9660 crash - various performance fixes and added thread safety checks ---------------- VERSION 4.4.1 -------------- - New Features: -- Can create a sparse VHD file when reading a local drive with new IMAGE_WRITER structure. Currently being used by Autopsy, but no TSK command line tools. - Bug fixes: -- Lots of cleanup and fixes. Including: -- memory leaks -- UTF8 and UTF16 cleanup -- Missing NTFS files (in fairly rare cases) -- Really long folder structures and database inserts ---------------- VERSION 4.4.0 -------------- - Compiling in Windows now uses Visual Studio 2015 - tsk_loaddb now adds new files for slack space and JNI was upgraded accordingly. ---------------- VERSION 4.3.1 -------------- - NTFS works on 4k sectors - Added support in Java to store local files in encoded form (XORed) - Added Java Account object into datamodel - Added notion of a review status to blackboard artifacts - Upgraded version of PostgreSQL - Various minor bug fixes ---------------- VERSION 4.3.0 -------------- - PostgreSQL support (Windows only) - New Release_ NoLibs Visual Studio target - Support for virtual machine formats via libvmdk and libvhdi (Windows only) - Schema updates (data sources table, mime type, attributes store type) - tsk_img_open can take externally created TSK_IMG_INFO - Various minor bug fixes ---------------- VERSION 4.2.0 -------------- - ExFAT support added - New database schema - New Sqlite hash database - Various bug fixes - NTFS pays more attention to sequence and loads metadata only if it matches. - Added secondary hash database index ---------------- VERSION 4.1.3 -------------- - fixed bug that could crash UFS/ExtX in inode_lookup. - More bounds checking in ISO9660 code - Image layer bounds checking - Update version of SQLITE-JDBC - changed how java loads navite libraries - Config file for YAFFS2 spare area - New method in image layer to return names - Yaffs2 cleanup. - Escape all strings in SQLite database - SQlite code uses NTTFS sequence number to match parent IDs ---------------- VERSION 4.1.2 -------------- Core: - Fixed more visual studio projects to work on 64-bit - TskAutoDB considers not finding a VS/FS a critical error. Java: - added method to Image to perform sanity check on image sizes. fiwalk: - Fixed compile error on Linux etc. ---------------- VERSION 4.1.1 -------------- Core: - Added FILE_SHARE_WRITE to all windows open calls. - removed unused methods in CRC code that caused compile errors. - Added NTFS FNAME times to time2 struct in TSK_FS_META to make them easier to access -- should have done this a long time ago! - fls -m and tsk_gettimes output NTFS FNAME times to output for timelines. - hfind with EnCase hashsets works when DB is specified (and not only index) - TskAuto now goes into UNALLOC partitions by default too. - Added support to automatically find all Cellebrite raw dump files given the name of the first image. - Added 64-bit windows targets to VisualStudio files. - Added NTFS sequence to parent address in directory and directory itself. - Updated SQLite code to use sequence when finding parent object ID. Java: - Java bindings JAR files now have native libraries in them. - Logical files are added with a transaction ---------------- VERSION 4.1.0 -------------- Core: - Added YAFFS2 support (patch from viaForensics). - Added Ext4 support (patch from kfairbanks) - changed all include paths to be 'tsk' instead of 'tsk3' -- IMPORTANT FOR ALL DEVELOPERS! Framework: - Added Linux and MAC support. - Added L01 support. - Added APIs to find files by name, path and extension. - Removed deprecated TskFile::getAttributes methods. - moved code around for AutoBuild tool support. Java Bindings: - added DerivedFile datamodel support - added a public method to Content to add ability to close() its tsk handle before the object is gc'd - added faster skip() and random seek support to ReadContentInputStream - refactored datamodel by pushing common methods up to AbstractFile - fixed minor memory leaks - improved regression testing framework for java bindings datamodel ---------------- VERSION 4.0.2 -------------- Core: New Features: - Added fiwalk tool from Simson. Not supported in Visual Studio yet. Bug Fixes: - Fixed fcat to work on NTFS files (still doesn't support ADS though). - Fixed HFS+ support in tsk_loaddb / SQLite -- root directory was not added. - NTFS code now looks at all MFT entries when listing directory contents. It used to only look at unallocated entries for orphan files. This fixes an image that had allocated files missing from the directory b-tree. - NTFS code uses sequence number when searching MFT entries for all files. - Libewf detection code change to support v2 API more reliably (ID: 3596212). - NTFS $SII code could crash in rare cases if $SDS was multiple of block size. Framework: - Added new API to TskImgDB that returns the base name of an image. - Numerous performance improvements to framework. - Removed requirement in framework to specify module extension in pipeline configuration file. - Added blackboard artifacts to represent both operating system and network service user accounts. Java Bindings: - added more APIs to find files by name, path and where clause - added API to get currently processed dir when image is being added, - added API to return specific types of children of image, volume system, volume, file system. - moved more common methods up to Content interface - deprecated context of blackboard attributes, - deprecated SleuthkitCase.runQuery() and SleuthkitCase.closeRunQuery() - fixed ReadContentInputStream bugs (ignoring offset into a buffer, implementing available() ) - methods that are lazy loading are now thread safe - Hash class is now thread-safe - use more PreparedStatements to improve performance - changed source level from java 1.6 to 1.7 - Throw exceptions from C++ side better ---------------- VERSION 4.0.1 -------------- New Features: - Can open raw Windows devices with write mode sharing. - More DOS partition types are displayed. - Added fcat tool that takes in file name and exports content (equivalent to using ifind and icat together). - Added new API to TskImgDB that returns hash value associated with carved files. - performance improvements with FAT code (maps and dir_add) - performance improvements with NTFS code (maps) - added AONLY flag to block_walk - Updated blkls and blkcalc to use AONLY flag -- MUCH faster. Bug Fixes: - Fixed mactime issue where it could choose the wrong timezone that did not follow daylight savings times. - Fixed file size of alternate data streams in framework. - Incorporated memory leak fixes and raw device fixes from ADF Solutions. ---------------- VERSION 4.0.0 -------------- New Features: - Added multithreaded support - Added C++ wrapper classes - Added JNI bindings / Java data model classes - 3314047: Added utf8-specific versions of 'toid' methods for img,vs,fs types - 3184429: More consistent printing of unset times (all zerso instead of 1970) - New database design that allows for multiple images in the same database - GPT volume system tries other sector sizes if first attempt fails. - Added hash calculation and lookup to AutoDB and JNI. - Upgraded SQLite to 3.7.9. - Added Framework in (windows-only) - EnCase hash support - Libewf v2 support (it is now non-beta) - First file in a raw split or E01 can be specified and the rest of the files are found. - mactime displays times as 0 if the time is not set (isntead of 1970) - Changed behavior of 'mactime -y' to use ISO8601 format. - Updated HFS+ code from ATC-NY. - FAT orphan file improvements to reduce false positives. - TskAuto better reports errors. - Upgrade build projects from Visual Studio 2008 to 2010. Bug Fixes: - Relaxed checking when conflict exists between DOS and GPT partitions. Had a Mac image that was failing to resolve which partition table to use. ---------------- VERSION 3.2.3 -------------- New Features: - new TskAuto method (handleNotification()) that gets verbose messages that allow for debugging when the class makes decisions. - DOS partitions are loaded even if an extended partition fails to load - new TskAuto::findFilesInFs(TSK_FS_INFO *) method - Need to only specify first E01 file and the rest are found - Changed docs license to non-commercial - Unicode conversion routines fix invalid UTF-16 text during conversion - Added '-d' to tsk_recover to specify directory to recover Bug Fixes: - Added check to fatfs_open to compare first sectors of FAT if we used backup boot sector and verify it is FAT32. - More checks to make sure that FAT short names are valid ASCII - 3406523: Mactime size sanity check - 3393960: hfind reading of Windows input file - 3316603: Error reading last blocks of RAW CD images - Fixed bugs in how directories and files were detected in TskAuto ---------------- VERSION 3.2.2 -------------- Bug Fixes - 3213886: ISO9660 directory hole not advancing - 3173095 contd: Updated checks so that tougher FAT checks are applied to deleted directories. - 3303678: Image type in Sqlite DB is now not always 0 - 3303679: Deleted FAT files have more name cleanup in short names New Features: - 3213888: RAW CD format - Auto class accepts TSK_IMG_INFO as argument - Copies of split image file names are stored in TSK so that the caller can free them before TSK_IMG_INFO is freed. ---------------- VERSION 3.2.1 -------------- Bug Fixes - 3108272: fls arguments for -d and -u - 3105539: compile error issues because of SQlite and pthreads - 3173095: missing FAT files because of invalid dates. - 3184419: mingew compile errors. - 3191391: surround file name in quotes in mactime -d csv output New Features: - A single dummy entry is added to the SQlite DB if no volume exists so that all programs can assume that there will be at least one volume in the table. - 3184455: allow srcdir != builddir ---------------- VERSION 3.2.0 -------------- Bug Fixes - 3043092: Minor logic errors with ifind code. - FAT performance fix when looking for parent directories in $OrphanFiles. - 3052302: Crash on NTFS/UFS detection test because of corrupt data -- tsk_malloc error. - 3088447: Error adding attribute because of run collision. Solved by assigning unique IDs. New Features: - 3012324: Name mangling moved out of library into outer tools so that they can see control characters if they want to. Patch by Anthony Lawrence. - 2993806: ENUM values have a specified NONE value if you don't want to specify any special flags. Patch by Anthony Lawrence. - 3026989: Add -e and -s flags to img_cat. patch by Simson Garfinkel. - 2941805: Add case sensitive flag to fsstat in HFS. Patch by Rob Joyce. - 3017764: Changed how default NTFS $DATA attribute was named. Now it has no name, while it previously had a fake name of "$Data". - New TskAuto class. - New tsk_loaddb, tsk_recover, tsk_comparedir, and tsk_gettimes tools. ---------------- VERSION 3.1.3 -------------- Bug Fixes - 3006733: FAT directory listings were slow because the inner code was not stopping when it found the parent directory. - Adjusted sanity / testing code on FAT directory entries to allow non-ascii in extensions and reject entries with lots of 0s. - 3023606: Ext2 / ffs corrupted file names. - Applied NTFS SID fixes from Mandiant. - ntfs_load_secure() memory leak patch from Michael Cohen ---------------- VERSION 3.1.2 -------------- Bug Fixes - 2982426: FAT directory listings were slow because the entire image was being scanned for parent directory information. - 2982965: fs_attr length bug fix. - 2988619: mmls -B display error. - 2988330: ntfs SII cluster size increment bug - 2991487: Zeroed content in NTFS files that were not fully initialized. - 2993767: Slow FAT listings of OrphanFiles because hunt for parent directory resulted in many searches for OrphanFiles. Added cache of OrphanFiles. - 2999567: ifind was not stopping after first hit. - 2993804: read past end of file did not always return -1. ---------------- VERSION 3.1.1 -------------- Bug Fixes - 2954703: ISO9660 missing files because duplicate files had same starting block. - 2954707: ISO9660 missing some files with zero length and duplicate starting block. Also changed behavior of how multiple volume descriptors are processed. - 2955898: Orphan files not found if no deleted file names exist. - 2955899: NTFS internal setting of USED flag. - 2972721: Sorter fails with hash lookup if '-l' is given. - 2941813: Reverse HFS case sensitive flags (internal fix only) - 2954448: Debian package typo fixes, etc. - 2975245: sorter ignores realloc entries to reduce misleading mismatch entries and duplicate entries. ---------------- VERSION 3.1.0 -------------- New Features and Changes - 2206285: HFS+ can now be read. Lots of tracker items about this. Thanks to Rob Joyce and ATC-NY for many of the patches and reports. - 2677069: DOS Safety Partitions in GPT Volume Systems are better detected instead of reporting multiple VSs. - Windows executables can be build in Visual Studio w/out needing other image format libraries. - 2367426: Uninitialized file space is shown if slack space is requested. - 2677107 All image formats supported by AFFLIB can be accessed by specifying the "afflib" type. - 2206265: sigfind can now process non-raw files. - 2206331: Indirect block addresses are now available in the library and command line tools. They are stored in a different attribute. - Removed 'docs' files and moved them to the wiki. - Removed disk_stat and disk_sreset because they were out of date and hdparm now has the same functionality. - 2874854: Image layer tools now support non-512 byte device sector sizes. Users can specify sector size using the -b argument to the command line tools. This has several consequences: -- 'mmls -b' is now 'mmls -B'. Similarly with istat -b. -- Changed command line format for '-o' so that sector size is specified only via -b and not using '-o 62@4096'. - 2874852: Sanity checking on partition table entires is relaxed and only first couple of partitions are checked to make sure that they can fit into the image. - 2895607: NTFS SID data is available in the library and 'istat'. - 2206341: AFF encrypted images now give more proper error message if password is not given. - 2351426: mactime is now distributed with Windows execs. Developer-level Changes - Abstracted name comparison to file system-specific function. - Added support in mactime to read body files with comment lines. - 2596153: Changed img_open arguments, similar to getopt(). - 2797169: tsk_fs_make_ls is now supported as an external library function. Now named tsk_fs_meta_make_ls. - 2908510: Nanosecond resolution of timestamps is now available. - 2914255: Version info is now available in .h files in both string and integer form. Bug Fixes: - 2568528: incorrect adjustment of attribute FILLER offset. - 2596397: Incorrect date sorting in mactime. - 2708195: Errors when doing long reads in fragmented attributes. - Fixed typo bugs in sorter (reported via e-mail by Drew Hunt). - 2734458: added orphan cache map to prevent slow NTFS listing times. - 2655831: Sorter now knows about the ext2 and ext3 types. - 2725799: ifind not converting UTF16 names properly on Windows because it was using endian ordering of file system and not local system. - 2662168: warning messages on macs when reading the raw character device. - 2778170: incorrect read size on resident attributes. - 2777633: missing second resolution on FAT creation times. - Added the READ_SHARE option to the CreateFile command for split image files. Patch by Christopher Siwy. - 2786963: NTFS compression infinite loop fix. - 2645156: FAT / blkls error getting slack because allocsize was being set too small (and other values were not being reset). - 2367426: Zeros are set for VDL slack on NTFS files. - 2796945: Inifite loop in fs_attr. - 2821031: Missing fls -m fields. - 2840345: Extended DOS partitions in extended partitions are now marked as Meta. - 2848162: Reading attributes at offsets that are on boundary of run fragment. - 2824457: Fixed issue reading last block of file system with blkcat. - 2891285: Fixed issue that prevented reads from the last block of a file system when using the POSIX-style API. - 2825690: Fixed issue that prevented blkls -A from working. - 2901365: Allow FAT files to have a 0 wdate. - 2900761: Added FAT directory sanity checks to prevent infinite loops. - 2895607: Fixed various memory leaks. - 2907248: Fixed image layer cache crash. - 2905750: all file system read() functions now return -1 when offset given is past end of file. ---------------- VERSION 3.0.1 -------------- 11/11/08: Bug Fix: Fixed crashing bug in ifind on FAT file system. Bug: 2265927 11/11/08: Bug Fix: Fixed crashing bug in istat on ExtX $OrphanFiles dir. Bug: 2266104 11/26/08: Update: Updated fls man page. 11/30/08: Update: Removed TODO file and using tracker for bugs and feature requests. 12/29/08: Bug Fix: Fixed incorrectly setting block status in file_walk for compressed files (Bug: 2475246) 12/29/08: Bug Fix: removed fs_info field from FS_META because it was not being set and should have been removed in 3.0. Reported by Rob Joyce and Judson Powers. 12/29/08: Bug Fix: orphan files and NTFS files found via parent directory have an unknown file name type (instead of being equal to meta type). (Bug: 2389901). Reported by Barry Grundy. 1/12/09: Bug Fix: Fixed ISO9660 bug where large directory contents were not displayed. (Bug: 2503552). Reported by Tom Black. 1/24/09: Bug Fix: Fixed bug 2534449 where extra NTFS files were shown if the MFT address was changed to 0 because fs_dir_add was checking the address and name. Reported by Andy Bontoft. 1/29/09: Update: Fixed fix for bug 2534449. The fix is in ifind instead of fs_dir_add(). 2/2/09: Update: Added RPM spec file from Morgan Weetmam. ---------------- VERSION 3.0.0 -------------- 0/00/00: Update: Many, many, many API changes. 2/14/08: Update: Added mmcat tool. 2/26/08: Update: Added flags to mmls to specify partition types. 3/1/08: Update: Major update of man pages. 4/14/08: Bug Fix: Fixed the calculation of "actual" last block. Off by 1 error. Reported by steve. 5/23/08: Bug Fix: Incorrect malloc return check in srch_strings. reported by Petri Latvala. 5/29/08: Bug Fix: Fixed endian ordering bug in ISO9660 code. Reported by Eduardo Aguiar de Oliveira. 6/17/08: Update: 'sorter' now uses the ifind method for finding deleted NTFS files (like Autopsy) does instead of relying on fls. Reported by John Lehr. 6/17/08: Update: 'ifind -p' reports data on ADS. 7/10/08: Update: FAT looks for a backup boot sector in FAT32 if magic is 0 7/21/08: Bug Fix: Changed define of strcasecmp to _stricmp instead of _strnicmp in Windows. (reported by Darren Bilby). 7/21/08: Bug Fix: Fall back to open "\\.\" image files on Windows with SHARE_WRITE access so that drive devices can be opened. (reported by Darren Bilby). 8/20/08: Bug Fix: Look for Windows objects when opening files in Cygwin, not just Win32. Reported by Par Osterberg Medina. 8/21/08: Update: Renamed library and install header files to have a '3' in them to allow parallel installations of v2 and v3. Suggested by Simson Garfinkel. 8/22/08: Update: Added -b option to sorter to specify minimum file size to process. Suggested by Jeff Kell. 8/22/08: Update: Added libewf as a requirement to build win32 so that E01 files are supported. 8/29/08: Update: Added initial mingw patches for cross compiling and Windows. Patches by Michael Cohen. 9/X/08: Update: Added ability to access attibutes 9/6/08: Update: Added image layer cache. 9/12/08: Bug Fix: Fixed crash from incorrectly cleared value in FS_DIR structure. Reported and patched by Jason Miller. 9/13/08: Update: Changed d* tool names to blk*. 9/17/08: Update: Finished mingw support so that both tools and library work with Unicode file name support. 9/22/08: Update: Added new HFS+ code from Judson Powers and Rob Joyce (ATC-NY) 9/24/08: Bug Fix: Fixed some cygwin compile errors about types on Cygwin. Reported by Phil Peacock. 9/25/08: Bug Fix: Added O_BINARY to open() in raw and split because Cygwin was having problems. Reported by Mark Stam. 10/1/08: Update: Added ifndef to TSK_USE_HFS define to allow people to define it on the command line. Patch by RB. ---------------- VERSION 2.52 -------------- 2/12/08: Bug Fix: Fixed warning messages in mactime about non-Numeric data. Reported by Pope. 2/19/08: Bug Fix: Added #define to tsk_base_i.h to define LARGEFILE64_SOURCE based on LARGEFILE_SOURCE for older Linux systems. 2/20/08: Bug Fix: Updated afflib references and code. 3/13/08: Update: Added more fixes to auto* so that AFF will compile on more systems. I have confirmed that AFFLIB 3.1.3 will run with OS X 10.4.11. 3/14/08: Bug Fix: Added checks to FAT code that calcs size of directories. If starting cluster of deleted dir points into a cluster chain, then problems can occur. Reported by John Ward. 3/19/08: Update: I have verified that this compiles with libewf-20070512. 3/21/08: Bug Fix: Deleted Ext/FFS directories were not being recursed into. This case was rare (because typically the metadata are wiped), but possible. Reported by JWalker. 3/24/08: Update: I have verified that this compiles with libewf-20080322. Updates from Joachim Metz. 3/26/08: Update: Changed some of the header file design for the tools so that the define settings in tsk_config.h can be used (for large files). 3/28/08: Update: Added config.h reference to srch_strings to get the LARGEFILE support. 4/5/08: Update: Improved inode argument number parsing function. ---------------- VERSION 2.51 -------------- 1/30/08: Bug Fix: Fixed potential infinite loop in fls_lib.c. Patch by Nathaniel Pierce. 2/7/08: Bug Fix: Defined some of the new constants that are used in disktools because older Linux distros did not define them. Reported by Russell Reynolds. 2/7/08: Bug Fix: Modified autoconf to check for large file build requirements and look for new 48-bit structures needed by disktools. Both of these were causing problems on older Linux distros. 2/7/08: Update: hfind will normalize hash values in database so that they are case insensitive. ---------------- VERSION 2.50 -------------- 12/19/07: Update: Finished upgrade to autotools building design. No longer include file, afflib, libewf. Resulted in many source code layout changes and sorter now searches for md5, sha1, etc. ---------------- VERSION 2.10 -------------- 7/12/07: Update: 0s are returned for AFF pages that were not imaged. 7/31/07: Bug Fix: ifind -p could crash if a deleted file name was found that did not point to a valid meta data stucture. (Reported by Andy Bontoft) 8/5/07: Update: Added NSRL support back into sorter. 8/15/07: Update: Errors are given if supplied sector offset is larger than disk image. Reported by Simson Garfinkel. 8/16/07: Update: Renamed MD5 and SHA1 functions to TSK_MD5_.. and TSK_SHA_.... 8/16/07: Update: tsk_error_get() does not reset the error messages. 9/26/07: Bug Fix: Changed FATFS check for valid dentries to consider second values of 30. Reported by Alessandro Camillo. 10/18/07: Update: inode_walk for NTFS and FAT will not abort if data corruption is found in one entry -- instead they will just skip it. 10/18/07: Update: tsk_os.h uses standard gcc system names instead of TSK specific ones. 10/18/07: Update: Updated raw.c to use ioctl commands on OS X to get size of raw device because it does not work with SEEK_END. Patch by Rob Joyce. 10/31/07: Update: Finished upgrade to fatfs_file_walk_off so that walking can start at a specific offset. Also finished upgrade that caches FAT run list to make the fatfs_file_walk_off more efficient. 11/14/07: Update: Fixed few places where off_t was being used instead of OFF_T. Reported by GiHan Kim. 11/14/07: Update: Fixed a memory leak in aff.c to free AFF_INFO. Reported by GiHan Kim. 11/24/07: Update: Finished review and update of ISO9660 code. 11/26/07: Bug Fix: Fixed 64-bit calculation in HFS+ code. Submitted by Rob Joyce. 11/29/07: Update: removed linking of srch_strings.c and libtsk. Reported by kwizart. 11/30/07: Upate: Made a #define TSK_USE_HFS compile flag for incorporating the HFS support (flag is in src/fstools/fs_tools_i.h) 11/30/07: Update: restricted the FAT dentry sanity checks to verify space padding in the name and latin-only extensions. 12/5/07: Bug Fix: fs_read_file_int had a bug that ignored the type passed for NTFS files. Reported by Dave Collett. 12/12/07: Update: Changed teh FAT dentry sanity checks to allow spaces in volume labels and do more checking on the attribute flag. ---------------- VERSION 2.09 -------------- 4/6/07: Bug Fix: Inifite loop in ext2 and ffs istat code because of using unsigned size_t variable. Reported by Makoto Shiotsuki. 4/16/07: Bug Fix: Changed use of fseek() to fseeko() in hashtools. Patch by Andy Bontoft. 4/16/07: Bug Fix: Changed Win32 SetFilePointer to use LARGE_INTEGER. Reported by Kim GiHan. 4/19/07: Bug Fix: Not all FAT orphan files were being found because of and offset error. 4/26/07: Bug Fix: ils -O was not working (link value not being checked). Reported by Christian Perst. 4/27/07: Bug Fix: ils -r was showing UNUSED inodes. Reported by Christian Perst. 5/10/07: Update: Redefined the USED and UNUSED flags for NTFS so that UNUSED is set when no attributes exist. 5/16/07: Bug Fix: Fixed several bounds checking bugs that may cause a crash if the disk image is corrupt. Reported by Tim Newsham (iSec Partners) 5/17/07: Update: Updated AFFLIB to 2.2.11 5/17/07: Update: Updated libewf to libewf-20070512 5/17/07: Update: Updated file to 4.20 5/29/07: Update: Removed NTFS SID/SDS contributed code because it causes crashes on some systems and its output is not entirely clear. (most recent bug reported by Andy Scott) 6/11/07: Update: Updated AFFLIB to 2.2.12. 6/12/07: Bug Fix: ifind -p was not reporting back info on the allocated name when one existed (because strtok was overwritting the name when the search continued). Reported by Andy Bontoft. 6/13/07: Update: Updated file to 4.21 ---------------- VERSION 2.08 -------------- 12/19/06: Bug Fix: ifind_path was not setting *result when root inode was searched for. patch by David Collett. 12/29/06: Update: Removed 'strncpy' in ntfs.c to manual assignment of text for '$Data' and 'N/A' for performance reasons. 1/11/07: Update: Added duname to FS_INFO that contains a string of name for a file system's data unit -- Cluster for example. 1/19/07: Bug Fix: ifind_path was returning an error even after some files were found. Errors are now ignored if a file was found. Reported by Michael Cohen. 1/26/07: Bug Fix: Fixed calcuation of inode numbers in fatfs.c (reported by Simson Garfinkel). 2/1/07: Update: Changed aff-install to support symlinked directory. 2/1/07: Update: img_open modified so that it does not report errors for s3:// and http:// files that do not exist. 2/5/07: Update: updated *_read() return values to look for "<0" instead of simply "== -1". (suggested by Simson Garfinkel). 2/8/07: Update: removed typedef for uintptr in WIN32 code. 2/13/07: Update: Applied patch from Kim Kulak to update HFS+ code to internal design changes. 2/16/07: Update: Renamed many of the external data structures and flags so that they start with TSK_ or tsk_ to prevent name collisions. 2/16/07: Update: Moved MD5 and SHA1 routines and binaries to auxtools instead of hashtools so that they are more easy to access. 2/16/07: Update: started redesign and port of hashtools. 2/21/07: Update: Changed inode_walk callback API to remove the flags variable -- this was redundant since flags are also in TSK_FS_INODE. Same for TSK_FS_DENT. 3/7/07: Bug Fix: fs_read_file failed for NTFS resident files. Reported by Michael Cohen. 3/8/07: Bug Fix: FATFS assumed a 512-byte sector in a couple of locations. 3/13/07: Update: Finished hashtools update. 3/13/07: Update: dcat reads block by block instead of all at once. 3/23/07: Update: Change ntfs_load_secure to allocate all of its needed memory at once instead of doing reallocs. 3/23/07: Update: Updated AFFLIB to 2.2.0 3/24/07: Bug Fix: Fixed many locations where return value from strtoull was not being properly checked and therefore invalid numbers were not being detected. 3/24/07: Bug Fix: A couple of error messages in ntfs_file_walk should have been converted to _RECOVER when the _RECOVERY flag was given. 3/24/07: Update: Changed behavior of ntfs_file_walk. If no type is given, then a default type is chosen for files and dirs. Now, no error is generated if that type does not exist -- similar to how no error is generated if a FAT file has 0 file size. 3/26/07: Update: cleaned up and documented fs_data code more. 3/29/07: Update: Updated AFF to 2.2.2. 3/29/07: Update: Updated install scripts for afflib, libewf, and file to touch files so that the auto* files are in the correct time stamp order. 4/5/07: Bug Fix: Added sanity checks to offsets and addresses in ExtX and UFS group descriptors. Reported by Simson Garfinkel. ---------------- VERSION 2.07 -------------- 9/6/06: Update: Changed TCHAR and _T to TSK_TCHAR and _TSK_T to avoid conflicts with other libraries. 9/18/06: Update: Added tsk_list_* functions and structures. 9/18/06: Update: Added checks for recursive FAT directories. 9/20/06: Update: Changed FS_META_* flags for LINK and UNLINK and moved them to ILS_? flags. 9/20/06: Update: added flags to ils to find only orphan inodes. 9/20/06: Update: Added Orphan support for FAT, NTFS, UFS, Ext2, ISO. 9/20/06: Update: File walk actions now have a flag to identify if a block is SPARSE or not (used to identify if the address being passed is valid or made up). 9/21/06: Update: Added file size sanity check to fatfs_is_dentry and fixed assignment of fatfs->clustcnt. 9/21/06: Update: block_, inode, and dent_walk functions now do more flag checking and make sure that some things are set instead of making the calling code do it. 9/21/06: Update: Added checks for recursive (infinite loop) NTFS, UFS, ExtX, and ISO9660 directories. 9/21/06: Update Added checks to make sure that walking the FAT for files and directories would result in an infinite loop (if FAT is corrupt). 9/21/06: Update: Added -a and -A to dls to specify allocated and unallocated blocks to display. 9/21/06: Update: Updated AFFLIB to 1.6.31. 9/22/06: Update: added a fs_read_file() function that allows you to read random parts of a file. 10/10/06: Update: Improved performance of fs_read_file() and added new FS_FLAG_META_COMP and FS_FLAG_DATA_COMP flags to show if a file and data are using file system-level compression (NTFS only). 10/18/06: Bug fix: in fs_data_put_run, added a check to see if the head was null before looking up. An extra error message was being created for nothing. 10/18/06: Bug Fix: Added a check to the compression buffer to see if it is null in _done(). 10/25/06: Bug Fix: Added some more bounds checks to NTFS uncompression code. 11/3/06: Bug Fix: added check to dcat_lib in case the number of blocks requested is too large. 11/07/06: Update: Added fs_read_file_noid wrapper around fs_read_file interface. 11/09/06: Update: Updated AFF to 1.7.1 11/17/06: Update: Updated libewf to 20061008-1 11/17/06: Bug Fix: Fixed attribute lookup bug in fs_data_lookup. Patch by David Collett. 11/21/06: Bug Fix: Fixed fs_data loops that were stopping when they hit an unused attribute. Patch by David Collett. 11/21/06: Bug Fix: sorter no longer clears the path when it starts. THis was causing errors on Cygwin because OpenSSL libraries could not be found. 11/22/06: Update: Added a tskGetVersion() function to return the string of the current version. 11/29/06: Update: Added more tsk_error_resets to more places to prevent extra error messages from being displayed. 11/30/06: Update: Added Caching to the getFAT function and to fs_read. 12/1/06: Update: Changed TSK_LIST to a reverse sorted list of buckets. 12/5/06: Bug Fix: Fixed FS_DATA_INUSE infinite loop bug. 12/5/06: Bug Fix: Fixed infinite loop bug with NTFS decompression code. 12/5/06: Update: Added NULL check to fs_inode_free (from Michael Cohen). 12/5/06: Update: Updated ifind_path so that an allocated name will be shown if one exists -- do not exit if we find simply an unallocated entry with an address of 0. Suggested by David Collett. 12/6/06: Update: Updated file to version 4.18. 12/6/06: Update: Updated libaff to 2.0a10 and changed build process accordingly. 12/7/06: Update: Added a tsk_error_get() function that returns a string with the error messages -- can be used instead of tsk_error_print. 12/7/06: Update: fixed some memory leaks in FAT and NTFS code. 12/11/06: Bug Fix: fatfs_open error message code referenced a value that was in freed memory -- reordered statements. 12/15/06: Update: Include VCProj files in build. ---------------- VERSION 2.06 -------------- 8/11/06: Bug Fix: Added back in ASCII/UTF-8 checks to remove control characters in file names. 8/11/06: Bug Fix: Added support for fast sym links in UFS1 8/11/06: Update: Redesigned the endian support so that getuX takes only the endian flag so that the Unicode design could be changed as well. 8/11/06: Update: Redesigned the Unicode support so that there is a tsk_UTF... routine instead of fs_UTF... 8/11/06: Update: Updated GPT to fully convert UTF16 to UTF8. 8/11/06: Update: There is now only one aux_tools header file to include instead of libauxtools and/or aux_lib, which were nearly identical. 8/16/06: Bug Fix: ntfs_dent_walk could segfault if two consecutive unallocated entries were found that had an MFT entry address of 0. Reported by Robert-Jan Mora. 8/16/06: Update: Changed a lot of the header files and reduced them so that it is easier to use the library and only one header file needs to be included. 8/21/06: Update: mmtools had char * instead of void * for walk callback 8/22/06: Update: Added fs_load_file function that returns a buffer full with the contents of a file. 8/23/06: Update: Upgraded AFFLIB to 1.6.31 and libewf to 20060820-1. 8/25/06: Update: Created printf wrappers so that output is UTF-16 on Windows and UTF-8 on Unix. 8/25/06: Update: Continued port to Windows by starting to use more TCHARS and defining needed macros for the Unix side. 8/25/06: Bug Fix: Fixed crash that could occur because of SDS code in NTFS. (reported by Simson Garfinkel) (BUG: 1546925). 8/25/06: Bug Fix: Fixed crash that could occur because path stack became corrupt with deep directories or corrupt images. (reported by Simson Garfinkel) (BUG: 1546926). 8/25/06: Bug Fix: Fixed infinite loop that could occur when trying to determine size of FAT directory when the FAT has a loop in it. (BUG: 1546929) 8/25/06: Update: Improved FAT checking code to look for '.' and '..' entries when inode value is replaced during dent_walk. 8/29/06: Update: Finished Win32 port and changes to handle UTF-16 vs UTF-8 inputs. 8/29/06: Update: Created a parse_inum function to handle parsing inode addresses from command line. 8/30/06: Update: Made progname a local variable instead of global. 8/31/06: Bug Fix: Fixed a sizeof() error with the memset in fatfs_inode_walk for the sect_alloc buffer. 8/31/06: Update: if mktime in dos2unixtime returns any negative value, then the return value is set to 0. Windows and glibc seem to have different return values. ---------------- VERSION 2.05 -------------- 5/15/06: Bug Fix: Fixed a bug in img_cat that could cause it to go into an infinite loop. (BUG: 1489284) 5/16/06: Update: Fixed printf statements in tsk_error.c that caused warning messages for some compilers. Reported by Jason DePriest. 5/17/06: Update: created a union of file system-specific file times in FS_INFO (Patch by Wyatt Banks) 5/22/06: Bug Fix: Updated libewf to 20060520 to fix bug with reported image size. (BUG: 1489287) 5/22/06: Bug Fix: Updated AFFLIB to 1.6.24 so that TSK could compile in CYGWIN. (BUG: 1493013) 5/22/06: Update: Fixed some more printf statements that were causing compile warnings. 5/23/06: Update: Added a file existence check to img_open to make error message more accurate. 5/23/06: Update: Usage messages had extra "Supported image types message". 5/25/06: Update: Added block / page range to fsstat for raw and swapfs. 6/5/06: Update: fixed some typos in the output messages of sigfind (reported by Jelle Smet) 6/9/06: Update: Added HFS+ template to sigfind (Patch by Wyatt Banks) 6/9/06: Update: Added ntfs and HFS template to sigfind. 6/19/06: Update: Begin Windows Visual Studio port 6/22/06: Update: Updated a myflags check in ntfs.c (reported by Wyatt Banks) 6/28/06: Update: Incorporated NTFS compression patch from I.D.E.A.L. 6/28/06: Update: Incorporated NTFS SID patch from I.D.E.A.L. 6/28/06: Bug Fix: A segfault could occur with NTFS if no inode was loaded in the dent_walk code. (Reported by Pope). 7/5/06: Update: Added tsk_error_reset function and updated code to use it. 7/5/06: Update: Added more sanity checks to the DOS partitions code. 7/10/06: Update: Upgraded libewf to version 20060708. 7/10/06: Update: Upgraded AFFLIB to version 1.6.28 7/10/06: Update: added 'list' option to usage message so that file system, image, volume system types are listed only if '-x list' is given. Suggested by kenshin. 7/10/06: Update: Compressed NTFS files use the compression unit size specified in the header. 7/10/06: Update: Added -R flag to icat to suppress recovery warnings and use this flag in sorter to prevent FAT recovery messages from filling up screen. 7/10/06: Update: file_walk functions now return FS_ERR_RECOVERY error codes for most cases if the RECOVERY flag is set -- this allows the errors to be more easily suppressed. 7/12/06: Update: Removed individual libraries and now make a single static libtsk.a library. 7/12/06: Update: Cleaned up top-level Makefile. Use '-C' flag (suggested by kenshin). 7/14/06: Update: Fixed and redesigned some of the new NTFS compression code. Changed variable names. 7/20/06: Update: Fixed an NTFS compression bug if a sub-block was not compressed. 7/21/06: Update: Made NTFS compression code thread friendly. ---------------- VERSION 2.04 -------------- 12/1/05: Bug Fix: Fixed a bug in the verbose output of img_open that would crash if no type or offset was given. Reported and patched by Wyatt Banks. 12/20/05: Bug Fix: An NTFS directory index sanity check used 356 instead of 365 when calculating an upper bound on the times. Reported by Wyatt Banks. 12/23/05: Bug Fix: Two printf statements in istat for NTFS printed to stdout instead of a specific file handle. Reported by Wyatt Banks. 1/22/06: Bug Fix: fsstat, imgstat and dcalc were using a char instead of int for the return value of getopt, which caused some systems to not execute the programs. (internal fix and later reported by Bernhard Reiter) 2/23/06: Update: added support for FreeBSD 6. 2/27/06: Bug Fix: Indirect blocks would nto be found by ifind with UFS and Ext2. Reported by Nelson G. Mejias-Diaz. (BUG: 1440075) 3/9/06: Update: Added AFF image file support. 3/14/06: Bug Fix: If the first directory entry of a UFS or ExtX block was unallocated, then later entries may not be shown. Reported by John Langezaal. (BUG: 1449655) 4/3/06: Update: Finished the improved error handling. Many internal changes, not many external changes. error() function no longer used and instead tsk_err variables and function are used. This makes the library more powerful. 4/5/06: Update: The byte offset for a volume is now passed to the mm_ and fs_ functions instead of img_open. This allows img_info to be used for multiple volumes at the same time. This required some mm_ changes. 4/5/06: Update: All TSK libraries are written to the lib directory. 4/6/06: Update: Added FS_FLAG_DATA_RES flag to identify data that are resident in ntfs_data_walk (suggested by Michael Cohen). 4/6/06: Update: The partition code (media Management) now checks that a partition starts before the end of the image file. There are currently no checks about the end of the partition though. 4/6/06: Update: The media management code now shows unpartitioned space as such from the end of the last partition to the end of the image file (using the image file size). (Suggested by Wyatt Banks). 4/7/06: Update: New version of ISO9660 code from Wyatt Banks and Crucial Security added and other code updated to allow CDs to be analyzed. 4/7/06: There was a conflict with guessuXX with mmtools and fstools. Renamed to mm_guessXX and fs_guessXX. 4/10/06: Upgraded AFFLIB to 1.5.6 4/12/06: Added version of libewf and support for it in imgtools 4/13/06: Added new img_cat tool to extract raw data from an image format. 4/24/06: Upgraded AFFLIB to 1.5.12 4/24/06: split and raw check if the image is a directory 4/24/06: Updated libewf to 20060423-1 4/26/06: Updated makedefs to work with SunOS 5.10 5/3/06: Added iso9660 patch from Wyatt Banks so that version number is not printed with file name. 5/4/06: Updated error checking in icat, istat, fatfs_dent, and ntfs_dent 5/8/06: Updated libewf to 20060505-1 to fix some gcc 2 compile errors. 5/9/06: Updated AFFLIB to 1.6.18 5/11/06: Cleaned up error handling (removed %m and unused legacy code) 5/11/06: Updated AFFLIB to 1.6.23 ---------------- VERSION 2.03 -------------- 7/26/05: Update: Removed incorrect print_version() statement from fs_tools.h (reported by Jaime Chang) 7/26/05: Update: Renamed libraries to start with "lib" 7/26/05: Update: Removed the logfp variable for verbose statements and instead use only stderr. 8/12/05: Update: If time is 0, then it is put as 00:00:00 instead of the default 1970 or 1980 time. 8/13/05: Update: Added Unicode support for FAT and NTFS (Supported by I.D.E.A.L. Technology Corp). 9/2/05: Update: Added Unicode support for UFS and ExtX. Non-printable ASCII characters are no longer replaced with '^.'. 9/2/05: Update: Improved the directory entry sanity checks for UFS and ExtX. 9/2/05: Update: Upgraded file to version 4.15. 9/2/05: Update: The dent_walk code of all file systems does not abort if a sub-directory is encountered with an error. If it is the top directory explicitly called, then it still gives an error. 9/2/05: Bug Fix: MD5 and SHA-1 values were incorrect under AMD64 systems because the incorrect variable sizes were being used. (reported by: Regis Friend Cassidy. BUG: 1280966) 9/2/05: Update: Changed all licenses in TSK to Common Public License (except those that were already IBM Public License). 9/15/05: Bug Fix: The Unicode names would not be displayed if the FAT short name entry was using code pages. The ASCII name check was removed, which may lead to more false positives during inode_walk. 10/05/05: Update: improved the sector size check when the FAT boot sector is read (check for specific values besides just mod 512). 10/12/05: Update: The ASCII name check was added back into FAT, but the check no longer looks for values over 0x80. 10/12/05: Update: The inode_walk function in FAT skips clusters that are allocated to files. This makes it much faster, but it will now not find unallocated directory entries in the slack space of allocated files. 10/13/05: Update: sorter updated to handle unicode in HTML output. ---------------- VERSION 2.02 -------------- 4/27/05: Bug Fix: the sizes of 'id' were not consistent in the front-end and library functions for icat and ffind. Reported by John Ward. 5/16/05: Bug Fix: fls could segfault in FAT if short name did not exist. There was also a bug where the long file name variable (fatfs->lfn_len) was not reset after processing a directory and the next entry could incorrectly get the long name. Reported by Jaime Chang. BUG: 1203673. 5/18/05: Update: Updated makedefs to support Darwin 8 (OS X Tiger) 5/23/05: Bug Fix: ntfs_dent_walk would not always stop when WALK_STOP was returned. This caused some issues with previous versions of ifind. This was fixed. 5/24/05: Bug Fix: Would not compile under Suse because it had header file conflicts for the size of int64_t. Reported by: Andrea Ghirardini. BUG: 1203676 5/25/05: Update: Fixed some memory leaks in fstools (reported by Jaime Chang). 6/13/05: Update: Compiled with g++ to get better warning messages. Fixed many signed versus unsigned comparisons, -1 assignments to unsigned vars, and some other minor internal issues. 6/13/05: Bug Fix: if UFS or FFS found a valid dentry in unallocated space, it could have a documented length that is larger than the remaining unallocated space. This would cause an allocated name to be skipped. BUG: 1210204 Reported by Christopher Betz. 6/13/05: Update: Improved design of all dent code so that there are no more global variables. 6/13/05: Update: Improved design of FAT dent code so that FATFS_INFO does not keep track of long file name information. 6/13/05: Bug Fix: If a cluster in a directory started with a strange dentry, then FAT inode_walk would skip it. The fixis to make sure that all directory sectors are processed. (BUG: 1203669). Reported by Jaime Chang. 6/14/05: Update: Changed design of FS_INODE so that it contains the inode address and the inode_walk action was changed to remove inum as an argument. 6/15/05: Update: Added 'ils -o' back in as 'ils -O' to list open and deleted files. 6/15/05: Update: Added '-m' flag to mactime so that it prints the month as a number instead of its name. 7/2/05: Bug Fix: If an NTFS file did not have a $DATA or $IDX_* attribute, then fls would not print it. The file had no content, but the name should be shown. (BUG: 1231515) (Reported by Fuerst) ---------------- VERSION 2.01 -------------- 3/24/05: Bug Fix: ffind would fail if the directory had two non-printable chars. The handling of non-printable chars was changed to replace with '^.'. (BUG: 1170310) (reported by Brian Baskin) 3/24/05: Bug Fix: icat would not print the output to stdout when split images were used. There was a bug in the image closing process of icat. (BUG: 1170309) (reported by Brian Baskin) 3/24/05: Update: Changed the header files in fstools to make fs_lib.h more self contained. 4/1/05: Bug Fix: Imgtools byte offset with many leading 0s could cause issues. (BUG: 1174977) 4/1/05: Update: Removed test check in mmtools/dos.c for value cluster size because to many partition tables have that as a valid field. Now it checks only OEM name. 4/8/05: Update: Updated usage of 'strtoul' to 'strtoull' for blocks and inodes. ---------------- VERSION 2.00 -------------- 1/6/05: Update: Added '-b' flag to 'mmls' so that sizes can be printed in bytes. Suggested and a patch proposed by Matt Kucenski 1/6/05: Update: Define DADDR_T, INUM_T, OFF_T, PNUM_T as a static size and use those to store values in data structures. Updated print statements as well. 1/6/05: Update: FAT now supports larger images becuase the inode address space is 64-bits. 1/6/05: Moved guess and get functions to misc from mmtools and fstools. 1/7/05: Update: Added imgtools with support for "raw" and "split" layers. All fstools have been updated. 1/7/05: Update: removed dtime from ils output 1/9/05: Update: FAT code reads in clusters instead of sectors to be faster (suggested by David Collett) 1/9/05: Update: mmtools uses imgtools for split images etc. 1/10/05: Update: Removed usage of global variables when using file_walk internally. 1/10/05: Update: mmls BSD will use the next sector automatically if the wrong is given instead of giving an error. 1/10/05: Update: Updated file to version 4.12 1/11/05: Update: Added autodetect to file system tools. 1/11/05: Update: Changed names to specify file system type (not OS-based) 1/11/05: Update: Added '-t' option to fsstat to give just the type. 1/11/05: Update: Added autodetect to mmls 1/17/05: Update: Added the 'mmstat' tool that gives the type of volume system. 1/17/05: Update: Now using CVS for local version control - added date stamps to all files. 2/20/05: Bug Fix: ils / istat would go into an infinte loop if the attribute list had an entry with a length of 0. Reported by Angus Marshall (BUG: 1144846) 3/2/05: Update: non-printable letters in ExtX/UFS file names are now replaced by a '.' 3/2/05: Update: Made file system tools more library friendly by making stubs for each application. 3/4/05: Update: Redesigned the diskstat tool and created the disksreset tool to remove the HPA temporarily. 3/4/05: Update: Added imgstat tool that displays image format details 3/7/05: Bug Fix: In fsstat on ExtX, the final group would have an incorrect _percentage_ of free blocks value (although the actual number was correct). Reported by Knut Eckstein. (BUG: 1158620) 3/11/05: Update: Renamed diskstat, disksreset, sstrings, and imgstat to disk_stat, disk_sreset, srch_strings, and img_stat to make the names more clear. 3/13/05: Bug Fix: The verbose output for fatfs_file_walk had an incorrect sector address. Reported by Rudolph Pereira. 3/13/05: Bug Fix: The beta version had compiling problems on FreeBSD because of a naming clash with the new 'fls' functions. (reported by secman) ---------------- VERSION 1.74 -------------- 11/18/04: Bug Fix: FreeBSD 5 would produce incorrect 'icat' output for Ext2/3 & UFS1 images because it used a 64-bit on-disk address. reported by neutrino neutrino. (BUG: 1068771) 11/30/04: Bug Fix: The makefile in disktools would generate an error on some systems (Cygwin) because of an extra entry. Reported by Vajira Ganepola (BUG: 1076029) ---------------- VERSION 1.73 -------------- 09/09/04: Update: Added journal support for EXT3FS and added jls and jcat tools. 09/13/04: Updated: Added the major and minor device numbers to EXTxFS istat. 09/13/04: Update: Added EXTxFS orphan code to 'fsstat' 09/24/04: Update: Fixed incorrect usage of 'ptr' and "" in action of ntfs_dent.c. Did not affect any code, but could have in the future. Reported by Pete Winkler. 09/25/04: Update: Added UFS flags to fsstat 09/26/04: Update: All fragments are printed for indirect block pointer addresses in UFS istat. 09/29/04: Update: Print extended UFS2 attributes in 'istat' 10/07/04: Bug Fix: Changed usage of (int) to (uintptr_t) for pointer arithmetic. Caused issues with Debian Sarge. (BUG: 1049352) - turned out to be from changes made to package version so that it would compile in 64-bit system (BUG: 928278). 10/11/04: Update: Added diskstat to check for HPA on linux systems. 10/13/04: Update: Added root directory location to FAT32 fsstat output 10/17/04: Bug Fix: EXTxFS superblock location would not be printed for images in fsstat that did not have sparse superblok (which is rare) (BUG: 1049355) 10/17/04: Update: Added sigfind tool to find binary signatures. 10/27/04: Bug Fix: NTFS is_clust_alloc returned an error when loading $MFT that had attribute list entry. Now I assume that clusters referred to by the $MFT are allocated until the $MFT is loaded. (BUG: 1055862). 10/28/04: Bug Fix: Check to see if an attribute with the same name exists instead of relying on id only. (ntfs_proc_attrseq) Affects the processing of attribute lists. Reported by Szakacsits Szabolcs, Matt Kucenski, & Gene Meltser (BUG: 1055862) 10/28/04: Update: Removed usage of mylseek in fstools for all systems (Bug: 928278) ---------------- VERSION 1.72 -------------- 07/31/04: Update: Added flag to mft_lookup so that ifind can run in noabort mode and it will not stop when it finds an invalid magic value. 08/01/04: Update: Removed previous change and removed MAGIC check entirely. XP doesn't even care if the Magic is corrupt, so neither does TSK. The update sequence check should find an invalid MFT entry. 08/01/04: Update: Added error message to 'ifind' if none of the search options are given. 08/05/04: Bug Fix: Fixed g_curdirptr recursive error by clearing the value when dent_walk had to abort because a deleted directory could not be recovered. (BUG: 1004329) Reported by epsilon@yahoo.com 08/16/04: Update: Added a sanity check to fatfs.c fat2unixtime to check if the year is > 137 (which is the overflow date for the 32-bit UNIX time). 08/16/04: Update: Added first version of sstrings from binutils-2.15 08/20/04: Bug Fix: Fixed a bug where the group number for block 0 of an EXT2FS file system would report -1. 'dstat' no longer displays value when it is not part of a block group. (BUG: 1013227) 8/24/04: Update: If an attribute list entry is found with an invalid MFT entry address, then it is ignored instead of an error being generated and exiting. 8/26/04: Update: Changed internal design of NTFS to make is_clust_alloc 8/26/04: Update: If an attribute list entry is found with an invalid MFT entry address AND the entry is unallocated, then no error message is printed, it is just ignored or logged in verbose mode. 8/29/04: Update: Added support for 32-bit GID and UID in EXTxFS 8/30/04: Bug Fix: ntfs_dent_walk was adding 24 extra bytes to the size of the index record for the final record processing (calc of list_len) (BUG: 1019321) (reported and debugging help from Matt Kucenski). 8/30/04: Bug Fix: fs_data_lookup was using an id of 0 as a wild card, but 0 is a legit id value and this could cause confusion. To solve this, a new FS_FLAG_FILE_NOID flag was added and a new fs_data_lookup_noid function that will not use the id to lookup values. (BUG: 1019690) (reported and debugging help from Matt Kucenski) 8/30/04: Update: modified fs_data_lookup_noid to return unamed data attribute if that type is requested (instead of just relying on id value in attributes) 8/31/04: Update: Updated file to v4.10, which seems to fix the CYGWIN compile problem. 9/1/04: Update: Added more DOS partition types to mmls (submitted by Matt Kucenski) 9/2/04: Update: Added EXT3FS extended attributes and Posix ACL to istat output. 9/2/04: Update: Added free inode and block counts per group to fsstat for EXT2FS. 9/7/04: Bug Fix: FreeBSD compile error for PRIx printf stuff in mmtools/gpt.c ---------------- VERSION 1.71 -------------- 06/05/04: Update: Added sanity checks in fat to unix time conversion so that invalid times are set to 0. 06/08/04: Bug Fix: Added a type cast when size is assigned in FAT and removed the assignment to a 32-bit signed variable (which was no longer needed). (Bug: 966839) 06/09/04: Bug Fix: Added a type cast to the 'getuX' macros because some compilers were assuming it was signed (Bug: 966839). 06/11/04: Update: Changed NTFS magic check to use the aa55 at the end and fixed the name of the original "magic" value to oemname. The oemname is now printed in fsstat. 06/12/04: Bug Fix: The NTFS serial number was being printed with bytes in the wrong order in the fsstat output. (BUG: 972207) 06/12/04: Update: The begin offset value in index header for NTFS was 16-bits instead of 32-bits. 06/22/04: Update: Created a library for the MD5 and SHA1 functions so that it can be incorporated into other tools. Also renamed some of the indexing tools that hfind uses. 06/23/04: Update: Changed output of 'istat' for NTFS images. Added more data from $STANDARD_INFORMATION. 07/13/04: Update: Changed output of 'istat' for NTFS images again. Moved more data to the $FILE_NAME section and added new data. 07/13/04: Update: Changed code for processing NTFS runs and no longer check for the offset to be 0 in ntfs_make_data_run(). This could have prevented some sparse files from being processed. 07/13/04: Update: Added flags for compressed and encrypted NTFS files. They are not decrypted or uncompressed yet, just identified. They cannot be displayed from 'icat', but the known layout is given in 'istat'. 07/18/04: Bug Fix: Sometimes, 'icat' would report an error about an existing FILLER entry in an NTFS attribute. This was traced to instances when it was run on a non-base file record. There is now a check for that to not show the error. (BUG: 993459) 07/19/04: Bug Fix: A run of -1 may exist for sparse files in non-NT versions of NTFS. Changed check for this. reported by Matthew Kucenski. (BUG: 994024). 07/24/04: Bug Fix: NTFS attribute names were missing (rarely) on some files because the code assumed they would always be at offset 64 for non-res attributes (Bug: 996981). 07/24/04: Update: Made listing of unallcoated NTFS file names less strict. There was a check for file name length versus stream length. 07/24/04: Update: Added $OBJECT_ID output to 'istat' 07/24/04: Update: Fixed ntfs.c compile warning about constant too large in time conversion code. 07/25/04: Update: Added attribute list contents to NTFS 'istat' output 07/25/04: Bug Fix: Not all slack space was being shown with 'dls -s'. It was documented that this occurs, but it is not what would be expected. (BUG: 997800). 07/25/04: Update: Changed output format of 'dls -s' so that it sends zeros where the file content was. Therefore the output is now a multiple of the data unit size. Also removed limitation to FAT & NTFS. 07/25/04: Update: 'dcalc' now has the '-s' option calculate the original location of data from a slack space image (dls -s). (from Chris Betz). 07/26/04: Update: Created the fs_os.h file and adjusted some of the header files for the PRI macros (C99). Created defines for OSes that do not have the macros already defined. 07/26/04: Non-release bug fix: Fixed file record size bug introduced with recent changes. 07/27/04: Update: Added GPT support to mmls. 07/29/04: Update: Added '-p' flag to 'ifind' to find deleted NTFS files that point to the given parent directory. Added '-l and -z' as well. ---------------- VERSION 1.70 -------------- 04/21/04: Update: Changed attribute and mode for FAT 'istat' so that actual FAT attributes are used instead of UNIX translation. 04/21/04: Update: The FAT 'istat' output better handles Long FIle Name entry 04/21/04: Update: The FAT 'istat' output better handles Volume Label entry 04/21/04: Update: Allowed the FAT volume label entry to be displayed with 'ils' 04/21/04: Update: Allowed the FAT volume label entry to be displayed with 'fls' 04/24/04: Update: 'dstat' on a FAT cluster now shows the cluster address in addition to the sector address. 04/24/04: Update: Added the cluster range to the FAT 'fsstat' output 05/01/04: Update: Improved the FAT version autodetect code. 05/02/04: Update: Removed 'H' flag from 'icat'. 05/02/04: Update: Changed all of the FS_FLAG_XXX variables in the file system tools to constants that are specific to the usage (NAME, DATA, META, FILE). 05/03/04: Update: fatfs_inode_walk now goes by sectors instead of clusters to get more dentries from slack space. 05/03/04: Bug Fix: The allocation status of FAT dentires was set only by the flag and not the allocation status of the cluster it is located in. (BUG: 947112) 05/03/04: Update: Improved comments and variable names in FAT code 05/03/04: Update: Added '-r' flag to 'icat' for deleted file recovery 05/03/04: Update: Added RECOVERY flag to file_walk for deleted file recovery 05/03/04: Update: Added FAT file recovery. 05/03/04: Update: Removed '-H' flag from 'icat'. Default is to display holes. 05/03/04: Update: 'fls -r' will recurse down deleted directories in FAT 05/03/04: Update: 'fsstat' reports FAT clusters that are marked as BAD 05/03/04: Update: 'istat' for FAT now shows recovery clusters for deleted files. 05/04/04: Update: Added output to 'fsstat' for FAT file systems by adding a list of BAD sectors and improving the amount of layout information. I also changed some of the internal variables. 05/08/04: Update: Removed addr_bsize from FS_INFO, moved block_frags to FFS_INFO, modified dcat output only data unit size. 05/20/04: Update: Added RECOVERY flag to 'ifind' so that it can find the data units that are allocated to deleted files 05/20/04: Update: Added icat recovery options to 'sorter'. 05/20/04: Update: Improved the naming convention in sorter for the 'ils' dead files. 05/21/04: Update: Added outlook to sorter rules (from David Berger) 05/27/04: Bug Fix: Added to mylseek.c so that it compiles with Fedora Core 2 (Patch by Angus Marshall) (BUG: 961908). 05/27/04: Update: Changed the letter with 'fls -l' for FIFO to 'p' instead of 'f' (reported by Dave Henkewick). 05/28/04: Update: Added '-u' flag to 'dcat' so that the data unit size can be specified for raw, swap, and dls image types. 05/28/04: Update: Changed the size argument of 'dcat' to be number of data units instead of size in bytes (suggestion by Harald Katzer). ---------------- VERSION 1.69 -------------- 03/06/04: Update: Fixed some memory leaks in ext2fs_close. reported by Paul Bakker. 03/10/04: Bug Fix: If the '-s' flag was used with 'icat' on a EXT2FS or FFS file system, then a large amount of extra data came out. Reported by epsion. (BUG: 913874) 03/10/04: Bug Fix: One of the verbose outputs in ext2fs.c was being sent to STDOUT instead of logfp. (BUG: 913875) 04/14/04: Update: Added more data to fsstat output of FAT file system. 04/15/04: Bug Fix: The last sector of a FAT file system may not be analyzed. (BUG: 935976) 04/16/04: Update: Added full support for swap and raw by making the standard files and functions for them instead of the hack in dcat. Suggested by (and initial patch by) Paul Baker. 04/18/04: Update: Changed error messages in EXT2/3FS code to be extXfs. 04/18/04: Update: Updaged to version 4.09 of 'file'. This will help fix some of the problems people have had compiling it under OS X 10.3. 04/18/04: Update: Added compiling support for SFU 3.5 (Microsoft). Patches from an anonymous person. ---------------- VERSION 1.68 -------------- 01/20/04: Bug Fix: FAT times were an hour too fast during daylight savings. Now use mktime() instead of manual calculation. Reported by Randall Shane. (BUG: 880606) 02/01/04: Update: 'hfind -i' now reports the header entry as an invalid entry. The first header row was ignored. 02/20/04: Bug Fix: indirect block pointer blocks would not be identified by the ifind tool. Reported by Knut Eckstein (BUG: 902709) 03/01/04: Update: Added fs->seek_pos check to fs_read_random. ---------------- VERSION 1.67 -------------- 11/15/03: Bug Fix: Added support for OS X 10.3 to src/makedefs. (BUG: 843029) 11/16/03: Bug Fix: Mac partition tables could generate an error if there were VOID-type partitions. (BUG: 843366) 11/21/03: Update: Changed NOABORT messages to verbose messages, so invalid data is not printed during 'ifind' searches. 11/30/03: Bug Fix: icat would not hide the 'holes' if '-h' was given because the _UNALLOC flag was always being passed to file_walk. (reported by Knut Eckstein). (BUG: 851873) 11/30/03: Bug Fix: NTFS data_walk was not using _ALLOC and _UNALLOC flags and other code that called it was not either. (BUG: 851895) 11/30/03: Bug Fix: Not all needed commands were using _UNALLOC when they called file_walk (although for most cases it did not matter because sparse files would not be found in a directory for example). (Bug: 851897) 12/09/03: Bug Fix: FFS and EXT2FS code was using OFF_T type instead of size_t for the size of the file. This could result in a file > 2GB as being a negative size on some systems (BUG: 856957). 12/26/03: Bug Fix: ffind would crash for root directory of FAT image. Added NULL check and added a NULL name to fake root directory entry. (BUG: 871219) 01/05/04: Bug Fix: The clustcnt value for FAT was incorrectly calculated and was too large for FAT12 and FAT16 by 32 sectors. This could produce extra entries in the 'fsstat' output when the FAT is dumped. (BUG: 871220) 01/05/04: Bug Fix: ils, fls, and istat were not printing the full size of files that are > 2GB. (reported by Knut Eckstein) (BUG: 871457) 01/05/04: Bug Fix: The EXT2FS and EXT3FS code was not using the i_dir_acl value as the upper 32-bits of regular files that are > 2GB (BUG: 871458) 01/06/04: Mitigation: An error was reported where sorter would error that icat was being passed a '-1' argument. I can't find how that would happen, so I added quotes to all arguments so that the next time it occurs, the error is more useful (BUG: 845840). 01/06/04: Update: Incorporated patch from Charles Seeger so that 'cc' can be used and compile time warnings are fixed with Sun 'cc'. 01/06/04: Update: Upgraded file from v3.41 to v4.07 ---------------- VERSION 1.66 -------------- 09/02/03: Bug Fix: Would not compile under OpenBSD 3 because fs_tools.h & mm_tools was missing a defined statement (reported by Randy - m0th_man) NOTE: Bugs now will have an entry into the Source Forge bug tracking sytem. 10/13/03: Bug Fix: buffer was not being cleared between uses and length incorrectly set in NTFS resulted in false deleted file names being shown when the '-r' flag was given. The extra entries were from the previous directory. (BUG: 823057) 10/13/03: Bug Fix: The results of 'sorter' varied depending on the version of Perl and the system. If the file output matched more than one, sorter could not gaurantee which would match. Therefore, results were different for some files and some machines. 'sorter' now enforces the ordering based on the order they are in the configuration file. The entries at the end of the file have priority over the first entries (generic rules to specific rules). (BUG: 823057) 10/14/03: Update: 'mmls' prints 'MS LVM' with partition type 0x42 now. 10/25/03: Bug Fix: NTFS could have a null pointer crash if the image was very corrupt and $Data was not found for the MFT. 11/10/03: Bug Fix: NTFS 'ffind' would only report the file name and not the attribute name because the type and id were ignored. ffind and ntfs_dent were updated - found during NTFS keyword search test. (Bug: 831579() 11/12/03: Update: added support for Solaris x86 partition tables to 'mmls' 11/12/03: Update: Modified the sparc data structure to add the correct location of the 'sanity' magic value. 11/15/03: Update: Added '-s' flag to 'icat' so that slack space is also displayed. ---------------- VERSION 1.65 -------------- 08/03/03: Bug Fix: 'sorter' now checks for inode values that are too small to avoid 'icat' errors about invalid inode values. 08/19/03: Update: 'raw' is now a valid type for 'dcat'. 08/21/03: Update: mactime and sorter look for perl5.6.0 first. 08/21/03: Update: Removed NSRL support from 'sorter' until a better wany to identify the known good and known bad files is found 08/21/03: Bug Fix: The file path replaces < and > with HTML encoding for HTML output (ils names were not being shown) 08/25/03: Update: Added 'nsrl.txt' describing why the NSRL functionality was removed. 08/27/03: Update: Improved code in 'mactime' to reduce warnings when '-w' is used with Perl ('exists' checks on arrays). 08/27/03: Update: Improved code in 'sorter' to reduce warnings when '-w' is used with Perl (inode_int for NTFS). ---------------- VERSION 1.64 -------------- 08/01/03: Docs Fix: The Sun VTOC was documented as Virtual TOC and it should be Volume TOC (Jake @ UMASS). 08/02/03: Bug Fix: Some compilers complained about verbose logging assignment in 'mmls' (Ralf Spenneberg). ---------------- VERSION 1.63 -------------- 06/13/03; Update: Added 'mmtools' directory with 'dos' partitions and 'mmls'. 06/18/03: Update: Updated the documents in the 'doc' directory 06/19/03: Update: Updated error message for EXT3FS magic check 06/27/03: Update: Added slot & table number to mmls 07/08/03: Update: Added mac support to mmtools 07/11/03: Bug Fix: 'sorter' was not processing all unallocated meta data structures because of a regexp error. (reported by Jeff Reava) 07/16/03: Update: Added support for FreeBSD5 07/16/03: Update: Added BSD disk labels to mmtools 07/28/03: Update: Relaxed requirements for DOS directory entries, the wtime can be zero (reported by Adam Uccello). 07/30/03: Update: Added SUN VTOC to mmtools 07/31/03: Update: Added NetBSD support (adam@monkeybyte.org) 08/01/03: Update: Added more sanity checks to FAT so that it would not try and process NTFS images that have the same MAGIC value ---------------- VERSION 1.62 -------------- 04/11/03: Bug Fix: 'fsstat' for an FFS file system could report data fragments in the last group that were larger than the maximum fragment 04/11/03: Bug Fix: 'ffs' allows the image to not be a multiple of the block size. A read error occurred when it tried to read the last fragments since a whole block could not be read. 04/15/03: Update: Added debug statements to FAT code. 04/26/03: Update: Added verbose statements to FAT code 04/26/03: Update: Added NOABORT flag to dls -s 04/26/03: Update: Added stderr messages for errors that are not aborted because of NOABORT 05/27/03: Update: Added 'mask' field to FATFS_INFO structure and changed code in fatfs.c to use it. 05/27/03: Update: isdentry now checks the starting cluster to see if it is a valid size. 05/27/03: Bug Fix: Added a sanitizer to 'sorter' to remove invalid chars from the 'file' output and reduce the warnings from Perl. 05/28/03: Bug Fix: Improved sanitize expression in 'sorter' 05/28/03: Update: Added '-d' option to 'mactime' to allow output to be given in comma delimited format for importing into a spread sheet or other graphing tool 06/09/03: Update: Added hourly summary / indexing to mactime 06/09/03: Bug Fix: sorter would not allow linux-ext3 fstype ---------------- VERSION 1.61 -------------- 02/05/03: Update: Started addition of image thumbnails to sorter 03/05/03: Update: Updated 'file' to version 3.41 03/16/03: Update: Added comments and NULL check to 'ifind' 03/16/03: Bug Fix: Added a valid magic of 0 for MFT entries. This was found in an XP image. 03/26/03: Bug Fix: fls would crash for an inode of 0 and a clock skew was given. fixed the bug in fls.c (debug help from Josep Homs) 03/26/03: Update: Added more verbose comments to ntfs_dent.c. 03/26/03: Bug Fix: 'ifind' for a path could return a result that was shorter than the requested name (strncmp was used) 03/26/03: Update: Short FAT names can be used in 'ifind -n' and error messages were improved 03/26/03: Bug Fix: A final NTFS Index Buffer was not always processed in ntfs_dent.c, which resulted in files not being shown. This was fixed with debugging help from Matthew Shannon. 03/27/03: Update: Added an 'index.html' for image thumbnails in sorter and added a 'details' link from the thumbnail to the images.html file 03/27/03: Update: 'sorter' can now take a directory inode to start processing 03/27/03: Update: added '-z' flag when running 'file' in 'sorter' so that compressed file contents are reported 03/27/03: Update: added '-i' flag to 'mactime' that creates a daily summary of events 03/27/03: Update: Added support for Version 2 of the NSRL in 'hfind' 04/01/03: Update: Added support for Hash Keeper to 'hfind' 04/01/03: Update: Added '-e' flag to 'hfind' for extended info (currently hashkeeper only) ---------------- VERSION 1.60 -------------- 10/31/02: Bug Fix: the unmounting status of EXT2FS in the 'fsstat' command was not correct (reported by Stephane Denis). 11/24/02: Bug Fix: The -v argument was not allowed on istat or fls (Michael Stone) 11/24/02: Bug Fix: When doing an 'ifind' on a UNIX fs, it could abort if it looked at an unallocated inode with invalid indirect block pointers. This was fixed by adding a "NOABORT" flag to the walk code and adding error checks in the file system code instead of relying on the fs_io code. (suggested by Micael Stone) 11/26/02: Update: ifind has a '-n' argument that allows one to specify a file name it and it searches to find the meta data structure for it (suggested by William Salusky). 11/26/02: Update: Now that there is a '-n' flag with 'ifind', the '-d' flag was added to specify the data unit address. The old syntax of giving the data_unit at the end is no longer supported. 11/27/02: Update: Added sanity checks on meta data and data unit addresses earlier in the code. 12/12/02: Update: Added additional debug statements to NTFS code 12/19/02: Update: Moved 'hash' directory to 'hashtools' 12/19/02: Update: Started development of 'hfind' 12/31/02: Update: Improved verbose debug statements to show full 64-bit offsets 01/02/03: Update: Finished development of 'hfind' with ability to update for next version of NSRL (which may have a different format) 01/05/03: Bug Fix: FFS and EXT2FS symbolic link destinations where not properly NULL terminated and some extra chars were appended in 'fls' (later reported by Thorsten Zachmann) 01/06/03: Bug Fix: getu64() was not properly masking byte sizes and some data was being lost. This caused incorrect times to be displayed in some NTFS files. 01/06/03: Bug Fix: ifind reported incorrect ownership for some UNIX file systems if the end fragments were allocated to a different file than the first ones were. 01/07/03: Update: Renamed the src/mactime directory to src/timeline. 01/07/03: Update: Updated README and man pages for hfind and sorter 01/12/03: Bug Fix: ntfs_mft_lookup was casting a 64-bit value to a 32-bit variable. This caused MFT Magic errors. Reported and debugged by Keven Murphy 01/12/03: Update: Added verbose argument to 'fls' 01/12/03: Bug Fix: '-V' argument to 'istat' was doing verbose instead of version 01/13/03: Update: Changed static sizes of OFF_T and DADDR_T in Linux version to the actual 'off_t' and 'daddr_t' types 01/23/03: Update: Changed use of strtok_r to strtok in ifind.c so that Mac 10.1 could compile (Dave Goldsmith). 01/28/03: Update: Improved code in 'hfind' and 'sorter' to handle files with spaces in the path (Dave Goldsmith). ---------------- VERSION 1.52 -------------- 09/24/02: Bug Fix: Memory leak in ntfs_dent_idxentry(), ntfs_find_file(), and ntfs_dent_walk() 09/24/02: Update: Removal of index sequences for index buffers is now done using upd_off, which will allow for NTFS to move the structure in the future. 09/26/02: Update: Added create time for NTFS / STANDARD_INFO to istat output. 09/26/02: Update: Changed the method that the NTFS time is converted to UNIX time. Should be more efficient. 10/09/02: Update: dcat error changed. 10/02/02: Update: Includes a Beta version of 'sorter' ---------------- VERSION 1.51 -------------- 09/10/02: Bug Fix: Fixed a design bug that would not allow attribute lists in $MFT. This bug would generate an error that complained about an invalid MFT entry in attribute list. 09/10/02: Update: The size of files and directories is now calculated after each time proc_attrseq() is called so that it is more up to date when dealing with attribute lists. The size has the sizes of all $Data, $IDX_ROOT, and $IDX_ALLOC streams. 09/10/02: Update: The maxinum number of MFT entries is now calculated each time an MFT entry is processed while loading the MFT. This allows us to reflect what the maximum possible MFT entry is at that given point based on how many attribute lists have been processed. 09/10/02: Update: Added file version 3.39 to distro (bigger magic files) (Salusky) 09/10/02: Bug Fix: fs_data was wasting memory when it was allocated 09/10/02: Update: added a fs_data_alloc() function 09/12/02: Bug Fix: Do not give an error if an attribute list of an unallocated file points to an MFT that no longer claims it is a member of the list. 09/12/02: Update: No longer need version to remove update sequence values from on-disk buffers 09/19/02: Bug Fix: fixed memory leak in ntfs_load_ver() 09/19/02: Bug Fix: Update sequence errors were displayed because of a bug that occurred when an MFT entry crossed a run in $MFT. Only occurred with 512-byte clusters and an odd number of clusters in a run. 09/19/02: Update: New argument to ils, istat, and fls that allows user to specify a time skew in seconds of the compromised system. Originated from discussion at DFRWS II. 09/19/02: Update: Added '-h' argument to mactime to display header info ---------------- VERSION 1.50 -------------- 04/21/02: icat now displays idxroot attribute for NTFS directories 04/21/02: fs_dent_print functions now are passed the FS_DATA structure instead of the extra inode and name strings. (NTFS) 04/21/02: fs_dent_print functions display alternate data stream size instead of the default data size (NTFS) 04/24/02: Fixed bug in istat that displayed too many fragments with ffs images 04/24/02: Fixed bug in istat that did not display sparse files correctly 04/24/02: fsstat of FFS images now identifies the fragments at the beginning of cyl groups as data fragments. 04/26/02: Fixed bug in ext2fs_dent_parse_block that did not advance the directory entry pointer far enough each time 04/26/02: Fixed bug in ext2fs_dent_parse_block so that gave an error if a file name was exactly 255 chars 04/29/02: Removed the getX functions from get.c as they are now macros 05/11/02: Added support for lowercase flag in FAT 05/11/02: Added support for sequence values (NTFS) 05/13/02: Added FS_FLAG_META for FAT 05/13/02: Changed ifind so that it looks the block up to identify if it is a meta data block when an inode can not be found 05/13/02: Added a conditional to ifind so that it handles sparse files better 05/19/02: Changed icat so that the default attribute type is set in the file_walk function 05/20/02: ils and dls now use boundary inode & block values if too large or small are given 05/21/02: istat now displays all NTFS times 05/21/02: Created functions to just display date and time 05/24/02: moved istat functionality to the specific file system file 05/25/02: added linux-ext3 flag, but no new features 05/25/02: Added sha1 (so Autopsy can use the NIST SW Database) 05/26/02: Fixed bug with FAT that did not return all slack space on file_walk 05/26/02: Added '-s' flag to dls to extract slack space of FAT and NTFS 06/07/02: fixed _timezone variable so correct times are shown in CYGWIN 06/11/02: *_copy_inode now sets the flags for the inode 06/11/02: fixed bug in mactimes that displayed a duplicate entry with time because of header entries in body file 06/12/02: Added ntfs.README doc 06/16/02: Added a comment to file Makefile to make it easier to compile for an IR CD. 06/18/02: Fixed NTFS bug that showed ADS when only deleted files were supposed to be shown (when ADS in directory) 06/19/02: added the day of the week to the mactime output (Tan) 07/09/02: Fixed bug that added extra chars to end of symlink destination 07/17/02: 1.50 Released ---------------- VERSION 1.00 -------------- - Integrated TCT-1.09 and TCTUTILs-1.01 - Fixed bug in bcat if size is not given with type of swap. - Added platform indep by including the structures of each file system type - Added flags for large file support under linux - blockcalc was off by 1 if calculated using the raw block number and not the one that lazarus spits out (which start at 1) - Changed the inode_walk and block_walk functions slightly to return a value so that a walk can be ended in the middle of it. - FAT support added - Improved ifind to better handle fragments - '-z' flag to fls and istat now use the time zone string instead of integer value. - no longer prepend / in _dent - verify that '-m' directory in fls ends with a '/' - identify the destination of sym links - fsstat tool added - fixed caching bug with FAT12 when the value overlapped cache entries - added mactime - removed the value in fls when printing mac format (inode is now printed in mactime) - renamed src/misc directory to src/hash (it only has md5 and will have sha) - renamed aux directory to misc (Windows doesn't allow aux as a name ??) - Added support for Cygwin - Use the flags in super block of EXT2FS to identify v1 or v2 - removed file system types of linux1 and linux2 and linux - added file system type of linux-ext2 (as ext3 is becoming more popular) - bug in file command that reported seek error for object files and STDIN sleuthkit-4.11.1/tsk/000755 000765 000024 00000000000 14137073560 015213 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/tests/000755 000765 000024 00000000000 14137073563 015557 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/INSTALL.txt000644 000765 000024 00000010352 14137073413 016257 0ustar00carrierstaff000000 000000 The Sleuth Kit http://www.sleuthkit.org/sleuthkit Installation Instructions Last Modified: Oct 2012 REQUIREMENTS ============================================================================= Tested Platform: - FreeBSD 2-6.* - Linux 2.* - OpenBSD 2-3.* - Mac OS X - SunOS 4-5.* - Windows Build System (to compile from a source distribution): - C/C++ compiler (C++ 14 required) - GNU Make - Java compiler / JDK (if you want the java bindings) Development System (to extend TSK or compile from the repository): - GNU autoconf, automake, and libtool - Plus the build system requirements Optional Programs: - Autopsy: Provides a graphical HTML-based interface to The Sleuth Kit (which makes it much easier to use). Install this AFTER installing The Sleuth Kit. Available at: http://www.sleuthkit.org/autopsy Optional Libraries: There are optional features that TSK can use if you have installed them before you build and install TSK. - AFFLIB: Allows you to process disk images that are stored in the AFF format. Version 3.3.6 has been tested to compile and work with this release. Available at: http://www.afflib.org - LibEWF: Allows you to process disk images that are stored in the Expert Witness format (EnCase Format). Version 20130128 has been tested to compile and work with this release. It is the last stable release of libewf and therefore the only one that we currently support. You can download it from: https://github.com/sleuthkit/libewf_64bit The official repository is available here, but there is not a package of the last stable release: https://github.com/libyal/libewf Available at: http://sourceforge.net/projects/libewf/ INSTALLATION ============================================================================= Refer to the README_win32.txt file for details on Windows. The Sleuth Kit uses the GNU autotools for building and installation. There are a few steps to this process. First, run the 'configure' script in the root TSK directory. See the CONFIGURE OPTIONS section for useful arguments that can be given to 'configure. $ ./configure If there were no errors, then run 'make'. If you do not have a 'configure' script, then it is probably because you cloned the source code repository. If so, you will need to have automake, autoconf, and libtool installed and you can create the configure script using the 'bootstrap' script in the root directory. $ make The 'make' process will take a while and will build the TSK tools. When this process is complete, the libraries and executables will be located in the TSK sub-directories. To install them, type 'make install'. $ make install By default, this will copy everything in to the /usr/local/ structure. So, the executables will be in '/usr/local/bin'. This directory will need to be in your PATH if you want to run the TSK commands without specifying '/usr/local/bin' everytime. If you get an error like: libtool: Version mismatch error. This is libtool 2.2.10, but the libtool: definition of this LT_INIT comes from libtool 2.2.4. libtool: You should recreate aclocal.m4 with macros from libtool 2.2.10 libtool: and run autoconf again. Run: ./bootstrap and then go back to running configure and make. To run 'bootstrap', you'll need to have the autotools installed (see the list at the top of this page). CONFIGURE OPTIONS ----------------------------------------------------------------------------- There are some arguments to 'configure' that you can supply to customize the setup. Currently, they focus on the optional disk image format libraries. --without-afflib: Supply this if you want TSK to ignore AFFLIB even if it is installed. --with-afflib=dir: Supply this if you want TSK to look in 'dir' for the AFFLIB installation (the directory should have 'lib' and 'include' directories in it). --without-ewf: Supply this if you want TSK to ignore libewf even if it is installed. --with-libewf=dir: Supply this if you want TSK to look in 'dir' for the libewf installation (the directory should have 'lib' and 'include' directories in it). ----------------------------------------------------------------------------- Brian Carrier carrier sleuthkit org sleuthkit-4.11.1/docs/000755 000765 000024 00000000000 14137073557 015350 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/case-uco/000755 000765 000024 00000000000 14137073556 016116 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/Makefile.am000644 000765 000024 00000005640 14137073413 016450 0ustar00carrierstaff000000 000000 # File that we want to include in the dist EXTRA_DIST = README_win32.txt README.md INSTALL.txt ChangeLog.txt NEWS.txt API-CHANGES.txt \ licenses/README.md licenses/GNUv2-COPYING licenses/GNUv3-COPYING licenses/IBM-LICENSE \ licenses/Apache-LICENSE-2.0.txt licenses/cpl1.0.txt licenses/bsd.txt licenses/mit.txt \ m4/*.m4 \ docs/README.txt \ packages/sleuthkit.spec \ win32/BUILDING.txt \ win32/*/*.vcxproj \ win32/tsk-win.sln \ win32/NugetPackages.props \ win32/docs/* \ bindings/java/README.txt \ bindings/java/*.xml \ bindings/java/doxygen/Doxyfile \ bindings/java/doxygen/*.dox \ bindings/java/doxygen/*.html \ bindings/java/nbproject/project.xml \ bindings/java/src/org/sleuthkit/datamodel/*.java \ bindings/java/src/org/sleuthkit/datamodel/*.html \ bindings/java/src/org/sleuthkit/datamodel/*.properties \ bindings/java/src/org/sleuthkit/datamodel/blackboardutils/*.java \ bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/*.java \ bindings/java/src/org/sleuthkit/datamodel/Examples/*.java \ bindings/java/src/*.html \ case-uco/java/*.xml \ case-uco/java/*.md \ case-uco/java/nbproject/*.xml \ case-uco/java/nbproject/*.properties \ case-uco/java/src/org/sleuthkit/caseuco/*.java \ case-uco/java/test/org/sleuthkit/caseuco/*.java ACLOCAL_AMFLAGS = -I m4 # directories to compile if CPPUNIT UNIT_TESTS=unit_tests endif # Compile java bindings if all of the dependencies existed if X_JNI JAVA_BINDINGS=bindings/java JAVA_CASEUCO=case-uco/java else JAVA_BINDINGS= JAVA_CASEUCO= endif SUBDIRS = tsk tools tests samples man $(UNIT_TESTS) $(JAVA_BINDINGS) $(JAVA_CASEUCO) nobase_include_HEADERS = tsk/libtsk.h tsk/tsk_incs.h \ tsk/base/tsk_base.h tsk/base/tsk_os.h \ tsk/img/tsk_img.h tsk/vs/tsk_vs.h tsk/img/pool.hpp \ tsk/vs/tsk_bsd.h tsk/vs/tsk_dos.h tsk/vs/tsk_gpt.h \ tsk/vs/tsk_mac.h tsk/vs/tsk_sun.h \ tsk/fs/tsk_fs.h tsk/fs/tsk_ffs.h tsk/fs/tsk_ext2fs.h tsk/fs/tsk_fatfs.h \ tsk/fs/tsk_ntfs.h tsk/fs/tsk_iso9660.h tsk/fs/tsk_hfs.h tsk/fs/tsk_yaffs.h \ tsk/fs/tsk_apfs.h tsk/fs/tsk_apfs.hpp tsk/fs/apfs_fs.h tsk/fs/apfs_fs.hpp tsk/fs/apfs_compat.hpp \ tsk/fs/decmpfs.h tsk/fs/tsk_exfatfs.h tsk/fs/tsk_fatxxfs.h \ tsk/hashdb/tsk_hashdb.h tsk/auto/tsk_auto.h \ tsk/auto/tsk_is_image_supported.h tsk/auto/guid.h \ tsk/pool/tsk_pool.h tsk/pool/tsk_pool.hpp tsk/pool/tsk_apfs.h tsk/pool/tsk_apfs.hpp \ tsk/pool/pool_compat.hpp tsk/pool/apfs_pool_compat.hpp \ tsk/util/crypto.hpp tsk/util/lw_shared_ptr.hpp tsk/util/span.hpp \ tsk/util/detect_encryption.h nobase_dist_data_DATA = tsk/sorter/default.sort tsk/sorter/freebsd.sort \ tsk/sorter/images.sort tsk/sorter/linux.sort tsk/sorter/openbsd.sort \ tsk/sorter/solaris.sort tsk/sorter/windows.sort api-docs: doxygen tsk/docs/Doxyfile cd bindings/java/doxygen; doxygen Doxyfile man-html: cd man;build-html sleuthkit-4.11.1/README.md000644 000765 000024 00000021575 14137073413 015700 0ustar00carrierstaff000000 000000 [![Build Status](https://travis-ci.org/sleuthkit/sleuthkit.svg?branch=develop)](https://travis-ci.org/sleuthkit/sleuthkit) [![Build status](https://ci.appveyor.com/api/projects/status/8f7ljj8s2lh5sqfv?svg=true)](https://ci.appveyor.com/project/bcarrier/sleuthkit) # [The Sleuth Kit](http://www.sleuthkit.org/sleuthkit) ## INTRODUCTION The Sleuth Kit is an open source forensic toolkit for analyzing Microsoft and UNIX file systems and disks. The Sleuth Kit enables investigators to identify and recover evidence from images acquired during incident response or from live systems. The Sleuth Kit is open source, which allows investigators to verify the actions of the tool or customize it to specific needs. The Sleuth Kit uses code from the file system analysis tools of The Coroner's Toolkit (TCT) by Wietse Venema and Dan Farmer. The TCT code was modified for platform independence. In addition, support was added for the NTFS (see [wiki/ntfs](http://wiki.sleuthkit.org/index.php?title=FAT_Implementation_Notes)) and FAT (see [wiki/fat](http://wiki.sleuthkit.org/index.php?title=NTFS_Implementation_Notes)) file systems. Previously, The Sleuth Kit was called The @stake Sleuth Kit (TASK). The Sleuth Kit is now independent of any commercial or academic organizations. It is recommended that these command line tools can be used with the Autopsy Forensic Browser. Autopsy, (http://www.sleuthkit.org/autopsy), is a graphical interface to the tools of The Sleuth Kit and automates many of the procedures and provides features such as image searching and MD5 image integrity checks. As with any investigation tool, any results found with The Sleuth Kit should be be recreated with a second tool to verify the data. ## OVERVIEW The Sleuth Kit allows one to analyze a disk or file system image created by 'dd', or a similar application that creates a raw image. These tools are low-level and each performs a single task. When used together, they can perform a full analysis. For a more detailed description of these tools, refer to [wiki/filesystem](http://wiki.sleuthkit.org/index.php?title=TSK_Tool_Overview). The tools are briefly described in a file system layered approach. Each tool name begins with a letter that is assigned to the layer. ### File System Layer: A disk contains one or more partitions (or slices). Each of these partitions contain a file system. Examples of file systems include the Berkeley Fast File System (FFS), Extended 2 File System (EXT2FS), File Allocation Table (FAT), and New Technologies File System (NTFS). The fsstat tool displays file system details in an ASCII format. Examples of data in this display include volume name, last mounting time, and the details about each "group" in UNIX file systems. ### Content Layer (block): The content layer of a file system contains the actual file content, or data. Data is stored in large chunks, with names such as blocks, fragments, and clusters. All tools in this layer begin with the letters 'blk'. The blkcat tool can be used to display the contents of a specific unit of the file system (similar to what 'dd' can do with a few arguments). The unit size is file system dependent. The 'blkls' tool displays the contents of all unallocated units of a file system, resulting in a stream of bytes of deleted content. The output can be searched for deleted file content. The 'blkcalc' program allows one to identify the unit location in the original image of a unit in the 'blkls' generated image. A new feature of The Sleuth Kit from TCT is the '-l' argument to 'blkls' (or 'unrm' in TCT). This argument lists the details for data units, similar to the 'ils' command. The 'blkstat' tool displays the statistics of a specific data unit (including allocation status and group number). ### Metadata Layer (inode): The metadata layer describes a file or directory. This layer contains descriptive data such as dates and size as well as the addresses of the data units. This layer describes the file in terms that the computer can process efficiently. The structures that the data is stored in have names such as inode and directory entry. All tools in this layer begin with an 'i'. The 'ils' program lists some values of the metadata structures. By default, it will only list the unallocated ones. The 'istat' displays metadata information in an ASCII format about a specific structure. New to The Sleuth Kit is that 'istat' will display the destination of symbolic links. The 'icat' function displays the contents of the data units allocated to the metadata structure (similar to the UNIX cat(1) command). The 'ifind' tool will identify which metadata structure has allocated a given content unit or file name. Refer to the [ntfs wiki](http://wiki.sleuthkit.org/index.php?title=NTFS_Implementation_Notes) for information on addressing metadata attributes in NTFS. ### Human Interface Layer (file): The human interface layer allows one to interact with files in a manner that is more convenient than directly with the metadata layer. In some operating systems there are separate structures for the metadata and human interface layers while others combine them. All tools in this layer begin with the letter 'f'. The 'fls' program lists file and directory names. This tool will display the names of deleted files as well. The 'ffind' program will identify the name of the file that has allocated a given metadata structure. With some file systems, deleted files will be identified. #### Time Line Generation Time lines are useful to quickly get a picture of file activity. Using The Sleuth Kit a time line of file MAC times can be easily made. The mactime (TCT) program takes as input the 'body' file that was generated by fls and ils. To get data on allocated and unallocated file names, use 'fls -rm dir' and for unallocated inodes use 'ils -m'. Note that the behavior of these tools are different than in TCT. For more information, refer to [wiki/mactime](http://wiki.sleuthkit.org/index.php?title=Mactime). #### Hash Databases Hash databases are used to quickly identify if a file is known. The MD5 or SHA-1 hash of a file is taken and a database is used to identify if it has been seen before. This allows identification to occur even if a file has been renamed. The Sleuth Kit includes the 'md5' and 'sha1' tools to generate hashes of files and other data. Also included is the 'hfind' tool. The 'hfind' tool allows one to create an index of a hash database and perform quick lookups using a binary search algorithm. The 'hfind' tool can perform lookups on the NIST National Software Reference Library (NSRL) (www.nsrl.nist.gov) and files created from the 'md5' or 'md5sum' command. Refer to the [wiki/hfind](http://wiki.sleuthkit.org/index.php?title=Hfind) file for more details. #### File Type Categories Different types of files typically have different internal structure. The 'file' command comes with most versions of UNIX and a copy is also distributed with The Sleuth Kit. This is used to identify the type of file or other data regardless of its name and extension. It can even be used on a given data unit to help identify what file used that unit for storage. Note that the 'file' command typically uses data in the first bytes of a file so it may not be able to identify a file type based on the middle blocks or clusters. The 'sorter' program in The Sleuth Kit will use other Sleuth Kit tools to sort the files in a file system image into categories. The categories are based on rule sets in configuration files. The 'sorter' tool will also use hash databases to flag known bad files and ignore known good files. Refer to the [wiki/sorter](http://wiki.sleuthkit.org/index.php?title=Sorter) file for more details. ## LICENSE There are a variety of licenses used in TSK based on where they were first developed. The licenses are located in the [licenses directory](https://github.com/sleuthkit/sleuthkit/tree/develop/licenses). - The file system tools (in the [tools/fstools](https://github.com/sleuthkit/sleuthkit/tree/develop/tools/fstools) directory) are released under the IBM open source license and Common Public License. - srch_strings and fiwalk are released under the GNU Public License - Other tools in the tools directory are Common Public License - The modifications to 'mactime' from the original 'mactime' in TCT and 'mac-daddy' are released under the Common Public License. The library uses utilities that were released under MIT and BSD 3-clause. ## INSTALL For installation instructions, refer to the INSTALL.txt document. ## OTHER DOCS The [wiki](http://wiki.sleuthkit.org/index.php?title=Main_Page) contains documents that describe the provided tools in more detail. The Sleuth Kit Informer is a newsletter that contains new documentation and articles. > www.sleuthkit.org/informer/ ## MAILING LIST Mailing lists exist on SourceForge, for both users and a low-volume announcements list. > http://sourceforge.net/mail/?group_id=55685 Brian Carrier carrier at sleuthkit dot org sleuthkit-4.11.1/API-CHANGES.txt000644 000765 000024 00000001155 14137073413 016631 0ustar00carrierstaff000000 000000 Changes to make once we are ready to do a backwards incompatible change. - TSK_SERVICE_ACCOUNT to TSK_ACCOUNT - HashDB to use new TSK_BASE_HASHDB enum instead of its own ENUM - Java SleuthkitCase.addArtifactType should return different if artifact already exists or getArtifactId should.... - Java SleuthkitCase.findFilesWhere should return AbstractFile like findFiles - getUniquePath() should not throw exception. - findFilesInImage should return an enum like TskDB methods differentiating if any data was found or not. - remove addImageInfo in db_Sqlite that does not take MD5, and/or make it take IMG_INFO as argumentsleuthkit-4.11.1/samples/000755 000765 000024 00000000000 14137073563 016061 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/packages/000755 000765 000024 00000000000 14137073557 016176 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/m4/000755 000765 000024 00000000000 14137073557 014740 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/ChangeLog.txt000644 000765 000024 00000000601 14137073413 016774 0ustar00carrierstaff000000 000000 This program does not distribute an official ChangeLog file. You can generate one from the subversion repository though using the following command: svn log http://svn.sleuthkit.org/repos/sleuthkit/ For a specific release, try something like: svn log http://svn.sleuthkit.org/repos/sleuthkit/tags/sleuthkit-3.0.0 and replace 3.0.0 with the version you are interested in. sleuthkit-4.11.1/bindings/000755 000765 000024 00000000000 14137073556 016214 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/000755 000765 000024 00000000000 14137073557 015362 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/README_win32.txt000644 000765 000024 00000003351 14137073413 017131 0ustar00carrierstaff000000 000000 The Sleuth Kit Win32 README File http://www.sleuthkit.org/sleuthkit Last Modified: Jan 2014 ==================================================================== The Sleuth Kit (TSK) runs on Windows. If you simply want the executables, you can download them from the www.sleuthkit.org website. If you want to build your own executables, you have two options. 1) Microsoft Visual Studio. The VS solution file is in the win32 directory. Refer to the win32\BUILDING.txt file for details for building the 32-bit and 64-bit versions. 2) mingw32. See below for more details. --------------------------------------------------------------- MINGW32 If you're using mingw32 on Linux, simply give the "--host=i586-mingw32msvc" argument when running the './configure' script and use 'make' to compile. If you're using mingw32 on Windows, './configure' and 'make' will work directly. Note that to compile the Java bindings you will need to have a JDK to be installed, and by default the Oracle JDK on Windows is installed in a path such as C:\Program Files\Java\jdk1.6.0_16\. GNU autotools (which is used if you do a mingw32 compile, but not a Visual Studio compile) do not handle paths containing spaces, so you will need to copy the JDK to a directory without spaces in the name, such as C:\jdk1.6.0_16\, then add C:\jdk1.6.0_16\bin to $PATH before running './configure' Note also that libtool may fail on mingw32 on Windows if C:\Windows\system32 is on $PATH before /usr/bin. The fix is to have the C:\Windows directories at the _end_ of your mingw $PATH. ------------------------------------------------------------------- carrier sleuthkit org Brian Carrier sleuthkit-4.11.1/Makefile.in000644 000765 000024 00000101237 14137073437 016466 0ustar00carrierstaff000000 000000 # Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = . ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ax_pkg_check_modules.m4 \ $(top_srcdir)/m4/tsk_opt_dep_check.m4 \ $(top_srcdir)/m4/ax_pthread.m4 $(top_srcdir)/m4/cppunit.m4 \ $(top_srcdir)/m4/ax_jni_include_dir.m4 \ $(top_srcdir)/m4/ac_prog_javac_works.m4 \ $(top_srcdir)/m4/ac_prog_javac.m4 \ $(top_srcdir)/m4/ac_prog_java_works.m4 \ $(top_srcdir)/m4/ac_prog_java.m4 \ $(top_srcdir)/m4/ax_cxx_compile_stdcxx.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ $(am__configure_deps) $(nobase_dist_data_DATA) \ $(nobase_include_HEADERS) $(am__DIST_COMMON) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/tsk/tsk_config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(datadir)" "$(DESTDIR)$(includedir)" DATA = $(nobase_dist_data_DATA) HEADERS = $(nobase_include_HEADERS) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ cscope distdir dist dist-all distcheck am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags CSCOPE = cscope DIST_SUBDIRS = tsk tools tests samples man unit_tests bindings/java \ case-uco/java am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/config/compile \ $(top_srcdir)/config/config.guess \ $(top_srcdir)/config/config.sub \ $(top_srcdir)/config/install-sh $(top_srcdir)/config/ltmain.sh \ $(top_srcdir)/config/missing config/compile \ config/config.guess config/config.sub config/install-sh \ config/ltmain.sh config/missing DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ if test -d "$(distdir)"; then \ find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -rf "$(distdir)" \ || { sleep 5 && rm -rf "$(distdir)"; }; \ else :; fi am__post_remove_distdir = $(am__remove_distdir) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best DIST_TARGETS = dist-gzip distuninstallcheck_listfiles = find . -type f -print am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ ALLOCA = @ALLOCA@ AMTAR = @AMTAR@ AM_CFLAGS = @AM_CFLAGS@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ ANT_FOUND = @ANT_FOUND@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ AX_PACKAGE_REQUIRES = @AX_PACKAGE_REQUIRES@ AX_PACKAGE_REQUIRES_PRIVATE = @AX_PACKAGE_REQUIRES_PRIVATE@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EWF_CFLAGS = @EWF_CFLAGS@ EWF_LIBS = @EWF_LIBS@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HAVE_CXX14 = @HAVE_CXX14@ IGNORE = @IGNORE@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ JAVA = @JAVA@ JAVAC = @JAVAC@ JNI_CPPFLAGS = @JNI_CPPFLAGS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBTSK_LDFLAGS = @LIBTSK_LDFLAGS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_LIBS_PRIVATE = @PACKAGE_LIBS_PRIVATE@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PERL = @PERL@ PKGCONFIG = @PKGCONFIG@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ PTHREAD_CC = @PTHREAD_CC@ PTHREAD_CFLAGS = @PTHREAD_CFLAGS@ PTHREAD_LIBS = @PTHREAD_LIBS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SQLITE3_CFLAGS = @SQLITE3_CFLAGS@ SQLITE3_LIBS = @SQLITE3_LIBS@ STRIP = @STRIP@ VERSION = @VERSION@ VHDI_CFLAGS = @VHDI_CFLAGS@ VHDI_LIBS = @VHDI_LIBS@ VMDK_CFLAGS = @VMDK_CFLAGS@ VMDK_LIBS = @VMDK_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ _ACJNI_JAVAC = @_ACJNI_JAVAC@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ ax_pthread_config = @ax_pthread_config@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ uudecode = @uudecode@ # File that we want to include in the dist EXTRA_DIST = README_win32.txt README.md INSTALL.txt ChangeLog.txt NEWS.txt API-CHANGES.txt \ licenses/README.md licenses/GNUv2-COPYING licenses/GNUv3-COPYING licenses/IBM-LICENSE \ licenses/Apache-LICENSE-2.0.txt licenses/cpl1.0.txt licenses/bsd.txt licenses/mit.txt \ m4/*.m4 \ docs/README.txt \ packages/sleuthkit.spec \ win32/BUILDING.txt \ win32/*/*.vcxproj \ win32/tsk-win.sln \ win32/NugetPackages.props \ win32/docs/* \ bindings/java/README.txt \ bindings/java/*.xml \ bindings/java/doxygen/Doxyfile \ bindings/java/doxygen/*.dox \ bindings/java/doxygen/*.html \ bindings/java/nbproject/project.xml \ bindings/java/src/org/sleuthkit/datamodel/*.java \ bindings/java/src/org/sleuthkit/datamodel/*.html \ bindings/java/src/org/sleuthkit/datamodel/*.properties \ bindings/java/src/org/sleuthkit/datamodel/blackboardutils/*.java \ bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/*.java \ bindings/java/src/org/sleuthkit/datamodel/Examples/*.java \ bindings/java/src/*.html \ case-uco/java/*.xml \ case-uco/java/*.md \ case-uco/java/nbproject/*.xml \ case-uco/java/nbproject/*.properties \ case-uco/java/src/org/sleuthkit/caseuco/*.java \ case-uco/java/test/org/sleuthkit/caseuco/*.java ACLOCAL_AMFLAGS = -I m4 # directories to compile @CPPUNIT_TRUE@UNIT_TESTS = unit_tests @X_JNI_FALSE@JAVA_BINDINGS = # Compile java bindings if all of the dependencies existed @X_JNI_TRUE@JAVA_BINDINGS = bindings/java @X_JNI_FALSE@JAVA_CASEUCO = @X_JNI_TRUE@JAVA_CASEUCO = case-uco/java SUBDIRS = tsk tools tests samples man $(UNIT_TESTS) $(JAVA_BINDINGS) $(JAVA_CASEUCO) nobase_include_HEADERS = tsk/libtsk.h tsk/tsk_incs.h \ tsk/base/tsk_base.h tsk/base/tsk_os.h \ tsk/img/tsk_img.h tsk/vs/tsk_vs.h tsk/img/pool.hpp \ tsk/vs/tsk_bsd.h tsk/vs/tsk_dos.h tsk/vs/tsk_gpt.h \ tsk/vs/tsk_mac.h tsk/vs/tsk_sun.h \ tsk/fs/tsk_fs.h tsk/fs/tsk_ffs.h tsk/fs/tsk_ext2fs.h tsk/fs/tsk_fatfs.h \ tsk/fs/tsk_ntfs.h tsk/fs/tsk_iso9660.h tsk/fs/tsk_hfs.h tsk/fs/tsk_yaffs.h \ tsk/fs/tsk_apfs.h tsk/fs/tsk_apfs.hpp tsk/fs/apfs_fs.h tsk/fs/apfs_fs.hpp tsk/fs/apfs_compat.hpp \ tsk/fs/decmpfs.h tsk/fs/tsk_exfatfs.h tsk/fs/tsk_fatxxfs.h \ tsk/hashdb/tsk_hashdb.h tsk/auto/tsk_auto.h \ tsk/auto/tsk_is_image_supported.h tsk/auto/guid.h \ tsk/pool/tsk_pool.h tsk/pool/tsk_pool.hpp tsk/pool/tsk_apfs.h tsk/pool/tsk_apfs.hpp \ tsk/pool/pool_compat.hpp tsk/pool/apfs_pool_compat.hpp \ tsk/util/crypto.hpp tsk/util/lw_shared_ptr.hpp tsk/util/span.hpp \ tsk/util/detect_encryption.h nobase_dist_data_DATA = tsk/sorter/default.sort tsk/sorter/freebsd.sort \ tsk/sorter/images.sort tsk/sorter/linux.sort tsk/sorter/openbsd.sort \ tsk/sorter/solaris.sort tsk/sorter/windows.sort all: all-recursive .SUFFIXES: am--refresh: Makefile @: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs distclean-libtool: -rm -f libtool config.lt install-nobase_dist_dataDATA: $(nobase_dist_data_DATA) @$(NORMAL_INSTALL) @list='$(nobase_dist_data_DATA)'; test -n "$(datadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(datadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(datadir)" || exit 1; \ fi; \ $(am__nobase_list) | while read dir files; do \ xfiles=; for file in $$files; do \ if test -f "$$file"; then xfiles="$$xfiles $$file"; \ else xfiles="$$xfiles $(srcdir)/$$file"; fi; done; \ test -z "$$xfiles" || { \ test "x$$dir" = x. || { \ echo " $(MKDIR_P) '$(DESTDIR)$(datadir)/$$dir'"; \ $(MKDIR_P) "$(DESTDIR)$(datadir)/$$dir"; }; \ echo " $(INSTALL_DATA) $$xfiles '$(DESTDIR)$(datadir)/$$dir'"; \ $(INSTALL_DATA) $$xfiles "$(DESTDIR)$(datadir)/$$dir" || exit $$?; }; \ done uninstall-nobase_dist_dataDATA: @$(NORMAL_UNINSTALL) @list='$(nobase_dist_data_DATA)'; test -n "$(datadir)" || list=; \ $(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \ dir='$(DESTDIR)$(datadir)'; $(am__uninstall_files_from_dir) install-nobase_includeHEADERS: $(nobase_include_HEADERS) @$(NORMAL_INSTALL) @list='$(nobase_include_HEADERS)'; test -n "$(includedir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \ $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \ fi; \ $(am__nobase_list) | while read dir files; do \ xfiles=; for file in $$files; do \ if test -f "$$file"; then xfiles="$$xfiles $$file"; \ else xfiles="$$xfiles $(srcdir)/$$file"; fi; done; \ test -z "$$xfiles" || { \ test "x$$dir" = x. || { \ echo " $(MKDIR_P) '$(DESTDIR)$(includedir)/$$dir'"; \ $(MKDIR_P) "$(DESTDIR)$(includedir)/$$dir"; }; \ echo " $(INSTALL_HEADER) $$xfiles '$(DESTDIR)$(includedir)/$$dir'"; \ $(INSTALL_HEADER) $$xfiles "$(DESTDIR)$(includedir)/$$dir" || exit $$?; }; \ done uninstall-nobase_includeHEADERS: @$(NORMAL_UNINSTALL) @list='$(nobase_include_HEADERS)'; test -n "$(includedir)" || list=; \ $(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \ dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscope: cscope.files test ! -s cscope.files \ || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) clean-cscope: -rm -f cscope.files cscope.files: clean-cscope cscopelist cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags -rm -f cscope.out cscope.in.out cscope.po.out cscope.files distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz $(am__post_remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 $(am__post_remove_distdir) dist-lzip: distdir tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz $(am__post_remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz $(am__post_remove_distdir) dist-tarZ: distdir @echo WARNING: "Support for distribution archives compressed with" \ "legacy program 'compress' is deprecated." >&2 @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir @echo WARNING: "Support for shar distribution archives is" \ "deprecated." >&2 @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz $(am__post_remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__post_remove_distdir) dist dist-all: $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' $(am__post_remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lz*) \ lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir) chmod u+w $(distdir) mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build/sub \ && ../../configure \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ --srcdir=../.. --prefix="$$dc_install_base" \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__post_remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @test -n '$(distuninstallcheck_dir)' || { \ echo 'ERROR: trying to run $@ with an empty' \ '$$(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ $(am__cd) '$(distuninstallcheck_dir)' || { \ echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile $(DATA) $(HEADERS) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(datadir)" "$(DESTDIR)$(includedir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-libtool \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-nobase_dist_dataDATA \ install-nobase_includeHEADERS install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-nobase_dist_dataDATA \ uninstall-nobase_includeHEADERS .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ am--refresh check check-am clean clean-cscope clean-generic \ clean-libtool cscope cscopelist-am ctags ctags-am dist \ dist-all dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ \ dist-xz dist-zip distcheck distclean distclean-generic \ distclean-libtool distclean-tags distcleancheck distdir \ distuninstallcheck dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-nobase_dist_dataDATA install-nobase_includeHEADERS \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am \ uninstall-nobase_dist_dataDATA uninstall-nobase_includeHEADERS .PRECIOUS: Makefile api-docs: doxygen tsk/docs/Doxyfile cd bindings/java/doxygen; doxygen Doxyfile man-html: cd man;build-html # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sleuthkit-4.11.1/aclocal.m4000644 000765 000024 00001356555 14137073434 016276 0ustar00carrierstaff000000 000000 # generated automatically by aclocal 1.15.1 -*- Autoconf -*- # Copyright (C) 1996-2017 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, [m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- # # Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. m4_define([_LT_COPYING], [dnl # Copyright (C) 2014 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program or library that is built # using GNU Libtool, you may include this file under the same # distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . ]) # serial 58 LT_INIT # LT_PREREQ(VERSION) # ------------------ # Complain and exit if this libtool version is less that VERSION. m4_defun([LT_PREREQ], [m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1, [m4_default([$3], [m4_fatal([Libtool version $1 or higher is required], 63)])], [$2])]) # _LT_CHECK_BUILDDIR # ------------------ # Complain if the absolute build directory name contains unusual characters m4_defun([_LT_CHECK_BUILDDIR], [case `pwd` in *\ * | *\ *) AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;; esac ]) # LT_INIT([OPTIONS]) # ------------------ AC_DEFUN([LT_INIT], [AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl AC_BEFORE([$0], [LT_LANG])dnl AC_BEFORE([$0], [LT_OUTPUT])dnl AC_BEFORE([$0], [LTDL_INIT])dnl m4_require([_LT_CHECK_BUILDDIR])dnl dnl Autoconf doesn't catch unexpanded LT_ macros by default: m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4 dnl unless we require an AC_DEFUNed macro: AC_REQUIRE([LTOPTIONS_VERSION])dnl AC_REQUIRE([LTSUGAR_VERSION])dnl AC_REQUIRE([LTVERSION_VERSION])dnl AC_REQUIRE([LTOBSOLETE_VERSION])dnl m4_require([_LT_PROG_LTMAIN])dnl _LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}]) dnl Parse OPTIONS _LT_SET_OPTIONS([$0], [$1]) # This can be used to rebuild libtool when needed LIBTOOL_DEPS=$ltmain # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' AC_SUBST(LIBTOOL)dnl _LT_SETUP # Only expand once: m4_define([LT_INIT]) ])# LT_INIT # Old names: AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT]) AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PROG_LIBTOOL], []) dnl AC_DEFUN([AM_PROG_LIBTOOL], []) # _LT_PREPARE_CC_BASENAME # ----------------------- m4_defun([_LT_PREPARE_CC_BASENAME], [ # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in @S|@*""; do case $cc_temp in compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } ])# _LT_PREPARE_CC_BASENAME # _LT_CC_BASENAME(CC) # ------------------- # It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME, # but that macro is also expanded into generated libtool script, which # arranges for $SED and $ECHO to be set by different means. m4_defun([_LT_CC_BASENAME], [m4_require([_LT_PREPARE_CC_BASENAME])dnl AC_REQUIRE([_LT_DECL_SED])dnl AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl func_cc_basename $1 cc_basename=$func_cc_basename_result ]) # _LT_FILEUTILS_DEFAULTS # ---------------------- # It is okay to use these file commands and assume they have been set # sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'. m4_defun([_LT_FILEUTILS_DEFAULTS], [: ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} ])# _LT_FILEUTILS_DEFAULTS # _LT_SETUP # --------- m4_defun([_LT_SETUP], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl _LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl dnl _LT_DECL([], [host_alias], [0], [The host system])dnl _LT_DECL([], [host], [0])dnl _LT_DECL([], [host_os], [0])dnl dnl _LT_DECL([], [build_alias], [0], [The build system])dnl _LT_DECL([], [build], [0])dnl _LT_DECL([], [build_os], [0])dnl dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl dnl AC_REQUIRE([AC_PROG_LN_S])dnl test -z "$LN_S" && LN_S="ln -s" _LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl dnl AC_REQUIRE([LT_CMD_MAX_LEN])dnl _LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl m4_require([_LT_CMD_RELOAD])dnl m4_require([_LT_CHECK_MAGIC_METHOD])dnl m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl m4_require([_LT_CMD_OLD_ARCHIVE])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_WITH_SYSROOT])dnl m4_require([_LT_CMD_TRUNCATE])dnl _LT_CONFIG_LIBTOOL_INIT([ # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi ]) if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi _LT_CHECK_OBJDIR m4_require([_LT_TAG_COMPILER])dnl case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld=$lt_cv_prog_gnu_ld old_CC=$CC old_CFLAGS=$CFLAGS # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o _LT_CC_BASENAME([$compiler]) # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then _LT_PATH_MAGIC fi ;; esac # Use C for the default configuration in the libtool script LT_SUPPORTED_TAG([CC]) _LT_LANG_C_CONFIG _LT_LANG_DEFAULT_CONFIG _LT_CONFIG_COMMANDS ])# _LT_SETUP # _LT_PREPARE_SED_QUOTE_VARS # -------------------------- # Define a few sed substitution that help us do robust quoting. m4_defun([_LT_PREPARE_SED_QUOTE_VARS], [# Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\([["`$\\]]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\([["`\\]]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ]) # _LT_PROG_LTMAIN # --------------- # Note that this code is called both from 'configure', and 'config.status' # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, # 'config.status' has no value for ac_aux_dir unless we are using Automake, # so we pass a copy along to make sure it has a sensible value anyway. m4_defun([_LT_PROG_LTMAIN], [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) ltmain=$ac_aux_dir/ltmain.sh ])# _LT_PROG_LTMAIN # So that we can recreate a full libtool script including additional # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS # in macros and then make a single call at the end using the 'libtool' # label. # _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS]) # ---------------------------------------- # Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL_INIT], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_INIT], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_INIT]) # _LT_CONFIG_LIBTOOL([COMMANDS]) # ------------------------------ # Register COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS]) # _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS]) # ----------------------------------------------------- m4_defun([_LT_CONFIG_SAVE_COMMANDS], [_LT_CONFIG_LIBTOOL([$1]) _LT_CONFIG_LIBTOOL_INIT([$2]) ]) # _LT_FORMAT_COMMENT([COMMENT]) # ----------------------------- # Add leading comment marks to the start of each line, and a trailing # full-stop to the whole comment if one is not present already. m4_define([_LT_FORMAT_COMMENT], [m4_ifval([$1], [ m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])], [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.]) )]) # _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?]) # ------------------------------------------------------------------- # CONFIGNAME is the name given to the value in the libtool script. # VARNAME is the (base) name used in the configure script. # VALUE may be 0, 1 or 2 for a computed quote escaped value based on # VARNAME. Any other value will be used directly. m4_define([_LT_DECL], [lt_if_append_uniq([lt_decl_varnames], [$2], [, ], [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name], [m4_ifval([$1], [$1], [$2])]) lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3]) m4_ifval([$4], [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])]) lt_dict_add_subkey([lt_decl_dict], [$2], [tagged?], [m4_ifval([$5], [yes], [no])])]) ]) # _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION]) # -------------------------------------------------------- m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])]) # lt_decl_tag_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_tag_varnames], [_lt_decl_filter([tagged?], [yes], $@)]) # _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..]) # --------------------------------------------------------- m4_define([_lt_decl_filter], [m4_case([$#], [0], [m4_fatal([$0: too few arguments: $#])], [1], [m4_fatal([$0: too few arguments: $#: $1])], [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)], [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)], [lt_dict_filter([lt_decl_dict], $@)])[]dnl ]) # lt_decl_quote_varnames([SEPARATOR], [VARNAME1...]) # -------------------------------------------------- m4_define([lt_decl_quote_varnames], [_lt_decl_filter([value], [1], $@)]) # lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_dquote_varnames], [_lt_decl_filter([value], [2], $@)]) # lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_varnames_tagged], [m4_assert([$# <= 2])dnl _$0(m4_quote(m4_default([$1], [[, ]])), m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]), m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))]) m4_define([_lt_decl_varnames_tagged], [m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])]) # lt_decl_all_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_all_varnames], [_$0(m4_quote(m4_default([$1], [[, ]])), m4_if([$2], [], m4_quote(lt_decl_varnames), m4_quote(m4_shift($@))))[]dnl ]) m4_define([_lt_decl_all_varnames], [lt_join($@, lt_decl_varnames_tagged([$1], lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl ]) # _LT_CONFIG_STATUS_DECLARE([VARNAME]) # ------------------------------------ # Quote a variable value, and forward it to 'config.status' so that its # declaration there will have the same value as in 'configure'. VARNAME # must have a single quote delimited value for this to work. m4_define([_LT_CONFIG_STATUS_DECLARE], [$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`']) # _LT_CONFIG_STATUS_DECLARATIONS # ------------------------------ # We delimit libtool config variables with single quotes, so when # we write them to config.status, we have to be sure to quote all # embedded single quotes properly. In configure, this macro expands # each variable declared with _LT_DECL (and _LT_TAGDECL) into: # # ='`$ECHO "$" | $SED "$delay_single_quote_subst"`' m4_defun([_LT_CONFIG_STATUS_DECLARATIONS], [m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames), [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAGS # ---------------- # Output comment and list of tags supported by the script m4_defun([_LT_LIBTOOL_TAGS], [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl available_tags='_LT_TAGS'dnl ]) # _LT_LIBTOOL_DECLARE(VARNAME, [TAG]) # ----------------------------------- # Extract the dictionary values for VARNAME (optionally with TAG) and # expand to a commented shell variable setting: # # # Some comment about what VAR is for. # visible_name=$lt_internal_name m4_define([_LT_LIBTOOL_DECLARE], [_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [description])))[]dnl m4_pushdef([_libtool_name], m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])), [0], [_libtool_name=[$]$1], [1], [_libtool_name=$lt_[]$1], [2], [_libtool_name=$lt_[]$1], [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl ]) # _LT_LIBTOOL_CONFIG_VARS # ----------------------- # Produce commented declarations of non-tagged libtool config variables # suitable for insertion in the LIBTOOL CONFIG section of the 'libtool' # script. Tagged libtool config variables (even for the LIBTOOL CONFIG # section) are produced by _LT_LIBTOOL_TAG_VARS. m4_defun([_LT_LIBTOOL_CONFIG_VARS], [m4_foreach([_lt_var], m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAG_VARS(TAG) # ------------------------- m4_define([_LT_LIBTOOL_TAG_VARS], [m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])]) # _LT_TAGVAR(VARNAME, [TAGNAME]) # ------------------------------ m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])]) # _LT_CONFIG_COMMANDS # ------------------- # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of # variables for single and double quote escaping we saved from calls # to _LT_DECL, we can put quote escaped variables declarations # into 'config.status', and then the shell code to quote escape them in # for loops in 'config.status'. Finally, any additional code accumulated # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. m4_defun([_LT_CONFIG_COMMANDS], [AC_PROVIDE_IFELSE([LT_OUTPUT], dnl If the libtool generation code has been placed in $CONFIG_LT, dnl instead of duplicating it all over again into config.status, dnl then we will have config.status run $CONFIG_LT later, so it dnl needs to know what name is stored there: [AC_CONFIG_COMMANDS([libtool], [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])], dnl If the libtool generation code is destined for config.status, dnl expand the accumulated commands and init code now: [AC_CONFIG_COMMANDS([libtool], [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])]) ])#_LT_CONFIG_COMMANDS # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT], [ # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' _LT_CONFIG_STATUS_DECLARATIONS LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$[]1 _LTECHO_EOF' } # Quote evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_quote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_dquote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done _LT_OUTPUT_LIBTOOL_INIT ]) # _LT_GENERATED_FILE_INIT(FILE, [COMMENT]) # ------------------------------------ # Generate a child script FILE with all initialization necessary to # reuse the environment learned by the parent script, and make the # file executable. If COMMENT is supplied, it is inserted after the # '#!' sequence but before initialization text begins. After this # macro, additional text can be appended to FILE to form the body of # the child script. The macro ends with non-zero status if the # file could not be fully written (such as if the disk is full). m4_ifdef([AS_INIT_GENERATED], [m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])], [m4_defun([_LT_GENERATED_FILE_INIT], [m4_require([AS_PREPARE])]dnl [m4_pushdef([AS_MESSAGE_LOG_FD])]dnl [lt_write_fail=0 cat >$1 <<_ASEOF || lt_write_fail=1 #! $SHELL # Generated by $as_me. $2 SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$1 <<\_ASEOF || lt_write_fail=1 AS_SHELL_SANITIZE _AS_PREPARE exec AS_MESSAGE_FD>&1 _ASEOF test 0 = "$lt_write_fail" && chmod +x $1[]dnl m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT # LT_OUTPUT # --------- # This macro allows early generation of the libtool script (before # AC_OUTPUT is called), incase it is used in configure for compilation # tests. AC_DEFUN([LT_OUTPUT], [: ${CONFIG_LT=./config.lt} AC_MSG_NOTICE([creating $CONFIG_LT]) _LT_GENERATED_FILE_INIT(["$CONFIG_LT"], [# Run this file to recreate a libtool stub with the current configuration.]) cat >>"$CONFIG_LT" <<\_LTEOF lt_cl_silent=false exec AS_MESSAGE_LOG_FD>>config.log { echo AS_BOX([Running $as_me.]) } >&AS_MESSAGE_LOG_FD lt_cl_help="\ '$as_me' creates a local libtool stub from the current configuration, for use in further configure time tests before the real libtool is generated. Usage: $[0] [[OPTIONS]] -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files Report bugs to ." lt_cl_version="\ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) configured by $[0], generated by m4_PACKAGE_STRING. Copyright (C) 2011 Free Software Foundation, Inc. This config.lt script is free software; the Free Software Foundation gives unlimited permision to copy, distribute and modify it." while test 0 != $[#] do case $[1] in --version | --v* | -V ) echo "$lt_cl_version"; exit 0 ;; --help | --h* | -h ) echo "$lt_cl_help"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --quiet | --q* | --silent | --s* | -q ) lt_cl_silent=: ;; -*) AC_MSG_ERROR([unrecognized option: $[1] Try '$[0] --help' for more information.]) ;; *) AC_MSG_ERROR([unrecognized argument: $[1] Try '$[0] --help' for more information.]) ;; esac shift done if $lt_cl_silent; then exec AS_MESSAGE_FD>/dev/null fi _LTEOF cat >>"$CONFIG_LT" <<_LTEOF _LT_OUTPUT_LIBTOOL_COMMANDS_INIT _LTEOF cat >>"$CONFIG_LT" <<\_LTEOF AC_MSG_NOTICE([creating $ofile]) _LT_OUTPUT_LIBTOOL_COMMANDS AS_EXIT(0) _LTEOF chmod +x "$CONFIG_LT" # configure is writing to config.log, but config.lt does its own redirection, # appending to config.log, which fails on DOS, as config.log is still kept # open by configure. Here we exec the FD to /dev/null, effectively closing # config.log, so it can be properly (re)opened and appended to by config.lt. lt_cl_success=: test yes = "$silent" && lt_config_lt_args="$lt_config_lt_args --quiet" exec AS_MESSAGE_LOG_FD>/dev/null $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false exec AS_MESSAGE_LOG_FD>>config.log $lt_cl_success || AS_EXIT(1) ])# LT_OUTPUT # _LT_CONFIG(TAG) # --------------- # If TAG is the built-in tag, create an initial libtool script with a # default configuration from the untagged config vars. Otherwise add code # to config.status for appending the configuration named by TAG from the # matching tagged config vars. m4_defun([_LT_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_CONFIG_SAVE_COMMANDS([ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl m4_if(_LT_TAG, [C], [ # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi cfgfile=${ofile}T trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # Generated automatically by $as_me ($PACKAGE) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # Provide generalized library-building support services. # Written by Gordon Matzigkeit, 1996 _LT_COPYING _LT_LIBTOOL_TAGS # Configured defaults for sys_lib_dlsearch_path munging. : \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} # ### BEGIN LIBTOOL CONFIG _LT_LIBTOOL_CONFIG_VARS _LT_LIBTOOL_TAG_VARS # ### END LIBTOOL CONFIG _LT_EOF cat <<'_LT_EOF' >> "$cfgfile" # ### BEGIN FUNCTIONS SHARED WITH CONFIGURE _LT_PREPARE_MUNGE_PATH_LIST _LT_PREPARE_CC_BASENAME # ### END FUNCTIONS SHARED WITH CONFIGURE _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac _LT_PROG_LTMAIN # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" ], [cat <<_LT_EOF >> "$ofile" dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded dnl in a comment (ie after a #). # ### BEGIN LIBTOOL TAG CONFIG: $1 _LT_LIBTOOL_TAG_VARS(_LT_TAG) # ### END LIBTOOL TAG CONFIG: $1 _LT_EOF ])dnl /m4_if ], [m4_if([$1], [], [ PACKAGE='$PACKAGE' VERSION='$VERSION' RM='$RM' ofile='$ofile'], []) ])dnl /_LT_CONFIG_SAVE_COMMANDS ])# _LT_CONFIG # LT_SUPPORTED_TAG(TAG) # --------------------- # Trace this macro to discover what tags are supported by the libtool # --tag option, using: # autoconf --trace 'LT_SUPPORTED_TAG:$1' AC_DEFUN([LT_SUPPORTED_TAG], []) # C support is built-in for now m4_define([_LT_LANG_C_enabled], []) m4_define([_LT_TAGS], []) # LT_LANG(LANG) # ------------- # Enable libtool support for the given language if not already enabled. AC_DEFUN([LT_LANG], [AC_BEFORE([$0], [LT_OUTPUT])dnl m4_case([$1], [C], [_LT_LANG(C)], [C++], [_LT_LANG(CXX)], [Go], [_LT_LANG(GO)], [Java], [_LT_LANG(GCJ)], [Fortran 77], [_LT_LANG(F77)], [Fortran], [_LT_LANG(FC)], [Windows Resource], [_LT_LANG(RC)], [m4_ifdef([_LT_LANG_]$1[_CONFIG], [_LT_LANG($1)], [m4_fatal([$0: unsupported language: "$1"])])])dnl ])# LT_LANG # _LT_LANG(LANGNAME) # ------------------ m4_defun([_LT_LANG], [m4_ifdef([_LT_LANG_]$1[_enabled], [], [LT_SUPPORTED_TAG([$1])dnl m4_append([_LT_TAGS], [$1 ])dnl m4_define([_LT_LANG_]$1[_enabled], [])dnl _LT_LANG_$1_CONFIG($1)])dnl ])# _LT_LANG m4_ifndef([AC_PROG_GO], [ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_GO. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # m4_defun([AC_PROG_GO], [AC_LANG_PUSH(Go)dnl AC_ARG_VAR([GOC], [Go compiler command])dnl AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl _AC_ARG_VAR_LDFLAGS()dnl AC_CHECK_TOOL(GOC, gccgo) if test -z "$GOC"; then if test -n "$ac_tool_prefix"; then AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo]) fi fi if test -z "$GOC"; then AC_CHECK_PROG(GOC, gccgo, gccgo, false) fi ])#m4_defun ])#m4_ifndef # _LT_LANG_DEFAULT_CONFIG # ----------------------- m4_defun([_LT_LANG_DEFAULT_CONFIG], [AC_PROVIDE_IFELSE([AC_PROG_CXX], [LT_LANG(CXX)], [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])]) AC_PROVIDE_IFELSE([AC_PROG_F77], [LT_LANG(F77)], [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])]) AC_PROVIDE_IFELSE([AC_PROG_FC], [LT_LANG(FC)], [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])]) dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal dnl pulling things in needlessly. AC_PROVIDE_IFELSE([AC_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([LT_PROG_GCJ], [LT_LANG(GCJ)], [m4_ifdef([AC_PROG_GCJ], [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([A][M_PROG_GCJ], [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([LT_PROG_GCJ], [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])]) AC_PROVIDE_IFELSE([AC_PROG_GO], [LT_LANG(GO)], [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])]) AC_PROVIDE_IFELSE([LT_PROG_RC], [LT_LANG(RC)], [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])]) ])# _LT_LANG_DEFAULT_CONFIG # Obsolete macros: AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)]) AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)]) AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)]) AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)]) AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_CXX], []) dnl AC_DEFUN([AC_LIBTOOL_F77], []) dnl AC_DEFUN([AC_LIBTOOL_FC], []) dnl AC_DEFUN([AC_LIBTOOL_GCJ], []) dnl AC_DEFUN([AC_LIBTOOL_RC], []) # _LT_TAG_COMPILER # ---------------- m4_defun([_LT_TAG_COMPILER], [AC_REQUIRE([AC_PROG_CC])dnl _LT_DECL([LTCC], [CC], [1], [A C compiler])dnl _LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl _LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl _LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC ])# _LT_TAG_COMPILER # _LT_COMPILER_BOILERPLATE # ------------------------ # Check for compiler boilerplate output or warnings with # the simple compiler test code. m4_defun([_LT_COMPILER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ])# _LT_COMPILER_BOILERPLATE # _LT_LINKER_BOILERPLATE # ---------------------- # Check for linker boilerplate output or warnings with # the simple link test code. m4_defun([_LT_LINKER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* ])# _LT_LINKER_BOILERPLATE # _LT_REQUIRED_DARWIN_CHECKS # ------------------------- m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[ case $host_os in rhapsody* | darwin*) AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) AC_CHECK_TOOL([LIPO], [lipo], [:]) AC_CHECK_TOOL([OTOOL], [otool], [:]) AC_CHECK_TOOL([OTOOL64], [otool64], [:]) _LT_DECL([], [DSYMUTIL], [1], [Tool to manipulate archived DWARF debug symbol files on Mac OS X]) _LT_DECL([], [NMEDIT], [1], [Tool to change global to local symbols on Mac OS X]) _LT_DECL([], [LIPO], [1], [Tool to manipulate fat objects and archives on Mac OS X]) _LT_DECL([], [OTOOL], [1], [ldd/readelf like tool for Mach-O binaries on Mac OS X]) _LT_DECL([], [OTOOL64], [1], [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4]) AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], [lt_cv_apple_cc_single_mod=no if test -z "$LT_MULTI_MODULE"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test 0 = "$_lt_result"; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -rf libconftest.dylib* rm -f conftest.* fi]) AC_CACHE_CHECK([for -exported_symbols_list linker flag], [lt_cv_ld_exported_symbols_list], [lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [lt_cv_ld_exported_symbols_list=yes], [lt_cv_ld_exported_symbols_list=no]) LDFLAGS=$save_LDFLAGS ]) AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load], [lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then lt_cv_ld_force_load=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM ]) case $host_os in rhapsody* | darwin1.[[012]]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; 10.[[012]][[,.]]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test yes = "$lt_cv_apple_cc_single_mod"; then _lt_dar_single_mod='$single_module' fi if test yes = "$lt_cv_ld_exported_symbols_list"; then _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' fi if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac ]) # _LT_DARWIN_LINKER_FEATURES([TAG]) # --------------------------------- # Checks for linker and compiler features on darwin m4_defun([_LT_DARWIN_LINKER_FEATURES], [ m4_require([_LT_REQUIRED_DARWIN_CHECKS]) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported if test yes = "$lt_cv_ld_force_load"; then _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes], [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes]) else _LT_TAGVAR(whole_archive_flag_spec, $1)='' fi _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" m4_if([$1], [CXX], [ if test yes != "$lt_cv_apple_cc_single_mod"; then _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" fi ],[]) else _LT_TAGVAR(ld_shlibs, $1)=no fi ]) # _LT_SYS_MODULE_PATH_AIX([TAGNAME]) # ---------------------------------- # Links a minimal program and checks the executable # for the system default hardcoded library path. In most cases, # this is /usr/lib:/lib, but when the MPI compilers are used # the location of the communication and MPI libs are included too. # If we don't find anything, use the default library path according # to the aix ld manual. # Store the results from the different compilers for each TAGNAME. # Allow to override them for all tags through lt_cv_aix_libpath. m4_defun([_LT_SYS_MODULE_PATH_AIX], [m4_require([_LT_DECL_SED])dnl if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ lt_aix_libpath_sed='[ /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }]' _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi],[]) if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib fi ]) aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) fi ])# _LT_SYS_MODULE_PATH_AIX # _LT_SHELL_INIT(ARG) # ------------------- m4_define([_LT_SHELL_INIT], [m4_divert_text([M4SH-INIT], [$1 ])])# _LT_SHELL_INIT # _LT_PROG_ECHO_BACKSLASH # ----------------------- # Find how we can fake an echo command that does not interpret backslash. # In particular, with Autoconf 2.60 or later we add some code to the start # of the generated configure script that will find a shell with a builtin # printf (that we can use as an echo command). m4_defun([_LT_PROG_ECHO_BACKSLASH], [ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO AC_MSG_CHECKING([how to print strings]) # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $[]1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "$*" } case $ECHO in printf*) AC_MSG_RESULT([printf]) ;; print*) AC_MSG_RESULT([print -r]) ;; *) AC_MSG_RESULT([cat]) ;; esac m4_ifdef([_AS_DETECT_SUGGESTED], [_AS_DETECT_SUGGESTED([ test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test "X`printf %s $ECHO`" = "X$ECHO" \ || test "X`print -r -- $ECHO`" = "X$ECHO" )])]) _LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts]) _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes]) ])# _LT_PROG_ECHO_BACKSLASH # _LT_WITH_SYSROOT # ---------------- AC_DEFUN([_LT_WITH_SYSROOT], [AC_MSG_CHECKING([for sysroot]) AC_ARG_WITH([sysroot], [AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@], [Search for dependent libraries within DIR (or the compiler's sysroot if not specified).])], [], [with_sysroot=no]) dnl lt_sysroot will always be passed unquoted. We quote it here dnl in case the user passed a directory name. lt_sysroot= case $with_sysroot in #( yes) if test yes = "$GCC"; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) AC_MSG_RESULT([$with_sysroot]) AC_MSG_ERROR([The sysroot must be an absolute path.]) ;; esac AC_MSG_RESULT([${lt_sysroot:-no}]) _LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl [dependent libraries, and where our libraries should be installed.])]) # _LT_ENABLE_LOCK # --------------- m4_defun([_LT_ENABLE_LOCK], [AC_ARG_ENABLE([libtool-lock], [AS_HELP_STRING([--disable-libtool-lock], [avoid locking (might break parallel builds)])]) test no = "$enable_libtool_lock" || enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out what ABI is being produced by ac_compile, and set mode # options accordingly. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE=32 ;; *ELF-64*) HPUX_IA64_MODE=64 ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then if test yes = "$lt_cv_prog_gnu_ld"; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; mips64*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then emul=elf case `/usr/bin/file conftest.$ac_objext` in *32-bit*) emul="${emul}32" ;; *64-bit*) emul="${emul}64" ;; esac case `/usr/bin/file conftest.$ac_objext` in *MSB*) emul="${emul}btsmip" ;; *LSB*) emul="${emul}ltsmip" ;; esac case `/usr/bin/file conftest.$ac_objext` in *N32*) emul="${emul}n32" ;; esac LD="${LD-ld} -m $emul" fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. Note that the listed cases only cover the # situations where additional linker options are needed (such as when # doing 32-bit compilation for a host where ld defaults to 64-bit, or # vice versa); the common cases where no linker options are needed do # not appear in the list. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) case `/usr/bin/file conftest.o` in *x86-64*) LD="${LD-ld} -m elf32_x86_64" ;; *) LD="${LD-ld} -m elf_i386" ;; esac ;; powerpc64le-*linux*) LD="${LD-ld} -m elf32lppclinux" ;; powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; powerpcle-*linux*) LD="${LD-ld} -m elf64lppc" ;; powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS=$CFLAGS CFLAGS="$CFLAGS -belf" AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, [AC_LANG_PUSH(C) AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) AC_LANG_POP]) if test yes != "$lt_cv_cc_needs_belf"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS=$SAVE_CFLAGS fi ;; *-*solaris*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*|x86_64-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD=${LD-ld}_sol2 fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks=$enable_libtool_lock ])# _LT_ENABLE_LOCK # _LT_PROG_AR # ----------- m4_defun([_LT_PROG_AR], [AC_CHECK_TOOLS(AR, [ar], false) : ${AR=ar} : ${AR_FLAGS=cru} _LT_DECL([], [AR], [1], [The archiver]) _LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], [lt_cv_ar_at_file=no AC_COMPILE_IFELSE([AC_LANG_PROGRAM], [echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' AC_TRY_EVAL([lt_ar_try]) if test 0 -eq "$ac_status"; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a AC_TRY_EVAL([lt_ar_try]) if test 0 -ne "$ac_status"; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a ]) ]) if test no = "$lt_cv_ar_at_file"; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi _LT_DECL([], [archiver_list_spec], [1], [How to feed a file listing to the archiver]) ])# _LT_PROG_AR # _LT_CMD_OLD_ARCHIVE # ------------------- m4_defun([_LT_CMD_OLD_ARCHIVE], [_LT_PROG_AR AC_CHECK_TOOL(STRIP, strip, :) test -z "$STRIP" && STRIP=: _LT_DECL([], [STRIP], [1], [A symbol stripping program]) AC_CHECK_TOOL(RANLIB, ranlib, :) test -z "$RANLIB" && RANLIB=: _LT_DECL([], [RANLIB], [1], [Commands used to install an old-style archive]) # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in bitrig* | openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac _LT_DECL([], [old_postinstall_cmds], [2]) _LT_DECL([], [old_postuninstall_cmds], [2]) _LT_TAGDECL([], [old_archive_cmds], [2], [Commands used to build an old-style archive]) _LT_DECL([], [lock_old_archive_extraction], [0], [Whether to use a lock for old archive extraction]) ])# _LT_CMD_OLD_ARCHIVE # _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------------------- # Check whether the given compiler option works AC_DEFUN([_LT_COMPILER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$3" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi fi $RM conftest* ]) if test yes = "[$]$2"; then m4_if([$5], , :, [$5]) else m4_if([$6], , :, [$6]) fi ])# _LT_COMPILER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], []) # _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------- # Check whether the given linker option works AC_DEFUN([_LT_LINKER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $3" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&AS_MESSAGE_LOG_FD $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi else $2=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS ]) if test yes = "[$]$2"; then m4_if([$4], , :, [$4]) else m4_if([$5], , :, [$5]) fi ])# _LT_LINKER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], []) # LT_CMD_MAX_LEN #--------------- AC_DEFUN([LT_CMD_MAX_LEN], [AC_REQUIRE([AC_CANONICAL_HOST])dnl # find the maximum length of command line arguments AC_MSG_CHECKING([the maximum length of command line arguments]) AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl i=0 teststring=ABCD case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len" && \ test undefined != "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test X`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test 17 != "$i" # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac ]) if test -n "$lt_cv_sys_max_cmd_len"; then AC_MSG_RESULT($lt_cv_sys_max_cmd_len) else AC_MSG_RESULT(none) fi max_cmd_len=$lt_cv_sys_max_cmd_len _LT_DECL([], [max_cmd_len], [0], [What is the maximum length of a command?]) ])# LT_CMD_MAX_LEN # Old name: AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], []) # _LT_HEADER_DLFCN # ---------------- m4_defun([_LT_HEADER_DLFCN], [AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl ])# _LT_HEADER_DLFCN # _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, # ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) # ---------------------------------------------------------------- m4_defun([_LT_TRY_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test yes = "$cross_compiling"; then : [$4] else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF [#line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; }] _LT_EOF if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) $1 ;; x$lt_dlneed_uscore) $2 ;; x$lt_dlunknown|x*) $3 ;; esac else : # compilation failed $3 fi fi rm -fr conftest* ])# _LT_TRY_DLOPEN_SELF # LT_SYS_DLOPEN_SELF # ------------------ AC_DEFUN([LT_SYS_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test yes != "$enable_dlopen"; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen=load_add_on lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen=LoadLibrary lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen=dlopen lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[ lt_cv_dlopen=dyld lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ]) ;; tpf*) # Don't try to run any link tests for TPF. We know it's impossible # because TPF is a cross-compiler, and we know how we open DSOs. lt_cv_dlopen=dlopen lt_cv_dlopen_libs= lt_cv_dlopen_self=no ;; *) AC_CHECK_FUNC([shl_load], [lt_cv_dlopen=shl_load], [AC_CHECK_LIB([dld], [shl_load], [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld], [AC_CHECK_FUNC([dlopen], [lt_cv_dlopen=dlopen], [AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl], [AC_CHECK_LIB([svld], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld], [AC_CHECK_LIB([dld], [dld_link], [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld]) ]) ]) ]) ]) ]) ;; esac if test no = "$lt_cv_dlopen"; then enable_dlopen=no else enable_dlopen=yes fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS=$CPPFLAGS test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS=$LDFLAGS wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS=$LIBS LIBS="$lt_cv_dlopen_libs $LIBS" AC_CACHE_CHECK([whether a program can dlopen itself], lt_cv_dlopen_self, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) ]) if test yes = "$lt_cv_dlopen_self"; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" AC_CACHE_CHECK([whether a statically linked program can dlopen itself], lt_cv_dlopen_self_static, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) ]) fi CPPFLAGS=$save_CPPFLAGS LDFLAGS=$save_LDFLAGS LIBS=$save_LIBS ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi _LT_DECL([dlopen_support], [enable_dlopen], [0], [Whether dlopen is supported]) _LT_DECL([dlopen_self], [enable_dlopen_self], [0], [Whether dlopen of programs is supported]) _LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0], [Whether dlopen of statically linked programs is supported]) ])# LT_SYS_DLOPEN_SELF # Old name: AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], []) # _LT_COMPILER_C_O([TAGNAME]) # --------------------------- # Check to see if options -c and -o are simultaneously supported by compiler. # This macro does not hard code the compiler like AC_PROG_CC_C_O. m4_defun([_LT_COMPILER_C_O], [m4_require([_LT_DECL_SED])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes fi fi chmod u+w . 2>&AS_MESSAGE_LOG_FD $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* ]) _LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], [Does compiler simultaneously support -c and -o options?]) ])# _LT_COMPILER_C_O # _LT_COMPILER_FILE_LOCKS([TAGNAME]) # ---------------------------------- # Check to see if we can do hard links to lock some files if needed m4_defun([_LT_COMPILER_FILE_LOCKS], [m4_require([_LT_ENABLE_LOCK])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_COMPILER_C_O([$1]) hard_links=nottested if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user AC_MSG_CHECKING([if we can lock with hard links]) hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no AC_MSG_RESULT([$hard_links]) if test no = "$hard_links"; then AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe]) need_locks=warn fi else need_locks=no fi _LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?]) ])# _LT_COMPILER_FILE_LOCKS # _LT_CHECK_OBJDIR # ---------------- m4_defun([_LT_CHECK_OBJDIR], [AC_CACHE_CHECK([for objdir], [lt_cv_objdir], [rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null]) objdir=$lt_cv_objdir _LT_DECL([], [objdir], [0], [The name of the directory that contains temporary libtool files])dnl m4_pattern_allow([LT_OBJDIR])dnl AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/", [Define to the sub-directory where libtool stores uninstalled libraries.]) ])# _LT_CHECK_OBJDIR # _LT_LINKER_HARDCODE_LIBPATH([TAGNAME]) # -------------------------------------- # Check hardcoding attributes. m4_defun([_LT_LINKER_HARDCODE_LIBPATH], [AC_MSG_CHECKING([how to hardcode library paths into programs]) _LT_TAGVAR(hardcode_action, $1)= if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || test -n "$_LT_TAGVAR(runpath_var, $1)" || test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then # We can hardcode non-existent directories. if test no != "$_LT_TAGVAR(hardcode_direct, $1)" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" && test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then # Linking always hardcodes the temporary library directory. _LT_TAGVAR(hardcode_action, $1)=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. _LT_TAGVAR(hardcode_action, $1)=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. _LT_TAGVAR(hardcode_action, $1)=unsupported fi AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) if test relink = "$_LT_TAGVAR(hardcode_action, $1)" || test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi _LT_TAGDECL([], [hardcode_action], [0], [How to hardcode a shared library path into an executable]) ])# _LT_LINKER_HARDCODE_LIBPATH # _LT_CMD_STRIPLIB # ---------------- m4_defun([_LT_CMD_STRIPLIB], [m4_require([_LT_DECL_EGREP]) striplib= old_striplib= AC_MSG_CHECKING([whether stripping libraries is possible]) if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" AC_MSG_RESULT([yes]) else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP"; then striplib="$STRIP -x" old_striplib="$STRIP -S" AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi ;; *) AC_MSG_RESULT([no]) ;; esac fi _LT_DECL([], [old_striplib], [1], [Commands to strip libraries]) _LT_DECL([], [striplib], [1]) ])# _LT_CMD_STRIPLIB # _LT_PREPARE_MUNGE_PATH_LIST # --------------------------- # Make sure func_munge_path_list() is defined correctly. m4_defun([_LT_PREPARE_MUNGE_PATH_LIST], [[# func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x@S|@2 in x) ;; *:) eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\" ;; x:*) eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\" ;; *::*) eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \@S|@@S|@1\" ;; *) eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\" ;; esac } ]])# _LT_PREPARE_PATH_LIST # _LT_SYS_DYNAMIC_LINKER([TAG]) # ----------------------------- # PORTME Fill in your ld.so characteristics m4_defun([_LT_SYS_DYNAMIC_LINKER], [AC_REQUIRE([AC_CANONICAL_HOST])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_OBJDUMP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl AC_MSG_CHECKING([dynamic linker characteristics]) m4_if([$1], [], [ if test yes = "$GCC"; then case $host_os in darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; *) lt_awk_arg='/^libraries:/' ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;; *) lt_sed_strip_eq='s|=/|/|g' ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary... lt_tmp_lt_search_path_spec= lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` # ...but if some path component already ends with the multilib dir we assume # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). case "$lt_multi_os_dir; $lt_search_path_spec " in "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) lt_multi_os_dir= ;; esac for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" elif test -n "$lt_multi_os_dir"; then test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS = " "; FS = "/|\n";} { lt_foo = ""; lt_count = 0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo = "/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[[lt_foo]]++; } if (lt_freq[[lt_foo]] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi]) library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown AC_ARG_VAR([LT_SYS_LIBRARY_PATH], [User-defined run-time library search path.]) case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[[4-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[[01]] | aix4.[[01]].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a[(]lib.so.V[)]' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], lib.a[(]lib.so.V[)]" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a[(]lib.so.V[)], lib.so.V[(]$shared_archive_member_spec.o[)]" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[[45]]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"]) ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[[23]].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[[01]]* | freebsdelf3.[[01]]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[[3-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath], [lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\"" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null], [lt_cv_shlibpath_overrides_runpath=yes])]) LDFLAGS=$save_LDFLAGS libdir=$save_libdir ]) shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac AC_MSG_RESULT([$dynamic_linker]) test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH _LT_DECL([], [variables_saved_for_relink], [1], [Variables whose values should be saved in libtool wrapper scripts and restored at link time]) _LT_DECL([], [need_lib_prefix], [0], [Do we need the "lib" prefix for modules?]) _LT_DECL([], [need_version], [0], [Do we need a version for libraries?]) _LT_DECL([], [version_type], [0], [Library versioning type]) _LT_DECL([], [runpath_var], [0], [Shared library runtime path variable]) _LT_DECL([], [shlibpath_var], [0],[Shared library path variable]) _LT_DECL([], [shlibpath_overrides_runpath], [0], [Is shlibpath searched before the hard-coded library search path?]) _LT_DECL([], [libname_spec], [1], [Format of library name prefix]) _LT_DECL([], [library_names_spec], [1], [[List of archive names. First name is the real one, the rest are links. The last name is the one that the linker finds with -lNAME]]) _LT_DECL([], [soname_spec], [1], [[The coded name of the library, if different from the real name]]) _LT_DECL([], [install_override_mode], [1], [Permission mode override for installation of shared libraries]) _LT_DECL([], [postinstall_cmds], [2], [Command to use after installation of a shared archive]) _LT_DECL([], [postuninstall_cmds], [2], [Command to use after uninstallation of a shared archive]) _LT_DECL([], [finish_cmds], [2], [Commands used to finish a libtool library installation in a directory]) _LT_DECL([], [finish_eval], [1], [[As "finish_cmds", except a single script fragment to be evaled but not shown]]) _LT_DECL([], [hardcode_into_libs], [0], [Whether we should hardcode library paths into libraries]) _LT_DECL([], [sys_lib_search_path_spec], [2], [Compile-time system search path for libraries]) _LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2], [Detected run-time system search path for libraries]) _LT_DECL([], [configure_time_lt_sys_library_path], [2], [Explicit LT_SYS_LIBRARY_PATH set during ./configure time]) ])# _LT_SYS_DYNAMIC_LINKER # _LT_PATH_TOOL_PREFIX(TOOL) # -------------------------- # find a file program that can recognize shared library AC_DEFUN([_LT_PATH_TOOL_PREFIX], [m4_require([_LT_DECL_EGREP])dnl AC_MSG_CHECKING([for $1]) AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, [case $MAGIC_CMD in [[\\/*] | ?:[\\/]*]) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR dnl $ac_dummy forces splitting on constant user-supplied paths. dnl POSIX.2 word splitting is done only on the output of word expansions, dnl not every word. This closes a longstanding sh security hole. ac_dummy="m4_if([$2], , $PATH, [$2])" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$1"; then lt_cv_path_MAGIC_CMD=$ac_dir/"$1" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac]) MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then AC_MSG_RESULT($MAGIC_CMD) else AC_MSG_RESULT(no) fi _LT_DECL([], [MAGIC_CMD], [0], [Used to examine libraries when file_magic_cmd begins with "file"])dnl ])# _LT_PATH_TOOL_PREFIX # Old name: AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], []) # _LT_PATH_MAGIC # -------------- # find a file program that can recognize a shared library m4_defun([_LT_PATH_MAGIC], [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) else MAGIC_CMD=: fi fi ])# _LT_PATH_MAGIC # LT_PATH_LD # ---------- # find the pathname to the GNU or non-GNU linker AC_DEFUN([LT_PATH_LD], [AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PROG_ECHO_BACKSLASH])dnl AC_ARG_WITH([gnu-ld], [AS_HELP_STRING([--with-gnu-ld], [assume the C compiler uses GNU ld @<:@default=no@:>@])], [test no = "$withval" || with_gnu_ld=yes], [with_gnu_ld=no])dnl ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL(lt_cv_path_LD, [if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &1 conftest.i cat conftest.i conftest.i >conftest2.i : ${lt_DD:=$DD} AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd], [if "$ac_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: fi]) rm -f conftest.i conftest2.i conftest.out]) ])# _LT_PATH_DD # _LT_CMD_TRUNCATE # ---------------- # find command to truncate a binary pipe m4_defun([_LT_CMD_TRUNCATE], [m4_require([_LT_PATH_DD]) AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin], [printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i lt_cv_truncate_bin= if "$ac_cv_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" fi rm -f conftest.i conftest2.i conftest.out test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"]) _LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1], [Command to truncate a binary pipe]) ])# _LT_CMD_TRUNCATE # _LT_CHECK_MAGIC_METHOD # ---------------------- # how to check for library dependencies # -- PORTME fill in with the dynamic library characteristics m4_defun([_LT_CHECK_MAGIC_METHOD], [m4_require([_LT_DECL_EGREP]) m4_require([_LT_DECL_OBJDUMP]) AC_CACHE_CHECK([how to recognize dependent libraries], lt_cv_deplibs_check_method, [lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # 'unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # that responds to the $file_magic_cmd with a given extended regex. # If you have 'file' or equivalent on your system and you're not sure # whether 'pass_all' will *always* work, you probably want this one. case $host_os in aix[[4-9]]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[[45]]*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. if ( file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'] lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[[3-9]]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) lt_cv_deplibs_check_method=pass_all ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd* | bitrig*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; os2*) lt_cv_deplibs_check_method=pass_all ;; esac ]) file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown _LT_DECL([], [deplibs_check_method], [1], [Method to check whether dependent libraries are shared objects]) _LT_DECL([], [file_magic_cmd], [1], [Command to use when deplibs_check_method = "file_magic"]) _LT_DECL([], [file_magic_glob], [1], [How to find potential files when deplibs_check_method = "file_magic"]) _LT_DECL([], [want_nocaseglob], [1], [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) ])# _LT_CHECK_MAGIC_METHOD # LT_PATH_NM # ---------- # find the pathname to a BSD- or MS-compatible name lister AC_DEFUN([LT_PATH_NM], [AC_REQUIRE([AC_PROG_CC])dnl AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, [if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM=$NM else lt_nm_to_check=${ac_tool_prefix}nm if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. tmp_nm=$ac_dir/$lt_tmp_nm if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then # Check to see if the nm accepts a BSD-compat flag. # Adding the 'sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty case $build_os in mingw*) lt_bad_file=conftest.nm/nofile ;; *) lt_bad_file=/dev/null ;; esac case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in *$lt_bad_file* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break 2 ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break 2 ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS=$lt_save_ifs done : ${lt_cv_path_NM=no} fi]) if test no != "$lt_cv_path_NM"; then NM=$lt_cv_path_NM else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :) case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols -headers" ;; *) DUMPBIN=: ;; esac fi AC_SUBST([DUMPBIN]) if test : != "$DUMPBIN"; then NM=$DUMPBIN fi fi test -z "$NM" && NM=nm AC_SUBST([NM]) _LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface], [lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD) cat conftest.out >&AS_MESSAGE_LOG_FD if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest*]) ])# LT_PATH_NM # Old names: AU_ALIAS([AM_PROG_NM], [LT_PATH_NM]) AU_ALIAS([AC_PROG_NM], [LT_PATH_NM]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_PROG_NM], []) dnl AC_DEFUN([AC_PROG_NM], []) # _LT_CHECK_SHAREDLIB_FROM_LINKLIB # -------------------------------- # how to determine the name of the shared library # associated with a specific link library. # -- PORTME fill in with the dynamic library characteristics m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], [m4_require([_LT_DECL_EGREP]) m4_require([_LT_DECL_OBJDUMP]) m4_require([_LT_DECL_DLLTOOL]) AC_CACHE_CHECK([how to associate runtime and link libraries], lt_cv_sharedlib_from_linklib_cmd, [lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh; # decide which one to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd=$ECHO ;; esac ]) sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO _LT_DECL([], [sharedlib_from_linklib_cmd], [1], [Command to associate shared and link libraries]) ])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB # _LT_PATH_MANIFEST_TOOL # ---------------------- # locate the manifest tool m4_defun([_LT_PATH_MANIFEST_TOOL], [AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], [lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&AS_MESSAGE_LOG_FD if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest*]) if test yes != "$lt_cv_path_mainfest_tool"; then MANIFEST_TOOL=: fi _LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl ])# _LT_PATH_MANIFEST_TOOL # _LT_DLL_DEF_P([FILE]) # --------------------- # True iff FILE is a Windows DLL '.def' file. # Keep in sync with func_dll_def_p in the libtool script AC_DEFUN([_LT_DLL_DEF_P], [dnl test DEF = "`$SED -n dnl -e '\''s/^[[ ]]*//'\'' dnl Strip leading whitespace -e '\''/^\(;.*\)*$/d'\'' dnl Delete empty lines and comments -e '\''s/^\(EXPORTS\|LIBRARY\)\([[ ]].*\)*$/DEF/p'\'' dnl -e q dnl Only consider the first "real" line $1`" dnl ])# _LT_DLL_DEF_P # LT_LIB_M # -------- # check for math library AC_DEFUN([LT_LIB_M], [AC_REQUIRE([AC_CANONICAL_HOST])dnl LIBM= case $host in *-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*) # These system don't have libm, or don't need it ;; *-ncr-sysv4.3*) AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw) AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") ;; *) AC_CHECK_LIB(m, cos, LIBM=-lm) ;; esac AC_SUBST([LIBM]) ])# LT_LIB_M # Old name: AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_CHECK_LIBM], []) # _LT_COMPILER_NO_RTTI([TAGNAME]) # ------------------------------- m4_defun([_LT_COMPILER_NO_RTTI], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= if test yes = "$GCC"; then case $cc_basename in nvcc*) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;; *) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;; esac _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], lt_cv_prog_compiler_rtti_exceptions, [-fno-rtti -fno-exceptions], [], [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) fi _LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1], [Compiler flag to turn off builtin functions]) ])# _LT_COMPILER_NO_RTTI # _LT_CMD_GLOBAL_SYMBOLS # ---------------------- m4_defun([_LT_CMD_GLOBAL_SYMBOLS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([LT_PATH_NM])dnl AC_REQUIRE([LT_PATH_LD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_TAG_COMPILER])dnl # Check for command to grab the raw symbol name followed by C symbol from nm. AC_MSG_CHECKING([command to parse $NM output from $compiler object]) AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], [ # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[[BCDEGRST]]' # Regexp to match symbols that can be accessed directly from C. sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[[BCDT]]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[[ABCDGISTW]]' ;; hpux*) if test ia64 = "$host_cpu"; then symcode='[[ABCDEGRST]]' fi ;; irix* | nonstopux*) symcode='[[BCDEGRST]]' ;; osf*) symcode='[[BCDEGQRST]]' ;; solaris*) symcode='[[BDRT]]' ;; sco3.2v5*) symcode='[[DT]]' ;; sysv4.2uw2*) symcode='[[DT]]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[[ABDT]]' ;; sysv4) symcode='[[DFNSTU]]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[[ABCDGIRSTW]]' ;; esac if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Gets list of data symbols to import. lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" # Adjust the below global symbol transforms to fixup imported variables. lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" lt_c_name_lib_hook="\ -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" else # Disable hooks by default. lt_cv_sys_global_symbol_to_import= lt_cdecl_hook= lt_c_name_hook= lt_c_name_lib_hook= fi # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n"\ $lt_cdecl_hook\ " -e 's/^T .* \(.*\)$/extern int \1();/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ $lt_c_name_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" # Transform an extracted symbol line into symbol name with lib prefix and # symbol address. lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ $lt_c_name_lib_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function, # D for any global variable and I for any imported variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK ['"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ " /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ " /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ " {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ " s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx]" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if AC_TRY_EVAL(ac_compile); then # Now try to grab the symbols. nlist=conftest.nm if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE /* DATA imports from DLLs on WIN32 can't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT@&t@_DLSYM_CONST #elif defined __osf__ /* This system does not cope well with relocations in const data. */ # define LT@&t@_DLSYM_CONST #else # define LT@&t@_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT@&t@_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[[]] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS=conftstm.$ac_objext CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD fi else echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test yes = "$pipe_works"; then break else lt_cv_sys_global_symbol_pipe= fi done ]) if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then AC_MSG_RESULT(failed) else AC_MSG_RESULT(ok) fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then nm_file_list_spec='@' fi _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], [Take the output of nm and produce a listing of raw symbols and C names]) _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], [Transform the output of nm in a proper C declaration]) _LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1], [Transform the output of nm into a list of symbols to manually relocate]) _LT_DECL([global_symbol_to_c_name_address], [lt_cv_sys_global_symbol_to_c_name_address], [1], [Transform the output of nm in a C name address pair]) _LT_DECL([global_symbol_to_c_name_address_lib_prefix], [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], [Transform the output of nm in a C name address pair when lib prefix is needed]) _LT_DECL([nm_interface], [lt_cv_nm_interface], [1], [The name lister interface]) _LT_DECL([], [nm_file_list_spec], [1], [Specify filename containing input files for $NM]) ]) # _LT_CMD_GLOBAL_SYMBOLS # _LT_COMPILER_PIC([TAGNAME]) # --------------------------- m4_defun([_LT_COMPILER_PIC], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_wl, $1)= _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)= m4_if([$1], [CXX], [ # C++ specific cases for pic, static, wl, etc. if test yes = "$GXX"; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac else case $host_os in aix[[4-9]]*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; dgux*) case $cc_basename in ec++*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; ghcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' if test ia64 != "$host_cpu"; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' fi ;; aCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # KAI C++ Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64, which still supported -KPIC. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL 8.0, 9.0 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' ;; *) ;; esac ;; netbsd*) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; cxx*) # Digital/Compaq C++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; lcc*) # Lucid _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ], [ if test yes = "$GCC"; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker ' if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' case $cc_basename in nagfor*) # NAG Fortran compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; hpux9* | hpux10* | hpux11*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC (with -KPIC) is the default. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in # old Intel for x86_64, which still supported -KPIC. ecc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # Lahey Fortran 8.1. lf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' ;; nagfor*) # NAG Fortran compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; tcc*) # Fabrice Bellard et al's Tiny C Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; ccc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All Alpha code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='' ;; *Sun\ F* | *Sun*Fortran*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' ;; *Intel*\ [[CF]]*Compiler*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; *Portland\ Group*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; esac ;; newsos6) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All OSF/1 code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; rdos*) _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; solaris*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; *) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; esac ;; sunos4*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; unicos*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; uts4*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ]) case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" ;; esac AC_CACHE_CHECK([for $compiler option to produce PIC], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) _LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) # # Check to make sure the PIC flag actually works. # if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works], [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)], [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [], [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in "" | " "*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;; esac], [_LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) fi _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], [Additional compiler flags for building library objects]) _LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], [How to pass a linker flag through the compiler]) # # Check to make sure the static flag actually works. # wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\" _LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1), $lt_tmp_static_flag, [], [_LT_TAGVAR(lt_prog_compiler_static, $1)=]) _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], [Compiler flag to prevent dynamic linking]) ])# _LT_COMPILER_PIC # _LT_LINKER_SHLIBS([TAGNAME]) # ---------------------------- # See if the linker supports building shared libraries. m4_defun([_LT_LINKER_SHLIBS], [AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) m4_if([$1], [CXX], [ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] case $host_os in aix[[4-9]]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi ;; pw32*) _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] ;; esac ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac ], [ runpath_var= _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_cmds, $1)= _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(old_archive_from_new_cmds, $1)= _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)= _LT_TAGVAR(thread_safe_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list _LT_TAGVAR(include_expsyms, $1)= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ' (' and ')$', so one must not match beginning or # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', # as well as any symbol that contains 'd'. _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. dnl Note also adjust exclude_expsyms for C++ above. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test yes != "$GCC"; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd* | bitrig*) with_gnu_ld=no ;; esac _LT_TAGVAR(ld_shlibs, $1)=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test yes = "$with_gnu_ld"; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;; *\ \(GNU\ Binutils\)\ [[3-9]]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test yes = "$lt_use_gnu_ld_interface"; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='$wl' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi supports_anon_versioning=no case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[[3-9]]*) # On AIX/PPC, the GNU linker is very broken if test ia64 != "$host_cpu"; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test linux-dietlibc = "$host_os"; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test no = "$tmp_diet" then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 _LT_TAGVAR(whole_archive_flag_spec, $1)= tmp_sharedflag='--shared' ;; nagfor*) # NAGFOR 5.3 tmp_sharedflag='-Wl,-shared' ;; xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi case $cc_basename in tcc*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic' ;; xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; sunos4*) _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then runpath_var= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. _LT_TAGVAR(hardcode_minus_L, $1)=yes if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. _LT_TAGVAR(hardcode_direct, $1)=unsupported fi ;; aix[[4-9]]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then aix_use_runtimelinking=yes break fi done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # traditional, no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no ;; esac if test yes = "$GCC"; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi ;; esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag="$shared_flag "'$wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. _LT_TAGVAR(always_export_symbols, $1)=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; bsdi[[45]]*) _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' # FIXME: Should let the user specify the lib program. _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; hpux9*) if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' ;; hpux10*) if test yes,no = "$GCC,$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes fi ;; hpux11*) if test yes,no = "$GCC,$with_gnu_ld"; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) m4_if($1, [], [ # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) _LT_LINKER_OPTION([if $CC understands -b], _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b], [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])], [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) ;; esac fi if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], [lt_cv_irix_exported_symbol], [save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" AC_LINK_IFELSE( [AC_LANG_SOURCE( [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], [C++], [[int foo (void) { return 0; }]], [Fortran 77], [[ subroutine foo end]], [Fortran], [[ subroutine foo end]])])], [lt_cv_irix_exported_symbol=yes], [lt_cv_irix_exported_symbol=no]) LDFLAGS=$save_LDFLAGS]) if test yes = "$lt_cv_irix_exported_symbol"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' fi else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes _LT_TAGVAR(link_all_deplibs, $1)=yes ;; linux*) case $cc_basename in tcc*) # Fabrice Bellard et al's Tiny C Compiler _LT_TAGVAR(ld_shlibs, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; newsos6) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *nto* | *qnx*) ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' fi else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; osf3*) if test yes = "$GCC"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test yes = "$GCC"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; solaris*) _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' if test yes = "$GCC"; then wlarc='$wl' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='$wl' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. GCC discards it without '$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test yes = "$GCC"; then _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' else _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' fi ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes ;; sunos4*) if test sequent = "$host_vendor"; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4) case $host_vendor in sni) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' _LT_TAGVAR(hardcode_direct, $1)=no ;; motorola) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4.3*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes _LT_TAGVAR(ld_shlibs, $1)=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(ld_shlibs, $1)=no ;; esac if test sni = "$host_vendor"; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym' ;; esac fi fi ]) AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld _LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl _LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl _LT_DECL([], [extract_expsyms_cmds], [2], [The commands to extract the exported symbol list from a shared archive]) # # Do we need to explicitly link libc? # case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in x|xyes) # Assume -lc should be added _LT_TAGVAR(archive_cmds_need_lc, $1)=yes if test yes,yes = "$GCC,$enable_shared"; then case $_LT_TAGVAR(archive_cmds, $1) in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. AC_CACHE_CHECK([whether -lc should be explicitly linked in], [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1), [$RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if AC_TRY_EVAL(ac_compile) 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1) compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1) _LT_TAGVAR(allow_undefined_flag, $1)= if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) then lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no else lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes fi _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* ]) _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1) ;; esac fi ;; esac _LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0], [Whether or not to add -lc for building shared libraries]) _LT_TAGDECL([allow_libtool_libs_with_static_runtimes], [enable_shared_with_static_runtimes], [0], [Whether or not to disallow shared libs when runtime libs are static]) _LT_TAGDECL([], [export_dynamic_flag_spec], [1], [Compiler flag to allow reflexive dlopens]) _LT_TAGDECL([], [whole_archive_flag_spec], [1], [Compiler flag to generate shared objects directly from archives]) _LT_TAGDECL([], [compiler_needs_object], [1], [Whether the compiler copes with passing no objects directly]) _LT_TAGDECL([], [old_archive_from_new_cmds], [2], [Create an old-style archive from a shared archive]) _LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2], [Create a temporary old-style archive to link instead of a shared archive]) _LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive]) _LT_TAGDECL([], [archive_expsym_cmds], [2]) _LT_TAGDECL([], [module_cmds], [2], [Commands used to build a loadable module if different from building a shared archive.]) _LT_TAGDECL([], [module_expsym_cmds], [2]) _LT_TAGDECL([], [with_gnu_ld], [1], [Whether we are building with GNU ld or not]) _LT_TAGDECL([], [allow_undefined_flag], [1], [Flag that allows shared libraries with undefined symbols to be built]) _LT_TAGDECL([], [no_undefined_flag], [1], [Flag that enforces no undefined symbols]) _LT_TAGDECL([], [hardcode_libdir_flag_spec], [1], [Flag to hardcode $libdir into a binary during linking. This must work even if $libdir does not exist]) _LT_TAGDECL([], [hardcode_libdir_separator], [1], [Whether we need a single "-rpath" flag with a separated argument]) _LT_TAGDECL([], [hardcode_direct], [0], [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_direct_absolute], [0], [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes DIR into the resulting binary and the resulting library dependency is "absolute", i.e impossible to change by setting $shlibpath_var if the library is relocated]) _LT_TAGDECL([], [hardcode_minus_L], [0], [Set to "yes" if using the -LDIR flag during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_shlibpath_var], [0], [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_automatic], [0], [Set to "yes" if building a shared library automatically hardcodes DIR into the library and all subsequent libraries and executables linked against it]) _LT_TAGDECL([], [inherit_rpath], [0], [Set to yes if linker adds runtime paths of dependent libraries to runtime path list]) _LT_TAGDECL([], [link_all_deplibs], [0], [Whether libtool must link a program against all its dependency libraries]) _LT_TAGDECL([], [always_export_symbols], [0], [Set to "yes" if exported symbols are required]) _LT_TAGDECL([], [export_symbols_cmds], [2], [The commands to list exported symbols]) _LT_TAGDECL([], [exclude_expsyms], [1], [Symbols that should not be listed in the preloaded symbols]) _LT_TAGDECL([], [include_expsyms], [1], [Symbols that must always be exported]) _LT_TAGDECL([], [prelink_cmds], [2], [Commands necessary for linking programs (against libraries) with templates]) _LT_TAGDECL([], [postlink_cmds], [2], [Commands necessary for finishing linking programs]) _LT_TAGDECL([], [file_list_spec], [1], [Specify filename containing input files]) dnl FIXME: Not yet implemented dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1], dnl [Compiler flag to generate thread safe objects]) ])# _LT_LINKER_SHLIBS # _LT_LANG_C_CONFIG([TAG]) # ------------------------ # Ensure that the configuration variables for a C compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to 'libtool'. m4_defun([_LT_LANG_C_CONFIG], [m4_require([_LT_DECL_EGREP])dnl lt_save_CC=$CC AC_LANG_PUSH(C) # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' _LT_TAG_COMPILER # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) LT_SYS_DLOPEN_SELF _LT_CMD_STRIPLIB # Report what library types will actually be built AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_CONFIG($1) fi AC_LANG_POP CC=$lt_save_CC ])# _LT_LANG_C_CONFIG # _LT_LANG_CXX_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a C++ compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to 'libtool'. m4_defun([_LT_LANG_CXX_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl if test -n "$CXX" && ( test no != "$CXX" && ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || (test g++ != "$CXX"))); then AC_PROG_CXXCPP else _lt_caught_CXX_error=yes fi AC_LANG_PUSH(C++) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_caught_CXX_error"; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test yes = "$GXX"; then _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' else _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= fi if test yes = "$GXX"; then # Set up default GNU C++ configuration LT_PATH_LD # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test yes = "$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='$wl' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) _LT_TAGVAR(ld_shlibs, $1)=yes case $host_os in aix3*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aix[[4-9]]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no ;; esac if test yes = "$GXX"; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag=$shared_flag' $wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. _LT_TAGVAR(always_export_symbols, $1)=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. # The "-G" linker flag allows undefined symbols. _LT_TAGVAR(no_undefined_flag, $1)='-bernotok' # Determine the default libpath from the value encoded in an empty # executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ func_to_tool_file "$lt_outputfile"~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF _LT_TAGVAR(ld_shlibs, $1)=no ;; freebsd-elf*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions _LT_TAGVAR(ld_shlibs, $1)=yes ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; hpux9*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; hpux10*|hpux11*) if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) ;; *) _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' fi fi _LT_TAGVAR(link_all_deplibs, $1)=yes ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*) _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ;; cxx*) # Compaq C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; m88k*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) _LT_TAGVAR(ld_shlibs, $1)=yes ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; cxx*) case $host in osf3*) _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' ;; *) _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ $RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' ;; esac _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes,no = "$GXX,$with_gnu_ld"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' case $host in osf3*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test yes,no = "$GXX,$with_gnu_ld"; then _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require '-G' NOT '-shared' on this # platform. _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir' case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~ '"$_LT_TAGVAR(old_archive_cmds, $1)" _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~ '"$_LT_TAGVAR(reload_cmds, $1)" ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no _LT_TAGVAR(GCC, $1)=$GXX _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test yes != "$_lt_caught_CXX_error" AC_LANG_POP ])# _LT_LANG_CXX_CONFIG # _LT_FUNC_STRIPNAME_CNF # ---------------------- # func_stripname_cnf prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # # This function is identical to the (non-XSI) version of func_stripname, # except this one can be used by m4 code that may be executed by configure, # rather than the libtool script. m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl AC_REQUIRE([_LT_DECL_SED]) AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) func_stripname_cnf () { case @S|@2 in .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%\\\\@S|@2\$%%"`;; *) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;; esac } # func_stripname_cnf ])# _LT_FUNC_STRIPNAME_CNF # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) # --------------------------------- # Figure out "hidden" library dependencies from verbose # compiler output when linking a shared library. # Parse the compiler output and extract the necessary # objects, libraries and library flags. m4_defun([_LT_SYS_HIDDEN_LIBDEPS], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl # Dependencies to place before and after the object being linked: _LT_TAGVAR(predep_objects, $1)= _LT_TAGVAR(postdep_objects, $1)= _LT_TAGVAR(predeps, $1)= _LT_TAGVAR(postdeps, $1)= _LT_TAGVAR(compiler_lib_search_path, $1)= dnl we can't use the lt_simple_compile_test_code here, dnl because it contains code intended for an executable, dnl not a library. It's possible we should let each dnl tag define a new lt_????_link_test_code variable, dnl but it's only used here... m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF int a; void foo (void) { a = 0; } _LT_EOF ], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF ], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer*4 a a=0 return end _LT_EOF ], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer a a=0 return end _LT_EOF ], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF public class foo { private int a; public void bar (void) { a = 0; } }; _LT_EOF ], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF package foo func foo() { } _LT_EOF ]) _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac dnl Parse the compiler output and extract the necessary dnl objects, libraries and library flags. if AC_TRY_EVAL(ac_compile); then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $prev$p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test x-L = "$p" || test x-R = "$p"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test no = "$pre_test_object_deps_done"; then case $prev in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p else _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$_LT_TAGVAR(postdeps, $1)"; then _LT_TAGVAR(postdeps, $1)=$prev$p else _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test no = "$pre_test_object_deps_done"; then if test -z "$_LT_TAGVAR(predep_objects, $1)"; then _LT_TAGVAR(predep_objects, $1)=$p else _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" fi else if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then _LT_TAGVAR(postdep_objects, $1)=$p else _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling $1 test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken m4_if([$1], [CXX], [case $host_os in interix[[3-9]]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. _LT_TAGVAR(predep_objects,$1)= _LT_TAGVAR(postdep_objects,$1)= _LT_TAGVAR(postdeps,$1)= ;; esac ]) case " $_LT_TAGVAR(postdeps, $1) " in *" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; esac _LT_TAGVAR(compiler_lib_search_dirs, $1)= if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'` fi _LT_TAGDECL([], [compiler_lib_search_dirs], [1], [The directories searched by this compiler when creating a shared library]) _LT_TAGDECL([], [predep_objects], [1], [Dependencies to place before and after the objects being linked to create a shared library]) _LT_TAGDECL([], [postdep_objects], [1]) _LT_TAGDECL([], [predeps], [1]) _LT_TAGDECL([], [postdeps], [1]) _LT_TAGDECL([], [compiler_lib_search_path], [1], [The library search path used internally by the compiler when linking a shared library]) ])# _LT_SYS_HIDDEN_LIBDEPS # _LT_LANG_F77_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a Fortran 77 compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_F77_CONFIG], [AC_LANG_PUSH(Fortran 77) if test -z "$F77" || test no = "$F77"; then _lt_disable_F77=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for f77 test sources. ac_ext=f # Object file extension for compiled f77 test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the F77 compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_disable_F77"; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${F77-"f77"} CFLAGS=$FFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) GCC=$G77 if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)=$G77 _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS fi # test yes != "$_lt_disable_F77" AC_LANG_POP ])# _LT_LANG_F77_CONFIG # _LT_LANG_FC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for a Fortran compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_FC_CONFIG], [AC_LANG_PUSH(Fortran) if test -z "$FC" || test no = "$FC"; then _lt_disable_FC=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for fc test sources. ac_ext=${ac_fc_srcext-f} # Object file extension for compiled fc test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the FC compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_disable_FC"; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${FC-"f95"} CFLAGS=$FCFLAGS compiler=$CC GCC=$ac_cv_fc_compiler_gnu _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS fi # test yes != "$_lt_disable_FC" AC_LANG_POP ])# _LT_LANG_FC_CONFIG # _LT_LANG_GCJ_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Java Compiler compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_GCJ_CONFIG], [AC_REQUIRE([LT_PROG_GCJ])dnl AC_LANG_SAVE # Source file extension for Java test sources. ac_ext=java # Object file extension for compiled Java test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="class foo {}" # Code to be used in simple link tests lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GCJ-"gcj"} CFLAGS=$GCJFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)=$LD _LT_CC_BASENAME([$compiler]) # GCJ did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GCJ_CONFIG # _LT_LANG_GO_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Go compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_GO_CONFIG], [AC_REQUIRE([LT_PROG_GO])dnl AC_LANG_SAVE # Source file extension for Go test sources. ac_ext=go # Object file extension for compiled Go test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="package main; func main() { }" # Code to be used in simple link tests lt_simple_link_test_code='package main; func main() { }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GOC-"gccgo"} CFLAGS=$GOFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)=$LD _LT_CC_BASENAME([$compiler]) # Go did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GO_CONFIG # _LT_LANG_RC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for the Windows resource compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_RC_CONFIG], [AC_REQUIRE([LT_PROG_RC])dnl AC_LANG_SAVE # Source file extension for RC test sources. ac_ext=rc # Object file extension for compiled RC test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' # Code to be used in simple link tests lt_simple_link_test_code=$lt_simple_compile_test_code # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC= CC=${RC-"windres"} CFLAGS= compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes if test -n "$compiler"; then : _LT_CONFIG($1) fi GCC=$lt_save_GCC AC_LANG_RESTORE CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_RC_CONFIG # LT_PROG_GCJ # ----------- AC_DEFUN([LT_PROG_GCJ], [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], [AC_CHECK_TOOL(GCJ, gcj,) test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2" AC_SUBST(GCJFLAGS)])])[]dnl ]) # Old name: AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_GCJ], []) # LT_PROG_GO # ---------- AC_DEFUN([LT_PROG_GO], [AC_CHECK_TOOL(GOC, gccgo,) ]) # LT_PROG_RC # ---------- AC_DEFUN([LT_PROG_RC], [AC_CHECK_TOOL(RC, windres,) ]) # Old name: AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_RC], []) # _LT_DECL_EGREP # -------------- # If we don't have a new enough Autoconf to choose the best grep # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_EGREP], [AC_REQUIRE([AC_PROG_EGREP])dnl AC_REQUIRE([AC_PROG_FGREP])dnl test -z "$GREP" && GREP=grep _LT_DECL([], [GREP], [1], [A grep program that handles long lines]) _LT_DECL([], [EGREP], [1], [An ERE matcher]) _LT_DECL([], [FGREP], [1], [A literal string matcher]) dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too AC_SUBST([GREP]) ]) # _LT_DECL_OBJDUMP # -------------- # If we don't have a new enough Autoconf to choose the best objdump # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_OBJDUMP], [AC_CHECK_TOOL(OBJDUMP, objdump, false) test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) AC_SUBST([OBJDUMP]) ]) # _LT_DECL_DLLTOOL # ---------------- # Ensure DLLTOOL variable is set. m4_defun([_LT_DECL_DLLTOOL], [AC_CHECK_TOOL(DLLTOOL, dlltool, false) test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program]) AC_SUBST([DLLTOOL]) ]) # _LT_DECL_SED # ------------ # Check for a fully-functional sed program, that truncates # as few characters as possible. Prefer GNU sed if found. m4_defun([_LT_DECL_SED], [AC_PROG_SED test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" _LT_DECL([], [SED], [1], [A sed program that does not truncate output]) _LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"], [Sed that helps us avoid accidentally triggering echo(1) options like -n]) ])# _LT_DECL_SED m4_ifndef([AC_PROG_SED], [ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_SED. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # m4_defun([AC_PROG_SED], [AC_MSG_CHECKING([for a sed that does not truncate output]) AC_CACHE_VAL(lt_cv_path_SED, [# Loop through the user's path and test for sed and gsed. # Then use that list of sed's as ones to test for truncation. as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for lt_ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" fi done done done IFS=$as_save_IFS lt_ac_max=0 lt_ac_count=0 # Add /usr/xpg4/bin/sed as it is typically found on Solaris # along with /bin/sed that truncates output. for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do test ! -f "$lt_ac_sed" && continue cat /dev/null > conftest.in lt_ac_count=0 echo $ECHO_N "0123456789$ECHO_C" >conftest.in # Check for GNU sed and select it if it is found. if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then lt_cv_path_SED=$lt_ac_sed break fi while true; do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo >>conftest.nl $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break cmp -s conftest.out conftest.nl || break # 10000 chars as input seems more than enough test 10 -lt "$lt_ac_count" && break lt_ac_count=`expr $lt_ac_count + 1` if test "$lt_ac_count" -gt "$lt_ac_max"; then lt_ac_max=$lt_ac_count lt_cv_path_SED=$lt_ac_sed fi done done ]) SED=$lt_cv_path_SED AC_SUBST([SED]) AC_MSG_RESULT([$SED]) ])#AC_PROG_SED ])#m4_ifndef # Old name: AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_SED], []) # _LT_CHECK_SHELL_FEATURES # ------------------------ # Find out whether the shell is Bourne or XSI compatible, # or has some other useful features. m4_defun([_LT_CHECK_SHELL_FEATURES], [if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi _LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac _LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl ])# _LT_CHECK_SHELL_FEATURES # _LT_PATH_CONVERSION_FUNCTIONS # ----------------------------- # Determine what file name conversion functions should be used by # func_to_host_file (and, implicitly, by func_to_host_path). These are needed # for certain cross-compile configurations and native mingw. m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_MSG_CHECKING([how to convert $build file names to $host format]) AC_CACHE_VAL(lt_cv_to_host_file_cmd, [case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac ]) to_host_file_cmd=$lt_cv_to_host_file_cmd AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) _LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], [0], [convert $build file names to $host format])dnl AC_MSG_CHECKING([how to convert $build file names to toolchain format]) AC_CACHE_VAL(lt_cv_to_tool_file_cmd, [#assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac ]) to_tool_file_cmd=$lt_cv_to_tool_file_cmd AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) _LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], [0], [convert $build files to toolchain format])dnl ])# _LT_PATH_CONVERSION_FUNCTIONS # Helper functions for option handling. -*- Autoconf -*- # # Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software # Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 8 ltoptions.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) # _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME) # ------------------------------------------ m4_define([_LT_MANGLE_OPTION], [[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])]) # _LT_SET_OPTION(MACRO-NAME, OPTION-NAME) # --------------------------------------- # Set option OPTION-NAME for macro MACRO-NAME, and if there is a # matching handler defined, dispatch to it. Other OPTION-NAMEs are # saved as a flag. m4_define([_LT_SET_OPTION], [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), _LT_MANGLE_DEFUN([$1], [$2]), [m4_warning([Unknown $1 option '$2'])])[]dnl ]) # _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET]) # ------------------------------------------------------------ # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. m4_define([_LT_IF_OPTION], [m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])]) # _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET) # ------------------------------------------------------- # Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME # are set. m4_define([_LT_UNLESS_OPTIONS], [m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option), [m4_define([$0_found])])])[]dnl m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3 ])[]dnl ]) # _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST) # ---------------------------------------- # OPTION-LIST is a space-separated list of Libtool options associated # with MACRO-NAME. If any OPTION has a matching handler declared with # LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about # the unknown option and exit. m4_defun([_LT_SET_OPTIONS], [# Set options m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [_LT_SET_OPTION([$1], _LT_Option)]) m4_if([$1],[LT_INIT],[ dnl dnl Simply set some default values (i.e off) if boolean options were not dnl specified: _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no ]) _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no ]) dnl dnl If no reference was made to various pairs of opposing options, then dnl we run the default mode handler for the pair. For example, if neither dnl 'shared' nor 'disable-shared' was passed, we enable building of shared dnl archives by default: _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], [_LT_ENABLE_FAST_INSTALL]) _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both aix-soname=svr4], [_LT_WITH_AIX_SONAME([aix])]) ]) ])# _LT_SET_OPTIONS # _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME) # ----------------------------------------- m4_define([_LT_MANGLE_DEFUN], [[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])]) # LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE) # ----------------------------------------------- m4_define([LT_OPTION_DEFINE], [m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl ])# LT_OPTION_DEFINE # dlopen # ------ LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes ]) AU_DEFUN([AC_LIBTOOL_DLOPEN], [_LT_SET_OPTION([LT_INIT], [dlopen]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'dlopen' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], []) # win32-dll # --------- # Declare package support for building win32 dll's. LT_OPTION_DEFINE([LT_INIT], [win32-dll], [enable_win32_dll=yes case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*) AC_CHECK_TOOL(AS, as, false) AC_CHECK_TOOL(DLLTOOL, dlltool, false) AC_CHECK_TOOL(OBJDUMP, objdump, false) ;; esac test -z "$AS" && AS=as _LT_DECL([], [AS], [1], [Assembler program])dnl test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl ])# win32-dll AU_DEFUN([AC_LIBTOOL_WIN32_DLL], [AC_REQUIRE([AC_CANONICAL_HOST])dnl _LT_SET_OPTION([LT_INIT], [win32-dll]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'win32-dll' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], []) # _LT_ENABLE_SHARED([DEFAULT]) # ---------------------------- # implement the --enable-shared flag, and supports the 'shared' and # 'disable-shared' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_SHARED], [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([shared], [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@], [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS=$lt_save_ifs ;; esac], [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) _LT_DECL([build_libtool_libs], [enable_shared], [0], [Whether or not to build shared libraries]) ])# _LT_ENABLE_SHARED LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])]) # Old names: AC_DEFUN([AC_ENABLE_SHARED], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared]) ]) AC_DEFUN([AC_DISABLE_SHARED], [_LT_SET_OPTION([LT_INIT], [disable-shared]) ]) AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_SHARED], []) dnl AC_DEFUN([AM_DISABLE_SHARED], []) # _LT_ENABLE_STATIC([DEFAULT]) # ---------------------------- # implement the --enable-static flag, and support the 'static' and # 'disable-static' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_STATIC], [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([static], [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@], [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS=$lt_save_ifs ;; esac], [enable_static=]_LT_ENABLE_STATIC_DEFAULT) _LT_DECL([build_old_libs], [enable_static], [0], [Whether or not to build static libraries]) ])# _LT_ENABLE_STATIC LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])]) # Old names: AC_DEFUN([AC_ENABLE_STATIC], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static]) ]) AC_DEFUN([AC_DISABLE_STATIC], [_LT_SET_OPTION([LT_INIT], [disable-static]) ]) AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_STATIC], []) dnl AC_DEFUN([AM_DISABLE_STATIC], []) # _LT_ENABLE_FAST_INSTALL([DEFAULT]) # ---------------------------------- # implement the --enable-fast-install flag, and support the 'fast-install' # and 'disable-fast-install' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_FAST_INSTALL], [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([fast-install], [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS=$lt_save_ifs ;; esac], [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) _LT_DECL([fast_install], [enable_fast_install], [0], [Whether or not to optimize for fast installation])dnl ])# _LT_ENABLE_FAST_INSTALL LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])]) # Old names: AU_DEFUN([AC_ENABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'fast-install' option into LT_INIT's first parameter.]) ]) AU_DEFUN([AC_DISABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'disable-fast-install' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], []) dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) # _LT_WITH_AIX_SONAME([DEFAULT]) # ---------------------------------- # implement the --with-aix-soname flag, and support the `aix-soname=aix' # and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT # is either `aix', `both' or `svr4'. If omitted, it defaults to `aix'. m4_define([_LT_WITH_AIX_SONAME], [m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, both, both, aix))])dnl shared_archive_member_spec= case $host,$enable_shared in power*-*-aix[[5-9]]*,yes) AC_MSG_CHECKING([which variant of shared library versioning to provide]) AC_ARG_WITH([aix-soname], [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], [shared library versioning (aka "SONAME") variant to provide on AIX, @<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])], [case $withval in aix|svr4|both) ;; *) AC_MSG_ERROR([Unknown argument to --with-aix-soname]) ;; esac lt_cv_with_aix_soname=$with_aix_soname], [AC_CACHE_VAL([lt_cv_with_aix_soname], [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT) with_aix_soname=$lt_cv_with_aix_soname]) AC_MSG_RESULT([$with_aix_soname]) if test aix != "$with_aix_soname"; then # For the AIX way of multilib, we name the shared archive member # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, # the AIX toolchain works better with OBJECT_MODE set (default 32). if test 64 = "${OBJECT_MODE-32}"; then shared_archive_member_spec=shr_64 else shared_archive_member_spec=shr fi fi ;; *) with_aix_soname=aix ;; esac _LT_DECL([], [shared_archive_member_spec], [0], [Shared archive member basename, for filename based shared library versioning on AIX])dnl ])# _LT_WITH_AIX_SONAME LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])]) LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])]) LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])]) # _LT_WITH_PIC([MODE]) # -------------------- # implement the --with-pic flag, and support the 'pic-only' and 'no-pic' # LT_INIT options. # MODE is either 'yes' or 'no'. If omitted, it defaults to 'both'. m4_define([_LT_WITH_PIC], [AC_ARG_WITH([pic], [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@], [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], [lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for lt_pkg in $withval; do IFS=$lt_save_ifs if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS=$lt_save_ifs ;; esac], [pic_mode=m4_default([$1], [default])]) _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl ])# _LT_WITH_PIC LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])]) # Old name: AU_DEFUN([AC_LIBTOOL_PICMODE], [_LT_SET_OPTION([LT_INIT], [pic-only]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'pic-only' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_PICMODE], []) m4_define([_LTDL_MODE], []) LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive], [m4_define([_LTDL_MODE], [nonrecursive])]) LT_OPTION_DEFINE([LTDL_INIT], [recursive], [m4_define([_LTDL_MODE], [recursive])]) LT_OPTION_DEFINE([LTDL_INIT], [subproject], [m4_define([_LTDL_MODE], [subproject])]) m4_define([_LTDL_TYPE], []) LT_OPTION_DEFINE([LTDL_INIT], [installable], [m4_define([_LTDL_TYPE], [installable])]) LT_OPTION_DEFINE([LTDL_INIT], [convenience], [m4_define([_LTDL_TYPE], [convenience])]) # ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- # # Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software # Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 6 ltsugar.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])]) # lt_join(SEP, ARG1, [ARG2...]) # ----------------------------- # Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their # associated separator. # Needed until we can rely on m4_join from Autoconf 2.62, since all earlier # versions in m4sugar had bugs. m4_define([lt_join], [m4_if([$#], [1], [], [$#], [2], [[$2]], [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])]) m4_define([_lt_join], [m4_if([$#$2], [2], [], [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])]) # lt_car(LIST) # lt_cdr(LIST) # ------------ # Manipulate m4 lists. # These macros are necessary as long as will still need to support # Autoconf-2.59, which quotes differently. m4_define([lt_car], [[$1]]) m4_define([lt_cdr], [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], [$#], 1, [], [m4_dquote(m4_shift($@))])]) m4_define([lt_unquote], $1) # lt_append(MACRO-NAME, STRING, [SEPARATOR]) # ------------------------------------------ # Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'. # Note that neither SEPARATOR nor STRING are expanded; they are appended # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). # No SEPARATOR is output if MACRO-NAME was previously undefined (different # than defined and empty). # # This macro is needed until we can rely on Autoconf 2.62, since earlier # versions of m4sugar mistakenly expanded SEPARATOR but not STRING. m4_define([lt_append], [m4_define([$1], m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])]) # lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...]) # ---------------------------------------------------------- # Produce a SEP delimited list of all paired combinations of elements of # PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list # has the form PREFIXmINFIXSUFFIXn. # Needed until we can rely on m4_combine added in Autoconf 2.62. m4_define([lt_combine], [m4_if(m4_eval([$# > 3]), [1], [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl [[m4_foreach([_Lt_prefix], [$2], [m4_foreach([_Lt_suffix], ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[, [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])]) # lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ]) # ----------------------------------------------------------------------- # Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited # by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ. m4_define([lt_if_append_uniq], [m4_ifdef([$1], [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1], [lt_append([$1], [$2], [$3])$4], [$5])], [lt_append([$1], [$2], [$3])$4])]) # lt_dict_add(DICT, KEY, VALUE) # ----------------------------- m4_define([lt_dict_add], [m4_define([$1($2)], [$3])]) # lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE) # -------------------------------------------- m4_define([lt_dict_add_subkey], [m4_define([$1($2:$3)], [$4])]) # lt_dict_fetch(DICT, KEY, [SUBKEY]) # ---------------------------------- m4_define([lt_dict_fetch], [m4_ifval([$3], m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]), m4_ifdef([$1($2)], [m4_defn([$1($2)])]))]) # lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE]) # ----------------------------------------------------------------- m4_define([lt_if_dict_fetch], [m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4], [$5], [$6])]) # lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...]) # -------------------------------------------------------------- m4_define([lt_dict_filter], [m4_if([$5], [], [], [lt_join(m4_quote(m4_default([$4], [[, ]])), lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]), [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl ]) # ltversion.m4 -- version numbers -*- Autoconf -*- # # Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # @configure_input@ # serial 4179 ltversion.m4 # This file is part of GNU Libtool m4_define([LT_PACKAGE_VERSION], [2.4.6]) m4_define([LT_PACKAGE_REVISION], [2.4.6]) AC_DEFUN([LTVERSION_VERSION], [macro_version='2.4.6' macro_revision='2.4.6' _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) _LT_DECL(, macro_revision, 0) ]) # lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- # # Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software # Foundation, Inc. # Written by Scott James Remnant, 2004. # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 5 lt~obsolete.m4 # These exist entirely to fool aclocal when bootstrapping libtool. # # In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN), # which have later been changed to m4_define as they aren't part of the # exported API, or moved to Autoconf or Automake where they belong. # # The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN # in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us # using a macro with the same name in our local m4/libtool.m4 it'll # pull the old libtool.m4 in (it doesn't see our shiny new m4_define # and doesn't know about Autoconf macros at all.) # # So we provide this file, which has a silly filename so it's always # included after everything else. This provides aclocal with the # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything # because those macros already exist, or will be overwritten later. # We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. # # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. # Yes, that means every name once taken will need to remain here until # we give up compatibility with versions before 1.7, at which point # we need to keep only those names which we still refer to. # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])]) m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])]) m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])]) m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])]) m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])]) m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])]) m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])]) m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])]) m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])]) m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])]) m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])]) m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])]) m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])]) m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])]) m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])]) m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])]) m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])]) m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])]) m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])]) m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])]) m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])]) m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])]) m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])]) m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])]) m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])]) m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])]) m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])]) m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])]) m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])]) m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])]) m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])]) m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])]) m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])]) m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])]) m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])]) m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])]) m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])]) m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])]) m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])]) m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])]) m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])]) m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])]) m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])]) m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])]) m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])]) m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])]) m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])]) m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])]) m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])]) m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])]) m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])]) m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])]) m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])]) m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])]) # pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- # serial 12 (pkg-config-0.29.2) dnl Copyright © 2004 Scott James Remnant . dnl Copyright © 2012-2015 Dan Nicholson dnl dnl This program is free software; you can redistribute it and/or modify dnl it under the terms of the GNU General Public License as published by dnl the Free Software Foundation; either version 2 of the License, or dnl (at your option) any later version. dnl dnl This program is distributed in the hope that it will be useful, but dnl WITHOUT ANY WARRANTY; without even the implied warranty of dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU dnl General Public License for more details. dnl dnl You should have received a copy of the GNU General Public License dnl along with this program; if not, write to the Free Software dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA dnl 02111-1307, USA. dnl dnl As a special exception to the GNU General Public License, if you dnl distribute this file as part of a program that contains a dnl configuration script generated by Autoconf, you may include it under dnl the same distribution terms that you use for the rest of that dnl program. dnl PKG_PREREQ(MIN-VERSION) dnl ----------------------- dnl Since: 0.29 dnl dnl Verify that the version of the pkg-config macros are at least dnl MIN-VERSION. Unlike PKG_PROG_PKG_CONFIG, which checks the user's dnl installed version of pkg-config, this checks the developer's version dnl of pkg.m4 when generating configure. dnl dnl To ensure that this macro is defined, also add: dnl m4_ifndef([PKG_PREREQ], dnl [m4_fatal([must install pkg-config 0.29 or later before running autoconf/autogen])]) dnl dnl See the "Since" comment for each macro you use to see what version dnl of the macros you require. m4_defun([PKG_PREREQ], [m4_define([PKG_MACROS_VERSION], [0.29.2]) m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1, [m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])]) ])dnl PKG_PREREQ dnl PKG_PROG_PKG_CONFIG([MIN-VERSION]) dnl ---------------------------------- dnl Since: 0.16 dnl dnl Search for the pkg-config tool and set the PKG_CONFIG variable to dnl first found in the path. Checks that the version of pkg-config found dnl is at least MIN-VERSION. If MIN-VERSION is not specified, 0.9.0 is dnl used since that's the first version where most current features of dnl pkg-config existed. AC_DEFUN([PKG_PROG_PKG_CONFIG], [m4_pattern_forbid([^_?PKG_[A-Z_]+$]) m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) fi if test -n "$PKG_CONFIG"; then _pkg_min_version=m4_default([$1], [0.9.0]) AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) PKG_CONFIG="" fi fi[]dnl ])dnl PKG_PROG_PKG_CONFIG dnl PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------------------------------- dnl Since: 0.18 dnl dnl Check to see whether a particular set of modules exists. Similar to dnl PKG_CHECK_MODULES(), but does not set variables or print errors. dnl dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) dnl only at the first occurence in configure.ac, so if the first place dnl it's called might be skipped (such as if it is within an "if", you dnl have to call PKG_CHECK_EXISTS manually AC_DEFUN([PKG_CHECK_EXISTS], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl if test -n "$PKG_CONFIG" && \ AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then m4_default([$2], [:]) m4_ifvaln([$3], [else $3])dnl fi]) dnl _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) dnl --------------------------------------------- dnl Internal wrapper calling pkg-config via PKG_CONFIG and setting dnl pkg_failed based on the result. m4_define([_PKG_CONFIG], [if test -n "$$1"; then pkg_cv_[]$1="$$1" elif test -n "$PKG_CONFIG"; then PKG_CHECK_EXISTS([$3], [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes ], [pkg_failed=yes]) else pkg_failed=untried fi[]dnl ])dnl _PKG_CONFIG dnl _PKG_SHORT_ERRORS_SUPPORTED dnl --------------------------- dnl Internal check to see if pkg-config supports short errors. AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], [AC_REQUIRE([PKG_PROG_PKG_CONFIG]) if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi[]dnl ])dnl _PKG_SHORT_ERRORS_SUPPORTED dnl PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl -------------------------------------------------------------- dnl Since: 0.4.0 dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES might not happen, you should be sure to include an dnl explicit call to PKG_PROG_PKG_CONFIG in your configure.ac AC_DEFUN([PKG_CHECK_MODULES], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no AC_MSG_CHECKING([for $2]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. _PKG_TEXT To get pkg-config, see .])[]dnl ]) else $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) $3 fi[]dnl ])dnl PKG_CHECK_MODULES dnl PKG_CHECK_MODULES_STATIC(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl --------------------------------------------------------------------- dnl Since: 0.29 dnl dnl Checks for existence of MODULES and gathers its build flags with dnl static libraries enabled. Sets VARIABLE-PREFIX_CFLAGS from --cflags dnl and VARIABLE-PREFIX_LIBS from --libs. dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES_STATIC might not happen, you should be sure to dnl include an explicit call to PKG_PROG_PKG_CONFIG in your dnl configure.ac. AC_DEFUN([PKG_CHECK_MODULES_STATIC], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl _save_PKG_CONFIG=$PKG_CONFIG PKG_CONFIG="$PKG_CONFIG --static" PKG_CHECK_MODULES($@) PKG_CONFIG=$_save_PKG_CONFIG[]dnl ])dnl PKG_CHECK_MODULES_STATIC dnl PKG_INSTALLDIR([DIRECTORY]) dnl ------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable pkgconfigdir as the location where a module dnl should install pkg-config .pc files. By default the directory is dnl $libdir/pkgconfig, but the default can be changed by passing dnl DIRECTORY. The user can override through the --with-pkgconfigdir dnl parameter. AC_DEFUN([PKG_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([pkgconfigdir], [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, [with_pkgconfigdir=]pkg_default) AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_INSTALLDIR dnl PKG_NOARCH_INSTALLDIR([DIRECTORY]) dnl -------------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable noarch_pkgconfigdir as the location where a dnl module should install arch-independent pkg-config .pc files. By dnl default the directory is $datadir/pkgconfig, but the default can be dnl changed by passing DIRECTORY. The user can override through the dnl --with-noarch-pkgconfigdir parameter. AC_DEFUN([PKG_NOARCH_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([noarch-pkgconfigdir], [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, [with_noarch_pkgconfigdir=]pkg_default) AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_NOARCH_INSTALLDIR dnl PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE, dnl [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------- dnl Since: 0.28 dnl dnl Retrieves the value of the pkg-config variable for the given module. AC_DEFUN([PKG_CHECK_VAR], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl _PKG_CONFIG([$1], [variable="][$3]["], [$2]) AS_VAR_COPY([$1], [pkg_cv_][$1]) AS_VAR_IF([$1], [""], [$5], [$4])dnl ])dnl PKG_CHECK_VAR # Copyright (C) 2002-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.15' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.15.1], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.15.1])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to # '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl # Expand $ac_aux_dir to an absolute path. am_aux_dir=`cd "$ac_aux_dir" && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ([2.52])dnl m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], [$1], [CXX], [depcc="$CXX" am_compiler_list=], [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], [$1], [UPC], [depcc="$UPC" am_compiler_list=], [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi am__universal=false m4_case([$1], [CC], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac], [CXX], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac]) for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES. AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE([dependency-tracking], [dnl AS_HELP_STRING( [--enable-dependency-tracking], [do not reject slow dependency extractors]) AS_HELP_STRING( [--disable-dependency-tracking], [speeds up one-time build])]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl AC_SUBST([am__nodep])dnl _AM_SUBST_NOTMAKE([am__nodep])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each '.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC]) [_AM_PROG_CC_C_O ]) # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [AC_DIAGNOSE([obsolete], [$0: two- and three-arguments forms are deprecated.]) m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if( m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) AM_MISSING_PROG([AUTOCONF], [autoconf]) AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) AM_MISSING_PROG([AUTOHEADER], [autoheader]) AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target (and possibly the TAP driver). The # system "awk" is bad on some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES([CC])], [m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES([CXX])], [m4_define([AC_PROG_CXX], m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES([OBJC])], [m4_define([AC_PROG_OBJC], m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], [_AM_DEPENDENCIES([OBJCXX])], [m4_define([AC_PROG_OBJCXX], m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) AC_REQUIRE([AM_SILENT_RULES])dnl dnl The testsuite driver may need to know about EXEEXT, so add the dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl # POSIX will say in a future version that running "rm -f" with no argument # is OK; and we want to be able to make that assumption in our Makefile # recipes. So use an aggressive probe to check that the usage we want is # actually supported "in the wild" to an acceptable degree. # See automake bug#10828. # To make any issue more visible, cause the running configure to be aborted # by default if the 'rm' program in use doesn't match our expectations; the # user can still override this though. if rm -f && rm -fr && rm -rf; then : OK; else cat >&2 <<'END' Oops! Your 'rm' program seems unable to run without file operands specified on the command line, even when the '-f' option is present. This is contrary to the behaviour of most rm programs out there, and not conforming with the upcoming POSIX standard: Please tell bug-automake@gnu.org about your system, including the value of your $PATH and any error possibly output before this message. This can help us improve future automake versions. END if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then echo 'Configuration will proceed anyway, since you have set the' >&2 echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 echo >&2 else cat >&2 <<'END' Aborting the configuration process, to ensure you take notice of the issue. You can download and install GNU coreutils to get an 'rm' implementation that behaves properly: . If you want to complete the configuration process using your problematic 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM to "yes", and re-run configure. END AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) fi fi dnl The trailing newline in this macro's definition is deliberate, for dnl backward compatibility and to allow trailing 'dnl'-style comments dnl after the AM_INIT_AUTOMAKE invocation. See automake bug#16841. ]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh+set}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST([install_sh])]) # Copyright (C) 2003-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Add --enable-maintainer-mode option to configure. -*- Autoconf -*- # From Jim Meyering # Copyright (C) 1996-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAINTAINER_MODE([DEFAULT-MODE]) # ---------------------------------- # Control maintainer-specific portions of Makefiles. # Default is to disable them, unless 'enable' is passed literally. # For symmetry, 'disable' may be passed as well. Anyway, the user # can override the default with the --enable/--disable switch. AC_DEFUN([AM_MAINTAINER_MODE], [m4_case(m4_default([$1], [disable]), [enable], [m4_define([am_maintainer_other], [disable])], [disable], [m4_define([am_maintainer_other], [enable])], [m4_define([am_maintainer_other], [enable]) m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])]) AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles]) dnl maintainer-mode's default is 'disable' unless 'enable' is passed AC_ARG_ENABLE([maintainer-mode], [AS_HELP_STRING([--]am_maintainer_other[-maintainer-mode], am_maintainer_other[ make rules and dependencies not useful (and sometimes confusing) to the casual installer])], [USE_MAINTAINER_MODE=$enableval], [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes])) AC_MSG_RESULT([$USE_MAINTAINER_MODE]) AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes]) MAINT=$MAINTAINER_MODE_TRUE AC_SUBST([MAINT])dnl ] ) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it is modern enough. # If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= AC_MSG_WARN(['missing' script is too old or missing]) fi ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Copyright (C) 1999-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_CC_C_O # --------------- # Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC # to automatically call this. AC_DEFUN([_AM_PROG_CC_C_O], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([compile])dnl AC_LANG_PUSH([C])dnl AC_CACHE_CHECK( [whether $CC understands -c and -o together], [am_cv_prog_cc_c_o], [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i]) if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi AC_LANG_POP([C])]) # For backward compatibility. AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_RUN_LOG(COMMAND) # ------------------- # Run COMMAND, save the exit status in ac_status, and log it. # (This has been adapted from Autoconf's _AC_RUN_LOG macro.) AC_DEFUN([AM_RUN_LOG], [{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD (exit $ac_status); }]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi if test "$[2]" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT([yes]) # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi AC_CONFIG_COMMANDS_PRE( [AC_MSG_CHECKING([that generated files are newer than configure]) if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi AC_MSG_RESULT([done])]) rm -f conftest.file ]) # Copyright (C) 2009-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT # ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], [AC_ARG_ENABLE([silent-rules], [dnl AS_HELP_STRING( [--enable-silent-rules], [less verbose build output (undo: "make V=1")]) AS_HELP_STRING( [--disable-silent-rules], [verbose build output (undo: "make V=0")])dnl ]) case $enable_silent_rules in @%:@ ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} AC_CACHE_CHECK([whether $am_make supports nested variables], [am_cv_make_support_nested_variables], [if AS_ECHO([['TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AC_SUBST([AM_V])dnl AM_SUBST_NOTMAKE([AM_V])dnl AC_SUBST([AM_DEFAULT_V])dnl AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl AC_SUBST([AM_DEFAULT_VERBOSITY])dnl AM_BACKSLASH='\' AC_SUBST([AM_BACKSLASH])dnl _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) # Copyright (C) 2001-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # -------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004-2017 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar # AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' m4_if([$1], [v7], [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar], [# The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) if test $am_uid -le $am_max_uid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) if test $am_gid -le $am_max_gid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi], [pax], [], [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_$1-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR m4_include([m4/ax_pkg_check_modules.m4]) m4_include([m4/tsk_opt_dep_check.m4]) sleuthkit-4.11.1/win32/callback-cpp-sample/000755 000765 000024 00000000000 14137073557 021155 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/mmstat/000755 000765 000024 00000000000 14137073557 016667 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/blkcat/000755 000765 000024 00000000000 14137073557 016622 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/libtsk/000755 000765 000024 00000000000 14137073557 016652 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/jls/000755 000765 000024 00000000000 14137073557 016152 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/pstat/000755 000765 000024 00000000000 14137073557 016515 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_loaddb/000755 000765 000024 00000000000 14137073557 017470 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/jcat/000755 000765 000024 00000000000 14137073557 016303 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_logical_imager/000755 000765 000024 00000000000 14137073557 021201 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_gettimes/000755 000765 000024 00000000000 14137073557 020064 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/ffind/000755 000765 000024 00000000000 14137073557 016450 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/ils/000755 000765 000024 00000000000 14137073557 016151 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/posix-cpp-sample/000755 000765 000024 00000000000 14137073557 020563 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/docs/000755 000765 000024 00000000000 14137073557 016312 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/rejistry++/000755 000765 000024 00000000000 14137073557 017363 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_comparedir/000755 000765 000024 00000000000 14137073557 020370 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/callback-sample/000755 000765 000024 00000000000 14137073557 020375 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/hfind/000755 000765 000024 00000000000 14137073557 016452 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/posix-sample/000755 000765 000024 00000000000 14137073557 020003 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/ifind/000755 000765 000024 00000000000 14137073557 016453 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_recover/000755 000765 000024 00000000000 14137073557 017710 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk_jni/000755 000765 000024 00000000000 14137073557 017023 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/NugetPackages.props000644 000765 000024 00000004606 14137073414 021166 0ustar00carrierstaff000000 000000 $(SolutionDir)\packages\libvmdk.20200810.0.0\lib\native\include $(SolutionDir)\packages\libvmdk.20200810.0.0\lib\native\lib\Win32 $(SolutionDir)\packages\libvmdk.20200810.0.0\lib\native\lib\x64 $(SolutionDir)\packages\libvhdi.20200810.0.0\lib\native\include $(SolutionDir)\packages\libvhdi.20200810.0.0\lib\native\lib\Win32 $(SolutionDir)\packages\libvhdi.20200810.0.0\lib\native\lib\x64 $(SolutionDir)\packages\sleuthkit-libewf.20130416.0.0\build\native\include $(SolutionDir)\packages\sleuthkit-libewf.20130416.0.0\build\native\msvscpp\Release $(SolutionDir)\packages\sleuthkit-libewf.20130416.0.0\build\native\msvscpp\x64\Release $(SolutionDir)\packages\zlib_native.1.2.11\build\native\include $(SolutionDir)\packages\zlib_native.1.2.11\build\native\lib\Win32\Release $(SolutionDir)\packages\zlib_native.1.2.11\build\native\lib\x64\Release $(SolutionDir)\packages\zlib_native.redist.1.2.11\build\native\bin\Win32\Release $(SolutionDir)\packages\zlib_native.redist.1.2.11\build\native\bin\x64\Release $(SolutionDir)\packages\openssl-vc140-vc141-x86_64.1.1.5\build\native\include; $(SolutionDir)\packages\openssl-vc140-vc141-x86_64.1.1.5\build\native\lib\vc140\x64; $(LibVmdkInclude);$(LibVhdiInclude);$(LibEwfInclude);$(ZlibInclude);$(OpenSslInclude); $(LibVmdkLib);$(LibVhdiLib);$(LibEwfLib);$(ZlibLib);$(OpenSslLib); sleuthkit-4.11.1/win32/tsk_imageinfo/000755 000765 000024 00000000000 14137073557 020201 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/fls/000755 000765 000024 00000000000 14137073557 016146 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/fsstat/000755 000765 000024 00000000000 14137073557 016666 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/icat/000755 000765 000024 00000000000 14137073557 016302 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/img_stat/000755 000765 000024 00000000000 14137073557 017171 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/BUILDING.txt000755 000765 000024 00000003670 14137073414 017321 0ustar00carrierstaff000000 000000 Last Updated: 18 October 2016 This file describes how to build TSK using Visual Studio 2015 (see README_win32.txt for instructions on building the win32 libraries and executables from Linux). Installing Visual Studio If you do not have a copy of Visual Studio, you can use the free Community Edition: http://www.microsoft.com/express/vc/ When installing it, choose to do a custom installation to ensure that Visual C++ is added. You will also need to choose support to make XP executables. Building There are four build targets: - Debug_NoLibs and Release_NoLibs do not depend on any third-party libraries. - Debug and Release depend on libewf, libvmdk, libvhdi, and zlib NuGet packages so that E01 images as well as VMDK and VHD virtual machine formats are supported. Note: The following instructions are for 64 bit versions to TSK only. ------------------------------------------------------------------------ Debug and Release Targets The steps below outline the process required to compile the Debug and Release targets. 1) If you want to build libtsk_jni for the Java JNI bindings, then set the JDK_HOME environment variable to point to the top directory of your Java SDK. 2) Open the TSK Visual Studio Solution file, tsk-win.sln, in the win32 directory. 3) Compile a Debug, Debug_NoLibs, or Release version of the libraries and executables. The resulting libraries and executables on a 32-bit build will be put in win32/Debug, win32/Debug_NoLibs, or win32/Release as appropriate. A 64-bit build will put them into the win32/x64 folders. You can change the type of build using the pulldown in Visual Studio and switching between Win32 and x64. 4) Note that the libraries and executables will depend on the libewf, libvmdk, libvhdi, and zlib DLL files (which are copied to the TSK build directories). ------------------------------------------------------------------- carrier sleuthkit org Brian Carrier sleuthkit-4.11.1/win32/fcat/000755 000765 000024 00000000000 14137073557 016277 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/mmls/000755 000765 000024 00000000000 14137073557 016332 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/istat/000755 000765 000024 00000000000 14137073557 016506 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/mmcat/000755 000765 000024 00000000000 14137073557 016463 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/img_cat/000755 000765 000024 00000000000 14137073557 016765 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/blkstat/000755 000765 000024 00000000000 14137073557 017026 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/tsk-win.sln000644 000765 000024 00000155151 14137073414 017474 0ustar00carrierstaff000000 000000  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio 14 VisualStudioVersion = 14.0.25420.1 MinimumVisualStudioVersion = 10.0.40219.1 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "img_stat", "img_stat\img_stat.vcxproj", "{48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "mmls", "mmls\mmls.vcxproj", "{712DD83B-786E-485E-83C7-7197DD851B78}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "img_cat", "img_cat\img_cat.vcxproj", "{671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "mmstat", "mmstat\mmstat.vcxproj", "{5D75FBFB-539A-4014-ACEB-520BB16F5BFC}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fls", "fls\fls.vcxproj", "{58DA1042-AC19-4779-AC1A-AA8EEB3A4524}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fsstat", "fsstat\fsstat.vcxproj", "{D1E6567A-4F65-4832-8018-D33B3CB4692B}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "blkcat", "blkcat\blkcat.vcxproj", "{A2BEA467-A4CC-4FA6-9C74-587498E35467}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "blkstat", "blkstat\blkstat.vcxproj", "{FBB66156-9A54-4713-A801-C507BE7A3AE3}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "blkcalc", "blkcalc\blkcalc.vcxproj", "{46B82840-9832-466F-8568-132407CA3853}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "blkls", "blkls\blkls.vcxproj", "{48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ffind", "ffind\ffind.vcxproj", "{7C132953-1700-42FF-9F61-A814C9F2C758}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "icat", "icat\icat.vcxproj", "{38D89022-2C83-4436-A333-375A2E3E7BB0}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ifind", "ifind\ifind.vcxproj", "{52251CB2-65A3-421B-9CB4-7DAC13BB3758}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ils", "ils\ils.vcxproj", "{62C97F5E-64DD-4623-9563-747C4C173348}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "istat", "istat\istat.vcxproj", "{D7643AD7-8518-4B3E-8F3F-F11258D9540E}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jcat", "jcat\jcat.vcxproj", "{44A003BE-400D-4434-AFED-64D8E3B448D9}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jls", "jls\jls.vcxproj", "{C52F935E-1FD2-443C-A181-27908DAB3BC8}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "hfind", "hfind\hfind.vcxproj", "{0B127AE3-0C18-4EEF-AB20-A0693E6AA822}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "mmcat", "mmcat\mmcat.vcxproj", "{A15F1E4F-951A-403E-B746-2A6D63D9C416}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "callback-sample", "callback-sample\callback-sample.vcxproj", "{6CE3D593-E90D-4CC1-A66B-694AC909F6B8}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "posix-sample", "posix-sample\posix-sample.vcxproj", "{1BA0B9E8-F135-494F-9CF5-86427C1F6E41}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_recover", "tsk_recover\tsk_recover.vcxproj", "{06D707E5-68FF-4FC4-AFD0-C84584E32F47}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_loaddb", "tsk_loaddb\tsk_loaddb.vcxproj", "{96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_comparedir", "tsk_comparedir\tsk_compare.vcxproj", "{8EE881F4-78DC-49C7-8845-E842358AC0FA}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "callback-cpp-sample", "callback-cpp-sample\callback-cpp-sample.vcxproj", "{3B32F1BE-9686-4DC9-8197-F734D146E9F8}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "posix-cpp-sample", "posix-cpp-sample\posix-cpp-sample.vcxproj", "{5594DC0E-191C-4F2A-83FE-97F53A9C1222}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_gettimes", "tsk_gettimes\tsk_gettimes.vcxproj", "{11A8927C-F971-4104-A286-5DC11C25E2EC}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libtsk_jni", "tsk_jni\tsk_jni.vcxproj", "{62D88133-09F6-4E13-B39F-36FCEFBE4FAF}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libtsk", "libtsk\libtsk.vcxproj", "{76EFC06C-1F64-4478-ABE8-79832716B393}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fcat", "fcat\fcat.vcxproj", "{E4A40368-152D-4D54-9E2E-4B140212F98F}" ProjectSection(ProjectDependencies) = postProject {76EFC06C-1F64-4478-ABE8-79832716B393} = {76EFC06C-1F64-4478-ABE8-79832716B393} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_logical_imager", "tsk_logical_imager\tsk_logical_imager.vcxproj", "{38429B36-9802-42DE-90DD-DA692F7412C2}" ProjectSection(ProjectDependencies) = postProject {C41ACD23-6D88-4999-B79D-7E7828B2DBDE} = {C41ACD23-6D88-4999-B79D-7E7828B2DBDE} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Rejistry++", "rejistry++\Rejistry++.vcxproj", "{C41ACD23-6D88-4999-B79D-7E7828B2DBDE}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pstat", "pstat\pstat.vcxproj", "{5D75FBFB-539A-4014-ACEB-520BB1451F00}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_imageinfo", "tsk_imageinfo\tsk_imageinfo.vcxproj", "{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug_NoLibs|Win32 = Debug_NoLibs|Win32 Debug_NoLibs|x64 = Debug_NoLibs|x64 Debug|Win32 = Debug|Win32 Debug|x64 = Debug|x64 Release_NoLibs|Win32 = Release_NoLibs|Win32 Release_NoLibs|x64 = Release_NoLibs|x64 Release|Win32 = Release|Win32 Release|x64 = Release|x64 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug|Win32.ActiveCfg = Debug|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug|Win32.Build.0 = Debug|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug|x64.ActiveCfg = Debug|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Debug|x64.Build.0 = Debug|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release|Win32.ActiveCfg = Release|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release|Win32.Build.0 = Release|Win32 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release|x64.ActiveCfg = Release|x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9}.Release|x64.Build.0 = Release|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug|Win32.ActiveCfg = Debug|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug|Win32.Build.0 = Debug|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug|x64.ActiveCfg = Debug|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Debug|x64.Build.0 = Debug|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Release|Win32.ActiveCfg = Release|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Release|Win32.Build.0 = Release|Win32 {712DD83B-786E-485E-83C7-7197DD851B78}.Release|x64.ActiveCfg = Release|x64 {712DD83B-786E-485E-83C7-7197DD851B78}.Release|x64.Build.0 = Release|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug|Win32.ActiveCfg = Debug|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug|Win32.Build.0 = Debug|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug|x64.ActiveCfg = Debug|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Debug|x64.Build.0 = Debug|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release|Win32.ActiveCfg = Release|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release|Win32.Build.0 = Release|Win32 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release|x64.ActiveCfg = Release|x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E}.Release|x64.Build.0 = Release|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug|Win32.ActiveCfg = Debug|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug|Win32.Build.0 = Debug|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug|x64.ActiveCfg = Debug|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Debug|x64.Build.0 = Debug|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release|Win32.ActiveCfg = Release|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release|Win32.Build.0 = Release|Win32 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release|x64.ActiveCfg = Release|x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC}.Release|x64.Build.0 = Release|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug|Win32.ActiveCfg = Debug|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug|Win32.Build.0 = Debug|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug|x64.ActiveCfg = Debug|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Debug|x64.Build.0 = Debug|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release|Win32.ActiveCfg = Release|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release|Win32.Build.0 = Release|Win32 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release|x64.ActiveCfg = Release|x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524}.Release|x64.Build.0 = Release|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug|Win32.ActiveCfg = Debug|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug|Win32.Build.0 = Debug|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug|x64.ActiveCfg = Debug|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Debug|x64.Build.0 = Debug|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release|Win32.ActiveCfg = Release|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release|Win32.Build.0 = Release|Win32 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release|x64.ActiveCfg = Release|x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B}.Release|x64.Build.0 = Release|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug|Win32.ActiveCfg = Debug|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug|Win32.Build.0 = Debug|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug|x64.ActiveCfg = Debug|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Debug|x64.Build.0 = Debug|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release|Win32.ActiveCfg = Release|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release|Win32.Build.0 = Release|Win32 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release|x64.ActiveCfg = Release|x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467}.Release|x64.Build.0 = Release|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug|Win32.ActiveCfg = Debug|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug|Win32.Build.0 = Debug|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug|x64.ActiveCfg = Debug|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Debug|x64.Build.0 = Debug|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release|Win32.ActiveCfg = Release|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release|Win32.Build.0 = Release|Win32 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release|x64.ActiveCfg = Release|x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3}.Release|x64.Build.0 = Release|x64 {46B82840-9832-466F-8568-132407CA3853}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {46B82840-9832-466F-8568-132407CA3853}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {46B82840-9832-466F-8568-132407CA3853}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {46B82840-9832-466F-8568-132407CA3853}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {46B82840-9832-466F-8568-132407CA3853}.Debug|Win32.ActiveCfg = Debug|Win32 {46B82840-9832-466F-8568-132407CA3853}.Debug|Win32.Build.0 = Debug|Win32 {46B82840-9832-466F-8568-132407CA3853}.Debug|x64.ActiveCfg = Debug|x64 {46B82840-9832-466F-8568-132407CA3853}.Debug|x64.Build.0 = Debug|x64 {46B82840-9832-466F-8568-132407CA3853}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {46B82840-9832-466F-8568-132407CA3853}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {46B82840-9832-466F-8568-132407CA3853}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {46B82840-9832-466F-8568-132407CA3853}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {46B82840-9832-466F-8568-132407CA3853}.Release|Win32.ActiveCfg = Release|Win32 {46B82840-9832-466F-8568-132407CA3853}.Release|Win32.Build.0 = Release|Win32 {46B82840-9832-466F-8568-132407CA3853}.Release|x64.ActiveCfg = Release|x64 {46B82840-9832-466F-8568-132407CA3853}.Release|x64.Build.0 = Release|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug|Win32.ActiveCfg = Debug|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug|Win32.Build.0 = Debug|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug|x64.ActiveCfg = Debug|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Debug|x64.Build.0 = Debug|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release|Win32.ActiveCfg = Release|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release|Win32.Build.0 = Release|Win32 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release|x64.ActiveCfg = Release|x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997}.Release|x64.Build.0 = Release|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug|Win32.ActiveCfg = Debug|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug|Win32.Build.0 = Debug|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug|x64.ActiveCfg = Debug|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Debug|x64.Build.0 = Debug|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release|Win32.ActiveCfg = Release|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release|Win32.Build.0 = Release|Win32 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release|x64.ActiveCfg = Release|x64 {7C132953-1700-42FF-9F61-A814C9F2C758}.Release|x64.Build.0 = Release|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug|Win32.ActiveCfg = Debug|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug|Win32.Build.0 = Debug|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug|x64.ActiveCfg = Debug|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Debug|x64.Build.0 = Debug|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release|Win32.ActiveCfg = Release|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release|Win32.Build.0 = Release|Win32 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release|x64.ActiveCfg = Release|x64 {38D89022-2C83-4436-A333-375A2E3E7BB0}.Release|x64.Build.0 = Release|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug|Win32.ActiveCfg = Debug|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug|Win32.Build.0 = Debug|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug|x64.ActiveCfg = Debug|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Debug|x64.Build.0 = Debug|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release|Win32.ActiveCfg = Release|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release|Win32.Build.0 = Release|Win32 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release|x64.ActiveCfg = Release|x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758}.Release|x64.Build.0 = Release|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug|Win32.ActiveCfg = Debug|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug|Win32.Build.0 = Debug|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug|x64.ActiveCfg = Debug|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Debug|x64.Build.0 = Debug|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Release|Win32.ActiveCfg = Release|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Release|Win32.Build.0 = Release|Win32 {62C97F5E-64DD-4623-9563-747C4C173348}.Release|x64.ActiveCfg = Release|x64 {62C97F5E-64DD-4623-9563-747C4C173348}.Release|x64.Build.0 = Release|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug|Win32.ActiveCfg = Debug|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug|Win32.Build.0 = Debug|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug|x64.ActiveCfg = Debug|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Debug|x64.Build.0 = Debug|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release|Win32.ActiveCfg = Release|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release|Win32.Build.0 = Release|Win32 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release|x64.ActiveCfg = Release|x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E}.Release|x64.Build.0 = Release|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug|Win32.ActiveCfg = Debug|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug|Win32.Build.0 = Debug|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug|x64.ActiveCfg = Debug|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Debug|x64.Build.0 = Debug|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release|Win32.ActiveCfg = Release|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release|Win32.Build.0 = Release|Win32 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release|x64.ActiveCfg = Release|x64 {44A003BE-400D-4434-AFED-64D8E3B448D9}.Release|x64.Build.0 = Release|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug|Win32.ActiveCfg = Debug|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug|Win32.Build.0 = Debug|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug|x64.ActiveCfg = Debug|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Debug|x64.Build.0 = Debug|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release|Win32.ActiveCfg = Release|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release|Win32.Build.0 = Release|Win32 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release|x64.ActiveCfg = Release|x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8}.Release|x64.Build.0 = Release|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug|Win32.ActiveCfg = Debug|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug|Win32.Build.0 = Debug|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug|x64.ActiveCfg = Debug|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Debug|x64.Build.0 = Debug|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release|Win32.ActiveCfg = Release|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release|Win32.Build.0 = Release|Win32 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release|x64.ActiveCfg = Release|x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822}.Release|x64.Build.0 = Release|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug|Win32.ActiveCfg = Debug|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug|Win32.Build.0 = Debug|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug|x64.ActiveCfg = Debug|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Debug|x64.Build.0 = Debug|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release|Win32.ActiveCfg = Release|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release|Win32.Build.0 = Release|Win32 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release|x64.ActiveCfg = Release|x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416}.Release|x64.Build.0 = Release|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug|Win32.ActiveCfg = Debug|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug|Win32.Build.0 = Debug|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug|x64.ActiveCfg = Debug|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Debug|x64.Build.0 = Debug|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release|Win32.ActiveCfg = Release|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release|Win32.Build.0 = Release|Win32 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release|x64.ActiveCfg = Release|x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8}.Release|x64.Build.0 = Release|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug|Win32.ActiveCfg = Debug|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug|Win32.Build.0 = Debug|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug|x64.ActiveCfg = Debug|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Debug|x64.Build.0 = Debug|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release|Win32.ActiveCfg = Release|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release|Win32.Build.0 = Release|Win32 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release|x64.ActiveCfg = Release|x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41}.Release|x64.Build.0 = Release|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug|Win32.ActiveCfg = Debug|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug|Win32.Build.0 = Debug|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug|x64.ActiveCfg = Debug|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Debug|x64.Build.0 = Debug|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release|Win32.ActiveCfg = Release|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release|Win32.Build.0 = Release|Win32 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release|x64.ActiveCfg = Release|x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47}.Release|x64.Build.0 = Release|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug|Win32.ActiveCfg = Debug|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug|Win32.Build.0 = Debug|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug|x64.ActiveCfg = Debug|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Debug|x64.Build.0 = Debug|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release|Win32.ActiveCfg = Release|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release|Win32.Build.0 = Release|Win32 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release|x64.ActiveCfg = Release|x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C}.Release|x64.Build.0 = Release|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug|Win32.ActiveCfg = Debug|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug|Win32.Build.0 = Debug|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug|x64.ActiveCfg = Debug|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Debug|x64.Build.0 = Debug|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release|Win32.ActiveCfg = Release|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release|Win32.Build.0 = Release|Win32 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release|x64.ActiveCfg = Release|x64 {8EE881F4-78DC-49C7-8845-E842358AC0FA}.Release|x64.Build.0 = Release|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug|Win32.ActiveCfg = Debug|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug|Win32.Build.0 = Debug|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug|x64.ActiveCfg = Debug|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Debug|x64.Build.0 = Debug|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release|Win32.ActiveCfg = Release|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release|Win32.Build.0 = Release|Win32 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release|x64.ActiveCfg = Release|x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8}.Release|x64.Build.0 = Release|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug|Win32.ActiveCfg = Debug|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug|Win32.Build.0 = Debug|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug|x64.ActiveCfg = Debug|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Debug|x64.Build.0 = Debug|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release|Win32.ActiveCfg = Release|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release|Win32.Build.0 = Release|Win32 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release|x64.ActiveCfg = Release|x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222}.Release|x64.Build.0 = Release|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug|Win32.ActiveCfg = Debug|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug|Win32.Build.0 = Debug|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug|x64.ActiveCfg = Debug|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Debug|x64.Build.0 = Debug|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release|Win32.ActiveCfg = Release|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release|Win32.Build.0 = Release|Win32 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release|x64.ActiveCfg = Release|x64 {11A8927C-F971-4104-A286-5DC11C25E2EC}.Release|x64.Build.0 = Release|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug|Win32.ActiveCfg = Debug|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug|Win32.Build.0 = Debug|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug|x64.ActiveCfg = Debug|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Debug|x64.Build.0 = Debug|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release|Win32.ActiveCfg = Release|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release|Win32.Build.0 = Release|Win32 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release|x64.ActiveCfg = Release|x64 {62D88133-09F6-4E13-B39F-36FCEFBE4FAF}.Release|x64.Build.0 = Release|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug|Win32.ActiveCfg = Debug|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug|Win32.Build.0 = Debug|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug|x64.ActiveCfg = Debug|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Debug|x64.Build.0 = Debug|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release|Win32.ActiveCfg = Release|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release|Win32.Build.0 = Release|Win32 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release|x64.ActiveCfg = Release|x64 {76EFC06C-1F64-4478-ABE8-79832716B393}.Release|x64.Build.0 = Release|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug|Win32.ActiveCfg = Debug|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug|Win32.Build.0 = Debug|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug|x64.ActiveCfg = Debug|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Debug|x64.Build.0 = Debug|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release|Win32.ActiveCfg = Release|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release|Win32.Build.0 = Release|Win32 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release|x64.ActiveCfg = Release|x64 {E4A40368-152D-4D54-9E2E-4B140212F98F}.Release|x64.Build.0 = Release|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug|Win32.ActiveCfg = Debug|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug|x64.ActiveCfg = Debug|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Debug|x64.Build.0 = Debug|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release|Win32.ActiveCfg = Release|Win32 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release|x64.ActiveCfg = Release|x64 {38429B36-9802-42DE-90DD-DA692F7412C2}.Release|x64.Build.0 = Release|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug|Win32.ActiveCfg = Debug|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug|Win32.Build.0 = Debug|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug|x64.ActiveCfg = Debug|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Debug|x64.Build.0 = Debug|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release|Win32.ActiveCfg = Release|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release|Win32.Build.0 = Release|Win32 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release|x64.ActiveCfg = Release|x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE}.Release|x64.Build.0 = Release|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug|Win32.ActiveCfg = Debug|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug|Win32.Build.0 = Debug|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug|x64.ActiveCfg = Debug|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Debug|x64.Build.0 = Debug|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|Win32.ActiveCfg = Release|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|Win32.Build.0 = Release|Win32 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|x64.ActiveCfg = Release|x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|x64.Build.0 = Release|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|Win32.ActiveCfg = Debug|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|Win32.Build.0 = Debug|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|x64.ActiveCfg = Debug|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|x64.Build.0 = Debug|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|Win32.ActiveCfg = Release|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|Win32.Build.0 = Release|Win32 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|x64.ActiveCfg = Release|x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sleuthkit-4.11.1/win32/blkls/000755 000765 000024 00000000000 14137073557 016471 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/blkcalc/000755 000765 000024 00000000000 14137073557 016755 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/win32/blkcalc/blkcalc.vcxproj000755 000765 000024 00000047365 14137073414 021777 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {46B82840-9832-466F-8568-132407CA3853} blkcalc Win32Proj 8.1 Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/blkls/blkls.vcxproj000755 000765 000024 00000047253 14137073414 021223 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {48D98A0A-BF9C-4D7E-9AF8-E4CAE8437997} blkls Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/blkstat/blkstat.vcxproj000755 000765 000024 00000047257 14137073414 022121 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {FBB66156-9A54-4713-A801-C507BE7A3AE3} blkstat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/img_cat/img_cat.vcxproj000755 000765 000024 00000047242 14137073414 022011 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {671D843F-4DFA-4CB8-8BC9-D44E7F4ECF1E} img_cat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/mmcat/mmcat.vcxproj000755 000765 000024 00000047501 14137073414 021203 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {A15F1E4F-951A-403E-B746-2A6D63D9C416} mmcat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/istat/istat.vcxproj000755 000765 000024 00000050273 14137073414 021251 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {D7643AD7-8518-4B3E-8F3F-F11258D9540E} istat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase 4200 %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/mmls/mmls.vcxproj000755 000765 000024 00000047251 14137073414 020723 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {712DD83B-786E-485E-83C7-7197DD851B78} mmls Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/fcat/fcat.vcxproj000755 000765 000024 00000047251 14137073414 020635 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {E4A40368-152D-4D54-9E2E-4B140212F98F} fcat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/img_stat/img_stat.vcxproj000755 000765 000024 00000047214 14137073414 022420 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {48F52EA8-A5D1-4BF4-B774-6ECFCB0CE3C9} img_stat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/icat/icat.vcxproj000755 000765 000024 00000050241 14137073414 020634 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {38D89022-2C83-4436-A333-375A2E3E7BB0} icat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase 4200 %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/fsstat/fsstat.vcxproj000755 000765 000024 00000050245 14137073414 021610 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {D1E6567A-4F65-4832-8018-D33B3CB4692B} fsstat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase 4200 %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/fls/fls.vcxproj000755 000765 000024 00000050267 14137073414 020354 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {58DA1042-AC19-4779-AC1A-AA8EEB3A4524} fls Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase 4200 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false 4200 %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200 %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase 4200 %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_imageinfo/tsk_imageinfo.vcxproj000644 000765 000024 00000047015 14137073414 024434 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {09E82DEB-4BC3-44B5-A9F6-4D65F885F88C} tsk_imageinfo 8.1 Application Unicode true v140_xp Application Unicode false v140_xp Application Unicode true true v140_xp Application Unicode false true v140_xp Application Unicode false true v140_xp Application Unicode false true v140_xp Application Unicode true v140_xp Application Unicode false v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true false false Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false Sync false true %(AdditionalDependencies) %(AdditionalLibraryDirectories) true MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded NotUsing Level3 ProgramDatabase false false true Async %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase %(AdditionalLibraryDirectories) true true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true true Console true true true true true true {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_jni/tsk_jni.vcxproj000755 000765 000024 00000055321 14137073414 022102 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 libtsk_jni {62D88133-09F6-4E13-B39F-36FCEFBE4FAF} tsk_jni Win32Proj DynamicLibrary Unicode v140_xp DynamicLibrary Unicode v140_xp DynamicLibrary false false Unicode true v140_xp DynamicLibrary false false Unicode true v140_xp DynamicLibrary false false Unicode true v140_xp DynamicLibrary false false Unicode true v140_xp DynamicLibrary Unicode v140_xp DynamicLibrary Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;$(ProjectDir)\..\..\tsk\hashdb;$(TskNugetIncludes);%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_USRDLL;HAVE_LIBEWF;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue false libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MachineX86 true $(TargetDir)$(TargetName).map Windows Disabled $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;$(TskNugetIncludes);%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_USRDLL;HAVE_LIBEWF;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase false libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true $(TargetDir)$(TargetName).map Console MaxSpeed true $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;$(ProjectDir)\..\..\tsk\hashdb;$(TskNugetIncludes);%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;_USRDLL;HAVE_LIBEWF;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) false Console true true MachineX86 true $(TargetDir)$(TargetName).map Windows MaxSpeed true $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;$(ProjectDir)\..\..\tsk\hashdb;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;_USRDLL;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 false %(AdditionalDependencies) %(AdditionalLibraryDirectories) false Console true true MachineX86 true $(TargetDir)$(TargetName).map Windows MaxSpeed true $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;$(TskNugetIncludes);%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;_USRDLL;TSK_JNI_EXPORTS;HAVE_LIBEWF;HAVE_LIBOPENSSL;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) false Console true true true $(TargetDir)$(TargetName).map Console MaxSpeed true $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;_USRDLL;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 false %(AdditionalDependencies) %(AdditionalLibraryDirectories) false Console true true true $(TargetDir)$(TargetName).map Console Disabled $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_USRDLL;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue false %(AdditionalLibraryDirectories) true Console MachineX86 true $(TargetDir)$(TargetName).map Windows Disabled $(JDK_HOME)\include;$(JDK_HOME)\include\win32;$(ProjectDir)\..\..;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_USRDLL;TSK_JNI_EXPORTS;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase false %(AdditionalLibraryDirectories) true Console true $(TargetDir)$(TargetName).map Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_recover/tsk_recover.vcxproj000755 000765 000024 00000045726 14137073414 023664 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {06D707E5-68FF-4FC4-AFD0-C84584E32F47} tsk_recover Win32Proj Application false v140_xp Application false Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL EditAndContinue Disabled %(AdditionalLibraryDirectories) Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug ProgramDatabase Disabled %(AdditionalLibraryDirectories) Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/ifind/ifind.vcxproj000755 000765 000024 00000047303 14137073414 021163 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {52251CB2-65A3-421B-9CB4-7DAC13BB3758} ifind Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/posix-sample/posix-sample.vcxproj000755 000765 000024 00000047507 14137073414 024051 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {1BA0B9E8-F135-494F-9CF5-86427C1F6E41} posixsample Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/hfind/hfind.vcxproj000755 000765 000024 00000047237 14137073414 021167 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {0B127AE3-0C18-4EEF-AB20-A0693E6AA822} hfind Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/callback-sample/callback-sample.vcxproj000755 000765 000024 00000047267 14137073414 025040 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {6CE3D593-E90D-4CC1-A66B-694AC909F6B8} callbacksample Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_comparedir/tsk_compare.vcxproj000755 000765 000024 00000044060 14137073414 024313 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 tsk_comparedir {8EE881F4-78DC-49C7-8845-E842358AC0FA} tsk_compare Application v140_xp Application Unicode v140_xp Application MultiByte true v140_xp Application MultiByte true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application MultiByte v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase false _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase false _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level1 EditAndContinue Disabled %(AdditionalLibraryDirectories) true true NotSet Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level1 ProgramDatabase Disabled %(AdditionalLibraryDirectories) true true Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/rejistry++/Rejistry++.vcxproj000644 000765 000024 00000036403 14137073414 022737 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {C41ACD23-6D88-4999-B79D-7E7828B2DBDE} rejistry StaticLibrary true Unicode v140_xp StaticLibrary true Unicode v140_xp StaticLibrary true Unicode v140 StaticLibrary true Unicode v140_xp StaticLibrary false true Unicode v140_xp StaticLibrary false true Unicode v140_xp StaticLibrary false true Unicode v140 StaticLibrary false true Unicode v140 $(OutDir) $(IntDir) $(OutDir) $(IntDir) $(OutDir) $(IntDir) $(OutDir) $(IntDir) $(OutDir) $(IntDir) $(OutDir) $(IntDir) Level3 Disabled true Level3 Disabled MultiThreadedDebugDLL true Level3 Disabled true Level3 Disabled MultiThreadedDebug true Level3 MaxSpeed true true true true true Level3 MaxSpeed true true MultiThreaded true true true Level3 MaxSpeed true true true true true Level3 MaxSpeed true true MultiThreaded true true true sleuthkit-4.11.1/win32/docs/README-win32.txt000755 000765 000024 00000004424 14137073414 020747 0ustar00carrierstaff000000 000000 The Sleuth Kit Windows Executables http://www.sleuthkit.org/sleuthkit Brian Carrier [carrier@sleuthkit.org] Last Updated: July 2012 ====================================================================== This zip file contains the Microsoft Windows executables for The Sleuth Kit. The full source code (including Visual Studio Solution files) and documentation can be downloaded from: http://www.sleuthkit.org These are distributed under the IBM Public License and the Common Public License, which can be found in the licenses folder. NOTES The dll files in the zip file are required to run the executables. They must be either in the same directory as the executables or in the path. There have been reports of the exe files not running on some systems and they give the error "The system cannot execute the specified program". This occurs because the system can't find the needed dll files. Installing the "Microsoft Visual C++ 2008 SP1 Redistributable Package (x86)" seems to fix the problem. It can be downloaded from Microsoft: http://www.microsoft.com/downloads/en/confirmation.aspx?FamilyID=A5C84275-3B97-4AB7-A40D-3802B2AF5FC2&displaylang=en mactime.pl requires a Windows port of Perl to be installed. If you have the ".pl" extension associated with Perl, you should be able to run "mactime.pl" from the command line. Otherwise, you may need to run it as "perl mactime.pl". Examples of Windows ports of Perl include: - ActivePerl (http://www.activestate.com/activeperl/) - Strawberry Perl (http://strawberryperl.com/) CURRENT LIMITATIONS The tools do not currently support globbing, which means that you cannot use 'fls img.*' on a split image. Windows does not automatically expand the '*' to all file names. However, most split images can now be used in The Sleuth Kit by simply specifying the first segment's path. These programs can be run on a live system, if you use the \\.\PhysicalDrive0 syntax. Note though, that you may get errors or the file system type may not be detected because the data being read is out of sync with cached versions of the data. Unicode characters are not always properly displayed in the command shell. The AFF image formats are not supported. sleuthkit-4.11.1/win32/posix-cpp-sample/posix-cpp-sample.vcxproj000755 000765 000024 00000046702 14137073414 025405 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {5594DC0E-191C-4F2A-83FE-97F53A9C1222} posixcppsample Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;WIN32;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/ils/ils.vcxproj000755 000765 000024 00000047217 14137073414 020363 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {62C97F5E-64DD-4623-9563-747C4C173348} ils Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/ffind/ffind.vcxproj000755 000765 000024 00000047253 14137073414 021161 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {7C132953-1700-42FF-9F61-A814C9F2C758} ffind Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_gettimes/tsk_gettimes.vcxproj000755 000765 000024 00000046752 14137073414 024214 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {11A8927C-F971-4104-A286-5DC11C25E2EC} tsk_gettimes ManagedCProj Application Unicode true v140_xp Application Unicode false v140_xp Application Unicode true true v140_xp Application Unicode false true v140_xp Application Unicode false true v140_xp Application Unicode false true v140_xp Application Unicode true v140_xp Application Unicode false v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true false false Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false Sync false true %(AdditionalDependencies) %(AdditionalLibraryDirectories) true MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded NotUsing Level3 ProgramDatabase false false true Async %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebugDLL Level3 ProgramDatabase %(AdditionalLibraryDirectories) true true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true true Console true true true true true true {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_logical_imager/tsk_logical_imager.vcxproj000755 000765 000024 00000053755 14137073414 026447 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {38429B36-9802-42DE-90DD-DA692F7412C2} tsk_logical_imager Application NotSet v140_xp Application Unicode v140_xp Application MultiByte true v140_xp Application MultiByte true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application NotSet v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2; /utf-8 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE; /utf-8 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0501;%(PreprocessorDefinitions) /utf-8 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) false /utf-8 Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) /utf-8 libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) false /utf-8 Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) /utf-8 %(AdditionalLibraryDirectories) true MachineX86 Console Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) /utf-8 %(AdditionalOptions) %(AdditionalLibraryDirectories) true Console Mpr.lib;Shlwapi.lib;Ws2_32.lib;comsuppw.lib;%(AdditionalDependencies) {76efc06c-1f64-4478-abe8-79832716b393} false {c41acd23-6d88-4999-b79d-7e7828b2dbde} sleuthkit-4.11.1/win32/jcat/jcat.vcxproj000755 000765 000024 00000047301 14137073414 020641 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {44A003BE-400D-4434-AFED-64D8E3B448D9} jcat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_SECURE_NO_WARNINGS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/tsk_loaddb/tsk_loaddb.vcxproj000755 000765 000024 00000043203 14137073414 023210 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {96AFC6D4-A3DC-44D4-8F55-F74E1D21798C} tsk_loaddb Application NotSet v140_xp Application Unicode v140_xp Application MultiByte true v140_xp Application MultiByte true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application NotSet v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2; libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE; libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0501;%(PreprocessorDefinitions) libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreadedDLL true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) MultiThreaded true Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;_MBCS;WINVER=0x0601;%(PreprocessorDefinitions) false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) %(AdditionalLibraryDirectories) true MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase _CRT_SECURE_NO_WARNINGS;_USING_V110_SDK71_;%(PreprocessorDefinitions) %(AdditionalLibraryDirectories) true Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/pstat/pstat.vcxproj000755 000765 000024 00000047255 14137073414 021275 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {5D75FBFB-539A-4014-ACEB-520BB1451F00} pstat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/jls/jls.vcxproj000755 000765 000024 00000047247 14137073414 020370 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {C52F935E-1FD2-443C-A181-27908DAB3BC8} jls Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/libtsk/libtsk.vcxproj000755 000765 000024 00000070516 14137073414 021563 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {76EFC06C-1F64-4478-ABE8-79832716B393} libtsk Win32Proj StaticLibrary Unicode v140_xp StaticLibrary Unicode v140_xp StaticLibrary Unicode true v140_xp StaticLibrary Unicode true v140_xp StaticLibrary Unicode true v140_xp StaticLibrary Unicode true v140_xp StaticLibrary Unicode v140_xp StaticLibrary Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) Disabled $(ProjectDir)\..\..\;$(TskNugetIncludes);%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;HAVE_LIBEWF;HAVE_LIBVMDK;HAVE_LIBVHDI;HAVE_LIBZ;WIN32;_DEBUG;_LIB;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase Default 4200;4814;5030 copy "$(LibEwfLib)\libewf.dll" "$(OutDir)" copy "$(ZlibDll)\zlib.dll" "$(OutDir)" copy "$(LibVmdkLib)\libvmdk.dll" "$(OutDir)" copy "$(LibVhdiLib)\libvhdi.dll" "$(OutDir)" Disabled $(ProjectDir)\..\..\;$(TskNugetIncludes);%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;HAVE_LIBEWF;HAVE_LIBVMDK;HAVE_LIBVHDI;HAVE_LIBZ;WIN32;_DEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase 4200;4814;5030 copy "$(LibEwfLib)\libewf.dll" "$(OutDir)" copy "$(ZlibDll)\zlib.dll" "$(OutDir)" copy "$(LibVmdkLib)\libvmdk.dll" "$(OutDir)" copy "$(LibVhdiLib)\libvhdi.dll" "$(OutDir)" MaxSpeed true $(ProjectDir)\..\..\;$(TskNugetIncludes);%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;HAVE_LIBEWF;HAVE_LIBZ;HAVE_LIBVMDK;HAVE_LIBVHDI;WIN32;NDEBUG;_LIB;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase 4200;4814;5030 copy "$(LibEwfLib)\libewf.dll" "$(OutDir)" copy "$(ZlibDll)\zlib.dll" "$(OutDir)" copy "$(LibVmdkLib)\libvmdk.dll" "$(OutDir)" copy "$(LibVhdiLib)\libvhdi.dll" "$(OutDir)" xcopy /E /Y "$(UniversalCRTSdkDir)\redist\ucrt\DLLS\$(PlatformTarget)" "$(OutDir)" xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(OutDir)" MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;WIN32;NDEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false 4200;4814;5030 MaxSpeed true $(ProjectDir)\..\..\;$(TskNugetIncludes);%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;HAVE_LIBEWF;HAVE_LIBOPENSSL;HAVE_LIBVMDK;HAVE_LIBVHDI;HAVE_LIBZ;WIN32;NDEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase 4200;4814;5030 copy "$(LibEwfLib)\libewf.dll" "$(OutDir)" copy "$(ZlibDll)\zlib.dll" "$(OutDir)" copy "$(LibVmdkLib)\libvmdk.dll" "$(OutDir)" copy "$(LibVhdiLib)\libvhdi.dll" "$(OutDir)" xcopy /E /Y "$(UniversalCRTSdkDir)\redist\ucrt\DLLS\$(PlatformTarget)" "$(OutDir)" xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(OutDir)" MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;WIN32;NDEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false 4200;4814;5030 Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue 4200;4814;5030 Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) NOMINMAX;_CRT_SECURE_NO_DEPRECATE;GUID_WINDOWS;_CRT_SECURE_NO_WARNINGS;WIN32;_DEBUG;_LIB;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase 4200;4814;5030 This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. sleuthkit-4.11.1/win32/blkcat/blkcat.vcxproj000755 000765 000024 00000047255 14137073414 021507 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {A2BEA467-A4CC-4FA6-9C74-587498E35467} blkcat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/mmstat/mmstat.vcxproj000755 000765 000024 00000047255 14137073414 021621 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {5D75FBFB-539A-4014-ACEB-520BB16F5BFC} mmstat Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false MachineX86 Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true false Console $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true false Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CONSOLE;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/win32/callback-cpp-sample/callback-cpp-sample.vcxproj000755 000765 000024 00000047060 14137073414 026367 0ustar00carrierstaff000000 000000  Debug_NoLibs Win32 Debug_NoLibs x64 Debug Win32 Debug x64 Release_NoLibs Win32 Release_NoLibs x64 Release Win32 Release x64 {3B32F1BE-9686-4DC9-8197-F734D146E9F8} callbackcppsample Win32Proj Application Unicode v140_xp Application Unicode v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode true v140_xp Application Unicode v140_xp Application Unicode v140_xp <_ProjectFileVersion>10.0.30319.1 $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true $(SolutionDir)$(Configuration)\ $(SolutionDir)$(Configuration)\ $(OutDir) $(OutDir) $(IntDir) $(IntDir) $(IntDir) $(IntDir) false false false false $(SolutionDir)$(Configuration)\ $(OutDir) $(IntDir) $(IntDir) true true Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebugDLL Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;WINVER=0x0501;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true MachineX86 Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreadedDLL true Level3 ProgramDatabase libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies) $(TskNugetLibs);%(AdditionalLibraryDirectories) true Console true true Console MaxSpeed true $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) MultiThreaded true Level3 ProgramDatabase false %(AdditionalDependencies) %(AdditionalLibraryDirectories) true Console true true Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL Level3 EditAndContinue %(AdditionalLibraryDirectories) true Console false MachineX86 Console Disabled $(ProjectDir)\..\..\;%(AdditionalIncludeDirectories) WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_WINDOWS;WINVER=0x0601;%(PreprocessorDefinitions) EnableFastChecks MultiThreadedDebug Level3 ProgramDatabase %(AdditionalLibraryDirectories) true Console false Console {76efc06c-1f64-4478-abe8-79832716b393} false sleuthkit-4.11.1/bindings/java/000755 000765 000024 00000000000 14137073564 017134 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/build-windows.xml000644 000765 000024 00000007217 14137073413 022445 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/ivysettings.xml000644 000765 000024 00000000424 14137073413 022237 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/build-unix.xml000644 000765 000024 00000011022 14137073413 021723 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/build.xml000644 000765 000024 00000021040 14137073430 020742 0ustar00carrierstaff000000 000000 Sleuthkit Java DataModel sleuthkit-4.11.1/bindings/java/ivy.xml000644 000765 000024 00000002117 14137073413 020457 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/Makefile.am000644 000765 000024 00000000502 14137073413 021156 0ustar00carrierstaff000000 000000 # Compile the sub directories SUBDIRS = jni tsk_jar = $(top_builddir)/bindings/java/dist/sleuthkit-$(PACKAGE_VERSION).jar jardir = $(prefix)/share/java jar_DATA = $(tsk_jar) if OFFLINE ant_args=-Doffline=true else endif $(tsk_jar): all-local: ant dist $(ant_args) CLEANFILES = $(tsk_jar) clean-local: ant clean sleuthkit-4.11.1/bindings/java/nbproject/000755 000765 000024 00000000000 14137073557 021124 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/README.txt000644 000765 000024 00000005125 14137073413 020626 0ustar00carrierstaff000000 000000 Sleuth Kit Java Bindings Overview The core functionality of the Sleuth Kit is in the C/C++ library. The functionality is made available to Java applications by using JNI. The theory is that a SQLite database is created by the C++ library and then it is queried by native Java code. JNI methods exist to make the database and to read file content (and other raw data that is too large to fit into the database). To use the Java bindings, you must have the Sleuth Kit datamodel JAR file compiled and have compiled the associated dynamic library from the C/C++ code. Requirements: * Java JDK * Ant * Jar files as listed in ivy.xml (which will get downloaded automatically) The following jar files must be on the classpath for building and running. Version details can be found in ivy.xml. They will be automatically downloaded if you do not compile in offline mode. * sqlite-jdbc * postgresql-jdbc * c3p0 Building the Dynamic Library (for JNI) The win32 Visual Studio solution has a tsk_jni project that will build the JNI dll. To use this project, you will need to have JDK_HOME environment variable set to the root directory of JDK. On non-windows environments, it should just build as part of running ./configure and make. If the needed Java components are not found, it will not be built. This library will depend on libewf, zlib, and other libraries that TSK was built to depend on. In Windows, the core of TSK (libtsk) is a static library that is fully embedded in the libtsk_jni.dll file. On non-Windows environments, libtsk_jni will depend on the libtsk dynamic library. Building The Jar File Build with the default ant target (by running 'ant'). This will download the required libraries (using ivy) and place the jar file in the dist folder along with the needed dll and library files. Using the Jar file and Library There are two categories of things that need to be in the right place: - The Jar file needs to be on the CLASSPATH. - The libewf and zlib dynamic libraries need to be loadable. The TSK JNI native library is inside of the Jar file and it will depend on the libewf and zlib libraries. On a Unix-like platform, that means that if you did a 'make install' with libewf and zlib, you should be OK. On Windows, you should copy these dlls to a place that is found based on the rules of Windows library loading. Note that these locations are based on the rules of Windows loading them and not necessarily based on java's loading paths. Refer to the javadocs for details on using the API: http://sleuthkit.org/sleuthkit/docs/jni-docs/ ------------ Brian Carrier Jan 2014 sleuthkit-4.11.1/bindings/java/doxygen/000755 000765 000024 00000000000 14137073557 020613 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/jni/000755 000765 000024 00000000000 14137073564 017714 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/Makefile.in000644 000765 000024 00000053532 14137073437 021210 0ustar00carrierstaff000000 000000 # Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = bindings/java ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ax_pkg_check_modules.m4 \ $(top_srcdir)/m4/tsk_opt_dep_check.m4 \ $(top_srcdir)/m4/ax_pthread.m4 $(top_srcdir)/m4/cppunit.m4 \ $(top_srcdir)/m4/ax_jni_include_dir.m4 \ $(top_srcdir)/m4/ac_prog_javac_works.m4 \ $(top_srcdir)/m4/ac_prog_javac.m4 \ $(top_srcdir)/m4/ac_prog_java_works.m4 \ $(top_srcdir)/m4/ac_prog_java.m4 \ $(top_srcdir)/m4/ax_cxx_compile_stdcxx.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/tsk/tsk_config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(jardir)" DATA = $(jar_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" ACLOCAL = @ACLOCAL@ ALLOCA = @ALLOCA@ AMTAR = @AMTAR@ AM_CFLAGS = @AM_CFLAGS@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ ANT_FOUND = @ANT_FOUND@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ AX_PACKAGE_REQUIRES = @AX_PACKAGE_REQUIRES@ AX_PACKAGE_REQUIRES_PRIVATE = @AX_PACKAGE_REQUIRES_PRIVATE@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EWF_CFLAGS = @EWF_CFLAGS@ EWF_LIBS = @EWF_LIBS@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HAVE_CXX14 = @HAVE_CXX14@ IGNORE = @IGNORE@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ JAVA = @JAVA@ JAVAC = @JAVAC@ JNI_CPPFLAGS = @JNI_CPPFLAGS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBTSK_LDFLAGS = @LIBTSK_LDFLAGS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_LIBS_PRIVATE = @PACKAGE_LIBS_PRIVATE@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PERL = @PERL@ PKGCONFIG = @PKGCONFIG@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ PTHREAD_CC = @PTHREAD_CC@ PTHREAD_CFLAGS = @PTHREAD_CFLAGS@ PTHREAD_LIBS = @PTHREAD_LIBS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SQLITE3_CFLAGS = @SQLITE3_CFLAGS@ SQLITE3_LIBS = @SQLITE3_LIBS@ STRIP = @STRIP@ VERSION = @VERSION@ VHDI_CFLAGS = @VHDI_CFLAGS@ VHDI_LIBS = @VHDI_LIBS@ VMDK_CFLAGS = @VMDK_CFLAGS@ VMDK_LIBS = @VMDK_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ _ACJNI_JAVAC = @_ACJNI_JAVAC@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ ax_pthread_config = @ax_pthread_config@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ uudecode = @uudecode@ # Compile the sub directories SUBDIRS = jni tsk_jar = $(top_builddir)/bindings/java/dist/sleuthkit-$(PACKAGE_VERSION).jar jardir = $(prefix)/share/java jar_DATA = $(tsk_jar) @OFFLINE_TRUE@ant_args = -Doffline=true CLEANFILES = $(tsk_jar) all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign bindings/java/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign bindings/java/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-jarDATA: $(jar_DATA) @$(NORMAL_INSTALL) @list='$(jar_DATA)'; test -n "$(jardir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(jardir)'"; \ $(MKDIR_P) "$(DESTDIR)$(jardir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(jardir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(jardir)" || exit $$?; \ done uninstall-jarDATA: @$(NORMAL_UNINSTALL) @list='$(jar_DATA)'; test -n "$(jardir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(jardir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(DATA) all-local installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(jardir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-local mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-jarDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-jarDATA .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am all-local \ check check-am clean clean-generic clean-libtool clean-local \ cscopelist-am ctags ctags-am distclean distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-jarDATA install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs installdirs-am \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am uninstall-jarDATA .PRECIOUS: Makefile $(tsk_jar): all-local: ant dist $(ant_args) clean-local: ant clean # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sleuthkit-4.11.1/bindings/java/src/000755 000765 000024 00000000000 14137073560 017717 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/org/000755 000765 000024 00000000000 14137073556 020513 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/overview.html000644 000765 000024 00000000674 14137073413 022457 0ustar00carrierstaff000000 000000

These classes allow Java programs to access data extracted by The Sleuth Kit.

The Sleuth Kit is primarily a C/C++ library and set of command line tools. These classes allow programs to obtain the data that TSK can produce. The typical steps would be to use JNI to cause the TSK library to create and populate a SQLite database. The Java classes then directly open the SQLite database and perform queries on it.

sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/000755 000765 000024 00000000000 14137073556 022527 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/000755 000765 000024 00000000000 14137073560 024454 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TagName.java000644 000765 000024 00000011335 14137073413 026633 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.Serializable; import java.util.HashMap; import java.util.Objects; /** * Instances of this class are data transfer objects (DTOs) that represent the * names (and related properties) a user can select from to apply a tag to * content or a blackboard artifact. */ public class TagName implements Comparable, Serializable { private static final long serialVersionUID = 1L; public enum HTML_COLOR { NONE("None", ""), //NON-NLS WHITE("White", "#FFFFFF"), //NON-NLS SILVER("Silver", "#C0C0C0"), //NON-NLS GRAY("Gray", "#808080"), //NON-NLS BLACK("Black", "#000000"), //NON-NLS RED("Red", "#FF0000"), //NON-NLS MAROON("Maron", "#800000"), //NON-NLS YELLOW("Yellow", "#FFFF00"), //NON-NLS OLIVE("Olive", "#808000"), //NON-NLS LIME("Lime", "#00FF00"), //NON-NLS GREEN("Green", "#008000"), //NON-NLS AQUA("Aqua", "#00FFFF"), //NON-NLS TEAL("Teal", "#008080"), //NON-NLS BLUE("Blue", "#0000FF"), //NON-NLS NAVY("Navy", "#000080"), //NON-NLS FUCHSIA("Fuchsia", "#FF00FF"), //NON-NLS PURPLE("Purple", "#800080"); //NON-NLS private final static HashMap colorMap = new HashMap(); private final String name; private final String hexString; static { for (HTML_COLOR color : HTML_COLOR.values()) { colorMap.put(color.getName(), color); } } HTML_COLOR(String name, String hexString) { this.hexString = hexString; this.name = name; } String getName() { return name; } public String getRgbValue() { return hexString; } public static HTML_COLOR getColorByName(String colorName) { if (colorMap.containsKey(colorName)) { return colorMap.get(colorName); } else { return NONE; } } } private final long id; private final String displayName; private final String description; private final HTML_COLOR color; private final TskData.FileKnown knownStatus; private final long tagSetId; private final int rank; // Clients of the org.sleuthkit.datamodel package should not directly create these objects. TagName(long id, String displayName, String description, HTML_COLOR color, TskData.FileKnown knownStatus, long tagSetId, int rank) { this.id = id; this.displayName = displayName; this.description = description; this.color = color; this.knownStatus = knownStatus; this.tagSetId = tagSetId; this.rank = rank; } public long getId() { return id; } public String getDisplayName() { return displayName; } public String getDescription() { return description; } public HTML_COLOR getColor() { return color; } public TskData.FileKnown getKnownStatus() { return knownStatus; } long getTagSetId() { return tagSetId; } public int getRank() { return rank; } /** * Compares two TagName objects by comparing their display names. * * @param other The other TagName to compare this TagName to * * @return the result of calling compareTo on the displayNames */ @Override public int compareTo(TagName other) { return this.getDisplayName().compareTo(other.getDisplayName()); } @Override public int hashCode() { int hash = 5; hash = 89 * hash + (int) (this.id ^ (this.id >>> 32)); hash = 89 * hash + (this.displayName != null ? this.displayName.hashCode() : 0); hash = 89 * hash + (this.description != null ? this.description.hashCode() : 0); hash = 89 * hash + (this.color != null ? this.color.hashCode() : 0); hash = 89 * hash + (this.knownStatus != null ? this.knownStatus.hashCode() : 0); hash = 89 * hash + (int) (this.id ^ (this.tagSetId >>> 32)); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TagName other = (TagName) obj; return (this.id == other.getId() && Objects.equals(this.displayName, other.getDisplayName()) && Objects.equals(this.description, other.getDescription()) && Objects.equals(this.color, other.getColor()) && Objects.equals(this.knownStatus, other.getKnownStatus()) && this.tagSetId == other.getTagSetId()); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CaseDatabaseFactory.java000644 000765 000024 00000120472 14137073414 031153 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; import java.util.logging.Logger; import java.util.logging.Level; import org.sleuthkit.datamodel.SQLHelper.PostgreSQLHelper; import org.sleuthkit.datamodel.SQLHelper.SQLiteHelper; /** * Creates a SQLite or PostgreSQL case database. */ class CaseDatabaseFactory { private static final Logger logger = Logger.getLogger(CaseDatabaseFactory.class.getName()); private final SQLHelper dbQueryHelper; private final DbCreationHelper dbCreationHelper; /** * Create a new SQLite case * * @param dbPath Full path to the database */ CaseDatabaseFactory(String dbPath) { this.dbQueryHelper = new SQLiteHelper(); this.dbCreationHelper = new SQLiteDbCreationHelper(dbPath); } /** * Create a new PostgreSQL case * * @param caseName The name of the case. It will be used to create a case * database name that can be safely used in SQL commands * and will not be subject to name collisions on the case * database server. Use getDatabaseName to get the * created name. * @param info The information to connect to the database. */ CaseDatabaseFactory(String caseName, CaseDbConnectionInfo info) { this.dbQueryHelper = new PostgreSQLHelper(); this.dbCreationHelper = new PostgreSQLDbCreationHelper(caseName, info); } /** * Creates and initializes the case database. * Currently the case must be reopened after creation. * * @throws TskCoreException */ void createCaseDatabase() throws TskCoreException { createDatabase(); initializeSchema(); } /** * Create the database itself (if necessary) * * @throws TskCoreException */ private void createDatabase() throws TskCoreException { dbCreationHelper.createDatabase(); } /** * Initialize the database schema * * @throws TskCoreException */ private void initializeSchema() throws TskCoreException { try (Connection conn = dbCreationHelper.getConnection()) { // Perform any needed steps before creating the tables dbCreationHelper.performPreInitialization(conn); // Add schema version addDbInfo(conn); // Add tables addTables(conn); dbCreationHelper.performPostTableInitialization(conn); // Add indexes addIndexes(conn); } catch (SQLException ex) { throw new TskCoreException("Error initializing case database", ex); } } /** * Create and populate the db_info tables * * @param conn the database connection * * @throws TskCoreException */ private void addDbInfo(Connection conn) throws TskCoreException { CaseDbSchemaVersionNumber version = SleuthkitCase.CURRENT_DB_SCHEMA_VERSION; long tskVersionNum = SleuthkitJNI.getSleuthkitVersion(); // This is the current version of TSK try (Statement stmt = conn.createStatement()) { stmt.execute("CREATE TABLE tsk_db_info (schema_ver INTEGER, tsk_ver INTEGER, schema_minor_ver INTEGER)"); stmt.execute("INSERT INTO tsk_db_info (schema_ver, tsk_ver, schema_minor_ver) VALUES (" + version.getMajor() + ", " + tskVersionNum + ", " + version.getMinor() + ");"); stmt.execute("CREATE TABLE tsk_db_info_extended (name TEXT PRIMARY KEY, value TEXT NOT NULL);"); stmt.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('TSK_VERSION', '" + tskVersionNum + "');"); stmt.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('SCHEMA_MAJOR_VERSION', '" + version.getMajor() + "');"); stmt.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('SCHEMA_MINOR_VERSION', '" + version.getMinor() + "');"); stmt.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('CREATION_SCHEMA_MAJOR_VERSION', '" + version.getMajor() + "');"); stmt.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('CREATION_SCHEMA_MINOR_VERSION', '" + version.getMinor() + "');"); } catch (SQLException ex) { throw new TskCoreException("Error initializing db_info tables", ex); } } /** * Add and initialize the database tables * * @param conn the database connection * * @throws TskCoreException */ private void addTables(Connection conn) throws TskCoreException { try (Statement stmt = conn.createStatement()) { createTskObjects(stmt); createHostTables(stmt); createAccountTables(stmt); createFileTables(stmt); createArtifactTables(stmt); createAnalysisResultsTables(stmt); createTagTables(stmt); createIngestTables(stmt); createEventTables(stmt); createAttributeTables(stmt); createAccountInstancesAndArtifacts(stmt); } catch (SQLException ex) { throw new TskCoreException("Error initializing tables", ex); } } // tsk_objects is referenced by many other tables and should be created first private void createTskObjects(Statement stmt) throws SQLException { // The UNIQUE here on the object ID is to create an index stmt.execute("CREATE TABLE tsk_objects (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, par_obj_id " + dbQueryHelper.getBigIntType() + ", type INTEGER NOT NULL, UNIQUE (obj_id), FOREIGN KEY (par_obj_id) REFERENCES tsk_objects (obj_id) ON DELETE CASCADE)"); } private void createFileTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE tsk_image_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, type INTEGER, ssize INTEGER, " + "tzone TEXT, size " + dbQueryHelper.getBigIntType() + ", md5 TEXT, sha1 TEXT, sha256 TEXT, display_name TEXT, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_image_names (obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, name TEXT NOT NULL, " + "sequence INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_vs_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, vs_type INTEGER NOT NULL, " + "img_offset " + dbQueryHelper.getBigIntType() + " NOT NULL, block_size " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_vs_parts (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "addr " + dbQueryHelper.getBigIntType() + " NOT NULL, start " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "length " + dbQueryHelper.getBigIntType() + " NOT NULL, " + dbQueryHelper.getVSDescColName() + " TEXT, " + "flags INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE);"); stmt.execute("CREATE TABLE tsk_pool_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "pool_type INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE);"); stmt.execute("CREATE TABLE data_source_info (obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, device_id TEXT NOT NULL, " + "time_zone TEXT NOT NULL, acquisition_details TEXT, added_date_time "+ dbQueryHelper.getBigIntType() + ", " + "acquisition_tool_settings TEXT, acquisition_tool_name TEXT, acquisition_tool_version TEXT, " + "host_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id), " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_fs_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "img_offset " + dbQueryHelper.getBigIntType() + " NOT NULL, fs_type INTEGER NOT NULL, " + "block_size " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "block_count " + dbQueryHelper.getBigIntType() + " NOT NULL, root_inum " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "first_inum " + dbQueryHelper.getBigIntType() + " NOT NULL, last_inum " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "display_name TEXT, " + "FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_files (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "fs_obj_id " + dbQueryHelper.getBigIntType() + ", data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "attr_type INTEGER, attr_id INTEGER, " + "name TEXT NOT NULL, meta_addr " + dbQueryHelper.getBigIntType() + ", meta_seq " + dbQueryHelper.getBigIntType() + ", " + "type INTEGER, has_layout INTEGER, has_path INTEGER, " + "dir_type INTEGER, meta_type INTEGER, dir_flags INTEGER, meta_flags INTEGER, size " + dbQueryHelper.getBigIntType() + ", " + "ctime " + dbQueryHelper.getBigIntType() + ", " + "crtime " + dbQueryHelper.getBigIntType() + ", atime " + dbQueryHelper.getBigIntType() + ", " + "mtime " + dbQueryHelper.getBigIntType() + ", mode INTEGER, uid INTEGER, gid INTEGER, md5 TEXT, sha256 TEXT, " + "known INTEGER, " + "parent_path TEXT, mime_type TEXT, extension TEXT, " + "owner_uid TEXT DEFAULT NULL, " + "os_account_obj_id " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(fs_obj_id) REFERENCES tsk_fs_info(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE SET NULL) " ); stmt.execute("CREATE TABLE file_encoding_types (encoding_type INTEGER PRIMARY KEY, name TEXT NOT NULL)"); stmt.execute("CREATE TABLE tsk_files_path (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, path TEXT NOT NULL, " + "encoding_type INTEGER NOT NULL, FOREIGN KEY(encoding_type) references file_encoding_types(encoding_type), " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_files_derived (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "derived_id " + dbQueryHelper.getBigIntType() + " NOT NULL, rederive TEXT, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE tsk_files_derived_method (derived_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "tool_name TEXT NOT NULL, tool_version TEXT NOT NULL, other TEXT)"); stmt.execute("CREATE TABLE tsk_file_layout (obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "byte_start " + dbQueryHelper.getBigIntType() + " NOT NULL, byte_len " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "sequence INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE);"); stmt.execute("CREATE TABLE reports (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, path TEXT NOT NULL, " + "crtime INTEGER NOT NULL, src_module_name TEXT NOT NULL, report_name TEXT NOT NULL, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE);"); } private void createArtifactTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE blackboard_artifact_types (artifact_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "type_name TEXT NOT NULL, display_name TEXT," + "category_type INTEGER DEFAULT 0)"); stmt.execute("CREATE TABLE blackboard_attribute_types (attribute_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "type_name TEXT NOT NULL, display_name TEXT, value_type INTEGER NOT NULL)"); stmt.execute("CREATE TABLE review_statuses (review_status_id INTEGER PRIMARY KEY, " + "review_status_name TEXT NOT NULL, " + "display_name TEXT NOT NULL)"); stmt.execute("CREATE TABLE blackboard_artifacts (artifact_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "artifact_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "data_source_obj_id " + dbQueryHelper.getBigIntType() + ", " + "artifact_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "review_status_id INTEGER NOT NULL, " + "UNIQUE (artifact_obj_id)," + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(artifact_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(artifact_type_id) REFERENCES blackboard_artifact_types(artifact_type_id), " + "FOREIGN KEY(review_status_id) REFERENCES review_statuses(review_status_id))"); /* Binary representation of BYTEA is a bunch of bytes, which could * include embedded nulls so we have to pay attention to field length. * http://www.postgresql.org/docs/9.4/static/libpq-example.html */ stmt.execute("CREATE TABLE blackboard_attributes (artifact_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "artifact_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "source TEXT, context TEXT, attribute_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "value_type INTEGER NOT NULL, value_byte " + dbQueryHelper.getBlobType() + ", " + "value_text TEXT, value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", value_double NUMERIC(20, 10), " + "FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id) ON DELETE CASCADE, " + "FOREIGN KEY(artifact_type_id) REFERENCES blackboard_artifact_types(artifact_type_id), " + "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))"); } private void createAnalysisResultsTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE tsk_analysis_results (artifact_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, " + "conclusion TEXT, " + "significance INTEGER NOT NULL, " + "priority INTEGER NOT NULL, " + "configuration TEXT, justification TEXT, " + "ignore_score INTEGER DEFAULT 0, " // boolean + "FOREIGN KEY(artifact_obj_id) REFERENCES blackboard_artifacts(artifact_obj_id) ON DELETE CASCADE" + ")"); stmt.execute("CREATE TABLE tsk_aggregate_score( obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, " + "data_source_obj_id " + dbQueryHelper.getBigIntType() + ", " + "significance INTEGER NOT NULL, " + "priority INTEGER NOT NULL, " + "UNIQUE (obj_id)," + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE " + ")"); } private void createTagTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE tsk_tag_sets (tag_set_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, name TEXT UNIQUE)"); stmt.execute("CREATE TABLE tag_names (tag_name_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, display_name TEXT UNIQUE, " + "description TEXT NOT NULL, color TEXT NOT NULL, knownStatus INTEGER NOT NULL," + " tag_set_id " + dbQueryHelper.getBigIntType() + ", rank INTEGER, FOREIGN KEY(tag_set_id) REFERENCES tsk_tag_sets(tag_set_id) ON DELETE SET NULL)"); stmt.execute("CREATE TABLE tsk_examiners (examiner_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "login_name TEXT NOT NULL, display_name TEXT, UNIQUE(login_name))"); stmt.execute("CREATE TABLE content_tags (tag_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, tag_name_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "comment TEXT NOT NULL, begin_byte_offset " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "end_byte_offset " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "examiner_id " + dbQueryHelper.getBigIntType() + ", " + "FOREIGN KEY(examiner_id) REFERENCES tsk_examiners(examiner_id) ON DELETE CASCADE, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(tag_name_id) REFERENCES tag_names(tag_name_id) ON DELETE CASCADE)"); stmt.execute("CREATE TABLE blackboard_artifact_tags (tag_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "artifact_id " + dbQueryHelper.getBigIntType() + " NOT NULL, tag_name_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "comment TEXT NOT NULL, examiner_id " + dbQueryHelper.getBigIntType() + ", " + "FOREIGN KEY(examiner_id) REFERENCES tsk_examiners(examiner_id) ON DELETE CASCADE, " + "FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id) ON DELETE CASCADE, " + "FOREIGN KEY(tag_name_id) REFERENCES tag_names(tag_name_id) ON DELETE CASCADE)"); } /** * Add indexes * * @param conn the database connection * @throws TskCoreException */ private void addIndexes(Connection conn) throws TskCoreException { try (Statement stmt = conn.createStatement()) { // tsk_objects index stmt.execute("CREATE INDEX parObjId ON tsk_objects(par_obj_id)"); // file layout index stmt.execute("CREATE INDEX layout_objID ON tsk_file_layout(obj_id)"); // blackboard indexes stmt.execute("CREATE INDEX artifact_objID ON blackboard_artifacts(obj_id)"); stmt.execute("CREATE INDEX artifact_artifact_objID ON blackboard_artifacts(artifact_obj_id)"); stmt.execute("CREATE INDEX artifact_typeID ON blackboard_artifacts(artifact_type_id)"); stmt.execute("CREATE INDEX attrsArtifactID ON blackboard_attributes(artifact_id)"); //file type indexes stmt.execute("CREATE INDEX mime_type ON tsk_files(dir_type,mime_type,type)"); stmt.execute("CREATE INDEX file_extension ON tsk_files(extension)"); // account indexes stmt.execute("CREATE INDEX relationships_account1 ON account_relationships(account1_id)"); stmt.execute("CREATE INDEX relationships_account2 ON account_relationships(account2_id)"); stmt.execute("CREATE INDEX relationships_relationship_source_obj_id ON account_relationships(relationship_source_obj_id)"); stmt.execute("CREATE INDEX relationships_date_time ON account_relationships(date_time)"); stmt.execute("CREATE INDEX relationships_relationship_type ON account_relationships(relationship_type)"); stmt.execute("CREATE INDEX relationships_data_source_obj_id ON account_relationships(data_source_obj_id)"); //tsk_events indices stmt.execute("CREATE INDEX events_data_source_obj_id ON tsk_event_descriptions(data_source_obj_id)"); stmt.execute("CREATE INDEX events_content_obj_id ON tsk_event_descriptions(content_obj_id)"); stmt.execute("CREATE INDEX events_artifact_id ON tsk_event_descriptions(artifact_id)"); stmt.execute("CREATE INDEX events_sub_type_time ON tsk_events(event_type_id, time)"); stmt.execute("CREATE INDEX events_time ON tsk_events(time)"); // analysis results and scores indices stmt.execute("CREATE INDEX score_significance_priority ON tsk_aggregate_score(significance, priority)"); stmt.execute("CREATE INDEX score_datasource_obj_id ON tsk_aggregate_score(data_source_obj_id)"); stmt.execute("CREATE INDEX tsk_file_attributes_obj_id ON tsk_file_attributes(obj_id)"); } catch (SQLException ex) { throw new TskCoreException("Error initializing db_info tables", ex); } } private void createIngestTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE ingest_module_types (type_id INTEGER PRIMARY KEY, type_name TEXT NOT NULL)"); stmt.execute("CREATE TABLE ingest_job_status_types (type_id INTEGER PRIMARY KEY, type_name TEXT NOT NULL)"); stmt.execute("CREATE TABLE ingest_modules (ingest_module_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "display_name TEXT NOT NULL, unique_name TEXT UNIQUE NOT NULL, type_id INTEGER NOT NULL, " + "version TEXT NOT NULL, FOREIGN KEY(type_id) REFERENCES ingest_module_types(type_id) ON DELETE CASCADE);"); stmt.execute("CREATE TABLE ingest_jobs (ingest_job_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, host_name TEXT NOT NULL, " + "start_date_time " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "end_date_time " + dbQueryHelper.getBigIntType() + " NOT NULL, status_id INTEGER NOT NULL, " + "settings_dir TEXT, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(status_id) REFERENCES ingest_job_status_types(type_id) ON DELETE CASCADE);"); stmt.execute("CREATE TABLE ingest_job_modules (ingest_job_id INTEGER, ingest_module_id INTEGER, " + "pipeline_position INTEGER, PRIMARY KEY(ingest_job_id, ingest_module_id), " + "FOREIGN KEY(ingest_job_id) REFERENCES ingest_jobs(ingest_job_id) ON DELETE CASCADE, " + "FOREIGN KEY(ingest_module_id) REFERENCES ingest_modules(ingest_module_id) ON DELETE CASCADE);"); } private void createHostTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE tsk_persons (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "name TEXT NOT NULL, " // person name + "UNIQUE(name)) "); // References tsk_persons stmt.execute("CREATE TABLE tsk_hosts (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "name TEXT NOT NULL, " // host name + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "person_id INTEGER, " + "merged_into " + dbQueryHelper.getBigIntType() + ", " + "FOREIGN KEY(person_id) REFERENCES tsk_persons(id) ON DELETE SET NULL, " + "FOREIGN KEY(merged_into) REFERENCES tsk_hosts(id) ON DELETE CASCADE, " + "UNIQUE(name)) "); stmt.execute("CREATE TABLE tsk_host_addresses (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "address_type INTEGER NOT NULL, " + "address TEXT NOT NULL, " + "UNIQUE(address_type, address)) "); stmt.execute("CREATE TABLE tsk_host_address_map (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "host_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "addr_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "source_obj_id " + dbQueryHelper.getBigIntType() + ", " // object id of the source where this mapping was found. + "time " + dbQueryHelper.getBigIntType() + ", " // time at which the mapping existed + "UNIQUE(host_id, addr_obj_id, time), " + "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, " + "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id), " + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )"); // stores associations between DNS name and IP address stmt.execute("CREATE TABLE tsk_host_address_dns_ip_map (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "dns_address_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "ip_address_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "source_obj_id " + dbQueryHelper.getBigIntType() + ", " + "time " + dbQueryHelper.getBigIntType() + ", " // time at which the mapping existed + "UNIQUE(dns_address_id, ip_address_id, time), " + "FOREIGN KEY(dns_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, " + "FOREIGN KEY(ip_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE," + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )"); // maps an address to an content/item using it stmt.execute("CREATE TABLE tsk_host_address_usage (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "addr_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " // obj id of the content/item using the address + "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " // data source where the usage was found + "UNIQUE(addr_obj_id, obj_id), " + "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )"); } // Must be called after tsk_persons, tsk_hosts and tsk_objects have been created. private void createAccountTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE account_types (account_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "type_name TEXT UNIQUE NOT NULL, display_name TEXT NOT NULL)"); // References account_types stmt.execute("CREATE TABLE accounts (account_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "account_type_id INTEGER NOT NULL, account_unique_identifier TEXT NOT NULL, " + "UNIQUE(account_type_id, account_unique_identifier), " + "FOREIGN KEY(account_type_id) REFERENCES account_types(account_type_id))"); // References accounts, tsk_objects stmt.execute("CREATE TABLE account_relationships (relationship_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "account1_id INTEGER NOT NULL, account2_id INTEGER NOT NULL, " + "relationship_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "date_time " + dbQueryHelper.getBigIntType() + ", relationship_type INTEGER NOT NULL, " + "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "UNIQUE(account1_id, account2_id, relationship_source_obj_id), " + "FOREIGN KEY(account1_id) REFERENCES accounts(account_id), " + "FOREIGN KEY(account2_id) REFERENCES accounts(account_id), " + "FOREIGN KEY(relationship_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); // References tsk_hosts stmt.execute("CREATE TABLE tsk_os_account_realms (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "realm_name TEXT DEFAULT NULL, " // realm name - for a domain realm, may be null + "realm_addr TEXT DEFAULT NULL, " // a sid/uid or some some other identifier, may be null + "realm_signature TEXT NOT NULL, " // Signature exists only to prevent duplicates. It is made up of realm address/name and scope host + "scope_host_id " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " // if the realm scope is a single host + "scope_confidence INTEGER, " // indicates whether we know for sure the realm scope or if we are inferring it + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "merged_into " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " + "UNIQUE(realm_signature), " + "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE," + "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) ON DELETE CASCADE )"); // References tsk_objects, tsk_os_account_realms, tsk_persons stmt.execute("CREATE TABLE tsk_os_accounts (os_account_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, " + "login_name TEXT DEFAULT NULL, " // login name, if available, may be null + "full_name TEXT DEFAULT NULL, " // full name, if available, may be null + "realm_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " // realm for the account + "addr TEXT DEFAULT NULL, " // SID/UID, if available + "signature TEXT NOT NULL, " // This exists only to prevent duplicates. It is either the addr or the login_name whichever is not null. + "status INTEGER, " // enabled/disabled/deleted + "type INTEGER, " // service/interactive + "created_date " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "merged_into " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " + "UNIQUE(signature, realm_id), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(realm_id) REFERENCES tsk_os_account_realms(id) ON DELETE CASCADE," + "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE )"); } // Must be called after createAccountTables() and blackboard_attribute_types, blackboard_artifacts creation. private void createAccountInstancesAndArtifacts(Statement stmt) throws SQLException { // References tsk_os_accounts, tsk_hosts, tsk_objects, blackboard_attribute_types stmt.execute("CREATE TABLE tsk_os_account_attributes (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "os_account_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "host_id " + dbQueryHelper.getBigIntType() + ", " + "source_obj_id " + dbQueryHelper.getBigIntType() + ", " + "attribute_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "value_type INTEGER NOT NULL, " + "value_byte " + dbQueryHelper.getBlobType() + ", " + "value_text TEXT, " + "value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", " + "value_double NUMERIC(20, 10), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, " + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL, " + "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))"); // References tsk_os_accounts, tsk_objects, tsk_hosts stmt.execute("CREATE TABLE tsk_os_account_instances (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "os_account_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "instance_type INTEGER NOT NULL, " // PerformedActionOn/ReferencedOn + "UNIQUE(os_account_obj_id, data_source_obj_id), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE ) "); // References blackboard_artifacts, tsk_os_accounts stmt.execute("CREATE TABLE tsk_data_artifacts ( " + "artifact_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, " + "os_account_obj_id " + dbQueryHelper.getBigIntType() + ", " + "FOREIGN KEY(artifact_obj_id) REFERENCES blackboard_artifacts(artifact_obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE SET NULL) "); } private void createEventTables(Statement stmt) throws SQLException { stmt.execute("CREATE TABLE tsk_event_types (" + " event_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY," + " display_name TEXT UNIQUE NOT NULL , " + " super_type_id INTEGER REFERENCES tsk_event_types(event_type_id) )"); /* * Regarding the timeline event tables schema, note that several columns * in the tsk_event_descriptions table seem, at first glance, to be * attributes of events rather than their descriptions and would appear * to belong in tsk_events table instead. The rationale for putting the * data source object ID, content object ID, artifact ID and the flags * indicating whether or not the event source has a hash set hit or is * tagged were motivated by the fact that these attributes are identical * for each event in a set of file system file MAC time events. The * decision was made to avoid duplication and save space by placing this * data in the tsk_event-descriptions table. */ stmt.execute( "CREATE TABLE tsk_event_descriptions ( " + " event_description_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + " full_description TEXT NOT NULL, " + " med_description TEXT, " + " short_description TEXT," + " data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + " content_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + " artifact_id " + dbQueryHelper.getBigIntType() + ", " + " hash_hit INTEGER NOT NULL, " //boolean + " tagged INTEGER NOT NULL, " //boolean + " FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id) ON DELETE CASCADE, " + " FOREIGN KEY(content_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + " FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id) ON DELETE CASCADE," + " UNIQUE (full_description, content_obj_id, artifact_id))"); stmt.execute( "CREATE TABLE tsk_events (" + " event_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + " event_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL REFERENCES tsk_event_types(event_type_id) ," + " event_description_id " + dbQueryHelper.getBigIntType() + " NOT NULL REFERENCES tsk_event_descriptions(event_description_id) ON DELETE CASCADE ," + " time " + dbQueryHelper.getBigIntType() + " NOT NULL , " + " UNIQUE (event_type_id, event_description_id, time))"); } private void createAttributeTables(Statement stmt) throws SQLException { /* * Binary representation of BYTEA is a bunch of bytes, which could * include embedded nulls so we have to pay attention to field length. * http://www.postgresql.org/docs/9.4/static/libpq-example.html */ stmt.execute("CREATE TABLE tsk_file_attributes ( id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, " + "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "attribute_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " + "value_type INTEGER NOT NULL, value_byte " + dbQueryHelper.getBlobType() + ", " + "value_text TEXT, value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", value_double NUMERIC(20, 10), " + "FOREIGN KEY(obj_id) REFERENCES tsk_files(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))"); } /** * Helper class for holding code unique to each database type. */ private abstract class DbCreationHelper { /** * Create the database itself (if necessary) * * @throws TskCoreException */ abstract void createDatabase() throws TskCoreException; /** * Get an connection to the case database * * @return the connection */ abstract Connection getConnection() throws TskCoreException; /** * Do any needed initialization before creating the tables. * This is where SQLite pragmas are set up. * * @param conn The database connection * * @throws TskCoreException */ abstract void performPreInitialization(Connection conn) throws TskCoreException; /** * Do any additional steps after the tables are created. * * @param conn The database connection * @throws TskCoreException */ abstract void performPostTableInitialization(Connection conn) throws TskCoreException; } /** * Implements the PostgreSQL-specific methods for creating the case */ private class PostgreSQLDbCreationHelper extends DbCreationHelper { private final static String JDBC_BASE_URI = "jdbc:postgresql://"; // NON-NLS private final static String JDBC_DRIVER = "org.postgresql.Driver"; // NON-NLS final private String caseName; final private CaseDbConnectionInfo info; PostgreSQLDbCreationHelper(String caseName, CaseDbConnectionInfo info) { this.caseName = caseName; this.info = info; } @Override void createDatabase() throws TskCoreException{ try(Connection conn = getPostgresConnection(); Statement stmt = conn.createStatement()) { stmt.execute("CREATE DATABASE \"" + caseName + "\" WITH ENCODING='UTF8'"); } catch (SQLException ex) { throw new TskCoreException("Error creating PostgreSQL case " + caseName, ex); } } @Override Connection getConnection() throws TskCoreException { return getConnection(caseName); } /** * Connects to the "postgres" database for creating new databases. * * @return the connection to the "postgres" database */ Connection getPostgresConnection() throws TskCoreException { return getConnection("postgres"); } /** * Connects to an existing database with the given name. * * @param databaseName the name of the database * * @return the connection to the database */ Connection getConnection(String databaseName) throws TskCoreException { String encodedDbName; try { encodedDbName = URLEncoder.encode(databaseName, "UTF-8"); } catch (UnsupportedEncodingException ex) { // Print the warning and continue with the unencoded name logger.log(Level.WARNING, "Error encoding database name " + databaseName, ex); encodedDbName = databaseName; } StringBuilder url = new StringBuilder(); url.append(JDBC_BASE_URI) .append(info.getHost()) .append(":") .append(info.getPort()) .append('/') // NON-NLS .append(encodedDbName); Connection conn; try { Properties props = new Properties(); props.setProperty("user", info.getUserName()); // NON-NLS props.setProperty("password", info.getPassword()); // NON-NLS Class.forName(JDBC_DRIVER); conn = DriverManager.getConnection(url.toString(), props); } catch (ClassNotFoundException | SQLException ex) { throw new TskCoreException("Failed to acquire ephemeral connection to PostgreSQL database " + databaseName, ex); // NON-NLS } return conn; } @Override void performPreInitialization(Connection conn) throws TskCoreException { // Nothing to do here for PostgreSQL } @Override void performPostTableInitialization(Connection conn) throws TskCoreException { try (Statement stmt = conn.createStatement()) { stmt.execute("ALTER SEQUENCE blackboard_artifacts_artifact_id_seq minvalue -9223372036854775808 restart with -9223372036854775808"); } catch (SQLException ex) { throw new TskCoreException("Error altering artifact ID sequence", ex); } } } /** * Implements the SQLite-specific methods for creating the case */ private class SQLiteDbCreationHelper extends DbCreationHelper { private final static String PRAGMA_SYNC_OFF = "PRAGMA synchronous = OFF"; // NON-NLS private final static String PRAGMA_READ_UNCOMMITTED_TRUE = "PRAGMA read_uncommitted = True"; // NON-NLS private final static String PRAGMA_ENCODING_UTF8 = "PRAGMA encoding = 'UTF-8'"; // NON-NLS private final static String PRAGMA_PAGE_SIZE_4096 = "PRAGMA page_size = 4096"; // NON-NLS private final static String PRAGMA_FOREIGN_KEYS_ON = "PRAGMA foreign_keys = ON"; // NON-NLS private final static String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS private final static String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS String dbPath; SQLiteDbCreationHelper(String dbPath) { this.dbPath = dbPath; } @Override void createDatabase() throws TskCoreException { // SQLite doesn't need to explicitly create the case database but we will // check that the folder exists and the database does not File dbFile = new File(dbPath); if (dbFile.exists()) { throw new TskCoreException("Case database already exists : " + dbPath); } if (dbFile.getParentFile() != null && !dbFile.getParentFile().exists()) { throw new TskCoreException("Case database folder does not exist : " + dbFile.getParent()); } } @Override Connection getConnection() throws TskCoreException { StringBuilder url = new StringBuilder(); url.append(JDBC_BASE_URI) .append(dbPath); Connection conn; try { Class.forName(JDBC_DRIVER); conn = DriverManager.getConnection(url.toString()); } catch (ClassNotFoundException | SQLException ex) { throw new TskCoreException("Failed to acquire ephemeral connection SQLite database " + dbPath, ex); // NON-NLS } return conn; } @Override void performPreInitialization(Connection conn) throws TskCoreException { try (Statement stmt = conn.createStatement()) { stmt.execute(PRAGMA_SYNC_OFF); stmt.execute(PRAGMA_READ_UNCOMMITTED_TRUE); stmt.execute(PRAGMA_ENCODING_UTF8); stmt.execute(PRAGMA_PAGE_SIZE_4096); stmt.execute(PRAGMA_FOREIGN_KEYS_ON); } catch (SQLException ex) { throw new TskCoreException("Error setting pragmas", ex); } } @Override void performPostTableInitialization(Connection conn) throws TskCoreException { // Nothing to do here for SQLite } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CarvingResult.java000755 000765 000024 00000005716 14137073413 030120 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.List; /** * A carving result consisting of a set of carved files and the parent from * which the files were carved. */ public final class CarvingResult { private final Content parent; private final List carvedFiles; /** * Constructs a carving result consisting of a set of carved files and the * parent from which the files were carved. * * @param parent The parent of the set of carved files in the carving * result. * @param carvedFiles The set of carved files in the carving result. */ public CarvingResult(Content parent, List carvedFiles) { this.parent = parent; this.carvedFiles = new ArrayList(carvedFiles); } /** * Gets the parent of the carved files in a carving result. * * @return The parent of the set of carved files in the carving result. */ final Content getParent() { return parent; } /** * Gets the carved files in a carving result. * * @return The set of carved files in the carving result. */ final List getCarvedFiles() { return carvedFiles; } /** * A carved file. */ public final static class CarvedFile { private final String name; private final long sizeInBytes; private final List layoutInParent; /** * Constructs a carved file. * * @param name The name of the file. * @param sizeInBytes The size of the file in bytes. * @param layoutInParent The layout of the file within its parent. */ public CarvedFile(String name, long sizeInBytes, List layoutInParent) { this.name = name; this.sizeInBytes = sizeInBytes; this.layoutInParent = layoutInParent; } /** * Gets the name of the carved file. * * @return The file name. */ final String getName() { return name; } /** * Gets the size of the carved file. * * @return The size of the file in bytes. */ final long getSizeInBytes() { return sizeInBytes; } /** * Gets the layout of the carved file within its parent. * * @return A list of TskRange objects representing the layout of the * carved file within its parent. */ final List getLayoutInParent() { return layoutInParent; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java000755 000765 000024 00000202425 14137073414 030516 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.Strings; import org.apache.commons.lang3.StringUtils; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Collections; import java.util.ArrayList; import java.util.List; import java.util.NavigableSet; import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.ConcurrentSkipListSet; import java.util.stream.Collectors; import org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE; import org.sleuthkit.datamodel.OsAccount.OsAccountStatus; import org.sleuthkit.datamodel.OsAccount.OsAccountType; import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.TskEvent.OsAccountsUpdatedTskEvent; import static org.sleuthkit.datamodel.WindowsAccountUtils.getWindowsSpecialSidName; import static org.sleuthkit.datamodel.WindowsAccountUtils.isWindowsSpecialSid; /** * Responsible for creating/updating/retrieving the OS accounts for files and * artifacts. */ public final class OsAccountManager { private final SleuthkitCase db; private final Object osAcctInstancesCacheLock; private final NavigableSet osAccountInstanceCache; /** * Construct a OsUserManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ OsAccountManager(SleuthkitCase skCase) { db = skCase; osAcctInstancesCacheLock = new Object(); osAccountInstanceCache = new ConcurrentSkipListSet<>(); } /** * Creates an OS account with given unique id and given realm id. If an * account already exists with the given id, then the existing OS account is * returned. * * @param uniqueAccountId Account sid/uid. * @param realm Account realm. * * @return OsAccount. * * @throws TskCoreException If there is an error in creating the OSAccount. * */ OsAccount newOsAccount(String uniqueAccountId, OsAccountRealm realm) throws TskCoreException { // ensure unique id is provided if (Strings.isNullOrEmpty(uniqueAccountId)) { throw new TskCoreException("Cannot create OS account with null uniqueId."); } if (realm == null) { throw new TskCoreException("Cannot create OS account without a realm."); } CaseDbTransaction trans = db.beginTransaction(); try { // try to create account try { OsAccount account = newOsAccount(uniqueAccountId, null, realm, OsAccount.OsAccountStatus.UNKNOWN, trans); trans.commit(); trans = null; return account; } catch (SQLException ex) { // Close the transaction before moving on trans.rollback(); trans = null; // Create may fail if an OsAccount already exists. Optional osAccount = this.getOsAccountByAddr(uniqueAccountId, realm); if (osAccount.isPresent()) { return osAccount.get(); } // create failed for some other reason, throw an exception throw new TskCoreException(String.format("Error creating OsAccount with uniqueAccountId = %s in realm id = %d", uniqueAccountId, realm.getRealmId()), ex); } } finally { if (trans != null) { trans.rollback(); } } } /** * Creates an OS account with Windows-specific data. If an account already * exists with the given id or realm/login, then the existing OS account is * returned. * * If the account realm already exists, but is missing the address or the * realm name, the realm is updated. * * @param sid Account sid/uid, can be null if loginName is * supplied. * @param loginName Login name, can be null if sid is supplied. * @param realmName Realm within which the accountId or login name is * unique. Can be null if sid is supplied. * @param referringHost Host referring the account. * @param realmScope Realm scope. * * @return OsAccount. * * @throws TskCoreException If there is an error in * creating the OSAccount. * @throws OsAccountManager.NotUserSIDException If the given SID is not a * user SID. * */ public OsAccount newWindowsOsAccount(String sid, String loginName, String realmName, Host referringHost, OsAccountRealm.RealmScope realmScope) throws TskCoreException, NotUserSIDException { if (realmScope == null) { throw new TskCoreException("RealmScope cannot be null. Use UNKNOWN if scope is not known."); } if (referringHost == null) { throw new TskCoreException("A referring host is required to create an account."); } // ensure at least one of the two is supplied - unique id or a login name if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) { throw new TskCoreException("Cannot create OS account with both uniqueId and loginName as null."); } // Realm name is required if the sid is null. if (StringUtils.isBlank(sid) && StringUtils.isBlank(realmName)) { throw new TskCoreException("Realm name or SID is required to create a Windows account."); } if (!StringUtils.isBlank(sid) && !WindowsAccountUtils.isWindowsUserSid(sid)) { throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid)); } // get the realm for the account, and update it if it is missing addr or name. Optional realmOptional; try (CaseDbConnection connection = db.getConnection()) { realmOptional = db.getOsAccountRealmManager().getAndUpdateWindowsRealm(sid, realmName, referringHost, connection); } OsAccountRealm realm; if (realmOptional.isPresent()) { realm = realmOptional.get(); } else { // realm was not found, create it. realm = db.getOsAccountRealmManager().newWindowsRealm(sid, realmName, referringHost, realmScope); } return newWindowsOsAccount(sid, loginName, realm); } /** * Creates an OS account with Windows-specific data. If an account already * exists with the given id or realm/login, then the existing OS account is * returned. * * @param sid Account sid/uid, can be null if loginName is supplied. * @param loginName Login name, can be null if sid is supplied. * @param realm The associated realm. * * @return OsAccount. * * @throws TskCoreException If there is an error in * creating the OSAccount. * @throws OsAccountManager.NotUserSIDException If the given SID is not a * user SID. * */ public OsAccount newWindowsOsAccount(String sid, String loginName, OsAccountRealm realm) throws TskCoreException, NotUserSIDException { // ensure at least one of the two is supplied - unique id or a login name if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) { throw new TskCoreException("Cannot create OS account with both uniqueId and loginName as null."); } if (!StringUtils.isBlank(sid) && !WindowsAccountUtils.isWindowsUserSid(sid)) { throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid)); } CaseDbTransaction trans = db.beginTransaction(); try { // try to create account try { OsAccount account = newOsAccount(sid, loginName, realm, OsAccount.OsAccountStatus.UNKNOWN, trans); // If the SID indicates a special windows account, then set its full name. if (!StringUtils.isBlank(sid) && isWindowsSpecialSid(sid)) { String fullName = getWindowsSpecialSidName(sid); if (StringUtils.isNotBlank(fullName)) { OsAccountUpdateResult updateResult = updateStandardOsAccountAttributes(account, fullName, null, null, null, trans); if (updateResult.getUpdatedAccount().isPresent()) { account = updateResult.getUpdatedAccount().get(); } } } trans.commit(); trans = null; return account; } catch (SQLException ex) { // Rollback the transaction before proceeding trans.rollback(); trans = null; // Create may fail if an OsAccount already exists. Optional osAccount; // First search for account by uniqueId if (!Strings.isNullOrEmpty(sid)) { osAccount = getOsAccountByAddr(sid, realm); if (osAccount.isPresent()) { return osAccount.get(); } } // search by loginName if (!Strings.isNullOrEmpty(loginName)) { osAccount = getOsAccountByLoginName(loginName, realm); if (osAccount.isPresent()) { return osAccount.get(); } } // create failed for some other reason, throw an exception throw new TskCoreException(String.format("Error creating OsAccount with sid = %s, loginName = %s, realm = %s, referring host = %s", (sid != null) ? sid : "Null", (loginName != null) ? loginName : "Null", (!realm.getRealmNames().isEmpty()) ? realm.getRealmNames().get(0) : "Null", realm.getScopeHost().isPresent() ? realm.getScopeHost().get().getName() : "Null"), ex); } } finally { if (trans != null) { trans.rollback(); } } } /** * Creates a OS account with the given uid, name, and realm. * * @param uniqueId Account sid/uid. May be null. * @param loginName Login name. May be null only if SID is not null. * @param realm Realm. * @param accountStatus Account status. * @param trans Open transaction to use. * * @return OS account. * * @throws TskCoreException If there is an error creating the account. */ private OsAccount newOsAccount(String uniqueId, String loginName, OsAccountRealm realm, OsAccount.OsAccountStatus accountStatus, CaseDbTransaction trans) throws TskCoreException, SQLException { if (Objects.isNull(realm)) { throw new TskCoreException("Cannot create an OS Account, realm is NULL."); } String signature = getOsAccountSignature(uniqueId, loginName); OsAccount account; CaseDbConnection connection = trans.getConnection(); // first create a tsk_object for the OsAccount. // RAMAN TODO: need to get the correct parent obj id. // Create an Object Directory parent and used its id. long parentObjId = 0; int objTypeId = TskData.ObjectType.OS_ACCOUNT.getObjectType(); long osAccountObjId = db.addObject(parentObjId, objTypeId, connection); String accountInsertSQL = "INSERT INTO tsk_os_accounts(os_account_obj_id, login_name, realm_id, addr, signature, status)" + " VALUES (?, ?, ?, ?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(accountInsertSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, osAccountObjId); preparedStatement.setString(2, loginName); preparedStatement.setLong(3, realm.getRealmId()); preparedStatement.setString(4, uniqueId); preparedStatement.setString(5, signature); preparedStatement.setInt(6, accountStatus.getId()); connection.executeUpdate(preparedStatement); account = new OsAccount(db, osAccountObjId, realm.getRealmId(), loginName, uniqueId, signature, null, null, null, accountStatus, OsAccount.OsAccountDbStatus.ACTIVE); trans.registerAddedOsAccount(account); return account; } /** * Get the OS account with the given unique id. * * @param addr Account sid/uid. * @param host Host for account realm, may be null. * * @return Optional with OsAccount, Optional.empty if no matching account is * found. * * @throws TskCoreException If there is an error getting the account. */ private Optional getOsAccountByAddr(String addr, Host host) throws TskCoreException { try (CaseDbConnection connection = db.getConnection()) { return getOsAccountByAddr(addr, host, connection); } } /** * Gets the OS account for the given unique id. * * @param uniqueId Account SID/uid. * @param host Host to match the realm, may be null. * @param connection Database connection to use. * * @return Optional with OsAccount, Optional.empty if no account with * matching uniqueId is found. * * @throws TskCoreException */ private Optional getOsAccountByAddr(String uniqueId, Host host, CaseDbConnection connection) throws TskCoreException { String whereHostClause = (host == null) ? " 1 = 1 " : " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL) "; String queryString = "SELECT accounts.os_account_obj_id as os_account_obj_id, accounts.login_name, accounts.full_name, " + " accounts.realm_id, accounts.addr, accounts.signature, " + " accounts.type, accounts.status, accounts.admin, accounts.created_date, accounts.db_status, " + " realms.realm_name as realm_name, realms.realm_addr as realm_addr, realms.realm_signature, realms.scope_host_id, realms.scope_confidence, realms.db_status as realm_db_status " + " FROM tsk_os_accounts as accounts" + " LEFT JOIN tsk_os_account_realms as realms" + " ON accounts.realm_id = realms.id" + " WHERE " + whereHostClause + " AND accounts.db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " AND LOWER(accounts.addr) = LOWER('" + uniqueId + "')"; db.acquireSingleUserCaseReadLock(); try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { return Optional.empty(); // no match found } else { return Optional.of(osAccountFromResultSet(rs)); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS account for unique id = %s and host = %s", uniqueId, (host != null ? host.getName() : "null")), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets an active OS Account by the realm and unique id. * * @param uniqueId Account unique id. * @param realm Account realm. * * @return Optional with OsAccount, Optional.empty, if no user is found with * matching realm and unique id. * * @throws TskCoreException */ Optional getOsAccountByAddr(String uniqueId, OsAccountRealm realm) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts" + " WHERE LOWER(addr) = LOWER('" + uniqueId + "')" + " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " AND realm_id = " + realm.getRealmId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { return Optional.empty(); // no match found } else { return Optional.of(osAccountFromResultSet(rs)); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS account for realm = %s and uniqueId = %s.", (realm != null) ? realm.getSignature() : "NULL", uniqueId), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets a OS Account by the realm and login name. * * @param loginName Login name. * @param realm Account realm. * * @return Optional with OsAccount, Optional.empty, if no user is found with * matching realm and login name. * * @throws TskCoreException */ Optional getOsAccountByLoginName(String loginName, OsAccountRealm realm) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts" + " WHERE LOWER(login_name) = LOWER('" + loginName + "')" + " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " AND realm_id = " + realm.getRealmId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { return Optional.empty(); // no match found } else { return Optional.of(osAccountFromResultSet(rs)); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS account for realm = %s and loginName = %s.", (realm != null) ? realm.getSignature() : "NULL", loginName), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the OS Account with the given object id. * * @param osAccountObjId Object id for the account. * * @return OsAccount. * * @throws TskCoreException If there is an error getting the account. */ public OsAccount getOsAccountByObjectId(long osAccountObjId) throws TskCoreException { try (CaseDbConnection connection = this.db.getConnection()) { return getOsAccountByObjectId(osAccountObjId, connection); } } /** * Get the OsAccount with the given object id. * * @param osAccountObjId Object id for the account. * @param connection Database connection to use. * * @return OsAccount. * * @throws TskCoreException If there is an error getting the account. */ OsAccount getOsAccountByObjectId(long osAccountObjId, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts" + " WHERE os_account_obj_id = " + osAccountObjId; db.acquireSingleUserCaseReadLock(); try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { throw new TskCoreException(String.format("No account found with obj id = %d ", osAccountObjId)); } else { return osAccountFromResultSet(rs); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting account with obj id = %d ", osAccountObjId), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Records that an OsAccount was used or referenced on a given data source. * This data is automatically recorded when a file or DataArtifact is * created. * * Use this method to explicitly record the association when: - Parsing * account information (such as in the registry) because the account may * already exist in the database, but the account did not create any files. * Therefore, no instance for it would be automatically created, even though * you found data about it. - You want to associate more than one OsAccount * with a DataArtifact. Call this for each OsAccount not specified in * 'newDataArtifact()'. * * This method does nothing if the instance is already recorded. * * @param osAccount Account for which an instance needs to be added. * @param dataSource Data source where the instance is found. * @param instanceType Instance type. * * @return OsAccountInstance Existing or newly created account instance. * * @throws TskCoreException If there is an error creating the account * instance. */ public OsAccountInstance newOsAccountInstance(OsAccount osAccount, DataSource dataSource, OsAccountInstance.OsAccountInstanceType instanceType) throws TskCoreException { if (osAccount == null) { throw new TskCoreException("Cannot create account instance with null account."); } if (dataSource == null) { throw new TskCoreException("Cannot create account instance with null data source."); } /* * Check the cache of OS account instances for an existing instance for * this OS account and data source. Note that the account instance * created here has a bogus instance ID. This is possible since the * instance ID is not considered in the equals() and hashCode() methods * of this class. */ OsAccountInstance bogus = new OsAccountInstance(db, 0, osAccount.getId(), dataSource.getId(), instanceType); synchronized (osAcctInstancesCacheLock) { if (osAccountInstanceCache.contains(bogus)) { // since we checked for contains(bogus), floor(bogus) should return the exact match and not a less than match. return osAccountInstanceCache.floor(bogus); } } try (CaseDbConnection connection = this.db.getConnection()) { return newOsAccountInstance(osAccount.getId(), dataSource.getId(), instanceType, connection); } } /** * Adds a row to the tsk_os_account_instances table. Does nothing if the * instance already exists in the table. * * @param osAccountId Account id for which an instance needs to be * added. * @param dataSourceObjId Data source id where the instance is found. * @param instanceType Instance type. * @param connection The current database connection. * * @return OsAccountInstance Existing or newly created account instance. * * @throws TskCoreException If there is an error creating the account * instance. */ OsAccountInstance newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInstance.OsAccountInstanceType instanceType, CaseDbConnection connection) throws TskCoreException { /* * Check the cache of OS account instances for an existing instance for * this OS account and data source. Note that the account instance * created here has a bogus instance ID. This is possible since the * instance ID is not considered in the equals() and hashCode() methods * of this class. */ OsAccountInstance bogus = new OsAccountInstance(db, 0, osAccountId, dataSourceObjId, instanceType); synchronized (osAcctInstancesCacheLock) { if (osAccountInstanceCache.contains(bogus)) { // since we checked for contains(bogus), floor(bogus) should return the exact match and not a less than match. return osAccountInstanceCache.floor(bogus); } } /* * Create the OS account instance. */ db.acquireSingleUserCaseWriteLock(); try { String accountInsertSQL = db.getInsertOrIgnoreSQL("INTO tsk_os_account_instances(os_account_obj_id, data_source_obj_id, instance_type)" + " VALUES (?, ?, ?)"); // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(accountInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, osAccountId); preparedStatement.setLong(2, dataSourceObjId); preparedStatement.setInt(3, instanceType.getId()); connection.executeUpdate(preparedStatement); try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) { if (resultSet.next()) { OsAccountInstance accountInstance = new OsAccountInstance(db, resultSet.getLong(1), osAccountId, dataSourceObjId, instanceType); synchronized (osAcctInstancesCacheLock) { osAccountInstanceCache.add(accountInstance); } /* * There is a potential issue here. The cache of OS account * instances is an optimization and was not intended to be * used as an authoritative indicator of whether or not a * particular OS account instance was already added to the * case. In fact, the entire cache is flushed during merge * operations. But regardless, there is a check-then-act * race condition for multi-user cases, with or without the * cache. And although the case database schema and the SQL * returned by getInsertOrIgnoreSQL() seamlessly prevents * duplicates in the case database, a valid row ID is * returned here even if the INSERT is not done. So the * bottom line is that a redundant event may be published * from time to time. */ db.fireTSKEvent(new TskEvent.OsAcctInstancesAddedTskEvent(Collections.singletonList(accountInstance))); return accountInstance; } else { throw new TskCoreException(String.format("Could not get autogen key after row insert for OS account instance. OS account object id = %d, data source object id = %d", osAccountId, dataSourceObjId)); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding OS account instance for OS account object id = %d, data source object id = %d", osAccountId, dataSourceObjId), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Get all accounts that had an instance on the specified host. * * @param host Host for which to look accounts for. * * @return Set of OsAccounts, may be empty. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getOsAccounts(Host host) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts accounts " + "WHERE accounts.os_account_obj_id IN " + "(SELECT instances.os_account_obj_id " + "FROM tsk_os_account_instances instances " + "INNER JOIN data_source_info datasources ON datasources.obj_id = instances.data_source_obj_id " + "WHERE datasources.host_id = " + host.getHostId() + ") " + "AND accounts.db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { List accounts = new ArrayList<>(); while (rs.next()) { accounts.add(osAccountFromResultSet(rs)); } return accounts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS accounts for host id = %d", host.getHostId()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get all accounts that had an instance on the specified data source. * * @param dataSourceId Data source id for which to look accounts for. * * @return Set of OsAccounts, may be empty. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getOsAccountsByDataSourceObjId(long dataSourceId) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts acc " + "WHERE acc.os_account_obj_id IN " + "(SELECT instance.os_account_obj_id " + "FROM tsk_os_account_instances instance " + "WHERE instance.data_source_obj_id = " + dataSourceId + ") " + "AND acc.db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { List accounts = new ArrayList<>(); while (rs.next()) { accounts.add(osAccountFromResultSet(rs)); } return accounts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS accounts for data source id = %d", dataSourceId), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Merge all OS accounts from sourceRealm into destRealm. After this call: - * sourceRealm's accounts will have been moved or merged - References to * sourceRealm accounts will be updated - sourceRealm will still exist, but * will be empty * * @param sourceRealm The source realm. * @param destRealm The destination realm. * @param trans The current transaction. * * @throws TskCoreException */ void mergeOsAccountsForRealms(OsAccountRealm sourceRealm, OsAccountRealm destRealm, CaseDbTransaction trans) throws TskCoreException { List destinationAccounts = getOsAccounts(destRealm, trans.getConnection()); List sourceAccounts = getOsAccounts(sourceRealm, trans.getConnection()); for (OsAccount sourceAccount : sourceAccounts) { // First a check for the case where the source account has both the login name and unique ID set and // we have separate matches in the destination account for both. If we find this case, we need to first merge // the two accounts in the destination realm. This will ensure that all source accounts match at most one // destination account. // Note that we only merge accounts based on login name if the unique ID is empty. if (sourceAccount.getAddr().isPresent() && sourceAccount.getLoginName().isPresent()) { List duplicateDestAccounts = destinationAccounts.stream() .filter(p -> p.getAddr().equals(sourceAccount.getAddr()) || (p.getLoginName().equals(sourceAccount.getLoginName()) && (!p.getAddr().isPresent()))) .collect(Collectors.toList()); if (duplicateDestAccounts.size() > 1) { OsAccount combinedDestAccount = duplicateDestAccounts.get(0); duplicateDestAccounts.remove(combinedDestAccount); for (OsAccount dupeDestAccount : duplicateDestAccounts) { mergeOsAccounts(dupeDestAccount, combinedDestAccount, trans); } } } // Look for matching destination account OsAccount matchingDestAccount = null; // First look for matching unique id if (sourceAccount.getAddr().isPresent()) { List matchingDestAccounts = destinationAccounts.stream() .filter(p -> p.getAddr().equals(sourceAccount.getAddr())) .collect(Collectors.toList()); if (!matchingDestAccounts.isEmpty()) { matchingDestAccount = matchingDestAccounts.get(0); } } // If a match wasn't found yet, look for a matching login name. // We will merge only if: // - We didn't already find a unique ID match // - The source account has no unique ID OR the destination account has no unique ID if (matchingDestAccount == null && sourceAccount.getLoginName().isPresent()) { List matchingDestAccounts = destinationAccounts.stream() .filter(p -> (p.getLoginName().equals(sourceAccount.getLoginName()) && ((!sourceAccount.getAddr().isPresent()) || (!p.getAddr().isPresent())))) .collect(Collectors.toList()); if (!matchingDestAccounts.isEmpty()) { matchingDestAccount = matchingDestAccounts.get(0); } } // If we found a match, merge the accounts. Otherwise simply update the realm id if (matchingDestAccount != null) { mergeOsAccounts(sourceAccount, matchingDestAccount, trans); } else { String query = "UPDATE tsk_os_accounts SET realm_id = " + destRealm.getRealmId() + " WHERE os_account_obj_id = " + sourceAccount.getId(); try (Statement s = trans.getConnection().createStatement()) { s.executeUpdate(query); } catch (SQLException ex) { throw new TskCoreException("Error executing SQL update: " + query, ex); } trans.registerChangedOsAccount(sourceAccount); } } } /** * Merges data between two accounts so that only one is active at the end * and all references are to it. Data from the destination account will take * priority. Basic operation: - Update the destination if source has names, * etc. not already in the destination - Update any references to the source * (such as in tsk_files) to point to destination - Mark the source as * "MERGED" and it will not come back in future queries. * * @param sourceAccount The source account. * @param destAccount The destination account. * @param trans The current transaction. * * @throws TskCoreException */ private void mergeOsAccounts(OsAccount sourceAccount, OsAccount destAccount, CaseDbTransaction trans) throws TskCoreException { String query = ""; try (Statement s = trans.getConnection().createStatement()) { // Update all references query = makeOsAccountUpdateQuery("tsk_os_account_attributes", sourceAccount, destAccount); s.executeUpdate(query); // tsk_os_account_instances has a unique constraint on os_account_obj_id, data_source_obj_id, host_id, // so delete any rows that would be duplicates. query = "DELETE FROM tsk_os_account_instances " + "WHERE id IN ( " + "SELECT " + " sourceAccountInstance.id " + "FROM " + " tsk_os_account_instances destAccountInstance " + "INNER JOIN tsk_os_account_instances sourceAccountInstance ON destAccountInstance.data_source_obj_id = sourceAccountInstance.data_source_obj_id " + "WHERE destAccountInstance.os_account_obj_id = " + destAccount.getId() + " AND sourceAccountInstance.os_account_obj_id = " + sourceAccount.getId() + " )"; s.executeUpdate(query); query = makeOsAccountUpdateQuery("tsk_os_account_instances", sourceAccount, destAccount); s.executeUpdate(query); synchronized (osAcctInstancesCacheLock) { osAccountInstanceCache.clear(); } query = makeOsAccountUpdateQuery("tsk_files", sourceAccount, destAccount); s.executeUpdate(query); query = makeOsAccountUpdateQuery("tsk_data_artifacts", sourceAccount, destAccount); s.executeUpdate(query); // Update the source account. Make a dummy signature to prevent problems with the unique constraint. String mergedSignature = makeMergedOsAccountSignature(); query = "UPDATE tsk_os_accounts SET merged_into = " + destAccount.getId() + ", db_status = " + OsAccount.OsAccountDbStatus.MERGED.getId() + ", signature = '" + mergedSignature + "' " + " WHERE os_account_obj_id = " + sourceAccount.getId(); s.executeUpdate(query); trans.registerDeletedOsAccount(sourceAccount.getId()); // Merge and update the destination account. Note that this must be done after updating // the source account to prevent conflicts when merging two accounts in the // same realm. mergeOsAccountObjectsAndUpdateDestAccount(sourceAccount, destAccount, trans); } catch (SQLException ex) { throw new TskCoreException("Error executing SQL update: " + query, ex); } } /** * Create a random signature for accounts that have been merged. * * @return The random signature. */ private String makeMergedOsAccountSignature() { return "MERGED " + UUID.randomUUID().toString(); } /** * Create the query to update the os account column to the merged account. * * @param tableName Name of table to update. * @param sourceAccount The source account. * @param destAccount The destination account. * * @return The query. */ private String makeOsAccountUpdateQuery(String tableName, OsAccount sourceAccount, OsAccount destAccount) { return "UPDATE " + tableName + " SET os_account_obj_id = " + destAccount.getId() + " WHERE os_account_obj_id = " + sourceAccount.getId(); } /** * Copy all fields from sourceAccount that are not set in destAccount. * * Updates the dest account in the database. * * @param sourceAccount The source account. * @param destAccount The destination account. * @param trans Transaction to use for database operations. * * @return OsAccount Updated account. */ private OsAccount mergeOsAccountObjectsAndUpdateDestAccount(OsAccount sourceAccount, OsAccount destAccount, CaseDbTransaction trans) throws TskCoreException { OsAccount mergedDestAccount = destAccount; String destLoginName = null; String destAddr = null; // Copy any fields that aren't set in the destination to the value from the source account. if (!destAccount.getLoginName().isPresent() && sourceAccount.getLoginName().isPresent()) { destLoginName = sourceAccount.getLoginName().get(); } if (!destAccount.getAddr().isPresent() && sourceAccount.getAddr().isPresent()) { destAddr = sourceAccount.getAddr().get(); } // update the dest account core OsAccountUpdateResult updateStatus = this.updateOsAccountCore(destAccount, destAddr, destLoginName, trans); if (updateStatus.getUpdateStatusCode() == OsAccountUpdateStatus.UPDATED && updateStatus.getUpdatedAccount().isPresent()) { mergedDestAccount = updateStatus.getUpdatedAccount().get(); } String destFullName = null; Long destCreationTime = null; if (!destAccount.getFullName().isPresent() && sourceAccount.getFullName().isPresent()) { destFullName = sourceAccount.getFullName().get(); } if (!destAccount.getCreationTime().isPresent() && sourceAccount.getCreationTime().isPresent()) { destCreationTime = sourceAccount.getCreationTime().get(); } // update the dest account properties updateStatus = this.updateStandardOsAccountAttributes(destAccount, destFullName, null, null, destCreationTime, trans); if (updateStatus.getUpdateStatusCode() == OsAccountUpdateStatus.UPDATED && updateStatus.getUpdatedAccount().isPresent()) { mergedDestAccount = updateStatus.getUpdatedAccount().get(); } return mergedDestAccount; } /** * Get all active accounts associated with the given realm. * * @param realm Realm for which to look accounts for. * @param connection Current database connection. * * @return Set of OsAccounts, may be empty. * * @throws org.sleuthkit.datamodel.TskCoreException */ private List getOsAccounts(OsAccountRealm realm, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts" + " WHERE realm_id = " + realm.getRealmId() + " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " ORDER BY os_account_obj_id"; try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { List accounts = new ArrayList<>(); while (rs.next()) { accounts.add(osAccountFromResultSet(rs)); } return accounts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS accounts for realm id = %d", realm.getRealmId()), ex); } } /** * Get all active accounts. * * @return Set of OsAccounts, may be empty. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getOsAccounts() throws TskCoreException { String queryString = "SELECT * FROM tsk_os_accounts" + " WHERE db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { List accounts = new ArrayList<>(); while (rs.next()) { accounts.add(osAccountFromResultSet(rs)); } return accounts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS accounts"), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets an OS account using Windows-specific data. * * @param sid Account SID, maybe null if loginName is supplied. * @param loginName Login name, maybe null if sid is supplied. * @param realmName Realm within which the accountId or login name is * unique. Can be null if sid is supplied. * @param referringHost Host referring the account. * * @return Optional with OsAccount, Optional.empty if no matching OsAccount * is found. * * @throws TskCoreException If there is an error getting the account. * @throws NotUserSIDException If the given SID is not a user SID. */ public Optional getWindowsOsAccount(String sid, String loginName, String realmName, Host referringHost) throws TskCoreException, NotUserSIDException { if (referringHost == null) { throw new TskCoreException("A referring host is required to get an account."); } // ensure at least one of the two is supplied - sid or a login name if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) { throw new TskCoreException("Cannot get an OS account with both SID and loginName as null."); } // first get the realm for the given sid Optional realm = db.getOsAccountRealmManager().getWindowsRealm(sid, realmName, referringHost); if (!realm.isPresent()) { return Optional.empty(); } // search by SID if (!Strings.isNullOrEmpty(sid)) { if (!WindowsAccountUtils.isWindowsUserSid(sid)) { throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid)); } Optional account = this.getOsAccountByAddr(sid, realm.get()); if (account.isPresent()) { return account; } } // search by login name return this.getOsAccountByLoginName(loginName, realm.get()); } /** * Adds a rows to the tsk_os_account_attributes table for the given set of * attribute. * * @param account Account for which the attributes is being added. * @param accountAttributes List of attributes to add. * * @throws TskCoreException */ public void addExtendedOsAccountAttributes(OsAccount account, List accountAttributes) throws TskCoreException { synchronized (account) { // synchronized to prevent multiple threads trying to add osAccount attributes concurrently to the same osAccount. db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = db.getConnection()) { for (OsAccountAttribute accountAttribute : accountAttributes) { String attributeInsertSQL = "INSERT INTO tsk_os_account_attributes(os_account_obj_id, host_id, source_obj_id, attribute_type_id, value_type, value_byte, value_text, value_int32, value_int64, value_double)" + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(attributeInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, account.getId()); if (accountAttribute.getHostId().isPresent()) { preparedStatement.setLong(2, accountAttribute.getHostId().get()); } else { preparedStatement.setNull(2, java.sql.Types.NULL); } if (accountAttribute.getSourceObjectId().isPresent()) { preparedStatement.setLong(3, accountAttribute.getSourceObjectId().get()); } else { preparedStatement.setNull(3, java.sql.Types.NULL); } preparedStatement.setLong(4, accountAttribute.getAttributeType().getTypeID()); preparedStatement.setLong(5, accountAttribute.getAttributeType().getValueType().getType()); if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) { preparedStatement.setBytes(6, accountAttribute.getValueBytes()); } else { preparedStatement.setBytes(6, null); } if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING || accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) { preparedStatement.setString(7, accountAttribute.getValueString()); } else { preparedStatement.setString(7, null); } if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) { preparedStatement.setInt(8, accountAttribute.getValueInt()); } else { preparedStatement.setNull(8, java.sql.Types.NULL); } if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME || accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG) { preparedStatement.setLong(9, accountAttribute.getValueLong()); } else { preparedStatement.setNull(9, java.sql.Types.NULL); } if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) { preparedStatement.setDouble(10, accountAttribute.getValueDouble()); } else { preparedStatement.setNull(10, java.sql.Types.NULL); } connection.executeUpdate(preparedStatement); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding OS Account attribute for account id = %d", account.getId()), ex); } finally { db.releaseSingleUserCaseWriteLock(); } // set the atrribute list in account to the most current list from the database List currentAttribsList = getOsAccountAttributes(account); account.setAttributesInternal(currentAttribsList); } fireChangeEvent(account); } /** * Get the OS account attributes for the given account. * * @param account Account to get the attributes for. * * @return List of attributes, may be an empty list. * * @throws TskCoreException */ List getOsAccountAttributes(OsAccount account) throws TskCoreException { String queryString = "SELECT attributes.os_account_obj_id as os_account_obj_id, attributes.host_id as host_id, attributes.source_obj_id as source_obj_id, " + " attributes.attribute_type_id as attribute_type_id, attributes.value_type as value_type, attributes.value_byte as value_byte, " + " attributes.value_text as value_text, attributes.value_int32 as value_int32, attributes.value_int64 as value_int64, attributes.value_double as value_double, " + " hosts.id, hosts.name as host_name, hosts.db_status as host_status " + " FROM tsk_os_account_attributes as attributes" + " LEFT JOIN tsk_hosts as hosts " + " ON attributes.host_id = hosts.id " + " WHERE os_account_obj_id = " + account.getId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { List attributes = new ArrayList<>(); while (rs.next()) { Host host = null; long hostId = rs.getLong("host_id"); if (!rs.wasNull()) { host = new Host(hostId, rs.getString("host_name"), Host.HostDbStatus.fromID(rs.getInt("host_status"))); } Content sourceContent = null; long sourceObjId = rs.getLong("source_obj_id"); if (!rs.wasNull()) { sourceContent = this.db.getContentById(sourceObjId); } BlackboardAttribute.Type attributeType = db.getAttributeType(rs.getInt("attribute_type_id")); OsAccountAttribute attribute = account.new OsAccountAttribute(attributeType, rs.getInt("value_int32"), rs.getLong("value_int64"), rs.getDouble("value_double"), rs.getString("value_text"), rs.getBytes("value_byte"), db, account, host, sourceContent); attributes.add(attribute); } return attributes; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting OS account attributes for account obj id = %d", account.getId()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets the OS account instances for a given OS account. * * @param account The OS account. * * @return The OS account instances, may be an empty list. * * @throws TskCoreException */ List getOsAccountInstances(OsAccount account) throws TskCoreException { String whereClause = "tsk_os_account_instances.os_account_obj_id = " + account.getId(); return getOsAccountInstances(whereClause); } /** * Gets the OS account instances with the given instance IDs. * * @param instanceIDs The instance IDs. * * @return The OS account instances. * * @throws TskCoreException Thrown if there is an error querying the case * database. */ public List getOsAccountInstances(List instanceIDs) throws TskCoreException { String instanceIds = instanceIDs.stream().map(id -> id.toString()).collect(Collectors.joining(",")); String whereClause = "tsk_os_account_instances.id IN (" + instanceIds + ")"; return getOsAccountInstances(whereClause); } /** * Gets the OS account instances that satisfy the given SQL WHERE clause. * * @param whereClause The SQL WHERE clause. * * @return The OS account instances. * * @throws TskCoreException Thrown if there is an error querying the case * database. */ private List getOsAccountInstances(String whereClause) throws TskCoreException { List osAcctInstances = new ArrayList<>(); String querySQL = "SELECT * FROM tsk_os_account_instances WHERE " + whereClause; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); PreparedStatement preparedStatement = connection.getPreparedStatement(querySQL, Statement.NO_GENERATED_KEYS); ResultSet results = connection.executeQuery(preparedStatement)) { while (results.next()) { long instanceId = results.getLong("id"); long osAccountObjID = results.getLong("os_account_obj_id"); long dataSourceObjId = results.getLong("data_source_obj_id"); int instanceType = results.getInt("instance_type"); osAcctInstances.add(new OsAccountInstance(db, instanceId, osAccountObjID, dataSourceObjId, OsAccountInstance.OsAccountInstanceType.fromID(instanceType))); } } catch (SQLException ex) { throw new TskCoreException("Failed to get OsAccountInstances (SQL = " + querySQL + ")", ex); } finally { db.releaseSingleUserCaseReadLock(); } return osAcctInstances; } /** * Updates the properties of the specified account in the database. * * A column is updated only if a non-null value has been specified. * * @param osAccount OsAccount that needs to be updated in the database. * @param fullName Full name, may be null. * @param accountType Account type, may be null * @param accountStatus Account status, may be null. * @param creationTime Creation time, may be null. * * @return OsAccountUpdateResult Account update status, and updated account. * * @throws TskCoreException If there is a database error or if the updated * information conflicts with an existing account. */ public OsAccountUpdateResult updateStandardOsAccountAttributes(OsAccount osAccount, String fullName, OsAccountType accountType, OsAccountStatus accountStatus, Long creationTime) throws TskCoreException { CaseDbTransaction trans = db.beginTransaction(); try { OsAccountUpdateResult updateStatus = updateStandardOsAccountAttributes(osAccount, fullName, accountType, accountStatus, creationTime, trans); trans.commit(); trans = null; return updateStatus; } finally { if (trans != null) { trans.rollback(); } } } /** * Updates the properties of the specified account in the database. * * A column is updated only if a non-null value has been specified. * * @param osAccount OsAccount that needs to be updated in the database. * @param fullName Full name, may be null. * @param accountType Account type, may be null * @param accountStatus Account status, may be null. * @param creationTime Creation time, may be null. * @param trans Transaction to use for database operation. * * @return OsAccountUpdateResult Account update status, and updated account. * * @throws TskCoreException If there is a database error or if the updated * information conflicts with an existing account. */ OsAccountUpdateResult updateStandardOsAccountAttributes(OsAccount osAccount, String fullName, OsAccountType accountType, OsAccountStatus accountStatus, Long creationTime, CaseDbTransaction trans) throws TskCoreException { OsAccountUpdateStatus updateStatusCode = OsAccountUpdateStatus.NO_CHANGE; try { CaseDbConnection connection = trans.getConnection(); if (!StringUtils.isBlank(fullName)) { updateAccountColumn(osAccount.getId(), "full_name", fullName, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } if (Objects.nonNull(accountType)) { updateAccountColumn(osAccount.getId(), "type", accountType, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } if (Objects.nonNull(accountStatus)) { updateAccountColumn(osAccount.getId(), "status", accountStatus, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } if (Objects.nonNull(creationTime)) { updateAccountColumn(osAccount.getId(), "created_date", creationTime, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } // if nothing has been changed, return if (updateStatusCode == OsAccountUpdateStatus.NO_CHANGE) { return new OsAccountUpdateResult(updateStatusCode, null); } // get the updated account from database OsAccount updatedAccount = getOsAccountByObjectId(osAccount.getId(), connection); // register the updated account with the transaction to fire off an event trans.registerChangedOsAccount(updatedAccount); return new OsAccountUpdateResult(updateStatusCode, updatedAccount); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating account with addr = %s, account id = %d", osAccount.getAddr().orElse("Unknown"), osAccount.getId()), ex); } } /** * Updates specified column in the tsk_os_accounts table to the specified * value. * * @param Type of value - must be a String, Long or an Integer. * @param accountObjId Object id of the account to be updated. * @param colName Name of column o be updated. * @param colValue New column value. * @param connection Database connection to use. * * @throws SQLException If there is an error updating the database. * @throws TskCoreException If the value type is not handled. */ private void updateAccountColumn(long accountObjId, String colName, T colValue, CaseDbConnection connection) throws SQLException, TskCoreException { String updateSQL = "UPDATE tsk_os_accounts " + " SET " + colName + " = ? " + " WHERE os_account_obj_id = ?"; db.acquireSingleUserCaseWriteLock(); try { PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); if (Objects.isNull(colValue)) { preparedStatement.setNull(1, Types.NULL); // handle null value } else { if (colValue instanceof String) { preparedStatement.setString(1, (String) colValue); } else if (colValue instanceof Long) { preparedStatement.setLong(1, (Long) colValue); } else if (colValue instanceof Integer) { preparedStatement.setInt(1, (Integer) colValue); } else { throw new TskCoreException(String.format("Unhandled column data type received while updating the account (%d) ", accountObjId)); } } preparedStatement.setLong(2, accountObjId); connection.executeUpdate(preparedStatement); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Updates the signature of the specified account, if the db status of the * account is active. * * @param accountObjId Object id of the account to be updated. * @param signature New signature. * @param connection Database connection to use. * * @throws SQLException If there is an error updating the database. */ private void updateAccountSignature(long accountObjId, String signature, CaseDbConnection connection) throws SQLException { String updateSQL = "UPDATE tsk_os_accounts SET " + " signature = " + " CASE WHEN db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " THEN ? ELSE signature END " + " WHERE os_account_obj_id = ?"; // 8 PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, signature); preparedStatement.setLong(2, accountObjId); connection.executeUpdate(preparedStatement); } /** * Update the address and/or login name for the specified account in the * database. Also update the realm addr/name if needed. * * A column is updated only if its current value is null and a non-null * value has been specified. * * * @param osAccount OsAccount that needs to be updated in the database. * @param accountSid Account SID, may be null. * @param loginName Login name, may be null. * @param realmName Realm name for the account. * @param referringHost Host. * * @return OsAccountUpdateResult Account update status, and the updated * account. * * @throws TskCoreException If there is a database error or if the updated * information conflicts with an existing account. */ public OsAccountUpdateResult updateCoreWindowsOsAccountAttributes(OsAccount osAccount, String accountSid, String loginName, String realmName, Host referringHost) throws TskCoreException, NotUserSIDException { CaseDbTransaction trans = db.beginTransaction(); try { OsAccountUpdateResult updateStatus = this.updateCoreWindowsOsAccountAttributes(osAccount, accountSid, loginName, realmName, referringHost, trans); trans.commit(); trans = null; return updateStatus; } finally { if (trans != null) { trans.rollback(); } } } /** * Update the address and/or login name for the specified account in the * database. Also update the realm addr/name if needed. * * A column is updated only if it's current value is null and a non-null * value has been specified. * * @param osAccount OsAccount that needs to be updated in the database. * @param accountSid Account SID, may be null. * @param loginName Login name, may be null. * @param realmName Account realm name. May be null if accountSid is not * null. * * @return OsAccountUpdateResult Account update status, and the updated * account. * * @throws TskCoreException If there is a database error or if the updated * information conflicts with an existing account. */ private OsAccountUpdateResult updateCoreWindowsOsAccountAttributes(OsAccount osAccount, String accountSid, String loginName, String realmName, Host referringHost, CaseDbTransaction trans) throws TskCoreException, NotUserSIDException { // first get and update the realm - if we have the info to find the realm if (!StringUtils.isBlank(accountSid) || !StringUtils.isBlank(realmName)) { db.getOsAccountRealmManager().getAndUpdateWindowsRealm(accountSid, realmName, referringHost, trans.getConnection()); } // now update the account core data OsAccountUpdateResult updateStatus = this.updateOsAccountCore(osAccount, accountSid, loginName, trans); return updateStatus; } /** * Update the address and/or login name for the specified account in the * database. * * A column is updated only if its current value is null and a non-null * value has been specified. * * * NOTE: Will not merge accounts if the updated information conflicts with * an existing account (such as adding an ID to an account that has only a * name and there already being an account with that ID). * * @param osAccount OsAccount that needs to be updated in the database. * @param address Account address, may be null. * @param loginName Login name, may be null. * * @return OsAccountUpdateResult Account update status, and the updated * account. * * @throws TskCoreException If there is a database error or if the updated * information conflicts with an existing account. */ private OsAccountUpdateResult updateOsAccountCore(OsAccount osAccount, String address, String loginName, CaseDbTransaction trans) throws TskCoreException { OsAccountUpdateStatus updateStatusCode = OsAccountUpdateStatus.NO_CHANGE; OsAccount updatedAccount; try { CaseDbConnection connection = trans.getConnection(); // if a new addr is provided and the account already has an address, and they are not the same, throw an exception if (!StringUtils.isBlank(address) && !StringUtils.isBlank(osAccount.getAddr().orElse(null)) && !address.equalsIgnoreCase(osAccount.getAddr().orElse(""))) { throw new TskCoreException(String.format("Account (%d) already has an address (%s), address cannot be updated.", osAccount.getId(), osAccount.getAddr().orElse("NULL"))); } if (StringUtils.isBlank(osAccount.getAddr().orElse(null)) && !StringUtils.isBlank(address)) { updateAccountColumn(osAccount.getId(), "addr", address, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } if (StringUtils.isBlank(osAccount.getLoginName().orElse(null)) && !StringUtils.isBlank(loginName)) { updateAccountColumn(osAccount.getId(), "login_name", loginName, connection); updateStatusCode = OsAccountUpdateStatus.UPDATED; } // if nothing is changed, return if (updateStatusCode == OsAccountUpdateStatus.NO_CHANGE) { return new OsAccountUpdateResult(updateStatusCode, osAccount); } // update signature if needed, based on the most current addr/loginName OsAccount currAccount = getOsAccountByObjectId(osAccount.getId(), connection); String newAddress = currAccount.getAddr().orElse(null); String newLoginName = currAccount.getLoginName().orElse(null); String newSignature = getOsAccountSignature(newAddress, newLoginName); updateAccountSignature(osAccount.getId(), newSignature, connection); // get the updated account from database updatedAccount = getOsAccountByObjectId(osAccount.getId(), connection); // register the updated account with the transaction to fire off an event trans.registerChangedOsAccount(updatedAccount); return new OsAccountUpdateResult(updateStatusCode, updatedAccount); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating account with unique id = %s, account id = %d", osAccount.getAddr().orElse("Unknown"), osAccount.getId()), ex); } } /** * Returns a list of hosts where the OsAccount has appeared. * * @param account OsAccount * * @return List of Hosts that reference the given OsAccount. * * @throws TskCoreException */ public List getHosts(OsAccount account) throws TskCoreException { List hostList = new ArrayList<>(); String query = "SELECT tsk_hosts.id AS hostId, name, db_status FROM tsk_hosts " + " JOIN data_source_info ON tsk_hosts.id = data_source_info.host_id" + " JOIN tsk_os_account_instances ON data_source_info.obj_id = tsk_os_account_instances.data_source_obj_id" + " WHERE os_account_obj_id = " + account.getId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) { while (rs.next()) { hostList.add(new Host(rs.getLong("hostId"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")))); } } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to get host list for os account %d", account.getId()), ex); } finally { db.releaseSingleUserCaseReadLock(); } return hostList; } /** * Takes in a result with a row from tsk_os_accounts table and creates an * OsAccount. * * @param rs ResultSet. * @param realmId Realm. * * @return OsAccount OS Account. * * @throws SQLException */ private OsAccount osAccountFromResultSet(ResultSet rs) throws SQLException { OsAccountType accountType = null; int typeId = rs.getInt("type"); if (!rs.wasNull()) { accountType = OsAccount.OsAccountType.fromID(typeId); } Long creationTime = rs.getLong("created_date"); // getLong returns 0 if value is null if (rs.wasNull()) { creationTime = null; } return new OsAccount(db, rs.getLong("os_account_obj_id"), rs.getLong("realm_id"), rs.getString("login_name"), rs.getString("addr"), rs.getString("signature"), rs.getString("full_name"), creationTime, accountType, OsAccount.OsAccountStatus.fromID(rs.getInt("status")), OsAccount.OsAccountDbStatus.fromID(rs.getInt("db_status"))); } /** * Fires an OsAccountChangeEvent for the given OsAccount. Do not call this * with an open transaction. * * @param account Updated account. */ private void fireChangeEvent(OsAccount account) { db.fireTSKEvent(new OsAccountsUpdatedTskEvent(Collections.singletonList(account))); } /** * Created an account signature for an OS Account. This signature is simply * to prevent duplicate accounts from being created. Signature is set to: * uniqueId: if the account has a uniqueId, otherwise loginName: if the * account has a login name. * * @param uniqueId Unique id. * @param loginName Login name. * * @return Account signature. * * @throws TskCoreException If there is an error creating the account * signature. */ static String getOsAccountSignature(String uniqueId, String loginName) throws TskCoreException { // Create a signature. String signature; if (Strings.isNullOrEmpty(uniqueId) == false) { signature = uniqueId; } else if (Strings.isNullOrEmpty(loginName) == false) { signature = loginName; } else { throw new TskCoreException("OS Account must have either a uniqueID or a login name."); } return signature; } /** * Exception thrown if a given SID is a valid SID but is a group SID, and * not an individual user SID. */ public static class NotUserSIDException extends TskException { private static final long serialVersionUID = 1L; /** * Default constructor when error message is not available */ public NotUserSIDException() { super("No error message available."); } /** * Create exception containing the error message * * @param msg the message */ public NotUserSIDException(String msg) { super(msg); } /** * Create exception containing the error message and cause exception * * @param msg the message * @param ex cause exception */ public NotUserSIDException(String msg, Exception ex) { super(msg, ex); } } /** * Status of an account update. */ public enum OsAccountUpdateStatus { NO_CHANGE, /// no change was made to account. UPDATED, /// account was updated MERGED /// account update triggered a merge } /** * Container that encapsulates the account update status and the updated * account. */ public final static class OsAccountUpdateResult { private final OsAccountUpdateStatus updateStatus; private final OsAccount updatedAccount; OsAccountUpdateResult(OsAccountUpdateStatus updateStatus, OsAccount updatedAccount) { this.updateStatus = updateStatus; this.updatedAccount = updatedAccount; } public OsAccountUpdateStatus getUpdateStatusCode() { return updateStatus; } public Optional getUpdatedAccount() { return Optional.ofNullable(updatedAccount); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AddDataSourceCallbacks.java000644 000765 000024 00000002112 14137073413 031553 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; /** * Provides callbacks at key points during the process of adding a data source to a case database. */ public interface AddDataSourceCallbacks { /** * Call to add a set of file object IDs that have been added to the database. * * @param fileObjectIds List of file object IDs. */ void onFilesAdded(List fileObjectIds); } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskCoreException.java000644 000765 000024 00000002641 14137073413 030550 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Core exception that is thrown from Sleuthkit classes. Indicates a critical * error within TSK */ public class TskCoreException extends TskException { private static final long serialVersionUID = 123049876L; /** * Default constructor when error message is not available */ public TskCoreException() { super("No error message available."); } /** * Create exception containing the error message * * @param msg the message */ public TskCoreException(String msg) { super(msg); } /** * Create exception containing the error message and cause exception * * @param msg the message * @param ex cause exception */ public TskCoreException(String msg, Exception ex) { super(msg, ex); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/StringUtils.java000644 000765 000024 00000003453 14137073413 027610 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collection; /** * Collection of string utility methods. */ final class StringUtils { private StringUtils() { } /** * Utility method to convert a list to an CSV string. * * @param values - collection of objects . * * @return a CSV string. */ static String buildCSVString(Collection values) { return joinAsStrings(values, ","); } /** * Utility method to join a collection into a string using a supplied * separator. * * @param The type of the values in the collection to be joined * @param values The collection to be joined * @param separator The separator to insert between each value in the result * string * * @return a string with the elements of values separated by separator */ static String joinAsStrings(Collection values, String separator) { if (values == null || values.isEmpty()) { return ""; } return org.apache.commons.lang3.StringUtils.join(values, separator); } static String deleteWhitespace(String result) { return org.apache.commons.lang3.StringUtils.deleteWhitespace(result); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/UnsupportedContent.java000644 000765 000024 00000003204 14137073413 031176 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * This content type is used as a default when the object type from the * tsk_objects table is not present in the TskData.ObjectType enum. This should * only come into play when loading case databases created by a newer version of * Autopsy. */ public class UnsupportedContent extends AbstractContent { /** * Create an UnsupportedContent object. Only store the object id. * * @param db case database handle * @param obj_id object id */ protected UnsupportedContent(SleuthkitCase db, long obj_id) { super(db, obj_id, "Unsupported Content"); } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { return 0; } @Override public void close() { // Do nothing } @Override public long getSize() { return 0; } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/IngestJobInfo.java000755 000765 000024 00000013640 14137073413 030023 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Date; import java.util.List; import java.util.ResourceBundle; /** * Represents information for an ingest job. */ public final class IngestJobInfo { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); public enum IngestJobStatusType { //DO NOT CHANGE ORDER STARTED(bundle.getString("IngestJobInfo.IngestJobStatusType.Started.displayName")), CANCELLED(bundle.getString("IngestJobInfo.IngestJobStatusType.Cancelled.displayName")), COMPLETED(bundle.getString("IngestJobInfo.IngestJobStatusType.Completed.displayName")); private String displayName; private IngestJobStatusType(String displayName) { this.displayName = displayName; } public static IngestJobStatusType fromID(int typeId) { for (IngestJobStatusType statusType : IngestJobStatusType.values()) { if (statusType.ordinal() == typeId) { return statusType; } } return null; } /** * @return the displayName */ public String getDisplayName() { return displayName; } } private final long ingestJobId; private final long objectId; private final String hostName; private final Date startDateTime; private Date endDateTime = new Date(0); private final String settingsDir; private final List ingestModuleInfo; private final SleuthkitCase skCase; private IngestJobStatusType status; /** * Constructs an IngestJobInfo that has not ended * * @param ingestJobId The id of the ingest job * @param objectId The data source the job is being run on * @param hostName The host on which the job was executed * @param startDateTime The date time the job was started * @param settingsDir The directory of the job settings * @param ingestModuleInfo The ingest modules being run for this job * @param skCase A reference to sleuthkit case */ IngestJobInfo(long ingestJobId, long objectId, String hostName, Date startDateTime, String settingsDir, List ingestModuleInfo, SleuthkitCase skCase) { this.ingestJobId = ingestJobId; this.objectId = objectId; this.hostName = hostName; this.startDateTime = startDateTime; this.settingsDir = settingsDir; this.skCase = skCase; this.ingestModuleInfo = ingestModuleInfo; this.status = IngestJobStatusType.STARTED; } /** * Constructs an IngestJobInfo that has already ended * * @param ingestJobId The id of the ingest job * @param dataSourceId The data source the job is being run on * @param hostName The host on which the job was executed * @param startDateTime The date time the job was started * @param endDateTime The date time the job was ended (if it ended) * @param status The status of the job * @param settingsDir The directory of the job settings * @param ingestModuleInfo The ingest modules being run for this job * @param skCase A reference to sleuthkit case */ IngestJobInfo(long ingestJobId, long dataSourceId, String hostName, Date startDateTime, Date endDateTime, IngestJobStatusType status, String settingsDir, List ingestModuleInfo, SleuthkitCase skCase) { this.ingestJobId = ingestJobId; this.objectId = dataSourceId; this.hostName = hostName; this.startDateTime = startDateTime; this.endDateTime = endDateTime; this.settingsDir = settingsDir; this.skCase = skCase; this.ingestModuleInfo = ingestModuleInfo; this.status = status; } /** * @return the end date time of the job (equal to the epoch if it has not * been set yet). */ public Date getEndDateTime() { return endDateTime; } /** * Sets the end date for the ingest job info, and updates the database. * * @param endDateTime the endDateTime to set * * @throws org.sleuthkit.datamodel.TskCoreException */ public void setEndDateTime(Date endDateTime) throws TskCoreException { Date oldDate = this.endDateTime; this.endDateTime = endDateTime; try { skCase.setIngestJobEndDateTime(getIngestJobId(), endDateTime.getTime()); } catch (TskCoreException ex) { this.endDateTime = oldDate; throw ex; } } /** * Sets the ingest status for the ingest job info, and updates the database. * * @param status The new status * * @throws TskCoreException */ public void setIngestJobStatus(IngestJobStatusType status) throws TskCoreException { IngestJobStatusType oldStatus = this.getStatus(); this.status = status; try { skCase.setIngestJobStatus(getIngestJobId(), status); } catch (TskCoreException ex) { this.status = oldStatus; throw ex; } } /** * @return the ingestJobId */ public long getIngestJobId() { return ingestJobId; } /** * @return the objectId */ public long getObjectId() { return objectId; } /** * @return the hostName */ public String getHostName() { return hostName; } /** * @return the startDateTime */ public Date getStartDateTime() { return startDateTime; } /** * @return the settingsDir */ public String getSettingsDir() { return settingsDir; } /** * @return the ingestModuleInfo */ public List getIngestModuleInfo() { return ingestModuleInfo; } /** * @return the status */ public IngestJobStatusType getStatus() { return status; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/FileSystem.java000644 000765 000024 00000013702 14137073413 027403 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; /** * Represents a file system object stored in tsk_fs_info table FileSystem has a * parent content object (volume or image) and children content objects (files * and directories) and fs-specific attributes. The object also maintains a * handle to internal file-system structures and the handle is reused across * reads. */ public class FileSystem extends AbstractContent { private long imgOffset, blockSize, blockCount, rootInum, firstInum, lastInum; private TskData.TSK_FS_TYPE_ENUM fsType; private Content parent; private volatile long filesystemHandle = 0; /** * Constructor most inputs are from the database * * @param db the case handle * @param obj_id the unique object id * @param name filesystem name * @param img_offset image offset * @param fs_type filesystem type * @param block_size block size in this fs * @param block_count number of blocks in this fs * @param root_inum the root inum * @param first_inum the first inum * @param last_inum the last inum */ protected FileSystem(SleuthkitCase db, long obj_id, String name, long img_offset, TskData.TSK_FS_TYPE_ENUM fs_type, long block_size, long block_count, long root_inum, long first_inum, long last_inum) { super(db, obj_id, name); this.imgOffset = img_offset; this.fsType = fs_type; this.blockSize = block_size; this.blockCount = block_count; this.rootInum = root_inum; this.firstInum = first_inum; this.lastInum = last_inum; } @Override public void close() { //does nothing currently, we are caching the fs handles } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { return SleuthkitJNI.readFs(getFileSystemHandle(), buf, offset, len); } @Override public long getSize() { return blockSize * blockCount; } /** * Lazily loads the internal file system structure: won't be loaded until * this is called and maintains the handle to it to reuse it * * @return a filesystem pointer from the sleuthkit * * @throws TskCoreException exception throw if an internal tsk core error * occurs */ long getFileSystemHandle() throws TskCoreException { if (filesystemHandle == 0) { synchronized (this) { if (filesystemHandle == 0) { Content dataSource = getDataSource(); if ((dataSource == null) || ( !(dataSource instanceof Image))) { throw new TskCoreException("Data Source of File System is not an image"); } Image image = (Image) dataSource; // Check if this file system is in a pool if (isPoolContent()) { Pool pool = getPool(); if (pool == null) { throw new TskCoreException("Error finding pool for file system"); } Volume poolVolume = getPoolVolume(); if (poolVolume == null) { throw new TskCoreException("File system is in a pool but has no volume"); } filesystemHandle = SleuthkitJNI.openFsPool(image.getImageHandle(), imgOffset, pool.getPoolHandle(), poolVolume.getStart(), getSleuthkitCase()); } else { filesystemHandle = SleuthkitJNI.openFs(image.getImageHandle(), imgOffset, getSleuthkitCase()); } } } } return this.filesystemHandle; } public Directory getRootDirectory() throws TskCoreException { List children = getChildren(); if (children.size() != 1) { throw new TskCoreException("FileSystem must have only one child."); } if (!(children.get(0) instanceof Directory)) { throw new TskCoreException("Child of FileSystem must be a Directory."); } return (Directory) children.get(0); } /** * Get the byte offset of this file system in the image * * @return offset */ public long getImageOffset() { return imgOffset; } /** * Get the file system type * * @return enum value of fs type */ public TskData.TSK_FS_TYPE_ENUM getFsType() { return fsType; } /** * Get the block size * * @return block size */ public long getBlock_size() { return blockSize; } /** * Get the number of blocks * * @return block count */ public long getBlock_count() { return blockCount; } /** * Get the inum of the root directory * * @return Root metadata address of the file system */ public long getRoot_inum() { return rootInum; } /** * Get the first inum in this file system * * @return first inum */ public long getFirst_inum() { return firstInum; } /** * Get the last inum * * @return last inum */ public long getLastInum() { return lastInum; } @Override public void finalize() throws Throwable { try { if (filesystemHandle != 0) { // SleuthkitJNI.closeFs(filesystemHandle); // closeFs is currently a no-op filesystemHandle = 0; } } finally { super.finalize(); } } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "FileSystem [\t" + " blockCount " + blockCount + "\t" + "blockSize " + blockSize + "\t" + "firstInum " + firstInum + "\t" + "fsType " + fsType + "\t" + "imgOffset " + imgOffset + "\t" + "lastInum " + lastInum + "\t" + "rootInum " + rootInum + "\t" + "]"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/DataArtifact.java000644 000765 000024 00000005315 14137073413 027647 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Optional; /** * DataArtifact is a category of artifact types that are simply data directly * extracted from a data source. */ public final class DataArtifact extends BlackboardArtifact { // data artifacts may have a OS Account associated with them. private final Long osAccountObjId; /** * Constructs a DataArtifact. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param sourceObjId The unique id of the content with which this * artifact is associated. * @param artifactObjId The object id of artifact, in tsk_objects. * @param dataSourceObjId Object ID of the data source where the artifact * was found. May be null. * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * @param reviewStatus The review status of this artifact. * @param osAccountObjId OsAccount associated with this artifact, may be * null. * @param isNew The object is newly created. */ DataArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, Long osAccountObjId, boolean isNew) { super(sleuthkitCase, artifactID, sourceObjId, artifactObjId, dataSourceObjId, artifactTypeID, artifactTypeName, displayName, reviewStatus, isNew); this.osAccountObjId = osAccountObjId; } /** * Gets the OS Account for this artifact. * * @return Optional with OsAccount, Optional.empty if there is no account. * * @throws TskCoreException If there is an error getting the account. */ public Optional getOsAccountObjectId() throws TskCoreException { return Optional.ofNullable(osAccountObjId); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SleuthkitJNI.java000644 000765 000024 00000221413 14137073413 027634 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.UUID; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * A utility class that provides a interface to the SleuthKit via JNI. Supports * case management, add image process, reading data off content objects Setting * up Hash database parameters and updating / reading values * * Caches image and filesystem handles and reuses them for the duration of the * application */ public class SleuthkitJNI { private static final Logger logger = Logger.getLogger(SleuthkitJNI.class.getName()); /** * Lock to protect against the TSK data structures being closed while * another thread is in the C++ code. Do not use this lock after obtaining * HandleCache.cacheLock. Additionally, the only code that should acquire * the write lock is CaseDbHandle.free(). */ private static final ReadWriteLock tskLock = new ReentrantReadWriteLock(); /* * Loads the SleuthKit libraries. */ static { LibraryUtils.loadSleuthkitJNI(); } /** * Constructor for the utility class that provides a interface to the * SleuthKit via JNI. */ private SleuthkitJNI() { } /** * Utility class to hold the handles for a single case. */ private static class CaseHandles { /* * A SleuthKit image handle cache implemented as a mappng of * concatenated image file paths to image handles. */ private final Map imageHandleCache = new HashMap<>(); /* * A SleuthKit file system handles cache implemented as a mapping of * image handles to image offset and file system handle pairs. */ private final Map> fsHandleCache = new HashMap<>(); /* * The collection of open file handles. We will only allow requests * through to the C code if the file handle exists in this collection. */ private final Set fileHandleCache = new HashSet<>(); private final Map> fileSystemToFileHandles = new HashMap<>(); private final Map> poolHandleCache = new HashMap<>(); // The poolImgCache is only used to close the images later. private final List poolImgCache = new ArrayList<>(); /* * Currently, our APFS code is not thread-safe and it is the only code * that uses pools. To prevent crashes, we make any reads to a file system * contained in a pool single-threaded. This cache keeps track of which * open file system handles are contained in a pool so we can set the locks * appropriately. */ private final List poolFsList = new ArrayList<>(); private CaseHandles() { // Nothing to do here } } /** * Cache of all handles allocated in the JNI layer. Used for: (a) quick * lookup of frequently used handles (e.g. file system and image) (b) * ensuring all handles passed in by clients of SleuthkitJNI are valid. (c) * consistent cleanup of handles on closure. */ private static class HandleCache { /* * A monitor used to guard access to cached Sleuthkit JNI handles. */ private static final Object cacheLock = new Object(); private static final Map caseHandlesCache = new HashMap<>(); private static final String INVALID_FILE_HANDLE = "Invalid file handle."; //NON-NLS /* * Currently, our APFS code is not thread-safe and it is the only code * that uses pools. To prevent crashes, we make any reads to a file system * contained in a pool single-threaded. This cache keeps track of which * open file handles are contained in a pool so we can set the locks * appropriately. * * Access to this list should be guarded by cacheLock. */ private static final List poolFileHandles = new ArrayList<>(); /** * Create the empty cache for a new case * * @param caseIdentifier Unique identifier for the case. */ private static void createCaseHandleCache(String caseIdentifier) { caseHandlesCache.put(caseIdentifier, new CaseHandles()); } /** * If there is one case open return its unique identifier. * This is to support deprecated methods that don't have a case parameter. * * @return the open case identifier * * @throws TskCoreException If there are no cases open or if multiple cases are open */ private static String getDefaultCaseIdentifier() throws TskCoreException { synchronized (cacheLock) { if (caseHandlesCache.keySet().size() > 1) { throw new TskCoreException("Can not get default case identifier with multiple open cases"); } else if (caseHandlesCache.keySet().isEmpty()) { throw new TskCoreException("Can not get default case identifier with no open case"); } return (caseHandlesCache.keySet().iterator().next()); } } /** * Gets the case handle cache for a given case. * * @param caseIdentifier Unique identifier for the case. * * @return the case handle cache * * @throws TskCoreException If there is no cache for this case. */ private static CaseHandles getCaseHandles(String caseIdentifier) throws TskCoreException { synchronized (cacheLock) { if (caseHandlesCache.containsKey(caseIdentifier)) { return caseHandlesCache.get(caseIdentifier); } // If the CaseHandles object isn't in there, it should mean the case has been closed. throw new TskCoreException("No entry for case " + caseIdentifier + " in cache. Case may have been closed"); } } /** * Removes the case handle cache for a given case. * * @param caseIdentifier Unique identifier for the case. */ private static void removeCaseHandlesCache(String caseIdentifier) { synchronized (cacheLock) { if (caseHandlesCache.containsKey(caseIdentifier)) { caseHandlesCache.get(caseIdentifier).fsHandleCache.clear(); caseHandlesCache.get(caseIdentifier).imageHandleCache.clear(); caseHandlesCache.get(caseIdentifier).fileHandleCache.clear(); caseHandlesCache.get(caseIdentifier).fileSystemToFileHandles.clear(); caseHandlesCache.get(caseIdentifier).poolHandleCache.clear(); caseHandlesCache.remove(caseIdentifier); } } } /** * Searches all the open caches for an image handle. * * @param imgHandle * * @return true if the handle is found in any cache, false otherwise */ private static boolean isImageInAnyCache(long imgHandle) { synchronized (cacheLock) { for (String caseIdentifier:caseHandlesCache.keySet()) { if (caseHandlesCache.get(caseIdentifier).fsHandleCache.keySet().contains(imgHandle)) { return true; } } return false; } } /** * Add a new file handle to the cache. * * @param caseIdentifier Unique identifier for the case. * @param fileHandle The new file handle. * @param fsHandle The file system handle in which the file lives. */ private static void addFileHandle(String caseIdentifier, long fileHandle, long fsHandle) { try { synchronized (cacheLock) { // Add to collection of open file handles. getCaseHandles(caseIdentifier).fileHandleCache.add(fileHandle); // Add to map of file system to file handles. if (getCaseHandles(caseIdentifier).fileSystemToFileHandles.containsKey(fsHandle)) { getCaseHandles(caseIdentifier).fileSystemToFileHandles.get(fsHandle).add(fileHandle); } else { getCaseHandles(caseIdentifier).fileSystemToFileHandles.put(fsHandle, new ArrayList<>(Arrays.asList(fileHandle))); } } } catch (TskCoreException ex) { logger.log(Level.WARNING, "Error caching file handle for case {0}", caseIdentifier); } } /** * Removes a file handle from the cache for the given case * * @param fileHandle * @param skCase Can be null. If so, the first matching handle will be removed. */ private static void removeFileHandle(long fileHandle, SleuthkitCase skCase) { synchronized (cacheLock) { // Remove from collection of open file handles. if (skCase != null) { try { getCaseHandles(skCase.getCaseHandleIdentifier()).fileHandleCache.remove(fileHandle); } catch (TskCoreException ex) { // If the call to getCaseHandles() failed, we've already cleared the cache. } } else { // If we don't know what case the handle is from, delete the first one we find for (String caseIdentifier:caseHandlesCache.keySet()) { if (caseHandlesCache.get(caseIdentifier).fileHandleCache.contains(fileHandle)) { caseHandlesCache.get(caseIdentifier).fileHandleCache.remove(fileHandle); return; } } } } } /** * Searches all the open caches for a file handle. * * @param fileHandle * * @return true if the handle is found in any cache, false otherwise */ private static boolean isValidFileHandle(long fileHandle) { synchronized (cacheLock) { for (String caseIdentifier:caseHandlesCache.keySet()) { if (caseHandlesCache.get(caseIdentifier).fileHandleCache.contains(fileHandle)) { return true; } } return false; } } private static void closeHandlesAndClearCache(String caseIdentifier) throws TskCoreException { synchronized (cacheLock) { /* * Close any cached file system handles. */ for (Map imageToFsMap : getCaseHandles(caseIdentifier).fsHandleCache.values()) { for (Long fsHandle : imageToFsMap.values()) { // First close all open file handles for the file system. if (getCaseHandles(caseIdentifier).fileSystemToFileHandles.containsKey(fsHandle)) { for (Long fileHandle : getCaseHandles(caseIdentifier).fileSystemToFileHandles.get(fsHandle)) { // Update the cache of file handles contained in pools if (poolFileHandles.contains(fileHandle)) { poolFileHandles.remove(fileHandle); } closeFile(fileHandle); } } // Then close the file system handle. closeFsNat(fsHandle); } } /* * Clear out the list of pool file systems. */ getCaseHandles(caseIdentifier).poolFsList.clear(); /* * Close any cached pools */ for (Long imgHandle : getCaseHandles(caseIdentifier).poolHandleCache.keySet()) { for (Long poolHandle : getCaseHandles(caseIdentifier).poolHandleCache.get(imgHandle).values()) { closePoolNat(poolHandle); } } /* * Close any open pool images */ for (Long imageHandle : getCaseHandles(caseIdentifier).poolImgCache) { closeImgNat(imageHandle); } /* * Close any cached image handles. */ for (Long imageHandle : getCaseHandles(caseIdentifier).imageHandleCache.values()) { closeImgNat(imageHandle); } removeCaseHandlesCache(caseIdentifier); } } } /** * Encapsulates a handle to a SleuthKit case database with support for * adding images to the database. */ public static class CaseDbHandle { /* * A unique indentifier for a case */ private final String caseDbIdentifier; /** * Constructs an object that encapsulates a handle to a single user SleuthKit case * database with support for adding images to the database. * * @param databaseName A path to a case database */ private CaseDbHandle(String databaseName) { this.caseDbIdentifier = "SingleUser:" + databaseName; // NON-NLS HandleCache.createCaseHandleCache(caseDbIdentifier); } /** * Constructs an object that encapsulates a handle to a multi user SleuthKit case * database with support for adding images to the database. * * @param databaseName The name of the multi-user database. * @param info Connection info for the multi-user database. */ private CaseDbHandle(String databaseName, CaseDbConnectionInfo info) { this.caseDbIdentifier = "MultiUser:" + info.getHost() + ":" + databaseName; HandleCache.createCaseHandleCache(caseDbIdentifier); } /** * Get the TSK pointer for the database * * @return Unique identifier for the case. */ String getCaseDbIdentifier() { return caseDbIdentifier; } /** * Closes the case database and any open image and file system handles. * * @throws TskCoreException if there is a problem competing the * operation. */ void free() throws TskCoreException { tskLock.writeLock().lock(); try { HandleCache.closeHandlesAndClearCache(caseDbIdentifier); //SleuthkitJNI.closeCaseDbNat(caseDbIdentifier); } finally { tskLock.writeLock().unlock(); } } /** * Adds an image to the case database. For finer-grained control of the * process of adding the image, call CaseDbHandle.initAddImageProcess * instead. * * @param deviceObjId The object id of the device associated with * the image. * @param imageFilePaths The image file paths. * @param timeZone The time zone for the image. * @param addFileSystems Pass true to attempt to add file systems * within the image to the case database. * @param addUnallocSpace Pass true to create virtual files for * unallocated space. Ignored if addFileSystems * is false. * @param skipFatFsOrphans Pass true to skip processing of orphan files * for FAT file systems. Ignored if * addFileSystems is false. * * @return The object id of the image. * * @throws TskCoreException if there is an error adding the image to * case database. */ long addImageInfo(long deviceObjId, List imageFilePaths, String timeZone, Host host, SleuthkitCase skCase) throws TskCoreException { try { if (host == null) { String hostName; if (imageFilePaths.size() > 0) { String path = imageFilePaths.get(0); hostName = (new java.io.File(path)).getName() + " Host"; } else { hostName = "Image_" + deviceObjId + " Host"; } host = skCase.getHostManager().newHost(hostName); } TskCaseDbBridge dbHelper = new TskCaseDbBridge(skCase, new DefaultAddDataSourceCallbacks(), host); long tskAutoDbPointer = initializeAddImgNat(dbHelper, timezoneLongToShort(timeZone), false, false, false); runOpenAndAddImgNat(tskAutoDbPointer, UUID.randomUUID().toString(), imageFilePaths.toArray(new String[0]), imageFilePaths.size(), timeZone); long id = finishAddImgNat(tskAutoDbPointer); dbHelper.finish(); skCase.addDataSourceToHasChildrenMap(); return id; } catch (TskDataException ex) { throw new TskCoreException("Error adding image to case database", ex); } } /** * Initializes a multi-step process for adding an image to the case * database. * * @param timeZone The time zone of the image. * @param addUnallocSpace Pass true to create virtual files for * unallocated space. * @param skipFatFsOrphans Pass true to skip processing of orphan files * for FAT file systems. * @param imageCopyPath Path to which a copy of the image should be * written. Use the empty string to disable * image writing. * * @return An object that can be used to exercise fine-grained control * of the process of adding the image to the case database. */ AddImageProcess initAddImageProcess(String timeZone, boolean addUnallocSpace, boolean skipFatFsOrphans, String imageCopyPath, SleuthkitCase skCase) { return new AddImageProcess(timeZone, addUnallocSpace, skipFatFsOrphans, imageCopyPath, skCase); } /** * Encapsulates a multi-step process to add an image to the case * database. */ public class AddImageProcess { private final String timeZone; private final boolean addUnallocSpace; private final boolean skipFatFsOrphans; private final String imageWriterPath; private volatile long tskAutoDbPointer; private long imageId = 0; private boolean isCanceled; private final SleuthkitCase skCase; private TskCaseDbBridge dbHelper; /** * Constructs an object that encapsulates a multi-step process to * add an image to the case database. * * @param timeZone The time zone of the image. * @param addUnallocSpace Pass true to create virtual files for * unallocated space. * @param skipFatFsOrphans Pass true to skip processing of orphan * files for FAT file systems. * @param imageWriterPath Path that a copy of the image should be * written to. Use empty string to disable * image writing */ private AddImageProcess(String timeZone, boolean addUnallocSpace, boolean skipFatFsOrphans, String imageWriterPath, SleuthkitCase skCase) { this.timeZone = timeZone; this.addUnallocSpace = addUnallocSpace; this.skipFatFsOrphans = skipFatFsOrphans; this.imageWriterPath = imageWriterPath; tskAutoDbPointer = 0; this.isCanceled = false; this.skCase = skCase; } /** * Starts the process of adding an image to the case database. * * @param deviceId An ASCII-printable identifier for the * device associated with the image that * should be unique across multiple cases * (e.g., a UUID). * @param imageFilePaths Full path(s) to the image file(s). * @param sectorSize The sector size (use '0' for autodetect). * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * @throws TskDataException if a non-critical error occurs within * the SleuthKit (should be OK to continue * the process) */ public void run(String deviceId, String[] imageFilePaths, int sectorSize) throws TskCoreException, TskDataException { Image img = addImageToDatabase(skCase, imageFilePaths, sectorSize, "", "", "", "", deviceId); run(deviceId, img, sectorSize, new DefaultAddDataSourceCallbacks()); } /** * Starts the process of adding an image to the case database. * * @param deviceId An ASCII-printable identifier for the * device associated with the image that * should be unique across multiple cases * (e.g., a UUID). * @param image The image object (has already been added to the database) * @param sectorSize The sector size (no longer used). * @param addDataSourceCallbacks The callbacks to use to send data to ingest (may do nothing). * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * @throws TskDataException if a non-critical error occurs within * the SleuthKit (should be OK to continue * the process) */ public void run(String deviceId, Image image, int sectorSize, AddDataSourceCallbacks addDataSourceCallbacks) throws TskCoreException, TskDataException { dbHelper = new TskCaseDbBridge(skCase, addDataSourceCallbacks, image.getHost()); getTSKReadLock(); try { long imageHandle = 0; synchronized (this) { if (0 != tskAutoDbPointer) { throw new TskCoreException("Add image process already started"); } if (!isCanceled) { //with isCanceled being guarded by this it will have the same value everywhere in this synchronized block imageHandle = image.getImageHandle(); tskAutoDbPointer = initAddImgNat(dbHelper, timezoneLongToShort(timeZone), addUnallocSpace, skipFatFsOrphans); } if (0 == tskAutoDbPointer) { throw new TskCoreException("initAddImgNat returned a NULL TskAutoDb pointer"); } } if (imageHandle != 0) { runAddImgNat(tskAutoDbPointer, deviceId, imageHandle, image.getId(), timeZone, imageWriterPath); } } finally { finishAddImageProcess(); releaseTSKReadLock(); } } /** * Stops the process of adding the image to the case database that * was started by calling AddImageProcess.run. * AddImageProcess.revert should be called after calling * AddImageProcess.stop. * * @throws TskCoreException if a critical error occurs within the * SleuthKit. */ public synchronized void stop() throws TskCoreException { getTSKReadLock(); try { isCanceled = true; if (tskAutoDbPointer != 0) { stopAddImgNat(tskAutoDbPointer); } } finally { releaseTSKReadLock(); } } /** * Call at the end of the add image process regardless of the error/canceled state. * * Note that the new image is no longer deleted on error/cancellation * * If the process was not canceled, will add the final batch of files to the database * and submit for any further processing through the callback. * * @throws TskCoreException */ private synchronized void finishAddImageProcess() throws TskCoreException { if (tskAutoDbPointer == 0) { return; } // If the process wasn't cancelled, finish up processing the // remaining files. if (! this.isCanceled && dbHelper != null) { dbHelper.finish(); } // Free the auto DB pointer and get the image ID imageId = finishAddImgNat(tskAutoDbPointer); tskAutoDbPointer = 0; skCase.addDataSourceToHasChildrenMap(); } /** * This no longer needs to be called. * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * * @deprecated No longer necessary */ @Deprecated public synchronized void revert() throws TskCoreException { // No-op } /** * This no longer needs to be called. Will simply return the * object ID of the new image. * * @return The object id of the image that was added. * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * * @deprecated No longer necessary */ @Deprecated public synchronized long commit() throws TskCoreException { return imageId; } /** * Gets the file system directory currently being processed by the * SleuthKit. * * @return The directory */ public synchronized String currentDirectory() { return tskAutoDbPointer == 0 ? "" : getCurDirNat(tskAutoDbPointer); //NON-NLS } /** * Starts the process of adding an image to the case database. * Either commit() or revert() MUST be called after calling run(). * * @param imageFilePaths Full path(s) to the image file(s). * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * @throws TskDataException if a non-critical error occurs within * the SleuthKit (should be OK to continue * the process) * * @deprecated Use run(String dataSourceId, String[] imageFilePaths) * instead */ @Deprecated public void run(String[] imageFilePaths) throws TskCoreException, TskDataException { run(null, imageFilePaths, 0); } /** * Starts the process of adding an image to the case database. * * @param deviceId An ASCII-printable identifier for the * device associated with the image that * should be unique across multiple cases * (e.g., a UUID). * @param imageFilePaths Full path(s) to the image file(s). * * @throws TskCoreException if a critical error occurs within the * SleuthKit. * @throws TskDataException if a non-critical error occurs within * the SleuthKit (should be OK to continue * the process) */ public void run(String deviceId, String[] imageFilePaths) throws TskCoreException, TskDataException { run(deviceId, imageFilePaths, 0); } } } /** * Creates a new case database. Must call .free() on CaseDbHandle instance * when done. * * @param path Location to create the database at. * * @return Handle for a new TskCaseDb instance. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle newCaseDb(String path) throws TskCoreException { return new CaseDbHandle(path); } /** * Creates a new case database. Must call .free() on CaseDbHandle instance * when done. * * @param databaseName the name of the database to create * @param info the connection info class for the database to create * * @return Handle for a new TskCaseDb instance. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle newCaseDb(String databaseName, CaseDbConnectionInfo info) throws TskCoreException { return new CaseDbHandle(databaseName, info); } /** * Opens an existing case database. Must call .free() on CaseDbHandle * instance when done. * * @param path Location of the existing database. * * @return Handle for a new TskCaseDb instance. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle openCaseDb(String path) throws TskCoreException { return new CaseDbHandle(path); } /** * Opens an existing case database. Must call .free() on CaseDbHandle * instance when done. * * @param databaseName the name of the database to open * @param info the connection info class for the database to open * * @return Handle for a new TskCaseDb instance. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle openCaseDb(String databaseName, CaseDbConnectionInfo info) throws TskCoreException { return new CaseDbHandle(databaseName, info); } /** * get the Sleuth Kit version string * * @return the version string */ public static String getVersion() { return getVersionNat(); } /** * Enable verbose logging and redirect stderr to the given log file. * * @param logPath the log file path */ public static void startVerboseLogging(String logPath) { startVerboseLoggingNat(logPath); } /** * Open the image and return the image info pointer. * * @param imageFiles the paths to the images * @param skCase the case this image belongs to * * @return the image info pointer * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openImage(String[] imageFiles, SleuthkitCase skCase) throws TskCoreException { if (skCase == null) { throw new TskCoreException("SleuthkitCase can not be null"); } return openImage(imageFiles, 0, true, skCase.getCaseHandleIdentifier()); } /** * Open the image with a specified sector size and return the image info * pointer. * * @param imageFiles the paths to the images * @param sSize the sector size (use '0' for autodetect) * @param skCase the case this image belongs to * * @return the image info pointer * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openImage(String[] imageFiles, int sSize, SleuthkitCase skCase) throws TskCoreException { if (skCase == null) { throw new TskCoreException("SleuthkitCase can not be null"); } return openImage(imageFiles, sSize, true, skCase.getCaseHandleIdentifier()); } /** * Open the image and return the image info pointer. This is a temporary * measure to allow ingest of multiple local disks on the same drive letter. * We need to clear the cache to make sure cached data from the first drive * is not used. * * @param imageFiles the paths to the images * @param sSize the sector size (use '0' for autodetect) * @param useCache true if the image handle cache should be used, false to * always go to TSK to open a fresh copy * @param caseIdentifer The caseDbIdentifier for this case. Can be null to support deprecated methods. * * @return the image info pointer * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ private static long openImage(String[] imageFiles, int sSize, boolean useCache, String caseIdentifer) throws TskCoreException { getTSKReadLock(); try { long imageHandle; StringBuilder keyBuilder = new StringBuilder(); for (int i = 0; i < imageFiles.length; ++i) { keyBuilder.append(imageFiles[i]); } final String imageKey = keyBuilder.toString(); synchronized (HandleCache.cacheLock) { String nonNullCaseIdentifer = caseIdentifer; if (nonNullCaseIdentifer == null) { nonNullCaseIdentifer = HandleCache.getDefaultCaseIdentifier(); } // If we're getting a fresh copy and an image with this path is already // in the cache, move the existing cache reference so it won't be used by // any subsequent calls to openImage but will still be valid if any objects // have it cached. This happens in the case where the user adds the same data // source twice (see JIRA-5868). if (!useCache && HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.containsKey(imageKey)) { long tempImageHandle = HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.get(imageKey); // Store the old image handle in a fake path. This way it will no longer be found but will // still be valid and the image and its file systems will be closed with the case. String newPath = "Image_" + UUID.randomUUID().toString(); HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.put(newPath, tempImageHandle); HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.remove(imageKey); } if (useCache && HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.containsKey(imageKey)) //get from cache { imageHandle = HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.get(imageKey); } else { //open new handle and cache it imageHandle = openImgNat(imageFiles, imageFiles.length, sSize); HandleCache.getCaseHandles(nonNullCaseIdentifer).fsHandleCache.put(imageHandle, new HashMap<>()); HandleCache.getCaseHandles(nonNullCaseIdentifer).imageHandleCache.put(imageKey, imageHandle); } } return imageHandle; } finally { releaseTSKReadLock(); } } /** * This is a temporary measure to support opening an image at the beginning * of the add image process. The open image handle is put into the normal image cache so * it won't be opened a second time and it will be closed during case closing. * * This will change when all image opens are done by object ID and not paths. * * @param skCase The case the image belongs to. * @param imagePaths The complete list of paths for the image. * @param imageHandle The open image handle from TSK. * * @throws TskCoreException If the new image could not be added to the cache */ private static void cacheImageHandle(SleuthkitCase skCase, List imagePaths, long imageHandle) throws TskCoreException { // Construct the hash key from the image paths StringBuilder keyBuilder = new StringBuilder(); for (int i = 0; i < imagePaths.size(); ++i) { keyBuilder.append(imagePaths.get(i)); } final String imageKey = keyBuilder.toString(); // Get the case identifier String caseIdentifier = skCase.getCaseHandleIdentifier(); synchronized (HandleCache.cacheLock) { HandleCache.getCaseHandles(caseIdentifier).fsHandleCache.put(imageHandle, new HashMap<>()); HandleCache.getCaseHandles(caseIdentifier).imageHandleCache.put(imageKey, imageHandle); } } /** * Add an image to the database and return the open image. * * @param skCase The current case. * @param imagePaths The path(s) to the image (will just be the first for .e01, .001, etc). * @param sectorSize The sector size (0 for auto-detect). * @param timeZone The time zone. * @param md5fromSettings MD5 hash (if known). * @param sha1fromSettings SHA1 hash (if known). * @param sha256fromSettings SHA256 hash (if known). * @param deviceId Device ID. * * @return The Image object. * * @throws TskCoreException */ public static Image addImageToDatabase(SleuthkitCase skCase, String[] imagePaths, int sectorSize, String timeZone, String md5fromSettings, String sha1fromSettings, String sha256fromSettings, String deviceId) throws TskCoreException { return addImageToDatabase(skCase, imagePaths, sectorSize, timeZone, md5fromSettings, sha1fromSettings, sha256fromSettings, deviceId, null); } /** * Add an image to the database and return the open image. * * @param skCase The current case. * @param imagePaths The path(s) to the image (will just be the first for .e01, .001, etc). * @param sectorSize The sector size (0 for auto-detect). * @param timeZone The time zone. * @param md5fromSettings MD5 hash (if known). * @param sha1fromSettings SHA1 hash (if known). * @param sha256fromSettings SHA256 hash (if known). * @param deviceId Device ID. * @param host Host. * * @return The Image object. * * @throws TskCoreException */ public static Image addImageToDatabase(SleuthkitCase skCase, String[] imagePaths, int sectorSize, String timeZone, String md5fromSettings, String sha1fromSettings, String sha256fromSettings, String deviceId, Host host) throws TskCoreException { // Open the image long imageHandle = openImgNat(imagePaths, 1, sectorSize); // Get the fields stored in the native code List computedPaths = Arrays.asList(getPathsForImageNat(imageHandle)); long size = getSizeForImageNat(imageHandle); long type = getTypeForImageNat(imageHandle); long computedSectorSize = getSectorSizeForImageNat(imageHandle); String md5 = md5fromSettings; if (StringUtils.isEmpty(md5)) { md5 = getMD5HashForImageNat(imageHandle); } String sha1 = sha1fromSettings; if (StringUtils.isEmpty(sha1)) { sha1 = getSha1HashForImageNat(imageHandle); } // Sleuthkit does not currently generate any SHA256 hashes. Set to empty // string for consistency. String sha256 = sha256fromSettings; if (sha256 == null) { sha256 = ""; } String collectionDetails = getCollectionDetailsForImageNat(imageHandle); // Now save to database CaseDbTransaction transaction = skCase.beginTransaction(); try { Image img = skCase.addImage(TskData.TSK_IMG_TYPE_ENUM.valueOf(type), computedSectorSize, size, null, computedPaths, timeZone, md5, sha1, sha256, deviceId, host, transaction); if (!StringUtils.isEmpty(collectionDetails)) { skCase.setAcquisitionDetails(img, collectionDetails); } transaction.commit(); img.setImageHandle(imageHandle); cacheImageHandle(skCase, computedPaths, imageHandle); return img; } catch (TskCoreException ex) { transaction.rollback(); throw(ex); } } /** * Get volume system Handle * * @param imgHandle a handle to previously opened image * @param vsOffset byte offset in the image to the volume system (usually * 0) * * @return pointer to a vsHandle structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openVs(long imgHandle, long vsOffset) throws TskCoreException { getTSKReadLock(); try { if(! imgHandleIsValid(imgHandle)) { throw new TskCoreException("Image handle " + imgHandle + " is closed"); } return openVsNat(imgHandle, vsOffset); } finally { releaseTSKReadLock(); } } //get pointers /** * Get volume Handle * * @param vsHandle pointer to the volume system structure in the sleuthkit * @param volId id of the volume * * @return pointer to a volHandle structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openVsPart(long vsHandle, long volId) throws TskCoreException { getTSKReadLock(); try { //returned long is ptr to vs Handle object in tsk return openVolNat(vsHandle, volId); } finally { releaseTSKReadLock(); } } /** * Get pool Handle * * @param imgHandle pointer to the image structure in the sleuthkit * @param offset offset of the pool * * @return pointer to a pool info structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static long openPool(long imgHandle, long offset, SleuthkitCase skCase) throws TskCoreException { getTSKReadLock(); try { if(! imgHandleIsValid(imgHandle)) { throw new TskCoreException("Image handle " + imgHandle + " is closed"); } synchronized (HandleCache.cacheLock) { String caseIdentifier; if (skCase == null) { caseIdentifier = HandleCache.getDefaultCaseIdentifier(); } else { caseIdentifier = skCase.getCaseHandleIdentifier(); } // If a pool handle cache for this image does not exist, make one if (! HandleCache.getCaseHandles(caseIdentifier).poolHandleCache.containsKey(imgHandle)) { HandleCache.getCaseHandles(caseIdentifier).poolHandleCache.put(imgHandle, new HashMap<>()); } // Get the pool handle cache for this image Map poolCacheForImage = HandleCache.getCaseHandles(caseIdentifier).poolHandleCache.get(imgHandle); if (poolCacheForImage.containsKey(offset)) { return poolCacheForImage.get(offset); } else { //returned long is ptr to pool Handle object in tsk long poolHandle = openPoolNat(imgHandle, offset); poolCacheForImage.put(offset, poolHandle); return poolHandle; } } } finally { releaseTSKReadLock(); } } /** * Get file system Handle Opened handle is cached (transparently) so it does * not need be reopened next time for the duration of the application * * @param imgHandle pointer to imgHandle in sleuthkit * @param fsOffset byte offset to the file system * @param skCase the case containing the file system * * @return pointer to a fsHandle structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openFs(long imgHandle, long fsOffset, SleuthkitCase skCase) throws TskCoreException { getTSKReadLock(); try { long fsHandle; synchronized (HandleCache.cacheLock) { String caseIdentifier; if (skCase == null) { caseIdentifier = HandleCache.getDefaultCaseIdentifier(); } else { caseIdentifier = skCase.getCaseHandleIdentifier(); } final Map imgOffSetToFsHandle = HandleCache.getCaseHandles(caseIdentifier).fsHandleCache.get(imgHandle); if (imgOffSetToFsHandle == null) { throw new TskCoreException("Missing image offset to file system handle cache for image handle " + imgHandle); } if (imgOffSetToFsHandle.containsKey(fsOffset)) { //return cached fsHandle = imgOffSetToFsHandle.get(fsOffset); } else { fsHandle = openFsNat(imgHandle, fsOffset); //cache it imgOffSetToFsHandle.put(fsOffset, fsHandle); } } return fsHandle; } finally { releaseTSKReadLock(); } } /** * Get file system handle for a file system contained in a pool. * Opened handle is cached (transparently) so it does * not need be reopened next time for the duration of the application * * @param imgHandle pointer to imgHandle in sleuthkit * @param fsOffset byte offset to the file system * @param poolHandle pointer to the pool info handle * @param poolBlock pool block * @param skCase the case containing the file system * * @return pointer to a fsHandle structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static long openFsPool(long imgHandle, long fsOffset, long poolHandle, long poolBlock, SleuthkitCase skCase) throws TskCoreException { /* * Currently, our APFS code is not thread-safe and it is the only code * that uses pools. To prevent crashes, we make any reads to a file system * contained in a pool single-threaded. */ getTSKWriteLock(); try { long fsHandle; synchronized (HandleCache.cacheLock) { String caseIdentifier; if (skCase == null) { caseIdentifier = HandleCache.getDefaultCaseIdentifier(); } else { caseIdentifier = skCase.getCaseHandleIdentifier(); } final Map imgOffSetToFsHandle = HandleCache.getCaseHandles(caseIdentifier).fsHandleCache.get(imgHandle); if (imgOffSetToFsHandle == null) { throw new TskCoreException("Missing image offset to file system handle cache for image handle " + imgHandle); } if (imgOffSetToFsHandle.containsKey(poolBlock)) { //return cached fsHandle = imgOffSetToFsHandle.get(poolBlock); } else { long poolImgHandle = getImgInfoForPoolNat(poolHandle, poolBlock); HandleCache.getCaseHandles(caseIdentifier).poolImgCache.add(poolImgHandle); fsHandle = openFsNat(poolImgHandle, fsOffset); //cache it imgOffSetToFsHandle.put(poolBlock, fsHandle); HandleCache.getCaseHandles(caseIdentifier).poolFsList.add(fsHandle); } } return fsHandle; } finally { releaseTSKWriteLock(); } } /** * Get file Handle * * @param fsHandle fsHandle pointer in the sleuthkit * @param fileId id of the file * @param attrType file attribute type to open * @param attrId file attribute id to open * @param skCase the case associated with this file * * @return pointer to a file structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openFile(long fsHandle, long fileId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, SleuthkitCase skCase) throws TskCoreException { /* * NOTE: previously attrId used to be stored in AbstractFile as (signed) * short even though it is stored as uint16 in TSK. In extremely rare * occurrences attrId can be larger than what a signed short can hold * (2^15). Changes were made to AbstractFile to store attrId as integer. * However, a depricated method still exists in AbstractFile to get * attrId as short. In that method we convert attribute ids that are * larger than 32K to a negative number. Therefore if encountered, we * need to convert negative attribute id to uint16 which is what TSK is * using to store attribute id. */ boolean withinPool = false; synchronized (HandleCache.cacheLock) { String caseIdentifier; if (skCase == null) { caseIdentifier = HandleCache.getDefaultCaseIdentifier(); } else { caseIdentifier = skCase.getCaseHandleIdentifier(); } if (HandleCache.getCaseHandles(caseIdentifier).poolFsList.contains(fsHandle)) { withinPool = true; } } /* * The current APFS code is not thread-safe. To compensate, we make any * reads to the APFS pool single-threaded by obtaining a write * lock instead of a read lock. */ if (withinPool) { getTSKWriteLock(); } else { getTSKReadLock(); } try { long fileHandle = openFileNat(fsHandle, fileId, attrType.getValue(), convertSignedToUnsigned(attrId)); synchronized (HandleCache.cacheLock) { String caseIdentifier; if (skCase == null) { caseIdentifier = HandleCache.getDefaultCaseIdentifier(); } else { caseIdentifier = skCase.getCaseHandleIdentifier(); } HandleCache.addFileHandle(caseIdentifier, fileHandle, fsHandle); // If this file is in a pool file system, record it so the locks // can be set appropriately when reading it. if (withinPool) { HandleCache.poolFileHandles.add(fileHandle); } } return fileHandle; } finally { if (withinPool) { releaseTSKWriteLock(); } else { releaseTSKReadLock(); } } } /** * Converts signed integer to an unsigned integer. * * @param val value to be converter * * @return unsigned integer value */ private static int convertSignedToUnsigned(int val) { if (val >= 0) { return val; } return val & 0xffff; // convert negative value to positive value } /** * Test that the given image handle is valid. * @param imgHandle * @return true if it is valid, false otherwise */ private static boolean imgHandleIsValid(long imgHandle) { synchronized(HandleCache.cacheLock) { return HandleCache.isImageInAnyCache(imgHandle); } } //do reads /** * reads data from an image * * @param imgHandle * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readImg(long imgHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { if(! imgHandleIsValid(imgHandle)) { throw new TskCoreException("Image handle " + imgHandle + " is closed"); } //returned byte[] is the data buffer return readImgNat(imgHandle, readBuffer, offset, len); } finally { releaseTSKReadLock(); } } /** * reads data from an volume system * * @param vsHandle pointer to a volume system structure in the sleuthkit * @param readBuffer buffer to read to * @param offset sector offset in the image to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readVs(long vsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { return readVsNat(vsHandle, readBuffer, offset, len); } finally { releaseTSKReadLock(); } } /** * Reads data from a pool * * @param poolHandle handle to the pool info struct * @param readBuffer buffer to read into * @param offset starting offset * @param len length * * @return number of bytes read * * @throws TskCoreException */ static int readPool(long poolHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { return readPoolNat(poolHandle, readBuffer, offset, len); } finally { releaseTSKReadLock(); } } /** * reads data from an volume * * @param volHandle pointer to a volume structure in the sleuthkit * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readVsPart(long volHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { //returned byte[] is the data buffer return readVolNat(volHandle, readBuffer, offset, len); } finally { releaseTSKReadLock(); } } /** * reads data from an file system * * @param fsHandle pointer to a file system structure in the sleuthkit * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readFs(long fsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { //returned byte[] is the data buffer return readFsNat(fsHandle, readBuffer, offset, len); } finally { releaseTSKReadLock(); } } /** * enum used to tell readFileNat whether the offset is from the beginning of * the file or from the beginning of the slack space. */ private enum TSK_FS_FILE_READ_OFFSET_TYPE_ENUM { START_OF_FILE(0), START_OF_SLACK(1); private final int val; TSK_FS_FILE_READ_OFFSET_TYPE_ENUM(int val) { this.val = val; } int getValue() { return val; } } /** * reads data from an file * * @param fileHandle pointer to a file structure in the sleuthkit * @param readBuffer pre-allocated buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readFile(long fileHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { boolean withinPool = false; synchronized (HandleCache.cacheLock) { if (HandleCache.poolFileHandles.contains(fileHandle)) { withinPool = true; } } /* * The current APFS code is not thread-safe. To compensate, we make any * reads to the APFS pool single-threaded by obtaining a write * lock instead of a read lock. */ if (withinPool) { getTSKWriteLock(); } else { getTSKReadLock(); } try { if (!HandleCache.isValidFileHandle(fileHandle)) { throw new TskCoreException(HandleCache.INVALID_FILE_HANDLE); } return readFileNat(fileHandle, readBuffer, offset, TSK_FS_FILE_READ_OFFSET_TYPE_ENUM.START_OF_FILE.getValue(), len); } finally { if (withinPool) { releaseTSKWriteLock(); } else { releaseTSKReadLock(); } } } /** * reads data from the slack space of a file * * @param fileHandle pointer to a file structure in the sleuthkit * @param readBuffer pre-allocated buffer to read to * @param offset byte offset in the slack to start at * @param len amount of data to read * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readFileSlack(long fileHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { getTSKReadLock(); try { if (!HandleCache.isValidFileHandle(fileHandle)) { throw new TskCoreException(HandleCache.INVALID_FILE_HANDLE); } return readFileNat(fileHandle, readBuffer, offset, TSK_FS_FILE_READ_OFFSET_TYPE_ENUM.START_OF_SLACK.getValue(), len); } finally { releaseTSKReadLock(); } } /** * Get human readable (some what) details about a file. This is the same as * the 'istat' TSK tool * * @param fileHandle pointer to file structure in the sleuthkit * * @return text * * @throws TskCoreException if errors occurred */ public static List getFileMetaDataText(long fileHandle) throws TskCoreException { getTSKReadLock(); try { if (!HandleCache.isValidFileHandle(fileHandle)) { throw new TskCoreException(HandleCache.INVALID_FILE_HANDLE); } try { java.io.File tmp = java.io.File.createTempFile("tsk", ".txt"); saveFileMetaDataTextNat(fileHandle, tmp.getAbsolutePath()); FileReader fr = new FileReader(tmp.getAbsolutePath()); BufferedReader textReader = new BufferedReader(fr); List lines = new ArrayList(); while (true) { String line = textReader.readLine(); if (line == null) { break; } lines.add(line); } textReader.close(); fr.close(); tmp.delete(); return lines; } catch (IOException ex) { throw new TskCoreException("Error reading istat output: " + ex.getLocalizedMessage()); } } finally { releaseTSKReadLock(); } } /** * frees the fileHandle pointer * * @param fileHandle pointer to file structure in sleuthkit */ public static void closeFile(long fileHandle) { closeFile(fileHandle, null); } /** * frees the fileHandle pointer * * @param fileHandle pointer to file structure in sleuthkit * @param skCase the case containing the file */ public static void closeFile(long fileHandle, SleuthkitCase skCase) { boolean withinPool = false; synchronized (HandleCache.cacheLock) { if (HandleCache.poolFileHandles.contains(fileHandle)) { withinPool = true; } } /* * The current APFS code is not thread-safe. To compensate, we make any * reads to the APFS pool single-threaded by obtaining a write * lock instead of a read lock. */ if (withinPool) { getTSKWriteLock(); } else { getTSKReadLock(); } try { synchronized (HandleCache.cacheLock) { if (!HandleCache.isValidFileHandle(fileHandle)) { // File handle is not open so this is a no-op. return; } closeFileNat(fileHandle); HandleCache.removeFileHandle(fileHandle, skCase); if (HandleCache.poolFileHandles.contains(fileHandle)) { HandleCache.poolFileHandles.remove(fileHandle); } } } finally { if (withinPool) { releaseTSKWriteLock(); } else { releaseTSKReadLock(); } } } /** * Create an index for a hash database. * * @param dbHandle A hash database handle. * * @throws TskCoreException if a critical error occurs within TSK core */ public static void createLookupIndexForHashDatabase(int dbHandle) throws TskCoreException { hashDbCreateIndexNat(dbHandle); } /** * Check if an index exists for a hash database. * * @param dbHandle A hash database handle. * * @return true if index exists * * @throws TskCoreException if a critical error occurs within TSK core */ public static boolean hashDatabaseHasLookupIndex(int dbHandle) throws TskCoreException { return hashDbIndexExistsNat(dbHandle); } /** * hashDatabaseCanBeReindexed * * @param dbHandle previously opened hash db handle * * @return Does this database have a source database that is different than * the index? * * @throws TskCoreException if a critical error occurs within TSK core */ public static boolean hashDatabaseCanBeReindexed(int dbHandle) throws TskCoreException { return hashDbIsReindexableNat(dbHandle); } /** * getHashDatabasePath * * @param dbHandle previously opened hash db handle * * @return Hash db file path * * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabasePath(int dbHandle) throws TskCoreException { return hashDbPathNat(dbHandle); } /** * getHashDatabaseIndexPath * * @param dbHandle previously opened hash db handle * * @return Index file path * * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabaseIndexPath(int dbHandle) throws TskCoreException { return hashDbIndexPathNat(dbHandle); } /** * Open a hash database for lookups * @param path Path to Hash DB or index file * @return Handle open db * @throws TskCoreException if there is an error opening the DB */ public static int openHashDatabase(String path) throws TskCoreException { return hashDbOpenNat(path); } /** * Creates a hash database. Will be of the default TSK hash database type. * * @param path The path to the database * * @return a handle for that database * * @throws TskCoreException if a critical error occurs within TSK core */ public static int createHashDatabase(String path) throws TskCoreException { return hashDbNewNat(path); } /** * Close the currently open lookup databases. Resets the handle counting. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static void closeAllHashDatabases() throws TskCoreException { hashDbCloseAll(); } /** * Close a particular open lookup database. Existing handles are not * affected. * * @param dbHandle Handle of database to close. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static void closeHashDatabase(int dbHandle) throws TskCoreException { hashDbClose(dbHandle); } /** * Get the name of the database * * @param dbHandle Previously opened hash db handle. * * @return The display name. * * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabaseDisplayName(int dbHandle) throws TskCoreException { return hashDbGetDisplayName(dbHandle); } /** * Lookup the given hash value and get basic answer * * @param hash Hash value to search for. * @param dbHandle Handle of database to lookup in. * * @return True if hash was found in database. * * @throws TskCoreException */ public static boolean lookupInHashDatabase(String hash, int dbHandle) throws TskCoreException { return hashDbLookup(hash, dbHandle); } /** * Lookup hash value in DB and return details on results (more time * consuming than basic lookup) * * @param hash Hash value to search for * @param dbHandle Handle of database to lookup in. * * @return Details on hash if it was in DB or null if it was not found. * * @throws TskCoreException */ public static HashHitInfo lookupInHashDatabaseVerbose(String hash, int dbHandle) throws TskCoreException { return hashDbLookupVerbose(hash, dbHandle); } /** * Adds a hash value to a hash database. * * @param filename Name of file (can be null) * @param md5 Text of MD5 hash (can be null) * @param sha1 Text of SHA1 hash (can be null) * @param sha256 Text of SHA256 hash (can be null) * @param comment A comment (can be null) * @param dbHandle Handle to DB * * @throws TskCoreException */ public static void addToHashDatabase(String filename, String md5, String sha1, String sha256, String comment, int dbHandle) throws TskCoreException { hashDbAddEntryNat(filename, md5, sha1, sha256, comment, dbHandle); } public static void addToHashDatabase(List hashes, int dbHandle) throws TskCoreException { hashDbBeginTransactionNat(dbHandle); try { for (HashEntry entry : hashes) { hashDbAddEntryNat(entry.getFileName(), entry.getMd5Hash(), entry.getSha1Hash(), entry.getSha256Hash(), entry.getComment(), dbHandle); } hashDbCommitTransactionNat(dbHandle); } catch (TskCoreException ex) { try { hashDbRollbackTransactionNat(dbHandle); } catch (TskCoreException ex2) { ex2.initCause(ex); throw ex2; } throw ex; } } public static boolean isUpdateableHashDatabase(int dbHandle) throws TskCoreException { return hashDbIsUpdateableNat(dbHandle); } public static boolean hashDatabaseIsIndexOnly(int dbHandle) throws TskCoreException { return hashDbIsIdxOnlyNat(dbHandle); } /** * Convert this timezone from long to short form Convert timezoneLongForm * passed in from long to short form * * @param timezoneLongForm the long form (e.g., America/New_York) * * @return the short form (e.g., EST5EDT) string representation, or an empty * string if empty long form was passed in */ private static String timezoneLongToShort(String timezoneLongForm) { if (timezoneLongForm == null || timezoneLongForm.isEmpty()) { return ""; } String timezoneShortForm; TimeZone zone = TimeZone.getTimeZone(timezoneLongForm); int offset = zone.getRawOffset() / 1000; int hour = offset / 3600; int min = (offset % 3600) / 60; DateFormat dfm = new SimpleDateFormat("z"); dfm.setTimeZone(zone); boolean hasDaylight = zone.useDaylightTime(); String first = dfm.format(new GregorianCalendar(2010, 1, 1).getTime()).substring(0, 3); // make it only 3 letters code String second = dfm.format(new GregorianCalendar(2011, 6, 6).getTime()).substring(0, 3); // make it only 3 letters code int mid = hour * -1; timezoneShortForm = first + Integer.toString(mid); if (min != 0) { timezoneShortForm = timezoneShortForm + ":" + (min < 10 ? "0" : "") + Integer.toString(min); } if (hasDaylight) { timezoneShortForm += second; } return timezoneShortForm; } /** * Fills in any gaps in the image created by image writer. * * @param imgHandle The image handle. * * @return 0 if no errors occurred; 1 otherwise. * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int finishImageWriter(long imgHandle) throws TskCoreException { getTSKReadLock(); try { if(! imgHandleIsValid(imgHandle)) { throw new TskCoreException("Image handle " + imgHandle + " is closed"); } return finishImageWriterNat(imgHandle); } finally { releaseTSKReadLock(); } } /** * Get the current progress of the finish image process (0-100) * * @param imgHandle * * @return Percentage of blocks completed (0-100) */ public static int getFinishImageProgress(long imgHandle) { getTSKReadLock(); try { if (imgHandleIsValid(imgHandle)) { return getFinishImageProgressNat(imgHandle); } else { return 0; } } finally { releaseTSKReadLock(); } } /** * Cancel the finish image process * * @param imgHandle */ public static void cancelFinishImage(long imgHandle) { getTSKReadLock(); try { if (imgHandleIsValid(imgHandle)) { cancelFinishImageNat(imgHandle); } } finally { releaseTSKReadLock(); } } /** * Get size of a device (physical, logical device, image) pointed to by * devPath * * @param devPath device path pointing to the device * * @return size of the device in bytes * * @throws TskCoreException exception thrown if the device size could not be * queried */ public static long findDeviceSize(String devPath) throws TskCoreException { return findDeviceSizeNat(devPath); } public static boolean isImageSupported(String imagePath) { return isImageSupportedNat(imagePath); } /** Get the version of the Sleuthkit code in number form. * Upper byte is A, next is B, and next byte is C in version A.B.C. * Lowest byte is 0xff, except in beta releases, in which case it * increments from 1. Nightly snapshots will have upper byte as * 0xff and next bytes with year, month, and date, respectively. * Note that you will not be able to differentiate between snapshots * from the trunk or branches with this method... * For example, 3.1.2 would be stored as 0x030102FF. * 3.1.2b1 would be 0x03010201. Snapshot from Jan 2, 2003 would be * 0xFF030102. * * @return the current Sleuthkit version */ static long getSleuthkitVersion() { return getSleuthkitVersionNat(); } /** * Get a read lock for the C++ layer. Do not get this lock after obtaining * HandleCache.cacheLock. */ private static void getTSKReadLock() { tskLock.readLock().lock(); } /** * Release the read lock */ private static void releaseTSKReadLock() { tskLock.readLock().unlock(); } /** * Get a write lock for the C++ layer. Do not get this lock after obtaining * HandleCache.cacheLock. * * This is a temporary fix for APFS which is not thread-safe. Should be used * when accessing anything under a pool. */ private static void getTSKWriteLock() { tskLock.writeLock().lock(); } /** * Release the write lock */ private static void releaseTSKWriteLock() { tskLock.writeLock().unlock(); } //free pointers /** * frees the imgHandle pointer currently does not close the image - * imgHandle should only be freed as part of CaseDbHandle.free(). * * @param imgHandle to close the image */ @Deprecated public static void closeImg(long imgHandle) { //closeImgNat(imgHandle); } /** * frees the vsHandle pointer - currently does nothing * * @param vsHandle pointer to volume system structure in sleuthkit */ @Deprecated public static void closeVs(long vsHandle) { // closeVsNat(vsHandle); TODO JIRA-3829 } /** * frees the fsHandle pointer Currently does not do anything - fsHandle * should only be freed as part of CaseDbHandle.free(). * * @param fsHandle pointer to file system structure in sleuthkit */ @Deprecated public static void closeFs(long fsHandle) { //closeFsNat(fsHandle); } /** * Open the image and return the image info pointer. * * @param imageFiles the paths to the images * * @return the image info pointer * * @throws TskCoreException exception thrown if critical error occurs within * TSK * @deprecated Use the version with the SleuthkitCase argument */ @Deprecated public static long openImage(String[] imageFiles) throws TskCoreException { return openImage(imageFiles, 0, true, null); } /** * Open the image with a specified sector size and return the image info * pointer. * * @param imageFiles the paths to the images * @param sSize the sector size (use '0' for autodetect) * * @return the image info pointer * * @throws TskCoreException exception thrown if critical error occurs within * TSK * @deprecated Use the version with the SleuthkitCase argument */ @Deprecated public static long openImage(String[] imageFiles, int sSize) throws TskCoreException { return openImage(imageFiles, sSize, true, null); } /** * Get file system Handle Opened handle is cached (transparently) so it does * not need be reopened next time for the duration of the application * * @param imgHandle pointer to imgHandle in sleuthkit * @param fsOffset byte offset to the file system * * @return pointer to a fsHandle structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK * @deprecated Use the version with the SleuthkitCase argument */ @Deprecated public static long openFs(long imgHandle, long fsOffset) throws TskCoreException { return openFs(imgHandle, fsOffset, null); } /** * Get file Handle * * @param fsHandle fsHandle pointer in the sleuthkit * @param fileId id of the file * @param attrType file attribute type to open * @param attrId file attribute id to open * * @return pointer to a file structure in the sleuthkit * * @throws TskCoreException exception thrown if critical error occurs within * TSK * @deprecated Use the version with the SleuthkitCase argument */ @Deprecated public static long openFile(long fsHandle, long fileId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId) throws TskCoreException { return openFile(fsHandle, fileId, attrType, attrId, null); } private static native String getVersionNat(); private static native void startVerboseLoggingNat(String logPath); private static native int hashDbOpenNat(String hashDbPath) throws TskCoreException; private static native int hashDbNewNat(String hashDbPath) throws TskCoreException; private static native int hashDbBeginTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbCommitTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbRollbackTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbAddEntryNat(String filename, String hashMd5, String hashSha1, String hashSha256, String comment, int dbHandle) throws TskCoreException; private static native boolean hashDbIsUpdateableNat(int dbHandle); private static native boolean hashDbIsReindexableNat(int dbHandle); private static native String hashDbPathNat(int dbHandle); private static native String hashDbIndexPathNat(int dbHandle); private static native String hashDbGetDisplayName(int dbHandle) throws TskCoreException; private static native void hashDbCloseAll() throws TskCoreException; private static native void hashDbClose(int dbHandle) throws TskCoreException; private static native void hashDbCreateIndexNat(int dbHandle) throws TskCoreException; private static native boolean hashDbIndexExistsNat(int dbHandle) throws TskCoreException; private static native boolean hashDbIsIdxOnlyNat(int dbHandle) throws TskCoreException; private static native boolean hashDbLookup(String hash, int dbHandle) throws TskCoreException; private static native HashHitInfo hashDbLookupVerbose(String hash, int dbHandle) throws TskCoreException; private static native long initAddImgNat(TskCaseDbBridge dbHelperObj, String timezone, boolean addUnallocSpace, boolean skipFatFsOrphans) throws TskCoreException; private static native long initializeAddImgNat(TskCaseDbBridge dbHelperObj, String timezone, boolean addFileSystems, boolean addUnallocSpace, boolean skipFatFsOrphans) throws TskCoreException; private static native void runOpenAndAddImgNat(long process, String deviceId, String[] imgPath, int splits, String timezone) throws TskCoreException, TskDataException; private static native void runAddImgNat(long process, String deviceId, long a_img_info, long image_id, String timeZone, String imageWriterPath) throws TskCoreException, TskDataException; private static native void stopAddImgNat(long process) throws TskCoreException; private static native long finishAddImgNat(long process) throws TskCoreException; private static native long openImgNat(String[] imgPath, int splits, int sSize) throws TskCoreException; private static native long openVsNat(long imgHandle, long vsOffset) throws TskCoreException; private static native long openVolNat(long vsHandle, long volId) throws TskCoreException; private static native long openPoolNat(long imgHandle, long offset) throws TskCoreException; private static native long getImgInfoForPoolNat(long poolHandle, long poolOffset) throws TskCoreException; private static native long openFsNat(long imgHandle, long fsId) throws TskCoreException; private static native long openFileNat(long fsHandle, long fileId, int attrType, int attrId) throws TskCoreException; private static native int readImgNat(long imgHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readVsNat(long vsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readPoolNat(long poolHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readVolNat(long volHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readFsNat(long fsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readFileNat(long fileHandle, byte[] readBuffer, long offset, int offset_type, long len) throws TskCoreException; private static native int saveFileMetaDataTextNat(long fileHandle, String fileName) throws TskCoreException; private static native String[] getPathsForImageNat(long imgHandle); private static native long getSizeForImageNat(long imgHandle); private static native long getTypeForImageNat(long imgHandle); private static native long getSectorSizeForImageNat(long imgHandle); private static native String getMD5HashForImageNat(long imgHandle); private static native String getSha1HashForImageNat(long imgHandle); private static native String getCollectionDetailsForImageNat(long imgHandle); private static native void closeImgNat(long imgHandle); private static native void closePoolNat(long poolHandle); private static native void closeVsNat(long vsHandle); private static native void closeFsNat(long fsHandle); private static native void closeFileNat(long fileHandle); private static native long findDeviceSizeNat(String devicePath) throws TskCoreException; private static native String getCurDirNat(long process); private static native boolean isImageSupportedNat(String imagePath); private static native long getSleuthkitVersionNat(); private static native int finishImageWriterNat(long a_img_info); private static native int getFinishImageProgressNat(long a_img_info); private static native void cancelFinishImageNat(long a_img_info); } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEvent.java000644 000765 000024 00000021025 14137073413 030064 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Optional; /** * A representation of an event in the timeline of a case. */ public final class TimelineEvent { /** * The unique ID of this event in the case database. */ private final long eventID; /** * The object ID of the content that is either the direct or indirect source * of this event. For events associated with files, this will be the object * ID of the file. For events associated with artifacts, this will be the * object ID of the artifact source: a file, a data source, or another * artifact. */ private final long contentObjID; /** * The artifact ID (not the object ID) of the artifact, if any, that is the * source of this event. Null for events assoicated directly with files. */ private final Long artifactID; /** * The object ID of the data source for the event source. */ private final long dataSourceObjID; /** * When this event occurred, in seconds from the UNIX epoch. */ private final long time; /** * The type of this event. */ private final TimelineEventType type; /** * The description of this event, provided at three levels of detail: high * (full description), medium (medium description), and low (short * description). */ private final TimelineEventDescription descriptions; /** * True if the file, if any, associated with this event, either directly or * indirectly, is a file for which a hash set hit has been detected. */ private final boolean eventSourceHashHitDetected; /** * True if the direct source (file or artifact) of this event has been * tagged. */ private final boolean eventSourceTagged; /** * Constructs a representation of an event in the timeline of a case. * * @param eventID The unique ID of this event in the case * database. * @param dataSourceObjID The object ID of the data source for * the event source. * @param contentObjID The object ID of the content that is * either the direct or indirect source of * this event. For events associated with * files, this will be the object ID of * the file. For events associated with * artifacts, this will be the object ID * of the artifact source: a file, a data * source, or another artifact. * @param artifactID The artifact ID (not the object ID) of * the artifact, if any, that is the * source of this event. Null for events * assoicated directly with files. * @param time The time this event occurred, in * seconds from the UNIX epoch. * @param type The type of this event. * @param fullDescription The full length description of this * event. * @param medDescription The medium length description of this * event. * @param shortDescription The short length description of this * event. * @param eventSourceHashHitDetected True if the file, if any, associated * with this event, either directly or * indirectly, is a file for which a hash * set hit has been detected. * @param eventSourceTagged True if the direct source (file or * artifact) of this event has been * tagged. */ TimelineEvent(long eventID, long dataSourceObjID, long contentObjID, Long artifactID, long time, TimelineEventType type, String fullDescription, String medDescription, String shortDescription, boolean eventSourceHashHitDetected, boolean eventSourceTagged) { this.eventID = eventID; this.dataSourceObjID = dataSourceObjID; this.contentObjID = contentObjID; this.artifactID = Long.valueOf(0).equals(artifactID) ? null : artifactID; this.time = time; this.type = type; /* * The cast that follows reflects the fact that we have not decided * whether or not to add the parseDescription method to the * TimelineEventType interface yet. Currently (9/18/19), this method is * part of TimelineEventTypeImpl and all implementations of * TimelineEventType are subclasses of TimelineEventTypeImpl. */ if (type instanceof TimelineEventTypeImpl) { this.descriptions = ((TimelineEventTypeImpl) type).parseDescription(fullDescription, medDescription, shortDescription); } else { this.descriptions = new TimelineEventDescription(fullDescription, medDescription, shortDescription); } this.eventSourceHashHitDetected = eventSourceHashHitDetected; this.eventSourceTagged = eventSourceTagged; } /** * Indicates whether or not the direct source (file or artifact) of this * artifact has been tagged. * * @return True or false. */ public boolean eventSourceIsTagged() { return eventSourceTagged; } /** * Indicates whether or not the file, if any, associated with this event, * either directly or indirectly, is a file for which a hash set hit has * been detected. * * @return True or false. */ public boolean eventSourceHasHashHits() { return eventSourceHashHitDetected; } /** * Gets the artifact ID (not object ID) of the artifact, if any, that is the * direct source of this event. * * @return An Optional object containing the artifact ID. May be empty. */ public Optional getArtifactID() { return Optional.ofNullable(artifactID); } /** * Gets the unique ID of this event in the case database. * * @return The event ID. */ public long getEventID() { return eventID; } /** * Gets the object ID of the content that is the direct or indirect source * of this event. For events associated with files, this will be the object * ID of the file that is the direct event source. For events associated * with artifacts, this will be the object ID of the artifact source: a * file, a data source, or another artifact. * * @return The object ID. */ public long getContentObjID() { return contentObjID; } /** * Gets the time this event occurred. * * @return The time this event occurred, in seconds from UNIX epoch. */ public long getTime() { return time; } /** * Gets the type of this event. * * @return The event type. */ public TimelineEventType getEventType() { return type; } /** * Gets the description of this event at a given level of detail. * * @param levelOfDetail The desired level of detail. * * @return The description of this event at the given level of detail. */ public String getDescription(TimelineLevelOfDetail levelOfDetail) { return descriptions.getDescription(levelOfDetail); } /** * Gets the object ID of the data source for the source content of this * event. * * @return The data source object ID. */ public long getDataSourceObjID() { return dataSourceObjID; } /** * Gets the time this event occured, in milliseconds from the UNIX epoch. * * @return The event time in milliseconds from the UNIX epoch. */ public long getEventTimeInMs() { return time * 1000; } @Override public int hashCode() { int hash = 7; hash = 13 * hash + (int) (this.eventID ^ (this.eventID >>> 32)); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TimelineEvent other = (TimelineEvent) obj; return this.eventID == other.getEventID(); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java000644 000765 000024 00002157450 14137073414 030103 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.EventBus; import com.mchange.v2.c3p0.ComboPooledDataSource; import com.mchange.v2.c3p0.DataSources; import com.mchange.v2.c3p0.PooledDataSource; import com.zaxxer.sparsebits.SparseBitSet; import java.beans.PropertyVetoException; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; import java.util.Properties; import java.util.ResourceBundle; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.postgresql.util.PSQLState; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifact.Category; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE; import org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType; import org.sleuthkit.datamodel.IngestModuleInfo.IngestModuleType; import org.sleuthkit.datamodel.SleuthkitJNI.CaseDbHandle.AddImageProcess; import org.sleuthkit.datamodel.TskData.DbType; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.ObjectType; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; import org.sqlite.SQLiteConfig; import org.sqlite.SQLiteDataSource; import org.sqlite.SQLiteJDBCLoader; /** * Represents the case database with methods that provide abstractions for * database operations. */ public class SleuthkitCase { private static final int MAX_DB_NAME_LEN_BEFORE_TIMESTAMP = 47; /** * This must be the same as TSK_SCHEMA_VER and TSK_SCHEMA_MINOR_VER in * tsk/auto/tsk_db.h. */ static final CaseDbSchemaVersionNumber CURRENT_DB_SCHEMA_VERSION = new CaseDbSchemaVersionNumber(9, 1); private static final long BASE_ARTIFACT_ID = Long.MIN_VALUE; // Artifact ids will start at the lowest negative value private static final Logger logger = Logger.getLogger(SleuthkitCase.class.getName()); private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private static final int IS_REACHABLE_TIMEOUT_MS = 1000; private static final String SQL_ERROR_CONNECTION_GROUP = "08"; private static final String SQL_ERROR_AUTHENTICATION_GROUP = "28"; private static final String SQL_ERROR_PRIVILEGE_GROUP = "42"; private static final String SQL_ERROR_RESOURCE_GROUP = "53"; private static final String SQL_ERROR_LIMIT_GROUP = "54"; private static final String SQL_ERROR_INTERNAL_GROUP = "xx"; private static final int MIN_USER_DEFINED_TYPE_ID = 10000; private static final Set CORE_TABLE_NAMES = ImmutableSet.of( "tsk_events", "tsk_event_descriptions", "tsk_event_types", "tsk_db_info", "tsk_objects", "tsk_image_info", "tsk_image_names", "tsk_vs_info", "tsk_vs_parts", "tsk_fs_info", "tsk_file_layout", "tsk_files", "tsk_files_path", "tsk_files_derived", "tsk_files_derived_method", "tag_names", "content_tags", "blackboard_artifact_tags", "blackboard_artifacts", "blackboard_attributes", "blackboard_artifact_types", "blackboard_attribute_types", "data_source_info", "file_encoding_types", "ingest_module_types", "ingest_job_status_types", "ingest_modules", "ingest_jobs", "ingest_job_modules", "account_types", "accounts", "account_relationships", "review_statuses", "reports,"); private static final Set CORE_INDEX_NAMES = ImmutableSet.of( "parObjId", "layout_objID", "artifact_objID", "artifact_artifact_objID", "artifact_typeID", "attrsArtifactID", "mime_type", "file_extension", "relationships_account1", "relationships_account2", "relationships_relationship_source_obj_id", "relationships_date_time", "relationships_relationship_type", "relationships_data_source_obj_id", "events_time", "events_type", "events_data_source_obj_id", "events_file_obj_id", "events_artifact_id"); private static final String TSK_VERSION_KEY = "TSK_VER"; private static final String SCHEMA_MAJOR_VERSION_KEY = "SCHEMA_MAJOR_VERSION"; private static final String SCHEMA_MINOR_VERSION_KEY = "SCHEMA_MINOR_VERSION"; private static final String CREATION_SCHEMA_MAJOR_VERSION_KEY = "CREATION_SCHEMA_MAJOR_VERSION"; private static final String CREATION_SCHEMA_MINOR_VERSION_KEY = "CREATION_SCHEMA_MINOR_VERSION"; private final ConnectionPool connections; private final Object carvedFileDirsLock = new Object(); private final Map rootIdsToCarvedFileDirs = new HashMap<>(); private final Map fileSystemIdMap = new HashMap<>(); // Cache for file system files. private final List sleuthkitCaseErrorObservers = new ArrayList<>(); private final String databaseName; private final String dbPath; private final DbType dbType; private final String caseDirPath; private SleuthkitJNI.CaseDbHandle caseHandle; private final String caseHandleIdentifier; // Used to identify this case in the JNI cache. private String dbBackupPath; private Map typeIdToArtifactTypeMap; private Map typeIdToAttributeTypeMap; private Map typeNameToArtifactTypeMap; private Map typeNameToAttributeTypeMap; private CaseDbSchemaVersionNumber caseDBSchemaCreationVersion; // Objects for caching the result of isRootDirectory(). Lock is for visibility only. private final Object rootDirectoryMapLock = new Object(); private final Map rootDirectoryMap = new HashMap<>(); private final Cache isRootDirectoryCache = CacheBuilder.newBuilder().maximumSize(200000).expireAfterAccess(5, TimeUnit.MINUTES).build(); /* * First parameter is used to specify the SparseBitSet to use, as object IDs * can be larger than the max size of a SparseBitSet */ private final Map hasChildrenBitSetMap = new HashMap<>(); private long nextArtifactId; // Used to ensure artifact ids come from the desired range. // This read/write lock is used to implement a layer of locking on top of // the locking protocol provided by the underlying SQLite database. The Java // locking protocol improves performance for reasons that are not currently // understood. Note that the lock is contructed to use a fairness policy. private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); private CommunicationsManager communicationsMgr; private TimelineManager timelineMgr; private Blackboard blackboard; private CaseDbAccessManager dbAccessManager; private FileManager fileManager; private TaggingManager taggingMgr; private ScoringManager scoringManager; private OsAccountRealmManager osAccountRealmManager; private OsAccountManager osAccountManager; private HostManager hostManager; private PersonManager personManager; private HostAddressManager hostAddressManager; private final Map> deviceIdToDatasourceObjIdMap = new HashMap<>(); private final EventBus eventBus = new EventBus("SleuthkitCase-EventBus"); public void registerForEvents(Object listener) { eventBus.register(listener); } public void unregisterForEvents(Object listener) { eventBus.unregister(listener); } void fireTSKEvent(Object event) { eventBus.post(event); } // Cache of frequently used content objects (e.g. data source, file system). private final Map frequentlyUsedContentMap = new HashMap<>(); private Examiner cachedCurrentExaminer = null; static { Properties p = new Properties(System.getProperties()); p.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog"); p.put("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "SEVERE"); System.setProperties(p); } /** * Attempts to connect to the database with the passed in settings, throws * if the settings are not sufficient to connect to the database type * indicated. Only attempts to connect to remote databases. * * When issues occur, it attempts to diagnose them by looking at the * exception messages, returning the appropriate user-facing text for the * exception received. This method expects the Exceptions messages to be in * English and compares against English text. * * @param info The connection information * * @throws org.sleuthkit.datamodel.TskCoreException */ public static void tryConnect(CaseDbConnectionInfo info) throws TskCoreException { // Check if we can talk to the database. if (info.getHost() == null || info.getHost().isEmpty()) { throw new TskCoreException(bundle.getString("DatabaseConnectionCheck.MissingHostname")); //NON-NLS } else if (info.getPort() == null || info.getPort().isEmpty()) { throw new TskCoreException(bundle.getString("DatabaseConnectionCheck.MissingPort")); //NON-NLS } else if (info.getUserName() == null || info.getUserName().isEmpty()) { throw new TskCoreException(bundle.getString("DatabaseConnectionCheck.MissingUsername")); //NON-NLS } else if (info.getPassword() == null || info.getPassword().isEmpty()) { throw new TskCoreException(bundle.getString("DatabaseConnectionCheck.MissingPassword")); //NON-NLS } try { Class.forName("org.postgresql.Driver"); //NON-NLS Connection conn = DriverManager.getConnection("jdbc:postgresql://" + info.getHost() + ":" + info.getPort() + "/postgres", info.getUserName(), info.getPassword()); //NON-NLS if (conn != null) { conn.close(); } } catch (SQLException ex) { String result; String sqlState = ex.getSQLState().toLowerCase(); if (sqlState.startsWith(SQL_ERROR_CONNECTION_GROUP)) { try { if (InetAddress.getByName(info.getHost()).isReachable(IS_REACHABLE_TIMEOUT_MS)) { // if we can reach the host, then it's probably port problem result = bundle.getString("DatabaseConnectionCheck.Port"); //NON-NLS } else { result = bundle.getString("DatabaseConnectionCheck.HostnameOrPort"); //NON-NLS } } catch (IOException | MissingResourceException any) { // it may be anything result = bundle.getString("DatabaseConnectionCheck.Everything"); //NON-NLS } } else if (sqlState.startsWith(SQL_ERROR_AUTHENTICATION_GROUP)) { result = bundle.getString("DatabaseConnectionCheck.Authentication"); //NON-NLS } else if (sqlState.startsWith(SQL_ERROR_PRIVILEGE_GROUP)) { result = bundle.getString("DatabaseConnectionCheck.Access"); //NON-NLS } else if (sqlState.startsWith(SQL_ERROR_RESOURCE_GROUP)) { result = bundle.getString("DatabaseConnectionCheck.ServerDiskSpace"); //NON-NLS } else if (sqlState.startsWith(SQL_ERROR_LIMIT_GROUP)) { result = bundle.getString("DatabaseConnectionCheck.ServerRestart"); //NON-NLS } else if (sqlState.startsWith(SQL_ERROR_INTERNAL_GROUP)) { result = bundle.getString("DatabaseConnectionCheck.InternalServerIssue"); //NON-NLS } else { result = bundle.getString("DatabaseConnectionCheck.Connection"); //NON-NLS } throw new TskCoreException(result); } catch (ClassNotFoundException ex) { throw new TskCoreException(bundle.getString("DatabaseConnectionCheck.Installation")); //NON-NLS } } /** * Private constructor, clients must use newCase() or openCase() method to * create an instance of this class. * * @param dbPath The full path to a SQLite case database file. * @param caseHandle A handle to a case database object in the native code * SleuthKit layer. * @param dbType The type of database we're dealing with * * @throws Exception */ private SleuthkitCase(String dbPath, SleuthkitJNI.CaseDbHandle caseHandle, DbType dbType) throws Exception { Class.forName("org.sqlite.JDBC"); this.dbPath = dbPath; this.dbType = dbType; File dbFile = new File(dbPath); this.caseDirPath = dbFile.getParentFile().getAbsolutePath(); this.databaseName = dbFile.getName(); this.connections = new SQLiteConnections(dbPath); this.caseHandle = caseHandle; this.caseHandleIdentifier = caseHandle.getCaseDbIdentifier(); init(); logSQLiteJDBCDriverInfo(); } /** * Private constructor, clients must use newCase() or openCase() method to * create an instance of this class. * * @param host The PostgreSQL database server. * @param port The port to use connect to the PostgreSQL database * server. * @param dbName The name of the case database. * @param userName The user name to use to connect to the case database. * @param password The password to use to connect to the case database. * @param caseHandle A handle to a case database object in the native code * @param dbType The type of database we're dealing with SleuthKit * layer. * @param caseDirPath The path to the root case directory. * * @throws Exception */ private SleuthkitCase(String host, int port, String dbName, String userName, String password, SleuthkitJNI.CaseDbHandle caseHandle, String caseDirPath, DbType dbType) throws Exception { this.dbPath = ""; this.databaseName = dbName; this.dbType = dbType; this.caseDirPath = caseDirPath; this.connections = new PostgreSQLConnections(host, port, dbName, userName, password); this.caseHandle = caseHandle; this.caseHandleIdentifier = caseHandle.getCaseDbIdentifier(); init(); } private void init() throws Exception { typeIdToArtifactTypeMap = new ConcurrentHashMap<>(); typeIdToAttributeTypeMap = new ConcurrentHashMap<>(); typeNameToArtifactTypeMap = new ConcurrentHashMap<>(); typeNameToAttributeTypeMap = new ConcurrentHashMap<>(); /* * The database schema must be updated before loading blackboard * artifact/attribute types */ updateDatabaseSchema(null); initBlackboardArtifactTypes(); initBlackboardAttributeTypes(); initNextArtifactId(); try (CaseDbConnection connection = connections.getConnection()) { initIngestModuleTypes(connection); initIngestStatusTypes(connection); initReviewStatuses(connection); initEncodingTypes(connection); populateHasChildrenMap(connection); updateExaminers(connection); initDBSchemaCreationVersion(connection); } blackboard = new Blackboard(this); fileManager = new FileManager(this); communicationsMgr = new CommunicationsManager(this); timelineMgr = new TimelineManager(this); dbAccessManager = new CaseDbAccessManager(this); taggingMgr = new TaggingManager(this); scoringManager = new ScoringManager(this); osAccountRealmManager = new OsAccountRealmManager(this); osAccountManager = new OsAccountManager(this); hostManager = new HostManager(this); personManager = new PersonManager(this); hostAddressManager = new HostAddressManager(this); } /** * Returns a set of core table names in the SleuthKit Case database. * * @return set of core table names */ static Set getCoreTableNames() { return CORE_TABLE_NAMES; } /** * Returns a set of core index names in the SleuthKit case database. * * @return set of core index names */ static Set getCoreIndexNames() { return CORE_INDEX_NAMES; } /** * Use the internal map to determine whether the content object has children * (of any type). * * @param content * * @return true if the content has children, false otherwise */ boolean getHasChildren(Content content) { long objId = content.getId(); long mapIndex = objId / Integer.MAX_VALUE; int mapValue = (int) (objId % Integer.MAX_VALUE); synchronized (hasChildrenBitSetMap) { if (hasChildrenBitSetMap.containsKey(mapIndex)) { return hasChildrenBitSetMap.get(mapIndex).get(mapValue); } return false; } } /** * Add this objId to the list of objects that have children (of any type) * * @param objId */ private void setHasChildren(Long objId) { long mapIndex = objId / Integer.MAX_VALUE; int mapValue = (int) (objId % Integer.MAX_VALUE); synchronized (hasChildrenBitSetMap) { if (hasChildrenBitSetMap.containsKey(mapIndex)) { hasChildrenBitSetMap.get(mapIndex).set(mapValue); } else { SparseBitSet bitSet = new SparseBitSet(); bitSet.set(mapValue); hasChildrenBitSetMap.put(mapIndex, bitSet); } } } /** * Gets the communications manager for this case. * * @return The per case CommunicationsManager object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public CommunicationsManager getCommunicationsManager() throws TskCoreException { return communicationsMgr; } /** * Gets the artifacts blackboard for this case. * * @return The per case Blackboard object. */ public Blackboard getBlackboard() { return blackboard; } /** * Gets the file manager for this case. * * @return The per case FileManager object. */ public FileManager getFileManager() { return fileManager; } /** * Gets the communications manager for this case. * * @return The per case TimelineManager object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public TimelineManager getTimelineManager() throws TskCoreException { return timelineMgr; } /* * Gets the case database access manager for this case. * * @return The per case CaseDbAccessManager object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public synchronized CaseDbAccessManager getCaseDbAccessManager() throws TskCoreException { return dbAccessManager; } /** * Get the case database TaggingManager object. * * @return The per case TaggingManager object. */ public synchronized TaggingManager getTaggingManager() { return taggingMgr; } /** * Gets the scoring manager for this case. * * @return The per case ScoringManager object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public ScoringManager getScoringManager() throws TskCoreException { return scoringManager; } /** * Gets the OS account realm manager for this case. * * @return The per case OsAccountRealmManager object. * * @throws TskCoreException */ public OsAccountRealmManager getOsAccountRealmManager() throws TskCoreException { return osAccountRealmManager; } /** * Gets the OS account manager for this case. * * @return The per case OsAccountManager object. * * @throws TskCoreException */ public OsAccountManager getOsAccountManager() throws TskCoreException { return osAccountManager; } /** * Gets the Hosts manager for this case. * * @return The per case HostManager object. * * @throws TskCoreException */ public HostManager getHostManager() throws TskCoreException { return hostManager; } /** * Gets the Person manager for this case. * * @return The per case PersonManager object. * * @throws TskCoreException */ public PersonManager getPersonManager() throws TskCoreException { return personManager; } /** * Gets the HostAddress manager for this case. * * @return The per case HostAddressManager object. * * @throws TskCoreException */ public HostAddressManager getHostAddressManager() throws TskCoreException { return hostAddressManager; } /** * Make sure the predefined artifact types are in the artifact types table. * * @throws SQLException * @throws TskCoreException */ private void initBlackboardArtifactTypes() throws SQLException, TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { for (ARTIFACT_TYPE type : ARTIFACT_TYPE.values()) { try { statement.execute("INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES (" + type.getTypeID() + " , '" + type.getLabel() + "', '" + type.getDisplayName() + "' , " + type.getCategory().getID() + ")"); //NON-NLS } catch (SQLException ex) { try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_artifact_types WHERE artifact_type_id = '" + type.getTypeID() + "'")) { //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } } } this.typeIdToArtifactTypeMap.put(type.getTypeID(), new BlackboardArtifact.Type(type)); this.typeNameToArtifactTypeMap.put(type.getLabel(), new BlackboardArtifact.Type(type)); } if (dbType == DbType.POSTGRESQL) { int newPrimaryKeyIndex = Collections.max(Arrays.asList(ARTIFACT_TYPE.values())).getTypeID() + 1; statement.execute("ALTER SEQUENCE blackboard_artifact_types_artifact_type_id_seq RESTART WITH " + newPrimaryKeyIndex); //NON-NLS } } finally { releaseSingleUserCaseWriteLock(); } } /** * Make sure the predefined artifact attribute types are in the artifact * attribute types table. * * @throws SQLException * @throws TskCoreException */ private void initBlackboardAttributeTypes() throws SQLException, TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { for (ATTRIBUTE_TYPE type : ATTRIBUTE_TYPE.values()) { try { statement.execute("INSERT INTO blackboard_attribute_types (attribute_type_id, type_name, display_name, value_type) VALUES (" + type.getTypeID() + ", '" + type.getLabel() + "', '" + type.getDisplayName() + "', '" + type.getValueType().getType() + "')"); //NON-NLS } catch (SQLException ex) { try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_attribute_types WHERE attribute_type_id = '" + type.getTypeID() + "'")) { //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } } } this.typeIdToAttributeTypeMap.put(type.getTypeID(), new BlackboardAttribute.Type(type)); this.typeNameToAttributeTypeMap.put(type.getLabel(), new BlackboardAttribute.Type(type)); } if (this.dbType == DbType.POSTGRESQL) { int newPrimaryKeyIndex = Collections.max(Arrays.asList(ATTRIBUTE_TYPE.values())).getTypeID() + 1; statement.execute("ALTER SEQUENCE blackboard_attribute_types_attribute_type_id_seq RESTART WITH " + newPrimaryKeyIndex); //NON-NLS } } finally { releaseSingleUserCaseWriteLock(); } } /** * Initialize the next artifact id. If there are entries in the * blackboard_artifacts table we will use max(artifact_id) + 1 otherwise we * will initialize the value to 0x8000000000000000 (the maximum negative * signed long). * * @throws SQLException * @throws TskCoreException */ private void initNextArtifactId() throws SQLException, TskCoreException { CaseDbConnection connection = null; Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT MAX(artifact_id) AS max_artifact_id FROM blackboard_artifacts"); //NON-NLS resultSet.next(); this.nextArtifactId = resultSet.getLong("max_artifact_id") + 1; if (this.nextArtifactId == 1) { this.nextArtifactId = BASE_ARTIFACT_ID; } } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Initialize ingest module types by adding them into the * ingest_module_types database. * * @throws SQLException * @throws TskCoreException */ private void initIngestModuleTypes(CaseDbConnection connection) throws SQLException, TskCoreException { Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); for (IngestModuleType type : IngestModuleType.values()) { try { statement.execute("INSERT INTO ingest_module_types (type_id, type_name) VALUES (" + type.ordinal() + ", '" + type.toString() + "');"); //NON-NLS } catch (SQLException ex) { resultSet = connection.executeQuery(statement, "SELECT COUNT(*) as count FROM ingest_module_types WHERE type_id = " + type.ordinal() + ";"); //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } resultSet.close(); resultSet = null; } } } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Initialize ingest status types by adding them into the * ingest_job_status_types database. * * @throws SQLException * @throws TskCoreException */ private void initIngestStatusTypes(CaseDbConnection connection) throws SQLException, TskCoreException { Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); for (IngestJobStatusType type : IngestJobStatusType.values()) { try { statement.execute("INSERT INTO ingest_job_status_types (type_id, type_name) VALUES (" + type.ordinal() + ", '" + type.toString() + "');"); //NON-NLS } catch (SQLException ex) { resultSet = connection.executeQuery(statement, "SELECT COUNT(*) as count FROM ingest_job_status_types WHERE type_id = " + type.ordinal() + ";"); //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } resultSet.close(); resultSet = null; } } } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Initialize the review statuses lookup table from the ReviewStatus enum. * * @throws SQLException * @throws TskCoreException if there is an error initializing the table. */ private void initReviewStatuses(CaseDbConnection connection) throws SQLException, TskCoreException { Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); for (BlackboardArtifact.ReviewStatus status : BlackboardArtifact.ReviewStatus.values()) { try { statement.execute("INSERT INTO review_statuses (review_status_id, review_status_name, display_name) " //NON-NLS + "VALUES (" + status.getID() + ",'" + status.getName() + "','" + status.getDisplayName() + "')"); //NON-NLS } catch (SQLException ex) { resultSet = connection.executeQuery(statement, "SELECT COUNT(*) as count FROM review_statuses WHERE review_status_id = " + status.getID()); //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } resultSet.close(); resultSet = null; } } } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Put the file encoding types into the table. This must be called after the * database upgrades or the encoding_types table will not exist. * * @throws SQLException * @throws TskCoreException */ private void initEncodingTypes(CaseDbConnection connection) throws SQLException, TskCoreException { Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); for (TskData.EncodingType type : TskData.EncodingType.values()) { try { statement.execute("INSERT INTO file_encoding_types (encoding_type, name) VALUES (" + type.getType() + " , '" + type.name() + "')"); //NON-NLS } catch (SQLException ex) { resultSet = connection.executeQuery(statement, "SELECT COUNT(*) as count FROM file_encoding_types WHERE encoding_type = " + type.getType()); //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } resultSet.close(); resultSet = null; } } } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Records the current examiner name in the tsk_examiners table * * @param CaseDbConnection * * @throws SQLException * @throws TskCoreException */ private void updateExaminers(CaseDbConnection connection) throws SQLException, TskCoreException { String loginName = System.getProperty("user.name"); if (loginName.isEmpty()) { logger.log(Level.SEVERE, "Cannot determine logged in user name"); return; } acquireSingleUserCaseWriteLock(); try { PreparedStatement statement; switch (getDatabaseType()) { case POSTGRESQL: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_EXAMINER_POSTGRESQL); break; case SQLITE: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_EXAMINER_SQLITE); break; default: throw new TskCoreException("Unknown DB Type: " + getDatabaseType().name()); } statement.clearParameters(); statement.setString(1, loginName); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error inserting row in tsk_examiners. login name: " + loginName, ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Set up or update the hasChildren map using the tsk_objects table. * * @param connection * * @throws TskCoreException */ private void populateHasChildrenMap(CaseDbConnection connection) throws TskCoreException { long timestamp = System.currentTimeMillis(); Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); resultSet = statement.executeQuery("select distinct par_obj_id from tsk_objects"); //NON-NLS synchronized (hasChildrenBitSetMap) { while (resultSet.next()) { setHasChildren(resultSet.getLong("par_obj_id")); } } long delay = System.currentTimeMillis() - timestamp; logger.log(Level.INFO, "Time to initialize parent node cache: {0} ms", delay); //NON-NLS } catch (SQLException ex) { throw new TskCoreException("Error populating parent node cache", ex); } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Add the object IDs for a new data source to the has children map. At * present, we simply reload the entire table. * * @throws TskCoreException */ void addDataSourceToHasChildrenMap() throws TskCoreException { CaseDbConnection connection = connections.getConnection(); try { populateHasChildrenMap(connection); } finally { closeConnection(connection); } } /** * Modify the case database to bring it up-to-date with the current version * of the database schema. * * @param dbPath Path to the db file. If dbPath is null, no backup will be * made. * * @throws Exception */ private void updateDatabaseSchema(String dbPath) throws Exception { CaseDbConnection connection = null; ResultSet resultSet = null; Statement statement = null; acquireSingleUserCaseWriteLock(); try { connection = connections.getConnection(); connection.beginTransaction(); boolean hasMinorVersion = false; ResultSet columns = connection.getConnection().getMetaData().getColumns(null, null, "tsk_db_info", "schema%"); while (columns.next()) { if (columns.getString("COLUMN_NAME").equals("schema_minor_ver")) { hasMinorVersion = true; } } // Get the schema version number of the case database from the tsk_db_info table. int dbSchemaMajorVersion; int dbSchemaMinorVersion = 0; //schemas before 7 have no minor version , default it to zero. statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT schema_ver" + (hasMinorVersion ? ", schema_minor_ver" : "") + " FROM tsk_db_info"); //NON-NLS if (resultSet.next()) { dbSchemaMajorVersion = resultSet.getInt("schema_ver"); //NON-NLS if (hasMinorVersion) { //if there is a minor version column, use it, else default to zero. dbSchemaMinorVersion = resultSet.getInt("schema_minor_ver"); //NON-NLS } } else { throw new TskCoreException(); } CaseDbSchemaVersionNumber dbSchemaVersion = new CaseDbSchemaVersionNumber(dbSchemaMajorVersion, dbSchemaMinorVersion); resultSet.close(); resultSet = null; statement.close(); statement = null; //check schema compatibility if (false == CURRENT_DB_SCHEMA_VERSION.isCompatible(dbSchemaVersion)) { //we cannot open a db with a major schema version higher than the current one. throw new TskUnsupportedSchemaVersionException( "Unsupported DB schema version " + dbSchemaVersion + ", the highest supported schema version is " + CURRENT_DB_SCHEMA_VERSION.getMajor() + ".X"); } else if (dbSchemaVersion.compareTo(CURRENT_DB_SCHEMA_VERSION) < 0) { //The schema version is compatible,possibly after upgrades. if (null != dbPath) { // Make a backup copy of the database. Client code can get the path of the backup // using the getBackupDatabasePath() method. String backupFilePath = dbPath + ".schemaVer" + dbSchemaVersion.toString() + ".backup"; //NON-NLS copyCaseDB(backupFilePath); dbBackupPath = backupFilePath; } // ***CALL SCHEMA UPDATE METHODS HERE*** // Each method should examine the schema version passed to it and either: // a. do nothing and return the schema version unchanged, or // b. upgrade the database and return the schema version that the db was upgraded to. dbSchemaVersion = updateFromSchema2toSchema3(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema3toSchema4(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema4toSchema5(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema5toSchema6(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema6toSchema7(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema7toSchema7dot1(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema7dot1toSchema7dot2(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema7dot2toSchema8dot0(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot0toSchema8dot1(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot1toSchema8dot2(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot2toSchema8dot3(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot3toSchema8dot4(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot4toSchema8dot5(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot5toSchema8dot6(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema8dot6toSchema9dot0(dbSchemaVersion, connection); dbSchemaVersion = updateFromSchema9dot0toSchema9dot1(dbSchemaVersion, connection); statement = connection.createStatement(); connection.executeUpdate(statement, "UPDATE tsk_db_info SET schema_ver = " + dbSchemaVersion.getMajor() + ", schema_minor_ver = " + dbSchemaVersion.getMinor()); //NON-NLS connection.executeUpdate(statement, "UPDATE tsk_db_info_extended SET value = " + dbSchemaVersion.getMajor() + " WHERE name = '" + SCHEMA_MAJOR_VERSION_KEY + "'"); //NON-NLS connection.executeUpdate(statement, "UPDATE tsk_db_info_extended SET value = " + dbSchemaVersion.getMinor() + " WHERE name = '" + SCHEMA_MINOR_VERSION_KEY + "'"); //NON-NLS statement.close(); statement = null; } connection.commitTransaction(); } catch (Exception ex) { // Cannot do exception multi-catch in Java 6, so use catch-all. rollbackTransaction(connection); throw ex; } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Get the database schema creation version from database. This must be * called after the database upgrades or the tsk_db_info_extended table may * not exist. * * @throws SQLException */ private void initDBSchemaCreationVersion(CaseDbConnection connection) throws SQLException { Statement statement = null; ResultSet resultSet = null; String createdSchemaMajorVersion = "0"; String createdSchemaMinorVersion = "0"; acquireSingleUserCaseReadLock(); try { statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT name, value FROM tsk_db_info_extended"); while (resultSet.next()) { String name = resultSet.getString("name"); if (name.equals(CREATION_SCHEMA_MAJOR_VERSION_KEY) || name.equals("CREATED_SCHEMA_MAJOR_VERSION")) { createdSchemaMajorVersion = resultSet.getString("value"); } else if (name.equals(CREATION_SCHEMA_MINOR_VERSION_KEY) || name.equals("CREATED_SCHEMA_MINOR_VERSION")) { createdSchemaMinorVersion = resultSet.getString("value"); } } } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseReadLock(); } caseDBSchemaCreationVersion = new CaseDbSchemaVersionNumber(Integer.parseInt(createdSchemaMajorVersion), Integer.parseInt(createdSchemaMinorVersion)); } /** * Make a duplicate / backup copy of the current case database. Makes a new * copy only, and continues to use the current connection. * * @param newDBPath Path to the copy to be created. File will be overwritten * if it exists. * * @throws IOException if copying fails. */ public void copyCaseDB(String newDBPath) throws IOException { if (dbPath.isEmpty()) { throw new IOException("Copying case database files is not supported for this type of case database"); //NON-NLS } InputStream in = null; OutputStream out = null; acquireSingleUserCaseWriteLock(); try { InputStream inFile = new FileInputStream(dbPath); in = new BufferedInputStream(inFile); OutputStream outFile = new FileOutputStream(newDBPath); out = new BufferedOutputStream(outFile); int bytesRead = in.read(); while (bytesRead != -1) { out.write(bytesRead); bytesRead = in.read(); } } finally { try { if (in != null) { in.close(); } if (out != null) { out.flush(); out.close(); } } catch (IOException e) { logger.log(Level.WARNING, "Could not close streams after db copy", e); //NON-NLS } releaseSingleUserCaseWriteLock(); } } /** * Write some SQLite JDBC driver details to the log file. */ private void logSQLiteJDBCDriverInfo() { try { SleuthkitCase.logger.info(String.format("sqlite-jdbc version %s loaded in %s mode", //NON-NLS SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() ? "native" : "pure-java")); //NON-NLS } catch (Exception ex) { SleuthkitCase.logger.log(Level.SEVERE, "Error querying case database mode", ex); } } /** * Updates a schema version 2 database to a schema version 3 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ @SuppressWarnings("deprecation") private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 2) { return schemaVersion; } Statement statement = null; Statement statement2 = null; Statement updateStatement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); statement2 = connection.createStatement(); // Add new tables for tags. statement.execute("CREATE TABLE tag_names (tag_name_id INTEGER PRIMARY KEY, display_name TEXT UNIQUE, description TEXT NOT NULL, color TEXT NOT NULL)"); //NON-NLS statement.execute("CREATE TABLE content_tags (tag_id INTEGER PRIMARY KEY, obj_id INTEGER NOT NULL, tag_name_id INTEGER NOT NULL, comment TEXT NOT NULL, begin_byte_offset INTEGER NOT NULL, end_byte_offset INTEGER NOT NULL)"); //NON-NLS statement.execute("CREATE TABLE blackboard_artifact_tags (tag_id INTEGER PRIMARY KEY, artifact_id INTEGER NOT NULL, tag_name_id INTEGER NOT NULL, comment TEXT NOT NULL)"); //NON-NLS // Add a new table for reports. statement.execute("CREATE TABLE reports (report_id INTEGER PRIMARY KEY, path TEXT NOT NULL, crtime INTEGER NOT NULL, src_module_name TEXT NOT NULL, report_name TEXT NOT NULL)"); //NON-NLS // Add new columns to the image info table. statement.execute("ALTER TABLE tsk_image_info ADD COLUMN size INTEGER;"); //NON-NLS statement.execute("ALTER TABLE tsk_image_info ADD COLUMN md5 TEXT;"); //NON-NLS statement.execute("ALTER TABLE tsk_image_info ADD COLUMN display_name TEXT;"); //NON-NLS // Add a new column to the file system info table. statement.execute("ALTER TABLE tsk_fs_info ADD COLUMN display_name TEXT;"); //NON-NLS // Add a new column to the file table. statement.execute("ALTER TABLE tsk_files ADD COLUMN meta_seq INTEGER;"); //NON-NLS // Add new columns and indexes to the attributes table and populate the // new column. Note that addition of the new column is a denormalization // to optimize attribute queries. statement.execute("ALTER TABLE blackboard_attributes ADD COLUMN artifact_type_id INTEGER NULL NOT NULL DEFAULT -1;"); //NON-NLS statement.execute("CREATE INDEX attribute_artifactTypeId ON blackboard_attributes(artifact_type_id);"); //NON-NLS statement.execute("CREATE INDEX attribute_valueText ON blackboard_attributes(value_text);"); //NON-NLS statement.execute("CREATE INDEX attribute_valueInt32 ON blackboard_attributes(value_int32);"); //NON-NLS statement.execute("CREATE INDEX attribute_valueInt64 ON blackboard_attributes(value_int64);"); //NON-NLS statement.execute("CREATE INDEX attribute_valueDouble ON blackboard_attributes(value_double);"); //NON-NLS resultSet = statement.executeQuery("SELECT attrs.artifact_id AS artifact_id, " //NON-NLS + "arts.artifact_type_id AS artifact_type_id " //NON-NLS + "FROM blackboard_attributes AS attrs " //NON-NLS + "INNER JOIN blackboard_artifacts AS arts " //NON-NLS + "WHERE attrs.artifact_id = arts.artifact_id;"); //NON-NLS updateStatement = connection.createStatement(); while (resultSet.next()) { long artifactId = resultSet.getLong("artifact_id"); int artifactTypeId = resultSet.getInt("artifact_type_id"); updateStatement.executeUpdate( "UPDATE blackboard_attributes " //NON-NLS + "SET artifact_type_id = " + artifactTypeId //NON-NLS + " WHERE blackboard_attributes.artifact_id = " + artifactId + ";"); //NON-NLS } resultSet.close(); // Convert existing tag artifact and attribute rows to rows in the new tags tables. Map tagNames = new HashMap<>(); long tagNameCounter = 1; // Convert file tags. // We need data from the TSK_TAG_NAME and TSK_COMMENT attributes, and need the file size from the tsk_files table. resultSet = statement.executeQuery("SELECT * FROM \n" + "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, blackboard_attributes.value_text AS name\n" + "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n" + "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n" + "WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID() + ") AS tagNames \n" + "INNER JOIN \n" + "(SELECT tsk_files.obj_id as objId2, tsk_files.size AS fileSize \n" + "FROM blackboard_artifacts INNER JOIN tsk_files \n" + "ON blackboard_artifacts.obj_id = tsk_files.obj_id) AS fileData \n" + "ON tagNames.objId = fileData.objId2 \n" + "LEFT JOIN \n" + "(SELECT value_text AS comment, artifact_id AS tagArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n" + "ON tagNames.artifactId = tagComments.tagArtifactId"); while (resultSet.next()) { long objId = resultSet.getLong("objId"); long fileSize = resultSet.getLong("fileSize"); String tagName = resultSet.getString("name"); String tagComment = resultSet.getString("comment"); if (tagComment == null) { tagComment = ""; } if (tagName != null && !tagName.isEmpty()) { // Get the index for the tag name, adding it to the database if needed. long tagNameIndex; if (tagNames.containsKey(tagName)) { tagNameIndex = tagNames.get(tagName); } else { statement2.execute("INSERT INTO tag_names (display_name, description, color) " + "VALUES(\"" + tagName + "\", \"\", \"None\")"); tagNames.put(tagName, tagNameCounter); tagNameIndex = tagNameCounter; tagNameCounter++; } statement2.execute("INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset) " + "VALUES(" + objId + ", " + tagNameIndex + ", \"" + tagComment + "\", 0, " + fileSize + ")"); } } resultSet.close(); // Convert artifact tags. // We need data from the TSK_TAG_NAME, TSK_TAGGED_ARTIFACT, and TSK_COMMENT attributes. resultSet = statement.executeQuery("SELECT * FROM \n" + "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, " + "blackboard_attributes.value_text AS name\n" + "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n" + "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n" + "WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID() + ") AS tagNames \n" + "INNER JOIN \n" + "(SELECT value_int64 AS taggedArtifactId, artifact_id AS associatedArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID() + ") AS tagArtifacts \n" + "ON tagNames.artifactId = tagArtifacts.associatedArtifactId \n" + "LEFT JOIN \n" + "(SELECT value_text AS comment, artifact_id AS commentArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n" + "ON tagNames.artifactId = tagComments.commentArtifactId"); while (resultSet.next()) { long artifactId = resultSet.getLong("taggedArtifactId"); String tagName = resultSet.getString("name"); String tagComment = resultSet.getString("comment"); if (tagComment == null) { tagComment = ""; } if (tagName != null && !tagName.isEmpty()) { // Get the index for the tag name, adding it to the database if needed. long tagNameIndex; if (tagNames.containsKey(tagName)) { tagNameIndex = tagNames.get(tagName); } else { statement2.execute("INSERT INTO tag_names (display_name, description, color) " + "VALUES(\"" + tagName + "\", \"\", \"None\")"); tagNames.put(tagName, tagNameCounter); tagNameIndex = tagNameCounter; tagNameCounter++; } statement2.execute("INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment) " + "VALUES(" + artifactId + ", " + tagNameIndex + ", \"" + tagComment + "\")"); } } resultSet.close(); statement.execute( "DELETE FROM blackboard_attributes WHERE artifact_id IN " //NON-NLS + "(SELECT artifact_id FROM blackboard_artifacts WHERE artifact_type_id = " //NON-NLS + ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + " OR artifact_type_id = " + ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ");"); //NON-NLS statement.execute( "DELETE FROM blackboard_artifacts WHERE artifact_type_id = " //NON-NLS + ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + " OR artifact_type_id = " + ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ";"); //NON-NLS return new CaseDbSchemaVersionNumber(3, 0); } finally { closeStatement(updateStatement); closeResultSet(resultSet); closeStatement(statement); closeStatement(statement2); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 3 database to a schema version 4 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema3toSchema4(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 3) { return schemaVersion; } Statement statement = null; ResultSet resultSet = null; Statement queryStatement = null; ResultSet queryResultSet = null; Statement updateStatement = null; acquireSingleUserCaseWriteLock(); try { // Add mime_type column to tsk_files table. Populate with general // info artifact file signature data. statement = connection.createStatement(); updateStatement = connection.createStatement(); statement.execute("ALTER TABLE tsk_files ADD COLUMN mime_type TEXT;"); resultSet = statement.executeQuery("SELECT files.obj_id AS obj_id, attrs.value_text AS value_text " + "FROM tsk_files AS files, blackboard_attributes AS attrs, blackboard_artifacts AS arts " + "WHERE files.obj_id = arts.obj_id AND " + "arts.artifact_id = attrs.artifact_id AND " + "arts.artifact_type_id = 1 AND " + "attrs.attribute_type_id = 62"); while (resultSet.next()) { updateStatement.executeUpdate( "UPDATE tsk_files " //NON-NLS + "SET mime_type = '" + resultSet.getString("value_text") + "' " //NON-NLS + "WHERE tsk_files.obj_id = " + resultSet.getInt("obj_id") + ";"); //NON-NLS } resultSet.close(); // Add value_type column to blackboard_attribute_types table. statement.execute("ALTER TABLE blackboard_attribute_types ADD COLUMN value_type INTEGER NOT NULL DEFAULT -1;"); resultSet = statement.executeQuery("SELECT * FROM blackboard_attribute_types AS types"); //NON-NLS while (resultSet.next()) { int attributeTypeId = resultSet.getInt("attribute_type_id"); String attributeLabel = resultSet.getString("type_name"); if (attributeTypeId < MIN_USER_DEFINED_TYPE_ID) { updateStatement.executeUpdate( "UPDATE blackboard_attribute_types " //NON-NLS + "SET value_type = " + ATTRIBUTE_TYPE.fromLabel(attributeLabel).getValueType().getType() + " " //NON-NLS + "WHERE blackboard_attribute_types.attribute_type_id = " + attributeTypeId + ";"); //NON-NLS } } resultSet.close(); // Add a data_sources_info table. queryStatement = connection.createStatement(); statement.execute("CREATE TABLE data_source_info (obj_id INTEGER PRIMARY KEY, device_id TEXT NOT NULL, time_zone TEXT NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id));"); resultSet = statement.executeQuery("SELECT * FROM tsk_objects WHERE par_obj_id IS NULL"); while (resultSet.next()) { long objectId = resultSet.getLong("obj_id"); String timeZone = ""; queryResultSet = queryStatement.executeQuery("SELECT tzone FROM tsk_image_info WHERE obj_id = " + objectId); if (queryResultSet.next()) { timeZone = queryResultSet.getString("tzone"); } queryResultSet.close(); updateStatement.executeUpdate("INSERT INTO data_source_info (obj_id, device_id, time_zone) " + "VALUES(" + objectId + ", '" + UUID.randomUUID().toString() + "' , '" + timeZone + "');"); } resultSet.close(); // Add data_source_obj_id column to the tsk_files table. // // NOTE: A new case database will have the following FK constraint: // // REFERENCES data_source_info (obj_id) // // The constraint is sacrificed here to avoid having to create and // populate a new tsk_files table. // // TODO: Do this right. statement.execute("ALTER TABLE tsk_files ADD COLUMN data_source_obj_id BIGINT NOT NULL DEFAULT -1;"); resultSet = statement.executeQuery("SELECT tsk_files.obj_id AS obj_id, par_obj_id FROM tsk_files, tsk_objects WHERE tsk_files.obj_id = tsk_objects.obj_id"); while (resultSet.next()) { long fileId = resultSet.getLong("obj_id"); long dataSourceId = getDataSourceObjectId(connection, fileId); updateStatement.executeUpdate("UPDATE tsk_files SET data_source_obj_id = " + dataSourceId + " WHERE obj_id = " + fileId + ";"); } resultSet.close(); statement.execute("CREATE TABLE ingest_module_types (type_id INTEGER PRIMARY KEY, type_name TEXT NOT NULL)"); //NON-NLS statement.execute("CREATE TABLE ingest_job_status_types (type_id INTEGER PRIMARY KEY, type_name TEXT NOT NULL)"); //NON-NLS if (this.dbType.equals(DbType.SQLITE)) { statement.execute("CREATE TABLE ingest_modules (ingest_module_id INTEGER PRIMARY KEY, display_name TEXT NOT NULL, unique_name TEXT UNIQUE NOT NULL, type_id INTEGER NOT NULL, version TEXT NOT NULL, FOREIGN KEY(type_id) REFERENCES ingest_module_types(type_id));"); //NON-NLS statement.execute("CREATE TABLE ingest_jobs (ingest_job_id INTEGER PRIMARY KEY, obj_id BIGINT NOT NULL, host_name TEXT NOT NULL, start_date_time BIGINT NOT NULL, end_date_time BIGINT NOT NULL, status_id INTEGER NOT NULL, settings_dir TEXT, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id), FOREIGN KEY(status_id) REFERENCES ingest_job_status_types(type_id));"); //NON-NLS } else { statement.execute("CREATE TABLE ingest_modules (ingest_module_id BIGSERIAL PRIMARY KEY, display_name TEXT NOT NULL, unique_name TEXT UNIQUE NOT NULL, type_id INTEGER NOT NULL, version TEXT NOT NULL, FOREIGN KEY(type_id) REFERENCES ingest_module_types(type_id));"); //NON-NLS statement.execute("CREATE TABLE ingest_jobs (ingest_job_id BIGSERIAL PRIMARY KEY, obj_id BIGINT NOT NULL, host_name TEXT NOT NULL, start_date_time BIGINT NOT NULL, end_date_time BIGINT NOT NULL, status_id INTEGER NOT NULL, settings_dir TEXT, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id), FOREIGN KEY(status_id) REFERENCES ingest_job_status_types(type_id));"); //NON-NLS } statement.execute("CREATE TABLE ingest_job_modules (ingest_job_id INTEGER, ingest_module_id INTEGER, pipeline_position INTEGER, PRIMARY KEY(ingest_job_id, ingest_module_id), FOREIGN KEY(ingest_job_id) REFERENCES ingest_jobs(ingest_job_id), FOREIGN KEY(ingest_module_id) REFERENCES ingest_modules(ingest_module_id));"); //NON-NLS initIngestModuleTypes(connection); initIngestStatusTypes(connection); return new CaseDbSchemaVersionNumber(4, 0); } finally { closeResultSet(queryResultSet); closeStatement(queryStatement); closeStatement(updateStatement); closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 4 database to a schema version 5 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema4toSchema5(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 4) { return schemaVersion; } Statement statement = null; acquireSingleUserCaseWriteLock(); try { // Add the review_statuses lookup table. statement = connection.createStatement(); statement.execute("CREATE TABLE review_statuses (review_status_id INTEGER PRIMARY KEY, review_status_name TEXT NOT NULL, display_name TEXT NOT NULL)"); /* * Add review_status_id column to artifacts table. * * NOTE: For DBs created with schema 5 we define a foreign key * constraint on the review_status_column. We don't bother with this * for DBs updated to schema 5 because of limitations of the SQLite * ALTER TABLE command. */ statement.execute("ALTER TABLE blackboard_artifacts ADD COLUMN review_status_id INTEGER NOT NULL DEFAULT " + BlackboardArtifact.ReviewStatus.UNDECIDED.getID()); // Add the encoding table statement.execute("CREATE TABLE file_encoding_types (encoding_type INTEGER PRIMARY KEY, name TEXT NOT NULL);"); initEncodingTypes(connection); /* * This needs to be done due to a Autopsy/TSK out of synch problem. * Without this, it is possible to upgrade from version 4 to 5 and * then 5 to 6, but not from 4 to 6. */ initReviewStatuses(connection); // Add encoding type column to tsk_files_path // This should really have the FOREIGN KEY constraint but there are problems // getting that to work, so we don't add it on this upgrade path. statement.execute("ALTER TABLE tsk_files_path ADD COLUMN encoding_type INTEGER NOT NULL DEFAULT 0;"); return new CaseDbSchemaVersionNumber(5, 0); } finally { closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 5 database to a schema version 6 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema5toSchema6(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 5) { return schemaVersion; } /* * This upgrade fixes a bug where some releases had artifact review * status support in the case database and others did not. */ Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { /* * Add the review_statuses lookup table, if missing. */ statement = connection.createStatement(); statement.execute("CREATE TABLE IF NOT EXISTS review_statuses (review_status_id INTEGER PRIMARY KEY, review_status_name TEXT NOT NULL, display_name TEXT NOT NULL)"); resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM review_statuses"); //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { /* * Add review_status_id column to artifacts table. * * NOTE: For DBs created with schema 5 or 6 we define a foreign * key constraint on the review_status_column. We don't bother * with this for DBs updated to schema 5 or 6 because of * limitations of the SQLite ALTER TABLE command. */ statement.execute("ALTER TABLE blackboard_artifacts ADD COLUMN review_status_id INTEGER NOT NULL DEFAULT " + BlackboardArtifact.ReviewStatus.UNDECIDED.getID()); } return new CaseDbSchemaVersionNumber(6, 0); } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 6 database to a schema version 7 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema6toSchema7(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 6) { return schemaVersion; } /* * This upgrade adds an indexed extension column to the tsk_files table. */ Statement statement = null; Statement updstatement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); updstatement = connection.createStatement(); statement.execute("ALTER TABLE tsk_files ADD COLUMN extension TEXT"); resultSet = connection.executeQuery(statement, "SELECT obj_id,name FROM tsk_files"); //NON-NLS while (resultSet.next()) { long objID = resultSet.getLong("obj_id"); String name = resultSet.getString("name"); updstatement.executeUpdate("UPDATE tsk_files SET extension = '" + escapeSingleQuotes(extractExtension(name)) + "' " + "WHERE obj_id = " + objID); } statement.execute("CREATE INDEX file_extension ON tsk_files ( extension )"); // Add artifact_obj_id column to blackboard_artifacts table, data conversion for old versions isn't necesarry. statement.execute("ALTER TABLE blackboard_artifacts ADD COLUMN artifact_obj_id INTEGER NOT NULL DEFAULT -1"); return new CaseDbSchemaVersionNumber(7, 0); } finally { closeResultSet(resultSet); closeStatement(statement); closeStatement(updstatement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 7 database to a schema version 7.1 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema7toSchema7dot1(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 7) { return schemaVersion; } if (schemaVersion.getMinor() != 0) { return schemaVersion; } /* * This upgrade adds a minor version number column. */ Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { statement = connection.createStatement(); //add the schema minor version number column. if (schemaVersion.getMinor() == 0) { //add the schema minor version number column. statement.execute("ALTER TABLE tsk_db_info ADD COLUMN schema_minor_ver INTEGER DEFAULT 1"); } return new CaseDbSchemaVersionNumber(7, 1); } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 7.1 database to a schema version 7.2 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema7dot1toSchema7dot2(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 7) { return schemaVersion; } if (schemaVersion.getMinor() != 1) { return schemaVersion; } Statement statement = null; Statement updstatement = null; ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { //add the data_source_obj_id column to blackboard_artifacts. statement = connection.createStatement(); statement.execute("ALTER TABLE blackboard_artifacts ADD COLUMN data_source_obj_id INTEGER NOT NULL DEFAULT -1"); // populate data_source_obj_id for each artifact updstatement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT artifact_id, obj_id FROM blackboard_artifacts"); //NON-NLS while (resultSet.next()) { long artifact_id = resultSet.getLong("artifact_id"); long obj_id = resultSet.getLong("obj_id"); long data_source_obj_id = getDataSourceObjectId(connection, obj_id); updstatement.executeUpdate("UPDATE blackboard_artifacts SET data_source_obj_id = " + data_source_obj_id + " " + "WHERE artifact_id = " + artifact_id); } closeResultSet(resultSet); closeStatement(statement); closeStatement(updstatement); /* * Add a knownStatus column to the tag_names table. */ statement = connection.createStatement(); statement.execute("ALTER TABLE tag_names ADD COLUMN knownStatus INTEGER NOT NULL DEFAULT " + TskData.FileKnown.UNKNOWN.getFileKnownValue()); // Create account_types, accounts, and account_relationships table if (this.dbType.equals(DbType.SQLITE)) { statement.execute("CREATE TABLE account_types (account_type_id INTEGER PRIMARY KEY, type_name TEXT UNIQUE NOT NULL, display_name TEXT NOT NULL)"); statement.execute("CREATE TABLE accounts (account_id INTEGER PRIMARY KEY, account_type_id INTEGER NOT NULL, account_unique_identifier TEXT NOT NULL, UNIQUE(account_type_id, account_unique_identifier) , FOREIGN KEY(account_type_id) REFERENCES account_types(account_type_id))"); statement.execute("CREATE TABLE account_relationships (relationship_id INTEGER PRIMARY KEY, account1_id INTEGER NOT NULL, account2_id INTEGER NOT NULL, relationship_source_obj_id INTEGER NOT NULL, date_time INTEGER, relationship_type INTEGER NOT NULL, data_source_obj_id INTEGER NOT NULL, UNIQUE(account1_id, account2_id, relationship_source_obj_id), FOREIGN KEY(account1_id) REFERENCES accounts(account_id), FOREIGN KEY(account2_id) REFERENCES accounts(account_id), FOREIGN KEY(relationship_source_obj_id) REFERENCES tsk_objects(obj_id), FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id))"); } else { statement.execute("CREATE TABLE account_types (account_type_id BIGSERIAL PRIMARY KEY, type_name TEXT UNIQUE NOT NULL, display_name TEXT NOT NULL)"); statement.execute("CREATE TABLE accounts (account_id BIGSERIAL PRIMARY KEY, account_type_id INTEGER NOT NULL, account_unique_identifier TEXT NOT NULL, UNIQUE(account_type_id, account_unique_identifier) , FOREIGN KEY(account_type_id) REFERENCES account_types(account_type_id))"); statement.execute("CREATE TABLE account_relationships (relationship_id BIGSERIAL PRIMARY KEY, account1_id INTEGER NOT NULL, account2_id INTEGER NOT NULL, relationship_source_obj_id INTEGER NOT NULL, date_time BIGINT, relationship_type INTEGER NOT NULL, data_source_obj_id INTEGER NOT NULL, UNIQUE(account1_id, account2_id, relationship_source_obj_id), FOREIGN KEY(account1_id) REFERENCES accounts(account_id), FOREIGN KEY(account2_id) REFERENCES accounts(account_id), FOREIGN KEY(relationship_source_obj_id) REFERENCES tsk_objects(obj_id), FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id))"); } // Create indexes statement.execute("CREATE INDEX artifact_artifact_objID ON blackboard_artifacts(artifact_obj_id)"); statement.execute("CREATE INDEX relationships_account1 ON account_relationships(account1_id)"); statement.execute("CREATE INDEX relationships_account2 ON account_relationships(account2_id)"); statement.execute("CREATE INDEX relationships_relationship_source_obj_id ON account_relationships(relationship_source_obj_id)"); statement.execute("CREATE INDEX relationships_date_time ON account_relationships(date_time)"); statement.execute("CREATE INDEX relationships_relationship_type ON account_relationships(relationship_type)"); statement.execute("CREATE INDEX relationships_data_source_obj_id ON account_relationships(data_source_obj_id)"); return new CaseDbSchemaVersionNumber(7, 2); } finally { closeResultSet(resultSet); closeStatement(statement); closeStatement(updstatement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 7.2 database to a schema version 8.0 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema7dot2toSchema8dot0(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 7) { return schemaVersion; } if (schemaVersion.getMinor() != 2) { return schemaVersion; } Statement updateSchemaStatement = connection.createStatement(); Statement getExistingReportsStatement = connection.createStatement(); ResultSet resultSet = null; ResultSet existingReports = null; acquireSingleUserCaseWriteLock(); try { // Update the schema to turn report_id into an object id. // Unfortunately, SQLite doesn't support adding a constraint // to an existing table so we have to rename the old... updateSchemaStatement.execute("ALTER TABLE reports RENAME TO old_reports"); // ...create the new... updateSchemaStatement.execute("CREATE TABLE reports (obj_id BIGSERIAL PRIMARY KEY, path TEXT NOT NULL, crtime INTEGER NOT NULL, src_module_name TEXT NOT NULL, report_name TEXT NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id))"); // ...add the existing report records back... existingReports = getExistingReportsStatement.executeQuery("SELECT * FROM old_reports"); while (existingReports.next()) { String path = existingReports.getString(2); long crtime = existingReports.getInt(3); String sourceModule = existingReports.getString(4); String reportName = existingReports.getString(5); PreparedStatement insertObjectStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_OBJECT, Statement.RETURN_GENERATED_KEYS); insertObjectStatement.clearParameters(); insertObjectStatement.setNull(1, java.sql.Types.BIGINT); insertObjectStatement.setLong(2, TskData.ObjectType.REPORT.getObjectType()); connection.executeUpdate(insertObjectStatement); resultSet = insertObjectStatement.getGeneratedKeys(); if (!resultSet.next()) { throw new TskCoreException(String.format("Failed to INSERT report %s (%s) in tsk_objects table", reportName, path)); } long objectId = resultSet.getLong(1); //last_insert_rowid() // INSERT INTO reports (obj_id, path, crtime, src_module_name, display_name) VALUES (?, ?, ?, ?, ?) PreparedStatement insertReportStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_REPORT); insertReportStatement.clearParameters(); insertReportStatement.setLong(1, objectId); insertReportStatement.setString(2, path); insertReportStatement.setLong(3, crtime); insertReportStatement.setString(4, sourceModule); insertReportStatement.setString(5, reportName); connection.executeUpdate(insertReportStatement); } // ...and drop the old table. updateSchemaStatement.execute("DROP TABLE old_reports"); return new CaseDbSchemaVersionNumber(8, 0); } finally { closeResultSet(resultSet); closeResultSet(existingReports); closeStatement(updateSchemaStatement); closeStatement(getExistingReportsStatement); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 8.0 database to a schema version 8.1 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema8dot0toSchema8dot1(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 0) { return schemaVersion; } acquireSingleUserCaseWriteLock(); try (Statement statement = connection.createStatement();) { // create examiners table if (this.dbType.equals(DbType.SQLITE)) { statement.execute("CREATE TABLE tsk_examiners (examiner_id INTEGER PRIMARY KEY, login_name TEXT NOT NULL, display_name TEXT, UNIQUE(login_name) )"); statement.execute("ALTER TABLE content_tags ADD COLUMN examiner_id INTEGER REFERENCES tsk_examiners(examiner_id) DEFAULT NULL"); statement.execute("ALTER TABLE blackboard_artifact_tags ADD COLUMN examiner_id INTEGER REFERENCES tsk_examiners(examiner_id) DEFAULT NULL"); } else { statement.execute("CREATE TABLE tsk_examiners (examiner_id BIGSERIAL PRIMARY KEY, login_name TEXT NOT NULL, display_name TEXT, UNIQUE(login_name))"); statement.execute("ALTER TABLE content_tags ADD COLUMN examiner_id BIGINT REFERENCES tsk_examiners(examiner_id) DEFAULT NULL"); statement.execute("ALTER TABLE blackboard_artifact_tags ADD COLUMN examiner_id BIGINT REFERENCES tsk_examiners(examiner_id) DEFAULT NULL"); } return new CaseDbSchemaVersionNumber(8, 1); } finally { releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 8.1 database to a schema version 8.2 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema8dot1toSchema8dot2(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 1) { return schemaVersion; } acquireSingleUserCaseWriteLock(); try (Statement statement = connection.createStatement();) { statement.execute("ALTER TABLE tsk_image_info ADD COLUMN sha1 TEXT DEFAULT NULL"); statement.execute("ALTER TABLE tsk_image_info ADD COLUMN sha256 TEXT DEFAULT NULL"); statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_details TEXT"); /* * Add new tsk_db_extended_info table with TSK version, creation * time schema and schema version numbers as the initial data. The * creation time schema version is set to 0, 0 to indicate that it * is not known. */ statement.execute("CREATE TABLE tsk_db_info_extended (name TEXT PRIMARY KEY, value TEXT NOT NULL)"); ResultSet result = statement.executeQuery("SELECT tsk_ver FROM tsk_db_info"); result.next(); statement.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('" + TSK_VERSION_KEY + "', '" + result.getLong("tsk_ver") + "')"); statement.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('" + SCHEMA_MAJOR_VERSION_KEY + "', '8')"); statement.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('" + SCHEMA_MINOR_VERSION_KEY + "', '2')"); statement.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('" + CREATION_SCHEMA_MAJOR_VERSION_KEY + "', '0')"); statement.execute("INSERT INTO tsk_db_info_extended (name, value) VALUES ('" + CREATION_SCHEMA_MINOR_VERSION_KEY + "', '0')"); String primaryKeyType; switch (getDatabaseType()) { case POSTGRESQL: primaryKeyType = "BIGSERIAL"; break; case SQLITE: primaryKeyType = "INTEGER"; break; default: throw new TskCoreException("Unsupported data base type: " + getDatabaseType().toString()); } //create and initialize tsk_event_types tables statement.execute("CREATE TABLE tsk_event_types (" + " event_type_id " + primaryKeyType + " PRIMARY KEY, " + " display_name TEXT UNIQUE NOT NULL, " + " super_type_id INTEGER REFERENCES tsk_event_types(event_type_id) )"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values( 0, 'Event Types', null)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(1, 'File System', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(2, 'Web Activity', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(3, 'Misc Types', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(4, 'Modified', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(5, 'Accessed', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(6, 'Created', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(7, 'Changed', 1)"); //create tsk_events tables statement.execute("CREATE TABLE tsk_event_descriptions (" + " event_description_id " + primaryKeyType + " PRIMARY KEY, " + " full_description TEXT NOT NULL, " + " med_description TEXT, " + " short_description TEXT," + " data_source_obj_id BIGINT NOT NULL, " + " file_obj_id BIGINT NOT NULL, " + " artifact_id BIGINT, " + " hash_hit INTEGER NOT NULL, " //boolean + " tagged INTEGER NOT NULL, " //boolean + " FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id), " + " FOREIGN KEY(file_obj_id) REFERENCES tsk_files(obj_id), " + " FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id))" ); statement.execute("CREATE TABLE tsk_events ( " + " event_id " + primaryKeyType + " PRIMARY KEY, " + " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ," + " event_description_id BIGINT NOT NULL REFERENCES tsk_event_descriptions(event_description_id) ," + " time INTEGER NOT NULL) " ); //create tsk_events indices statement.execute("CREATE INDEX events_time ON tsk_events(time)"); statement.execute("CREATE INDEX events_type ON tsk_events(event_type_id)"); statement.execute("CREATE INDEX events_data_source_obj_id ON tsk_event_descriptions(data_source_obj_id) "); statement.execute("CREATE INDEX events_file_obj_id ON tsk_event_descriptions(file_obj_id) "); statement.execute("CREATE INDEX events_artifact_id ON tsk_event_descriptions(artifact_id) "); statement.execute("CREATE INDEX events_sub_type_time ON tsk_events(event_type_id, time) "); return new CaseDbSchemaVersionNumber(8, 2); } finally { releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 8.2 database to a schema version 8.3 database. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema8dot2toSchema8dot3(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 2) { return schemaVersion; } acquireSingleUserCaseWriteLock(); ResultSet resultSet = null; try (Statement statement = connection.createStatement();) { // Add the uniqueness constraint to the tsk_event and tsk_event_description tables. // Unfortunately, SQLite doesn't support adding a constraint // to an existing table so we have to rename the old... String primaryKeyType; switch (getDatabaseType()) { case POSTGRESQL: primaryKeyType = "BIGSERIAL"; break; case SQLITE: primaryKeyType = "INTEGER"; break; default: throw new TskCoreException("Unsupported data base type: " + getDatabaseType().toString()); } //create and initialize tsk_event_types tables which may or may not exist statement.execute("CREATE TABLE IF NOT EXISTS tsk_event_types (" + " event_type_id " + primaryKeyType + " PRIMARY KEY, " + " display_name TEXT UNIQUE NOT NULL, " + " super_type_id INTEGER REFERENCES tsk_event_types(event_type_id) )"); resultSet = statement.executeQuery("SELECT * from tsk_event_types"); // If there is something in resultSet then the table must have previously // existing therefore there is not need to populate if (!resultSet.next()) { statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values( 0, 'Event Types', null)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(1, 'File System', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(2, 'Web Activity', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(3, 'Misc Types', 0)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(4, 'Modified', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(5, 'Accessed', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(6, 'Created', 1)"); statement.execute("insert into tsk_event_types(event_type_id, display_name, super_type_id)" + " values(7, 'Changed', 1)"); } // Delete the old table that may have been created with the upgrade // from 8.1 to 8.2. statement.execute("DROP TABLE IF EXISTS tsk_events"); // Delete the old table that may have been created with the upgrade // from 8.1 to 8.2 statement.execute("DROP TABLE IF EXISTS tsk_event_descriptions"); //create new tsk_event_description table statement.execute("CREATE TABLE tsk_event_descriptions (" + " event_description_id " + primaryKeyType + " PRIMARY KEY, " + " full_description TEXT NOT NULL, " + " med_description TEXT, " + " short_description TEXT," + " data_source_obj_id BIGINT NOT NULL, " + " file_obj_id BIGINT NOT NULL, " + " artifact_id BIGINT, " + " hash_hit INTEGER NOT NULL, " //boolean + " tagged INTEGER NOT NULL, " //boolean + " UNIQUE(full_description, file_obj_id, artifact_id), " + " FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id), " + " FOREIGN KEY(file_obj_id) REFERENCES tsk_files(obj_id), " + " FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id))" ); // create a new table statement.execute("CREATE TABLE tsk_events ( " + " event_id " + primaryKeyType + " PRIMARY KEY, " + " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ," + " event_description_id BIGINT NOT NULL REFERENCES tsk_event_descriptions(event_description_id) ," + " time INTEGER NOT NULL, " + " UNIQUE (event_type_id, event_description_id, time))" ); // Fix mistakenly set names in tsk_db_info_extended statement.execute("UPDATE tsk_db_info_extended SET name = 'CREATION_SCHEMA_MAJOR_VERSION' WHERE name = 'CREATED_SCHEMA_MAJOR_VERSION'"); statement.execute("UPDATE tsk_db_info_extended SET name = 'CREATION_SCHEMA_MINOR_VERSION' WHERE name = 'CREATED_SCHEMA_MINOR_VERSION'"); return new CaseDbSchemaVersionNumber(8, 3); } finally { closeResultSet(resultSet); releaseSingleUserCaseWriteLock(); } } /** * Updates a schema version 8.3 database to a schema version 8.4 database. * * This includes a bug fix update for a misnamed column in * tsk_event_descriptions in the previous update code. * * Note that 8.4 also introduced cascading deletes on many of the database * tables. We do not need to add these in the upgrade code because data * sources in cases that were originally created with 8.3 or earlier can not * be deleted. * * @param schemaVersion The current schema version of the database. * @param connection A connection to the case database. * * @return The new database schema version. * * @throws SQLException If there is an error completing a database * operation. * @throws TskCoreException If there is an error completing a database * operation via another SleuthkitCase method. */ private CaseDbSchemaVersionNumber updateFromSchema8dot3toSchema8dot4(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 3) { return schemaVersion; } Statement statement = connection.createStatement(); ResultSet results = null; acquireSingleUserCaseWriteLock(); try { // This is a bug fix update for a misnamed column in tsk_event_descriptions in // the previous update code. if (null == getDatabaseType()) { throw new TskCoreException("Unsupported data base type: " + getDatabaseType().toString()); } switch (getDatabaseType()) { case POSTGRESQL: // Check if the misnamed column is present results = statement.executeQuery("SELECT column_name FROM information_schema.columns " + "WHERE table_name='tsk_event_descriptions' and column_name='file_obj_id'"); if (results.next()) { // In PostgreSQL we can rename the column if it exists statement.execute("ALTER TABLE tsk_event_descriptions " + "RENAME COLUMN file_obj_id TO content_obj_id"); // In 8.2 to 8.3 upgrade, the event_id & time column in tsk_events table was erroneously created as type INTEGER, instead of BIGINT // Fix the schema, preserving any data if exists. statement.execute("CREATE TABLE temp_tsk_events ( " + " event_id BIGSERIAL PRIMARY KEY, " + " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ," + " event_description_id BIGINT NOT NULL REFERENCES tsk_event_descriptions(event_description_id)," + " time BIGINT NOT NULL, " + " UNIQUE (event_type_id, event_description_id, time))" ); // Copy the data statement.execute("INSERT INTO temp_tsk_events(event_id, event_type_id, " + "event_description_id, time) SELECT * FROM tsk_events"); // Drop the old table statement.execute("DROP TABLE tsk_events"); // Rename the new table statement.execute("ALTER TABLE temp_tsk_events RENAME TO tsk_events"); //create tsk_events indices that were skipped in the 8.2 to 8.3 update code statement.execute("CREATE INDEX events_data_source_obj_id ON tsk_event_descriptions(data_source_obj_id) "); statement.execute("CREATE INDEX events_content_obj_id ON tsk_event_descriptions(content_obj_id) "); statement.execute("CREATE INDEX events_artifact_id ON tsk_event_descriptions(artifact_id) "); statement.execute("CREATE INDEX events_sub_type_time ON tsk_events(event_type_id, time) "); statement.execute("CREATE INDEX events_time ON tsk_events(time) "); } break; case SQLITE: boolean hasMisnamedColumn = false; results = statement.executeQuery("pragma table_info('tsk_event_descriptions')"); while (results.next()) { if (results.getString("name") != null && results.getString("name").equals("file_obj_id")) { hasMisnamedColumn = true; break; } } if (hasMisnamedColumn) { // Since we can't rename the column we'll need to make new tables and copy the data statement.execute("CREATE TABLE temp_tsk_event_descriptions (" + " event_description_id INTEGER PRIMARY KEY, " + " full_description TEXT NOT NULL, " + " med_description TEXT, " + " short_description TEXT," + " data_source_obj_id BIGINT NOT NULL, " + " content_obj_id BIGINT NOT NULL, " + " artifact_id BIGINT, " + " hash_hit INTEGER NOT NULL, " //boolean + " tagged INTEGER NOT NULL, " //boolean + " UNIQUE(full_description, content_obj_id, artifact_id), " + " FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id), " + " FOREIGN KEY(content_obj_id) REFERENCES tsk_files(obj_id), " + " FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id))" ); statement.execute("CREATE TABLE temp_tsk_events ( " + " event_id INTEGER PRIMARY KEY, " + " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ," + " event_description_id BIGINT NOT NULL REFERENCES temp_tsk_event_descriptions(event_description_id)," + " time INTEGER NOT NULL, " + " UNIQUE (event_type_id, event_description_id, time))" ); // Copy the data statement.execute("INSERT INTO temp_tsk_event_descriptions(event_description_id, full_description, " + "med_description, short_description, data_source_obj_id, content_obj_id, artifact_id, " + "hash_hit, tagged) SELECT * FROM tsk_event_descriptions"); statement.execute("INSERT INTO temp_tsk_events(event_id, event_type_id, " + "event_description_id, time) SELECT * FROM tsk_events"); // Drop the old tables statement.execute("DROP TABLE tsk_events"); statement.execute("DROP TABLE tsk_event_descriptions"); // Rename the new tables statement.execute("ALTER TABLE temp_tsk_event_descriptions RENAME TO tsk_event_descriptions"); statement.execute("ALTER TABLE temp_tsk_events RENAME TO tsk_events"); //create tsk_events indices statement.execute("CREATE INDEX events_data_source_obj_id ON tsk_event_descriptions(data_source_obj_id) "); statement.execute("CREATE INDEX events_content_obj_id ON tsk_event_descriptions(content_obj_id) "); statement.execute("CREATE INDEX events_artifact_id ON tsk_event_descriptions(artifact_id) "); statement.execute("CREATE INDEX events_sub_type_time ON tsk_events(event_type_id, time) "); statement.execute("CREATE INDEX events_time ON tsk_events(time) "); } break; default: throw new TskCoreException("Unsupported data base type: " + getDatabaseType().toString()); } // create pool info table if (this.dbType.equals(DbType.SQLITE)) { statement.execute("CREATE TABLE tsk_pool_info (obj_id INTEGER PRIMARY KEY, pool_type INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); } else { statement.execute("CREATE TABLE tsk_pool_info (obj_id BIGSERIAL PRIMARY KEY, pool_type INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)"); } // Add new account types for newly supported messaging applications, if they dont exists already. insertAccountTypeIfNotExists(statement, "IMO", "IMO"); insertAccountTypeIfNotExists(statement, "LINE", "LINE"); insertAccountTypeIfNotExists(statement, "SKYPE", "Skype"); insertAccountTypeIfNotExists(statement, "TANGO", "Tango"); insertAccountTypeIfNotExists(statement, "TEXTNOW", "TextNow"); insertAccountTypeIfNotExists(statement, "THREEMA", "ThreeMa"); insertAccountTypeIfNotExists(statement, "VIBER", "Viber"); insertAccountTypeIfNotExists(statement, "XENDER", "Xender"); insertAccountTypeIfNotExists(statement, "ZAPYA", "Zapya"); insertAccountTypeIfNotExists(statement, "SHAREIT", "ShareIt"); return new CaseDbSchemaVersionNumber(8, 4); } finally { closeResultSet(results); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } private CaseDbSchemaVersionNumber updateFromSchema8dot4toSchema8dot5(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 4) { return schemaVersion; } Statement statement = connection.createStatement(); acquireSingleUserCaseWriteLock(); try { switch (getDatabaseType()) { case POSTGRESQL: statement.execute("CREATE TABLE tsk_tag_sets (tag_set_id BIGSERIAL PRIMARY KEY, name TEXT UNIQUE)"); statement.execute("ALTER TABLE tag_names ADD COLUMN tag_set_id BIGINT REFERENCES tsk_tag_sets(tag_set_id)"); break; case SQLITE: statement.execute("CREATE TABLE tsk_tag_sets (tag_set_id INTEGER PRIMARY KEY, name TEXT UNIQUE)"); statement.execute("ALTER TABLE tag_names ADD COLUMN tag_set_id INTEGER REFERENCES tsk_tag_sets(tag_set_id)"); break; } statement.execute("ALTER TABLE tag_names ADD COLUMN rank INTEGER"); /* * Update existing Project Vic tag names (from Image Gallery in * Autopsy) to be part of a Tag Set. NOTE: These names are out of * date and will not work with the Project VIC Report module. New * cases will get the new names from Image Gallery. */ String insertStmt = "INSERT INTO tsk_tag_sets (name) VALUES ('Project VIC')"; if (getDatabaseType() == DbType.POSTGRESQL) { statement.execute(insertStmt, Statement.RETURN_GENERATED_KEYS); } else { statement.execute(insertStmt); } try (ResultSet resultSet = statement.getGeneratedKeys()) { if (resultSet != null && resultSet.next()) { int tagSetId = resultSet.getInt(1); String updateQuery = "UPDATE tag_names SET tag_set_id = %d, color = '%s', rank = %d, display_name = '%s' WHERE display_name = '%s'"; statement.executeUpdate(String.format(updateQuery, tagSetId, "Red", 1, "Child Exploitation (Illegal)", "CAT-1: Child Exploitation (Illegal)")); statement.executeUpdate(String.format(updateQuery, tagSetId, "Lime", 2, "Child Exploitation (Non-Illegal/Age Difficult)", "CAT-2: Child Exploitation (Non-Illegal/Age Difficult)")); statement.executeUpdate(String.format(updateQuery, tagSetId, "Yellow", 3, "CGI/Animation (Child Exploitive)", "CAT-3: CGI/Animation (Child Exploitive)")); statement.executeUpdate(String.format(updateQuery, tagSetId, "Purple", 4, "Exemplar/Comparison (Internal Use Only)", "CAT-4: Exemplar/Comparison (Internal Use Only)")); statement.executeUpdate(String.format(updateQuery, tagSetId, "Fuchsia", 5, "Non-pertinent", "CAT-5: Non-pertinent")); String deleteContentTag = "DELETE FROM content_tags WHERE tag_name_id IN (SELECT tag_name_id from tag_names WHERE display_name LIKE 'CAT-0: Uncategorized')"; String deleteArtifactTag = "DELETE FROM blackboard_artifact_tags WHERE tag_name_id IN (SELECT tag_name_id from tag_names WHERE display_name LIKE 'CAT-0: Uncategorized')"; String deleteCat0 = "DELETE FROM tag_names WHERE display_name = 'CAT-0: Uncategorized'"; statement.executeUpdate(deleteContentTag); statement.executeUpdate(deleteArtifactTag); statement.executeUpdate(deleteCat0); } else { throw new TskCoreException("Failed to retrieve the default tag_set_id from DB"); } } // Add data_source_obj_id column to the tsk_files table. For newly created cases // this column will have a foreign key constraint on the data_source_info table. // There does not seem to be a reasonable way to do this in an upgrade, // so upgraded cases will be missing the foreign key. switch (getDatabaseType()) { case POSTGRESQL: statement.execute("ALTER TABLE tsk_fs_info ADD COLUMN data_source_obj_id BIGINT NOT NULL DEFAULT -1;"); break; case SQLITE: statement.execute("ALTER TABLE tsk_fs_info ADD COLUMN data_source_obj_id INTEGER NOT NULL DEFAULT -1;"); break; } Statement updateStatement = connection.createStatement(); try (ResultSet resultSet = statement.executeQuery("SELECT obj_id FROM tsk_fs_info")) { while (resultSet.next()) { long fsId = resultSet.getLong("obj_id"); long dataSourceId = getDataSourceObjectId(connection, fsId); updateStatement.executeUpdate("UPDATE tsk_fs_info SET data_source_obj_id = " + dataSourceId + " WHERE obj_id = " + fsId + ";"); } } finally { closeStatement(updateStatement); } return new CaseDbSchemaVersionNumber(8, 5); } finally { closeStatement(statement); releaseSingleUserCaseWriteLock(); } } private CaseDbSchemaVersionNumber updateFromSchema8dot5toSchema8dot6(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 5) { return schemaVersion; } Statement statement = connection.createStatement(); acquireSingleUserCaseWriteLock(); try { statement.execute("ALTER TABLE tsk_files ADD COLUMN sha256 TEXT"); return new CaseDbSchemaVersionNumber(8, 6); } finally { closeStatement(statement); releaseSingleUserCaseWriteLock(); } } @SuppressWarnings("deprecation") private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 8) { return schemaVersion; } if (schemaVersion.getMinor() != 6) { return schemaVersion; } Statement statement = connection.createStatement(); acquireSingleUserCaseWriteLock(); try { String dateDataType = "BIGINT"; String bigIntDataType = "BIGINT"; String blobDataType = "BYTEA"; String primaryKeyType = "BIGSERIAL"; if (this.dbType.equals(DbType.SQLITE)) { dateDataType = "INTEGER"; bigIntDataType = "INTEGER"; blobDataType = "BLOB"; primaryKeyType = "INTEGER"; } statement.execute("ALTER TABLE data_source_info ADD COLUMN added_date_time " + dateDataType); statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_settings TEXT"); statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_name TEXT"); statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_version TEXT"); // Add category type and initialize the types. We use the list of artifact types that // were categorized as analysis results as of the 8.7 update to ensure consistency in // case the built-in types change in a later release. statement.execute("ALTER TABLE blackboard_artifact_types ADD COLUMN category_type INTEGER DEFAULT 0"); String analysisTypeObjIdList = BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_USER_CONTENT_SUSPECTED.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_YARA_HIT.getTypeID() + ", " + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION.getTypeID(); statement.execute("UPDATE blackboard_artifact_types SET category_type = " + BlackboardArtifact.Category.ANALYSIS_RESULT.getID() + " WHERE artifact_type_id IN (" + analysisTypeObjIdList + ")"); // Create tsk file attributes table statement.execute("CREATE TABLE tsk_file_attributes (id " + primaryKeyType + " PRIMARY KEY, " + "obj_id " + bigIntDataType + " NOT NULL, " + "attribute_type_id " + bigIntDataType + " NOT NULL, " + "value_type INTEGER NOT NULL, value_byte " + blobDataType + ", " + "value_text TEXT, value_int32 INTEGER, value_int64 " + bigIntDataType + ", value_double NUMERIC(20, 10), " + "FOREIGN KEY(obj_id) REFERENCES tsk_files(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))"); // create analysis results tables statement.execute("CREATE TABLE tsk_analysis_results (artifact_obj_id " + bigIntDataType + " PRIMARY KEY, " + "conclusion TEXT, " + "significance INTEGER NOT NULL, " /* * method_category was a column in a little distributed * version of 9.0. It was renamed to priority before public * release. The 9.1 upgrade code will add the priority * column. This is commented out since it was never used. */ // + "method_category INTEGER NOT NULL, " + "configuration TEXT, justification TEXT, " + "ignore_score INTEGER DEFAULT 0 " // boolean + ")"); statement.execute("CREATE TABLE tsk_aggregate_score( obj_id " + bigIntDataType + " PRIMARY KEY, " + "data_source_obj_id " + bigIntDataType + ", " + "significance INTEGER NOT NULL, " // See comment above on why this is commented out // + "method_category INTEGER NOT NULL, " + "UNIQUE (obj_id)," + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE " + ")"); // Create person table. statement.execute("CREATE TABLE tsk_persons (id " + primaryKeyType + " PRIMARY KEY, " + "name TEXT NOT NULL, " // person name + "UNIQUE(name)) "); // Create host table. statement.execute("CREATE TABLE tsk_hosts (id " + primaryKeyType + " PRIMARY KEY, " + "name TEXT NOT NULL, " // host name + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "person_id INTEGER, " + "merged_into " + bigIntDataType + ", " + "FOREIGN KEY(person_id) REFERENCES tsk_persons(id) ON DELETE SET NULL, " + "FOREIGN KEY(merged_into) REFERENCES tsk_hosts(id), " + "UNIQUE(name)) "); // Create OS Account and related tables statement.execute("CREATE TABLE tsk_os_account_realms (id " + primaryKeyType + " PRIMARY KEY, " + "realm_name TEXT DEFAULT NULL, " // realm name - for a domain realm, may be null + "realm_addr TEXT DEFAULT NULL, " // a sid/uid or some some other identifier, may be null + "realm_signature TEXT NOT NULL, " // Signature exists only to prevent duplicates. It is made up of realm address/name and scope host + "scope_host_id " + bigIntDataType + " DEFAULT NULL, " // if the realm scope is a single host + "scope_confidence INTEGER, " // indicates whether we know for sure the realm scope or if we are inferring it + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "merged_into " + bigIntDataType + " DEFAULT NULL, " + "UNIQUE(realm_signature), " + "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id)," + "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) )"); // Add host column and create a host for each existing data source. // We will create a host for each device id so that related data sources will // be associated with the same host. statement.execute("ALTER TABLE data_source_info ADD COLUMN host_id INTEGER REFERENCES tsk_hosts(id)"); Statement updateStatement = connection.createStatement(); try (ResultSet resultSet = statement.executeQuery("SELECT obj_id, device_id FROM data_source_info")) { Map hostMap = new HashMap<>(); long hostIndex = 1; while (resultSet.next()) { long objId = resultSet.getLong("obj_id"); String deviceId = resultSet.getString("device_id"); if (!hostMap.containsKey(deviceId)) { String hostName = "Host " + hostIndex; updateStatement.execute("INSERT INTO tsk_hosts (name, db_status) VALUES ('" + hostName + "', 0)"); hostMap.put(deviceId, hostIndex); hostIndex++; } updateStatement.execute("UPDATE data_source_info SET host_id = " + hostMap.get(deviceId) + " WHERE obj_id = " + objId); } } finally { closeStatement(updateStatement); } statement.execute("CREATE TABLE tsk_os_accounts (os_account_obj_id " + bigIntDataType + " PRIMARY KEY, " + "login_name TEXT DEFAULT NULL, " // login name, if available, may be null + "full_name TEXT DEFAULT NULL, " // full name, if available, may be null + "realm_id " + bigIntDataType + " NOT NULL, " // realm for the account + "addr TEXT DEFAULT NULL, " // SID/UID, if available + "signature TEXT NOT NULL, " // This exists only to prevent duplicates. It is either the addr or the login_name whichever is not null. + "status INTEGER, " // enabled/disabled/deleted + "type INTEGER, " // service/interactive + "created_date " + bigIntDataType + " DEFAULT NULL, " + "db_status INTEGER DEFAULT 0, " // active/merged/deleted + "merged_into " + bigIntDataType + " DEFAULT NULL, " + "UNIQUE(signature, realm_id), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(realm_id) REFERENCES tsk_os_account_realms(id)," + "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) )"); statement.execute("CREATE TABLE tsk_os_account_attributes (id " + primaryKeyType + " PRIMARY KEY, " + "os_account_obj_id " + bigIntDataType + " NOT NULL, " + "host_id " + bigIntDataType + ", " + "source_obj_id " + bigIntDataType + ", " + "attribute_type_id " + bigIntDataType + " NOT NULL, " + "value_type INTEGER NOT NULL, " + "value_byte " + bigIntDataType + ", " + "value_text TEXT, " + "value_int32 INTEGER, value_int64 " + bigIntDataType + ", " + "value_double NUMERIC(20, 10), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id), " + "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id), " + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL, " + "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))"); statement.execute("CREATE TABLE tsk_os_account_instances (id " + primaryKeyType + " PRIMARY KEY, " + "os_account_obj_id " + bigIntDataType + " NOT NULL, " + "data_source_obj_id " + bigIntDataType + " NOT NULL, " + "instance_type INTEGER NOT NULL, " // PerformedActionOn/ReferencedOn + "UNIQUE(os_account_obj_id, data_source_obj_id), " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id), " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )"); statement.execute("CREATE TABLE tsk_data_artifacts ( " + "artifact_obj_id " + bigIntDataType + " PRIMARY KEY, " + "os_account_obj_id " + bigIntDataType + ", " + "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id)) "); // add owner_uid & os_account_obj_id columns to tsk_files statement.execute("ALTER TABLE tsk_files ADD COLUMN owner_uid TEXT DEFAULT NULL"); statement.execute("ALTER TABLE tsk_files ADD COLUMN os_account_obj_id " + bigIntDataType + " DEFAULT NULL REFERENCES tsk_os_accounts(os_account_obj_id) "); // create host address tables statement.execute("CREATE TABLE tsk_host_addresses (id " + primaryKeyType + " PRIMARY KEY, " + "address_type INTEGER NOT NULL, " + "address TEXT NOT NULL, " + "UNIQUE(address_type, address)) "); statement.execute("CREATE TABLE tsk_host_address_map (id " + primaryKeyType + " PRIMARY KEY, " + "host_id " + bigIntDataType + " NOT NULL, " + "addr_obj_id " + bigIntDataType + " NOT NULL, " + "source_obj_id " + bigIntDataType + ", " // object id of the source where this mapping was found. + "time " + bigIntDataType + ", " // time at which the mapping existed + "UNIQUE(host_id, addr_obj_id, time), " + "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, " + "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id), " + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )"); // stores associations between DNS name and IP address statement.execute("CREATE TABLE tsk_host_address_dns_ip_map (id " + primaryKeyType + " PRIMARY KEY, " + "dns_address_id " + bigIntDataType + " NOT NULL, " + "ip_address_id " + bigIntDataType + " NOT NULL, " + "source_obj_id " + bigIntDataType + ", " + "time " + bigIntDataType + ", " // time at which the mapping existed + "UNIQUE(dns_address_id, ip_address_id, time), " + "FOREIGN KEY(dns_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, " + "FOREIGN KEY(ip_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE," + "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )"); // maps an address to an artifact using it statement.execute("CREATE TABLE tsk_host_address_usage (id " + primaryKeyType + " PRIMARY KEY, " + "addr_obj_id " + bigIntDataType + " NOT NULL, " + "obj_id " + bigIntDataType + " NOT NULL, " + "data_source_obj_id " + bigIntDataType + " NOT NULL, " // data source where the usage was found + "UNIQUE(addr_obj_id, obj_id), " + "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, " + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )"); return new CaseDbSchemaVersionNumber(9, 0); } finally { closeStatement(statement); releaseSingleUserCaseWriteLock(); } } private CaseDbSchemaVersionNumber updateFromSchema9dot0toSchema9dot1(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException { if (schemaVersion.getMajor() != 9) { return schemaVersion; } if (schemaVersion.getMinor() != 0) { return schemaVersion; } Statement statement = connection.createStatement(); ResultSet results = null; acquireSingleUserCaseWriteLock(); try { // The 9.0 schema contained method_category columns that were renamed to priority. switch (getDatabaseType()) { case POSTGRESQL: // Check if the misnamed column is present. We'll assume here that the column will exist // in both tables if present in one. results = statement.executeQuery("SELECT column_name FROM information_schema.columns " + "WHERE table_name='tsk_analysis_results' and column_name='method_category'"); if (results.next()) { // In PostgreSQL we can delete the column statement.execute("ALTER TABLE tsk_analysis_results " + "DROP COLUMN method_category"); statement.execute("ALTER TABLE tsk_aggregate_score " + "DROP COLUMN method_category"); } break; case SQLITE: // Check if the misnamed column is present. We'll assume here that the column will exist // in both tables if present in one. boolean hasMisnamedColumn = false; results = statement.executeQuery("pragma table_info('tsk_analysis_results')"); while (results.next()) { if (results.getString("name") != null && results.getString("name").equals("method_category")) { hasMisnamedColumn = true; break; } } if (hasMisnamedColumn) { // Since we can't rename the column we'll need to make a new table and copy the data. // We'll add the priority column later. statement.execute("CREATE TABLE temp_tsk_analysis_results (artifact_obj_id INTEGER PRIMARY KEY, " + "conclusion TEXT, " + "significance INTEGER NOT NULL, " + "configuration TEXT, justification TEXT, " + "ignore_score INTEGER DEFAULT 0 " // boolean + ")"); statement.execute("CREATE TABLE temp_tsk_aggregate_score( obj_id INTEGER PRIMARY KEY, " + "data_source_obj_id INTEGER, " + "significance INTEGER NOT NULL, " + "UNIQUE (obj_id)," + "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, " + "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE " + ")"); // Copy the data statement.execute("INSERT INTO temp_tsk_analysis_results(artifact_obj_id, " + "conclusion, justification, significance, configuration, ignore_score) " + "SELECT artifact_obj_id, conclusion, justification, significance, configuration, ignore_score FROM tsk_analysis_results"); statement.execute("INSERT INTO temp_tsk_aggregate_score(obj_id, " + "data_source_obj_id, significance) " + "SELECT obj_id, data_source_obj_id, significance FROM tsk_aggregate_score"); // Drop the old tables statement.execute("DROP TABLE tsk_analysis_results"); statement.execute("DROP TABLE tsk_aggregate_score"); // Rename the new tables statement.execute("ALTER TABLE temp_tsk_analysis_results RENAME TO tsk_analysis_results"); statement.execute("ALTER TABLE temp_tsk_aggregate_score RENAME TO tsk_aggregate_score"); } break; default: throw new TskCoreException("Unsupported database type: " + getDatabaseType().toString()); } // add an index on tsk_file_attributes table. statement.execute("CREATE INDEX tsk_file_attributes_obj_id ON tsk_file_attributes(obj_id)"); statement.execute("ALTER TABLE tsk_analysis_results ADD COLUMN priority INTEGER NOT NULL DEFAULT " + Score.Priority.NORMAL.getId()); statement.execute("ALTER TABLE tsk_aggregate_score ADD COLUMN priority INTEGER NOT NULL DEFAULT " + Score.Priority.NORMAL.getId()); statement.execute("UPDATE blackboard_artifact_types SET category_type = 1 WHERE artifact_type_id = 16"); return new CaseDbSchemaVersionNumber(9, 1); } finally { closeResultSet(results); closeStatement(statement); releaseSingleUserCaseWriteLock(); } } /** * Inserts a row for the given account type in account_types table, if one * doesn't exist. * * @param statement Statement to use to execute SQL. * @param type_name Account type name. * @param display_name Account type display name. * * @throws TskCoreException * @throws SQLException */ private void insertAccountTypeIfNotExists(Statement statement, String type_name, String display_name) throws TskCoreException, SQLException { String insertSQL = String.format("INTO account_types(type_name, display_name) VALUES ('%s', '%s')", type_name, display_name); switch (getDatabaseType()) { case POSTGRESQL: insertSQL = "INSERT " + insertSQL + " ON CONFLICT DO NOTHING"; //NON-NLS break; case SQLITE: insertSQL = "INSERT OR IGNORE " + insertSQL; break; default: throw new TskCoreException("Unknown DB Type: " + getDatabaseType().name()); } statement.execute(insertSQL); //NON-NLS } /** * Extract the extension from a file name. * * @param fileName the file name to extract the extension from. * * @return The extension extracted from fileName. Will not be null. */ static String extractExtension(final String fileName) { String ext; int i = fileName.lastIndexOf("."); // > 0 because we assume it's not an extension if period is the first character if ((i > 0) && ((i + 1) < fileName.length())) { ext = fileName.substring(i + 1); } else { return ""; } // we added this at one point to deal with files that had crazy names based on URLs // it's too hard though to clean those up and not mess up basic extensions though. // We need to add '-' to the below if we use it again // String[] findNonAlphanumeric = ext.split("[^a-zA-Z0-9_]"); // if (findNonAlphanumeric.length > 1) { // ext = findNonAlphanumeric[0]; // } return ext.toLowerCase(); } /** * Returns case database schema version number. As of TSK 4.5.0 db schema * versions are two part Major.minor. This method only returns the major * part. Use getDBSchemaVersion() for the complete version. * * @return The schema version number as an integer. * * @deprecated since 4.5.0 Use getDBSchemaVersion() instead for more * complete version info. */ @Deprecated public int getSchemaVersion() { return getDBSchemaVersion().getMajor(); } /** * Gets the database schema version in use. * * @return the database schema version in use. */ public VersionNumber getDBSchemaVersion() { return CURRENT_DB_SCHEMA_VERSION; } /** * Gets the creation version of the database schema. * * @return the creation version for the database schema, the creation * version will be 0.0 for databases created prior to 8.2 */ public CaseDbSchemaVersionNumber getDBSchemaCreationVersion() { return caseDBSchemaCreationVersion; } /** * Returns the type of database in use. * * @return database type */ public DbType getDatabaseType() { return this.dbType; } /** * Returns the path of a backup copy of the database made when a schema * version upgrade has occurred. * * @return The path of the backup file or null if no backup was made. */ public String getBackupDatabasePath() { return dbBackupPath; } /** * Create a new transaction on the case database. The transaction object * that is returned can be passed to methods that take a CaseDbTransaction. * The caller is responsible for calling either commit() or rollback() on * the transaction object. * * Note that this beginning the transaction also acquires the single user * case write lock, which will be automatically released when the * transaction is closed. * * @return A CaseDbTransaction object. * * @throws TskCoreException */ public CaseDbTransaction beginTransaction() throws TskCoreException { return new CaseDbTransaction(this); } /** * Gets the case database name. * * @return The case database name. */ public String getDatabaseName() { return databaseName; } /** * Get the full path to the case directory. For a SQLite case database, this * is the same as the database directory path. * * @return Case directory path. */ public String getDbDirPath() { return caseDirPath; } /** * Acquires a write lock, but only if this is a single-user case. Always * call this method in a try block with a call to the lock release method in * an associated finally block. */ public void acquireSingleUserCaseWriteLock() { if (dbType == DbType.SQLITE) { rwLock.writeLock().lock(); } } /** * Releases a write lock, but only if this is a single-user case. This * method should always be called in the finally block of a try block in * which the lock was acquired. */ public void releaseSingleUserCaseWriteLock() { if (dbType == DbType.SQLITE) { rwLock.writeLock().unlock(); } } /** * Acquires a read lock, but only if this is a single-user case. Call this * method in a try block with a call to the lock release method in an * associated finally block. */ public void acquireSingleUserCaseReadLock() { if (dbType == DbType.SQLITE) { rwLock.readLock().lock(); } } /** * Releases a read lock, but only if this is a single-user case. This method * should always be called in the finally block of a try block in which the * lock was acquired. */ public void releaseSingleUserCaseReadLock() { if (dbType == DbType.SQLITE) { rwLock.readLock().unlock(); } } /** * Open an existing case database. * * @param dbPath Path to SQLite case database. * * @return Case database object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public static SleuthkitCase openCase(String dbPath) throws TskCoreException { try { final SleuthkitJNI.CaseDbHandle caseHandle = SleuthkitJNI.openCaseDb(dbPath); return new SleuthkitCase(dbPath, caseHandle, DbType.SQLITE); } catch (TskUnsupportedSchemaVersionException ex) { //don't wrap in new TskCoreException throw ex; } catch (Exception ex) { throw new TskCoreException("Failed to open case database at " + dbPath, ex); } } /** * Open an existing multi-user case database. * * @param databaseName The name of the database. * @param info Connection information for the the database. * @param caseDir The folder where the case metadata fils is stored. * * @return A case database object. * * @throws TskCoreException If there is a problem opening the database. */ public static SleuthkitCase openCase(String databaseName, CaseDbConnectionInfo info, String caseDir) throws TskCoreException { try { /* * The flow of this method involves trying to open case and if * successful, return that case. If unsuccessful, an exception is * thrown. We catch any exceptions, and use tryConnect() to attempt * to obtain further information about the error. If tryConnect() is * unable to successfully connect, tryConnect() will throw a * TskCoreException with a message containing user-level error * reporting. If tryConnect() is able to connect, flow continues and * we rethrow the original exception obtained from trying to create * the case. In this way, we obtain more detailed information if we * are able, but do not lose any information if unable. */ final SleuthkitJNI.CaseDbHandle caseHandle = SleuthkitJNI.openCaseDb(databaseName, info); return new SleuthkitCase(info.getHost(), Integer.parseInt(info.getPort()), databaseName, info.getUserName(), info.getPassword(), caseHandle, caseDir, info.getDbType()); } catch (PropertyVetoException exp) { // In this case, the JDBC driver doesn't support PostgreSQL. Use the generic message here. throw new TskCoreException(exp.getMessage(), exp); } catch (TskUnsupportedSchemaVersionException ex) { //don't wrap in new TskCoreException throw ex; } catch (Exception exp) { tryConnect(info); // attempt to connect, throw with user-friendly message if unable throw new TskCoreException(exp.getMessage(), exp); // throw with generic message if tryConnect() was successful } } /** * Creates a new SQLite case database. * * @param dbPath Path to where SQlite case database should be created. * * @return A case database object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public static SleuthkitCase newCase(String dbPath) throws TskCoreException { try { CaseDatabaseFactory factory = new CaseDatabaseFactory(dbPath); factory.createCaseDatabase(); SleuthkitJNI.CaseDbHandle caseHandle = SleuthkitJNI.openCaseDb(dbPath); return new SleuthkitCase(dbPath, caseHandle, DbType.SQLITE); } catch (Exception ex) { throw new TskCoreException("Failed to create case database at " + dbPath, ex); } } /** * Creates a new PostgreSQL case database. * * @param caseName The name of the case. It will be used to create a case * database name that can be safely used in SQL commands * and will not be subject to name collisions on the case * database server. Use getDatabaseName to get the * created name. * @param info The information to connect to the database. * @param caseDirPath The case directory path. * * @return A case database object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public static SleuthkitCase newCase(String caseName, CaseDbConnectionInfo info, String caseDirPath) throws TskCoreException { String databaseName = createCaseDataBaseName(caseName); try { /** * The flow of this method involves trying to create a new case and * if successful, return that case. If unsuccessful, an exception is * thrown. We catch any exceptions, and use tryConnect() to attempt * to obtain further information about the error. If tryConnect() is * unable to successfully connect, tryConnect() will throw a * TskCoreException with a message containing user-level error * reporting. If tryConnect() is able to connect, flow continues and * we rethrow the original exception obtained from trying to create * the case. In this way, we obtain more detailed information if we * are able, but do not lose any information if unable. */ CaseDatabaseFactory factory = new CaseDatabaseFactory(databaseName, info); factory.createCaseDatabase(); final SleuthkitJNI.CaseDbHandle caseHandle = SleuthkitJNI.openCaseDb(databaseName, info); return new SleuthkitCase(info.getHost(), Integer.parseInt(info.getPort()), databaseName, info.getUserName(), info.getPassword(), caseHandle, caseDirPath, info.getDbType()); } catch (PropertyVetoException exp) { // In this case, the JDBC driver doesn't support PostgreSQL. Use the generic message here. throw new TskCoreException(exp.getMessage(), exp); } catch (Exception exp) { tryConnect(info); // attempt to connect, throw with user-friendly message if unable throw new TskCoreException(exp.getMessage(), exp); // throw with generic message if tryConnect() was successful } } /** * Transforms a candidate PostgreSQL case database name into one that can be * safely used in SQL commands and will not be subject to name collisions on * the case database server. * * @param candidateDbName A candidate case database name. * * @return A case database name. */ private static String createCaseDataBaseName(String candidateDbName) { String dbName; if (!candidateDbName.isEmpty()) { /* * Replace all non-ASCII characters. */ dbName = candidateDbName.replaceAll("[^\\p{ASCII}]", "_"); //NON-NLS /* * Replace all control characters. */ dbName = dbName.replaceAll("[\\p{Cntrl}]", "_"); //NON-NLS /* * Replace /, \, :, ?, space, ' ". */ dbName = dbName.replaceAll("[ /?:'\"\\\\]", "_"); //NON-NLS /* * Make it all lowercase. */ dbName = dbName.toLowerCase(); /* * Must start with letter or underscore. If not, prepend an * underscore. */ if ((dbName.length() > 0 && !(Character.isLetter(dbName.codePointAt(0))) && !(dbName.codePointAt(0) == '_'))) { dbName = "_" + dbName; } /* * Truncate to 63 - 16 = 47 chars to accomodate a timestamp for * uniqueness. */ if (dbName.length() > MAX_DB_NAME_LEN_BEFORE_TIMESTAMP) { dbName = dbName.substring(0, MAX_DB_NAME_LEN_BEFORE_TIMESTAMP); } } else { /* * Must start with letter or underscore. */ dbName = "_"; } /* * Add the time stmap. */ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmmss"); Date date = new Date(); dbName = dbName + "_" + dateFormat.format(date); return dbName; } /** * Returns the Examiner object for currently logged in user * * @return A Examiner object. * * @throws org.sleuthkit.datamodel.TskCoreException */ public Examiner getCurrentExaminer() throws TskCoreException { // return cached value if there's one if (cachedCurrentExaminer != null) { return cachedCurrentExaminer; } String loginName = System.getProperty("user.name"); if (loginName == null || loginName.isEmpty()) { throw new TskCoreException("Failed to determine logged in user name."); } ResultSet resultSet = null; CaseDbConnection connection = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_EXAMINER_BY_LOGIN_NAME); statement.clearParameters(); statement.setString(1, loginName); resultSet = connection.executeQuery(statement); if (resultSet.next()) { cachedCurrentExaminer = new Examiner(resultSet.getLong("examiner_id"), resultSet.getString("login_name"), resultSet.getString("display_name")); return cachedCurrentExaminer; } else { throw new TskCoreException("Error getting examaminer for name = " + loginName); } } catch (SQLException ex) { throw new TskCoreException("Error getting examaminer for name = " + loginName, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Returns the Examiner object for given id * * @param id * * @return Examiner object * * @throws TskCoreException */ Examiner getExaminerById(long id) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_EXAMINER_BY_ID); statement.clearParameters(); statement.setLong(1, id); resultSet = connection.executeQuery(statement); if (resultSet.next()) { return new Examiner(resultSet.getLong("examiner_id"), resultSet.getString("login_name"), resultSet.getString("full_name")); } else { throw new TskCoreException("Error getting examaminer for id = " + id); } } catch (SQLException ex) { throw new TskCoreException("Error getting examaminer for id = " + id, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Starts the multi-step process of adding an image data source to the case * by creating an object that can be used to control the process and get * progress messages from it. * * @param timeZone The time zone of the image. * @param addUnallocSpace Set to true to create virtual files for * unallocated space in the image. * @param noFatFsOrphans Set to true to skip processing orphan files of FAT * file systems. * @param imageCopyPath Path to which a copy of the image should be * written. Use the empty string to disable image * writing. * * @return An object that encapsulates control of adding an image via the * SleuthKit native code layer. */ public AddImageProcess makeAddImageProcess(String timeZone, boolean addUnallocSpace, boolean noFatFsOrphans, String imageCopyPath) { return this.caseHandle.initAddImageProcess(timeZone, addUnallocSpace, noFatFsOrphans, imageCopyPath, this); } /** * Get the list of root objects (data sources) from the case database, e.g., * image files, logical (local) files, virtual directories. * * @return List of content objects representing root objects. * * @throws TskCoreException */ public List getRootObjects() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT obj_id, type FROM tsk_objects " //NON-NLS + "WHERE par_obj_id IS NULL"); //NON-NLS Collection infos = new ArrayList(); while (rs.next()) { infos.add(new ObjectInfo(rs.getLong("obj_id"), ObjectType.valueOf(rs.getShort("type")))); //NON-NLS } List rootObjs = new ArrayList(); for (ObjectInfo i : infos) { if (null != i.type) { switch (i.type) { case IMG: rootObjs.add(getImageById(i.id)); break; case ABSTRACTFILE: // Check if virtual dir for local files. AbstractFile af = getAbstractFileById(i.id); if (af instanceof VirtualDirectory) { rootObjs.add(af); } else { throw new TskCoreException("Parentless object has wrong type to be a root (ABSTRACTFILE, but not VIRTUAL_DIRECTORY: " + i.type); } break; case REPORT: break; case OS_ACCOUNT: break; case HOST_ADDRESS: break; case UNSUPPORTED: break; default: throw new TskCoreException("Parentless object has wrong type to be a root: " + i.type); } } } return rootObjs; } catch (SQLException ex) { throw new TskCoreException("Error getting root objects", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets the the datasource obj ids for the given device_id * * @param deviceId device_id * * @return A list of the data source object_id for the given device_id for * the case. * * @throws TskCoreException if there is a problem getting the data source * obj ids. */ List getDataSourceObjIds(String deviceId) throws TskCoreException { // check cached map first synchronized (deviceIdToDatasourceObjIdMap) { if (deviceIdToDatasourceObjIdMap.containsKey(deviceId)) { return new ArrayList(deviceIdToDatasourceObjIdMap.get(deviceId)); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT obj_id FROM data_source_info WHERE device_id = '" + deviceId + "'"); //NON-NLS List dataSourceObjIds = new ArrayList(); while (rs.next()) { dataSourceObjIds.add(rs.getLong("obj_id")); // Add to map of deviceID to data_source_obj_id. long ds_obj_id = rs.getLong("obj_id"); if (deviceIdToDatasourceObjIdMap.containsKey(deviceId)) { deviceIdToDatasourceObjIdMap.get(deviceId).add(ds_obj_id); } else { deviceIdToDatasourceObjIdMap.put(deviceId, new HashSet(Arrays.asList(ds_obj_id))); } } return dataSourceObjIds; } catch (SQLException ex) { throw new TskCoreException("Error getting data sources", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } } /** * Gets the data sources for the case. For each data source, if it is an * image, an Image will be instantiated. Otherwise, a LocalFilesDataSource * will be instantiated. * * NOTE: The DataSource interface is an emerging feature and at present is * only useful for obtaining the object id and the device id, an * ASCII-printable identifier for the device associated with the data source * that is intended to be unique across multiple cases (e.g., a UUID). In * the future, this method will be a replacement for the getRootObjects * method. * * @return A list of the data sources for the case. * * @throws TskCoreException if there is a problem getting the data sources. */ public List getDataSources() throws TskCoreException { CaseDbConnection connection = null; Statement statement = null; ResultSet resultSet = null; Statement statement2 = null; ResultSet resultSet2 = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); statement2 = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT ds.obj_id, ds.device_id, ds.time_zone, img.type, img.ssize, img.size, img.md5, img.sha1, img.sha256, img.display_name " + "FROM data_source_info AS ds " + "LEFT JOIN tsk_image_info AS img " + "ON ds.obj_id = img.obj_id"); //NON-NLS List dataSourceList = new ArrayList(); Map> imagePathsMap = getImagePaths(); while (resultSet.next()) { DataSource dataSource; Long objectId = resultSet.getLong("obj_id"); String deviceId = resultSet.getString("device_id"); String timezone = resultSet.getString("time_zone"); String type = resultSet.getString("type"); if (type == null) { /* * No data found in 'tsk_image_info', so we build a * LocalFilesDataSource. */ resultSet2 = connection.executeQuery(statement2, "SELECT name FROM tsk_files WHERE tsk_files.obj_id = " + objectId); //NON-NLS String dsName = (resultSet2.next()) ? resultSet2.getString("name") : ""; resultSet2.close(); TSK_FS_NAME_TYPE_ENUM dirType = TSK_FS_NAME_TYPE_ENUM.DIR; TSK_FS_META_TYPE_ENUM metaType = TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); String parentPath = "/"; //NON-NLS dataSource = new LocalFilesDataSource(this, objectId, objectId, deviceId, dsName, dirType, metaType, dirFlag, metaFlags, timezone, null, null, FileKnown.UNKNOWN, parentPath); } else { /* * Data found in 'tsk_image_info', so we build an Image. */ Long ssize = resultSet.getLong("ssize"); Long size = resultSet.getLong("size"); String md5 = resultSet.getString("md5"); String sha1 = resultSet.getString("sha1"); String sha256 = resultSet.getString("sha256"); String name = resultSet.getString("display_name"); List imagePaths = imagePathsMap.get(objectId); if (name == null) { if (imagePaths.size() > 0) { String path = imagePaths.get(0); name = (new java.io.File(path)).getName(); } else { name = ""; } } dataSource = new Image(this, objectId, Long.valueOf(type), deviceId, ssize, name, imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, size); } dataSourceList.add(dataSource); } return dataSourceList; } catch (SQLException ex) { throw new TskCoreException("Error getting data sources", ex); } finally { closeResultSet(resultSet); closeStatement(statement); closeResultSet(resultSet2); closeStatement(statement2); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a specific data source for the case. If it is an image, an Image * will be instantiated. Otherwise, a LocalFilesDataSource will be * instantiated. * * NOTE: The DataSource class is an emerging feature and at present is only * useful for obtaining the object id and the data source identifier, an * ASCII-printable identifier for the data source that is intended to be * unique across multiple cases (e.g., a UUID). In the future, this method * will be a replacement for the getRootObjects method. * * @param objectId The object id of the data source. * * @return The data source. * * @throws TskDataException If there is no data source for the given object * id. * @throws TskCoreException If there is a problem getting the data source. */ public DataSource getDataSource(long objectId) throws TskDataException, TskCoreException { DataSource dataSource = null; CaseDbConnection connection = null; Statement statement = null; ResultSet resultSet = null; Statement statement2 = null; ResultSet resultSet2 = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); statement2 = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT ds.device_id, ds.time_zone, img.type, img.ssize, img.size, img.md5, img.sha1, img.sha256, img.display_name " + "FROM data_source_info AS ds " + "LEFT JOIN tsk_image_info AS img " + "ON ds.obj_id = img.obj_id " + "WHERE ds.obj_id = " + objectId); //NON-NLS if (resultSet.next()) { String deviceId = resultSet.getString("device_id"); String timezone = resultSet.getString("time_zone"); String type = resultSet.getString("type"); if (type == null) { /* * No data found in 'tsk_image_info', so we build an * LocalFilesDataSource. */ resultSet2 = connection.executeQuery(statement2, "SELECT name FROM tsk_files WHERE tsk_files.obj_id = " + objectId); //NON-NLS String dsName = (resultSet2.next()) ? resultSet2.getString("name") : ""; TSK_FS_NAME_TYPE_ENUM dirType = TSK_FS_NAME_TYPE_ENUM.DIR; TSK_FS_META_TYPE_ENUM metaType = TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); String parentPath = "/"; //NON-NLS dataSource = new LocalFilesDataSource(this, objectId, objectId, deviceId, dsName, dirType, metaType, dirFlag, metaFlags, timezone, null, null, FileKnown.UNKNOWN, parentPath); } else { /* * Data found in 'tsk_image_info', so we build an Image. */ Long ssize = resultSet.getLong("ssize"); Long size = resultSet.getLong("size"); String md5 = resultSet.getString("md5"); String sha1 = resultSet.getString("sha1"); String sha256 = resultSet.getString("sha256"); String name = resultSet.getString("display_name"); List imagePaths = getImagePathsById(objectId, connection); if (name == null) { if (imagePaths.size() > 0) { String path = imagePaths.get(0); name = (new java.io.File(path)).getName(); } else { name = ""; } } dataSource = new Image(this, objectId, Long.valueOf(type), deviceId, ssize, name, imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, size); } } else { throw new TskDataException(String.format("There is no data source with obj_id = %d", objectId)); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting data source with obj_id = %d", objectId), ex); } finally { closeResultSet(resultSet); closeStatement(statement); closeResultSet(resultSet2); closeStatement(statement2); closeConnection(connection); releaseSingleUserCaseReadLock(); } return dataSource; } /** * Get all blackboard artifacts of a given type. Does not included rejected * artifacts. * * @param artifactTypeID artifact type id (must exist in database) * * @return list of blackboard artifacts. * * @throws TskCoreException * * @deprecated Use Blackboard.getArtifactsByType instead. */ @Deprecated public ArrayList getBlackboardArtifacts(int artifactTypeID) throws TskCoreException { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(blackboard.getArtifactsByType(getArtifactType(artifactTypeID))); return artifacts; } /** * Get a count of blackboard artifacts for a given content. Does not include * rejected artifacts. * * @param objId Id of the content. * * @return The artifacts count for the content. * * @throws TskCoreException */ public long getBlackboardArtifactsCount(long objId) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_FROM_SOURCE); statement.clearParameters(); statement.setLong(1, objId); rs = connection.executeQuery(statement); long count = 0; if (rs.next()) { count = rs.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting number of blackboard artifacts by content", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get a count of artifacts of a given type. Does not include rejected * artifacts. * * @param artifactTypeID Id of the artifact type. * * @return The artifacts count for the type. * * @throws TskCoreException */ public long getBlackboardArtifactsTypeCount(int artifactTypeID) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE artifact_type_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_OF_TYPE); statement.clearParameters(); statement.setInt(1, artifactTypeID); rs = connection.executeQuery(statement); long count = 0; if (rs.next()) { count = rs.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting number of blackboard artifacts by type", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get a count of artifacts of a given type for the given data source. Does * not include rejected artifacts. * * @param artifactTypeID Id of the artifact type. * @param dataSourceID * * @return The artifacts count for the type. * * @throws TskCoreException */ public long getBlackboardArtifactsTypeCount(int artifactTypeID, long dataSourceID) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE artifact_type_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_OF_TYPE_BY_DATA_SOURCE); statement.clearParameters(); statement.setInt(2, artifactTypeID); statement.setLong(1, dataSourceID); rs = connection.executeQuery(statement); long count = 0; if (rs.next()) { count = rs.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting number of blackboard artifacts by type (%d) and data source (%d)", artifactTypeID, dataSourceID), ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * String value. Does not included rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param value value of the attribute of the attrType type to look for * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, String value) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + "arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + "types.type_name AS type_name, types.display_name AS display_name, "//NON-NLS + " arts.review_status_id AS review_status_id " //NON-NLS + "FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + "WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND attrs.value_text = '" + value + "'" + " AND types.artifact_type_id=arts.artifact_type_id" + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { //NON-NLS List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute", ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * String value. Does not included rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param subString value substring of the string attribute of the attrType * type to look for * @param startsWith if true, the artifact attribute string should start * with the substring, if false, it should just contain it * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, String subString, boolean startsWith) throws TskCoreException { String valSubStr = "%" + subString; //NON-NLS if (startsWith == false) { valSubStr += "%"; //NON-NLS } acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " //NON-NLS + " types.type_name AS type_name, types.display_name AS display_name, " //NON-NLS + " arts.review_status_id AS review_status_id " //NON-NLS + " FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + " WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND LOWER(attrs.value_text) LIKE LOWER('" + valSubStr + "')" + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute. " + ex.getMessage(), ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * integer value. Does not included rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param value value of the attribute of the attrType type to look for * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, int value) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + " types.type_name AS type_name, types.display_name AS display_name, " + " arts.review_status_id AS review_status_id "//NON-NLS + " FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + "WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND attrs.value_int32 = " + value //NON-NLS + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute", ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * long value. Does not included rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param value value of the attribute of the attrType type to look for * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, long value) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + " types.type_name AS type_name, types.display_name AS display_name, " + " arts.review_status_id AS review_status_id "//NON-NLS + " FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + " WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND attrs.value_int64 = " + value //NON-NLS + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute. " + ex.getMessage(), ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * double value. Does not included rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param value value of the attribute of the attrType type to look for * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, double value) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + " types.type_name AS type_name, types.display_name AS display_name, " + " arts.review_status_id AS review_status_id "//NON-NLS + " FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + " WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND attrs.value_double = " + value //NON-NLS + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute", ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts that have an attribute of the given type and * byte value. Does not include rejected artifacts. * * @param attrType attribute of this attribute type to look for in the * artifacts * @param value value of the attribute of the attrType type to look for * * @return a list of blackboard artifacts with such an attribute * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core and artifacts could not be * queried * * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, byte value) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + " types.type_name AS type_name, types.display_name AS display_name, " + " arts.review_status_id AS review_status_id "//NON-NLS + " FROM blackboard_artifacts AS arts, blackboard_attributes AS attrs, blackboard_artifact_types AS types " //NON-NLS + " WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + " AND attrs.attribute_type_id = " + attrType.getTypeID() //NON-NLS + " AND attrs.value_byte = " + value //NON-NLS + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList artifacts = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { artifacts.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by attribute", ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Gets a list of all the artifact types for this case * * @return a list of artifact types * * @throws TskCoreException when there is an error getting the types */ public Iterable getArtifactTypes() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types"); //NON-NLS ArrayList artifactTypes = new ArrayList(); while (rs.next()) { artifactTypes.add(new BlackboardArtifact.Type(rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.Category.fromID(rs.getInt("category_type")))); } return artifactTypes; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact types", ex); //NON-NLS } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get all of the standard blackboard artifact types that are in use in the * blackboard. * * @return List of standard blackboard artifact types * * @throws TskCoreException */ public ArrayList getBlackboardArtifactTypesInUse() throws TskCoreException { String typeIdList = ""; for (int i = 0; i < BlackboardArtifact.ARTIFACT_TYPE.values().length; ++i) { typeIdList += BlackboardArtifact.ARTIFACT_TYPE.values()[i].getTypeID(); if (i < BlackboardArtifact.ARTIFACT_TYPE.values().length - 1) { typeIdList += ", "; } } String query = "SELECT DISTINCT artifact_type_id FROM blackboard_artifacts " + "WHERE artifact_type_id IN (" + typeIdList + ")"; CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, query); ArrayList usedArts = new ArrayList(); while (rs.next()) { usedArts.add(ARTIFACT_TYPE.fromID(rs.getInt("artifact_type_id"))); } return usedArts; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact types in use", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets the list of all unique artifact IDs in use. * * Gets both static and dynamic IDs. * * @return The list of unique IDs * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core */ public List getArtifactTypesInUse() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_type_id AS artifact_type_id, " + "types.type_name AS type_name, " + "types.display_name AS display_name, " + "types.category_type AS category_type " + "FROM blackboard_artifact_types AS types " + "INNER JOIN blackboard_artifacts AS arts " + "ON arts.artifact_type_id = types.artifact_type_id"); //NON-NLS List uniqueArtifactTypes = new ArrayList(); while (rs.next()) { uniqueArtifactTypes.add(new BlackboardArtifact.Type(rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.Category.fromID(rs.getInt("category_type")))); } return uniqueArtifactTypes; } catch (SQLException ex) { throw new TskCoreException("Error getting attribute types", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a list of all the attribute types for this case * * @return a list of attribute types * * @throws TskCoreException when there is an error getting the types */ public List getAttributeTypes() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types"); //NON-NLS ArrayList attribute_types = new ArrayList(); while (rs.next()) { attribute_types.add(new BlackboardAttribute.Type(rs.getInt("attribute_type_id"), rs.getString("type_name"), rs.getString("display_name"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getLong("value_type")))); } return attribute_types; } catch (SQLException ex) { throw new TskCoreException("Error getting attribute types", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get count of blackboard attribute types * * Counts both static (in enum) and dynamic attributes types (created by * modules at runtime) * * @return count of attribute types * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public int getBlackboardAttributeTypesCount() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM blackboard_attribute_types"); //NON-NLS int count = 0; if (rs.next()) { count = rs.getInt("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting number of blackboard artifacts by type", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Helper method to get count of all artifacts matching the type id and * object id. Does not included rejected artifacts. * * @param artifactTypeID artifact type id * @param obj_id associated object id * * @return count of matching blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ private long getArtifactsCountHelper(int artifactTypeID, long obj_id) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ? AND artifact_type_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_BY_SOURCE_AND_TYPE); statement.clearParameters(); statement.setLong(1, obj_id); statement.setInt(2, artifactTypeID); rs = connection.executeQuery(statement); long count = 0; if (rs.next()) { count = rs.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifact count", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get all blackboard artifacts of a given type for the given source (object * id). Does not included rejected artifacts. * * @param artifactTypeName artifact type name * @param obj_id object id * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public ArrayList getBlackboardArtifacts(String artifactTypeName, long obj_id) throws TskCoreException { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(blackboard.getArtifactsBySourceId(getArtifactType(artifactTypeName), obj_id)); return artifacts; } /** * Get all blackboard artifacts of a given type for the given object id. * Does not included rejected artifacts. * * @param artifactTypeID artifact type id (must exist in database) * @param obj_id object id * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public ArrayList getBlackboardArtifacts(int artifactTypeID, long obj_id) throws TskCoreException { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(blackboard.getArtifactsBySourceId(getArtifactType(artifactTypeID), obj_id)); return artifacts; } /** * Get all blackboard artifacts of a given type for the given object id. * Does not included rejected artifacts. * * @param artifactType artifact type enum * @param obj_id object id * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public ArrayList getBlackboardArtifacts(ARTIFACT_TYPE artifactType, long obj_id) throws TskCoreException { return getBlackboardArtifacts(artifactType.getTypeID(), obj_id); } /** * Get count of all blackboard artifacts of a given type for the given * object id. Does not include rejected artifacts. * * @param artifactTypeName artifact type name * @param obj_id object id * * @return count of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public long getBlackboardArtifactsCount(String artifactTypeName, long obj_id) throws TskCoreException { int artifactTypeID = this.getArtifactType(artifactTypeName).getTypeID(); if (artifactTypeID == -1) { return 0; } return getArtifactsCountHelper(artifactTypeID, obj_id); } /** * Get count of all blackboard artifacts of a given type for the given * object id. Does not include rejected artifacts. * * @param artifactTypeID artifact type id (must exist in database) * @param obj_id object id * * @return count of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public long getBlackboardArtifactsCount(int artifactTypeID, long obj_id) throws TskCoreException { return getArtifactsCountHelper(artifactTypeID, obj_id); } /** * Get count of all blackboard artifacts of a given type for the given * object id. Does not include rejected artifacts. * * @param artifactType artifact type enum * @param obj_id object id * * @return count of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public long getBlackboardArtifactsCount(ARTIFACT_TYPE artifactType, long obj_id) throws TskCoreException { return getArtifactsCountHelper(artifactType.getTypeID(), obj_id); } /** * Get all blackboard artifacts of a given type. Does not included rejected * artifacts. * * @param artifactTypeName artifact type name * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public ArrayList getBlackboardArtifacts(String artifactTypeName) throws TskCoreException { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(blackboard.getArtifactsByType(getArtifactType(artifactTypeName))); return artifacts; } /** * Get all blackboard artifacts of a given type. Does not included rejected * artifacts. * * @param artifactType artifact type enum * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public ArrayList getBlackboardArtifacts(ARTIFACT_TYPE artifactType) throws TskCoreException { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(blackboard.getArtifactsByType(getArtifactType(artifactType.getTypeID()))); return artifacts; } /** * Get all blackboard artifacts of a given type with an attribute of a given * type and String value. Does not included rejected artifacts. * * @param artifactType artifact type enum * @param attrType attribute type enum * @param value String value of attribute * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core * * @deprecated Do not use. */ @Deprecated public List getBlackboardArtifacts(ARTIFACT_TYPE artifactType, BlackboardAttribute.ATTRIBUTE_TYPE attrType, String value) throws TskCoreException { String dataArtifactJoin = "tsk_data_artifacts AS datarts ON datarts.artifact_obj_id = arts.artifact_obj_id"; String analysisResultJoin = "tsk_analysis_results AS anresult ON anresult.artifact_obj_id = arts.artifact_obj_id"; String dataArtifactColumns = ", datarts.os_account_obj_id AS os_account_obj_id"; String analysResultColumns = ", anresult.conclusion AS conclusion, anresult.significance AS significance, anresult.priority AS priority, anresult.configuration AS configuration, anresult.justification AS justification "; String formatQuery = "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS + "arts.obj_id AS obj_id, arts.artifact_obj_id as artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " + "types.type_name AS type_name, types.display_name AS display_name," + "arts.review_status_id AS review_status_id %s "//NON-NLS + "FROM blackboard_artifacts AS arts " + "JOIN blackboard_attributes AS attrs ON arts.artifact_id = attrs.artifact_id " + "JOIN blackboard_artifact_types AS types ON types.artifact_type_id = arts.artifact_type_id " //NON-NLS + "LEFT JOIN %s " + "WHERE arts.artifact_id = attrs.artifact_id " //NON-NLS + "AND attrs.attribute_type_id = %d " + " AND arts.artifact_type_id = %d " + " AND attrs.value_text = '%s' " //NON-NLS + " AND types.artifact_type_id=arts.artifact_type_id " + " AND arts.review_status_id != %d"; String query = String.format(formatQuery, (artifactType.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT ? analysResultColumns : dataArtifactColumns), (artifactType.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT ? analysisResultJoin : dataArtifactJoin), attrType.getTypeID(), artifactType.getTypeID(), value, BlackboardArtifact.ReviewStatus.REJECTED.getID()); acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) { ArrayList artifacts = new ArrayList<>(); while (rs.next()) { if (artifactType.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) { Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } artifacts.add(new DataArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null, rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")), osAccountObjId, false)); } else { artifacts.add(new AnalysisResult(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null, rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")), new Score(Score.Significance.fromID(rs.getInt("significance")), Score.Priority.fromID(rs.getInt("priority"))), rs.getString("conclusion"), rs.getString("configuration"), rs.getString("justification"))); } } return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifacts by artifact type and attribute. " + ex.getMessage(), ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get the blackboard artifact with the given artifact id (artifact_id in * blackboard_artifacts) * * @param artifactID artifact ID (artifact_id column) * * @return blackboard artifact * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public BlackboardArtifact getBlackboardArtifact(long artifactID) throws TskCoreException { List dataArtifacts = blackboard.getDataArtifactsWhere("artifacts.artifact_id = " + artifactID); if (!dataArtifacts.isEmpty()) { return dataArtifacts.get(0); } List analysisResults = blackboard.getAnalysisResultsWhere("artifacts.artifact_id = " + artifactID); if (!analysisResults.isEmpty()) { return analysisResults.get(0); } throw new TskCoreException("No blackboard artifact with id " + artifactID); } /** * Add a blackboard attribute. * * @param attr A blackboard attribute. * @param artifactTypeId The type of artifact associated with the attribute. * * @throws TskCoreException thrown if a critical error occurs. */ public void addBlackboardAttribute(BlackboardAttribute attr, int artifactTypeId) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { addBlackBoardAttribute(attr, artifactTypeId, connection); } catch (SQLException ex) { throw new TskCoreException("Error adding blackboard attribute " + attr.toString(), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Add a set blackboard attributes. * * @param attributes A set of blackboard attribute. * @param artifactTypeId The type of artifact associated with the * attributes. * * @throws TskCoreException thrown if a critical error occurs. */ public void addBlackboardAttributes(Collection attributes, int artifactTypeId) throws TskCoreException { CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); try { connection = connections.getConnection(); connection.beginTransaction(); for (final BlackboardAttribute attr : attributes) { addBlackBoardAttribute(attr, artifactTypeId, connection); } connection.commitTransaction(); } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error adding blackboard attributes", ex); } finally { closeConnection(connection); releaseSingleUserCaseWriteLock(); } } void addBlackBoardAttribute(BlackboardAttribute attr, int artifactTypeId, CaseDbConnection connection) throws SQLException, TskCoreException { PreparedStatement statement; switch (attr.getAttributeType().getValueType()) { case STRING: case JSON: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_STRING_ATTRIBUTE); statement.clearParameters(); statement.setString(7, attr.getValueString()); break; case BYTE: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_BYTE_ATTRIBUTE); statement.clearParameters(); statement.setBytes(7, attr.getValueBytes()); break; case INTEGER: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_INT_ATTRIBUTE); statement.clearParameters(); statement.setInt(7, attr.getValueInt()); break; case LONG: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LONG_ATTRIBUTE); statement.clearParameters(); statement.setLong(7, attr.getValueLong()); break; case DOUBLE: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_DOUBLE_ATTRIBUTE); statement.clearParameters(); statement.setDouble(7, attr.getValueDouble()); break; case DATETIME: statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LONG_ATTRIBUTE); statement.clearParameters(); statement.setLong(7, attr.getValueLong()); break; default: throw new TskCoreException("Unrecognized artifact attribute value type"); } statement.setLong(1, attr.getArtifactID()); statement.setInt(2, artifactTypeId); statement.setString(3, attr.getSourcesCSV()); statement.setString(4, ""); statement.setInt(5, attr.getAttributeType().getTypeID()); statement.setLong(6, attr.getAttributeType().getValueType().getType()); connection.executeUpdate(statement); } void addFileAttribute(Attribute attr, CaseDbConnection connection) throws SQLException, TskCoreException { PreparedStatement statement; statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_ATTRIBUTE, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); statement.setLong(1, attr.getAttributeParentId()); statement.setInt(2, attr.getAttributeType().getTypeID()); statement.setLong(3, attr.getAttributeType().getValueType().getType()); if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) { statement.setBytes(4, attr.getValueBytes()); } else { statement.setBytes(4, null); } if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING || attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) { statement.setString(5, attr.getValueString()); } else { statement.setString(5, null); } if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) { statement.setInt(6, attr.getValueInt()); } else { statement.setNull(6, java.sql.Types.INTEGER); } if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME || attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG) { statement.setLong(7, attr.getValueLong()); } else { statement.setNull(7, java.sql.Types.BIGINT); } if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) { statement.setDouble(8, attr.getValueDouble()); } else { statement.setNull(8, java.sql.Types.DOUBLE); } connection.executeUpdate(statement); try (ResultSet resultSet = statement.getGeneratedKeys()) { if (!resultSet.next()) { throw new TskCoreException(String.format("Failed to insert file attribute " + "with id=%d. The expected key was not generated", attr.getId())); } attr.setId(resultSet.getLong(1)); } } /** * Adds a source name to the source column of one or more rows in the * blackboard attributes table. The source name will be added to a CSV list * in any rows that exactly match the attribute's artifact_id and value. * * @param attr The artifact attribute * @param source The source name. * * @throws TskCoreException */ String addSourceToArtifactAttribute(BlackboardAttribute attr, String source) throws TskCoreException { /* * WARNING: This is a temporary implementation that is not safe and * denormalizes the case datbase. * * TODO (JIRA-2294): Provide a safe and normalized solution to tracking * the sources of artifact attributes. */ if (null == source || source.isEmpty()) { throw new TskCoreException("Attempt to add null or empty source module name to artifact attribute"); } CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); Statement queryStmt = null; Statement updateStmt = null; ResultSet result = null; String newSources = ""; try { connection = connections.getConnection(); connection.beginTransaction(); String valueClause = ""; BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType = attr.getAttributeType().getValueType(); if (BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE != valueType) { switch (valueType) { case STRING: case JSON: valueClause = " value_text = '" + escapeSingleQuotes(attr.getValueString()) + "'"; break; case INTEGER: valueClause = " value_int32 = " + attr.getValueInt(); break; case LONG: case DATETIME: valueClause = " value_int64 = " + attr.getValueLong(); break; case DOUBLE: valueClause = " value_double = " + attr.getValueDouble(); break; default: throw new TskCoreException(String.format("Unrecognized value type for attribute %s", attr.getDisplayString())); } String query = "SELECT source FROM blackboard_attributes WHERE" + " artifact_id = " + attr.getArtifactID() + " AND attribute_type_id = " + attr.getAttributeType().getTypeID() + " AND value_type = " + attr.getAttributeType().getValueType().getType() + " AND " + valueClause + ";"; queryStmt = connection.createStatement(); updateStmt = connection.createStatement(); result = connection.executeQuery(queryStmt, query); } else { /* * SELECT source FROM blackboard_attributes WHERE artifact_id = * ? AND attribute_type_id = ? AND value_type = 4 AND value_byte * = ? */ PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ATTR_BY_VALUE_BYTE); statement.clearParameters(); statement.setLong(1, attr.getArtifactID()); statement.setLong(2, attr.getAttributeType().getTypeID()); statement.setBytes(3, attr.getValueBytes()); result = connection.executeQuery(statement); } while (result.next()) { String oldSources = result.getString("source"); if (null != oldSources && !oldSources.isEmpty()) { Set uniqueSources = new HashSet(Arrays.asList(oldSources.split(","))); if (!uniqueSources.contains(source)) { newSources = oldSources + "," + source; } else { newSources = oldSources; } } else { newSources = source; } if (BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE != valueType) { String update = "UPDATE blackboard_attributes SET source = '" + newSources + "' WHERE" + " artifact_id = " + attr.getArtifactID() + " AND attribute_type_id = " + attr.getAttributeType().getTypeID() + " AND value_type = " + attr.getAttributeType().getValueType().getType() + " AND " + valueClause + ";"; connection.executeUpdate(updateStmt, update); } else { /* * UPDATE blackboard_attributes SET source = ? WHERE * artifact_id = ? AND attribute_type_id = ? AND value_type * = 4 AND value_byte = ? */ PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ATTR_BY_VALUE_BYTE); statement.clearParameters(); statement.setString(1, newSources); statement.setLong(2, attr.getArtifactID()); statement.setLong(3, attr.getAttributeType().getTypeID()); statement.setBytes(4, attr.getValueBytes()); connection.executeUpdate(statement); } } connection.commitTransaction(); return newSources; } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException(String.format("Error adding source module to attribute %s", attr.getDisplayString()), ex); } finally { closeResultSet(result); closeStatement(updateStmt); closeStatement(queryStmt); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Add an attribute type with the given name * * @param attrTypeString Name of the new attribute * @param valueType The value type of this new attribute type * @param displayName The (non-unique) display name of the attribute type * * @return the id of the new attribute * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @throws TskDataException exception thrown if attribute type was already * in the system */ public BlackboardAttribute.Type addArtifactAttributeType(String attrTypeString, TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws TskCoreException, TskDataException { CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); Statement s = null; ResultSet rs = null; try { connection = connections.getConnection(); connection.beginTransaction(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT attribute_type_id FROM blackboard_attribute_types WHERE type_name = '" + attrTypeString + "'"); //NON-NLS if (!rs.next()) { rs.close(); rs = connection.executeQuery(s, "SELECT MAX(attribute_type_id) AS highest_id FROM blackboard_attribute_types"); int maxID = 0; if (rs.next()) { maxID = rs.getInt("highest_id"); if (maxID < MIN_USER_DEFINED_TYPE_ID) { maxID = MIN_USER_DEFINED_TYPE_ID; } else { maxID++; } } connection.executeUpdate(s, "INSERT INTO blackboard_attribute_types (attribute_type_id, type_name, display_name, value_type) VALUES ('" + maxID + "', '" + attrTypeString + "', '" + displayName + "', '" + valueType.getType() + "')"); //NON-NLS BlackboardAttribute.Type type = new BlackboardAttribute.Type(maxID, attrTypeString, displayName, valueType); this.typeIdToAttributeTypeMap.put(type.getTypeID(), type); this.typeNameToAttributeTypeMap.put(type.getTypeName(), type); connection.commitTransaction(); return type; } else { throw new TskDataException("The attribute type that was added was already within the system."); } } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error adding attribute type", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Get the attribute type associated with an attribute type name. * * @param attrTypeName An attribute type name. * * @return An attribute type or null if the attribute type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. * */ public BlackboardAttribute.Type getAttributeType(String attrTypeName) throws TskCoreException { if (this.typeNameToAttributeTypeMap.containsKey(attrTypeName)) { return this.typeNameToAttributeTypeMap.get(attrTypeName); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types WHERE type_name = '" + attrTypeName + "'"); //NON-NLS BlackboardAttribute.Type type = null; if (rs.next()) { type = new BlackboardAttribute.Type(rs.getInt("attribute_type_id"), rs.getString("type_name"), rs.getString("display_name"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getLong("value_type"))); this.typeIdToAttributeTypeMap.put(type.getTypeID(), type); this.typeNameToAttributeTypeMap.put(attrTypeName, type); } return type; } catch (SQLException ex) { throw new TskCoreException("Error getting attribute type id", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the attribute type associated with an attribute type ID. * * @param typeID An attribute type ID. * * @return An attribute type or null if the attribute type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. * */ BlackboardAttribute.Type getAttributeType(int typeID) throws TskCoreException { if (this.typeIdToAttributeTypeMap.containsKey(typeID)) { return this.typeIdToAttributeTypeMap.get(typeID); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types WHERE attribute_type_id = " + typeID + ""); //NON-NLS BlackboardAttribute.Type type = null; if (rs.next()) { type = new BlackboardAttribute.Type(rs.getInt("attribute_type_id"), rs.getString("type_name"), rs.getString("display_name"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getLong("value_type"))); this.typeIdToAttributeTypeMap.put(typeID, type); this.typeNameToAttributeTypeMap.put(type.getTypeName(), type); } return type; } catch (SQLException ex) { throw new TskCoreException("Error getting attribute type id", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the artifact type associated with an artifact type name. * * @param artTypeName An artifact type name. * * @return An artifact type or null if the artifact type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. * */ public BlackboardArtifact.Type getArtifactType(String artTypeName) throws TskCoreException { if (this.typeNameToArtifactTypeMap.containsKey(artTypeName)) { return this.typeNameToArtifactTypeMap.get(artTypeName); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types WHERE type_name = '" + artTypeName + "'"); //NON-NLS BlackboardArtifact.Type type = null; if (rs.next()) { type = new BlackboardArtifact.Type(rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.Category.fromID(rs.getInt("category_type"))); this.typeIdToArtifactTypeMap.put(type.getTypeID(), type); this.typeNameToArtifactTypeMap.put(artTypeName, type); } return type; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact type from the database", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the artifact type associated with an artifact type id. * * @param artTypeId An artifact type id. * * @return The artifact type. * * @throws TskCoreException If an error occurs accessing the case database * or no value is found. * */ BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException { if (this.typeIdToArtifactTypeMap.containsKey(artTypeId)) { return typeIdToArtifactTypeMap.get(artTypeId); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types WHERE artifact_type_id = " + artTypeId + ""); //NON-NLS BlackboardArtifact.Type type = null; if (rs.next()) { type = new BlackboardArtifact.Type(rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"), BlackboardArtifact.Category.fromID(rs.getInt("category_type"))); this.typeIdToArtifactTypeMap.put(artTypeId, type); this.typeNameToArtifactTypeMap.put(type.getTypeName(), type); return type; } else { throw new TskCoreException("No artifact type found matching id: " + artTypeId); } } catch (SQLException ex) { throw new TskCoreException("Error getting artifact type from the database", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Add an artifact type with the given name. Will return an artifact Type. * * This assumes that the artifact type being added has the category * DATA_ARTIFACT. * * @param artifactTypeName System (unique) name of artifact * @param displayName Display (non-unique) name of artifact * * @return Type of the artifact added * * @throws TskCoreException exception thrown if a critical error occurs * @throws TskDataException exception thrown if given data is already in db * within tsk core */ public BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, String displayName) throws TskCoreException, TskDataException { return addBlackboardArtifactType(artifactTypeName, displayName, BlackboardArtifact.Category.DATA_ARTIFACT); } /** * Add an artifact type with the given name and category. Will return an * artifact Type. * * @param artifactTypeName System (unique) name of artifact * @param displayName Display (non-unique) name of artifact * @param category Artifact type category. * * * @return Type of the artifact added. * * @throws TskCoreException exception thrown if a critical error occurs * @throws TskDataException exception thrown if given data is already in db * within tsk core */ BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, String displayName, BlackboardArtifact.Category category) throws TskCoreException, TskDataException { CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); Statement s = null; ResultSet rs = null; try { connection = connections.getConnection(); connection.beginTransaction(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT artifact_type_id FROM blackboard_artifact_types WHERE type_name = '" + artifactTypeName + "'"); //NON-NLS if (!rs.next()) { rs.close(); rs = connection.executeQuery(s, "SELECT MAX(artifact_type_id) AS highest_id FROM blackboard_artifact_types"); int maxID = 0; if (rs.next()) { maxID = rs.getInt("highest_id"); if (maxID < MIN_USER_DEFINED_TYPE_ID) { maxID = MIN_USER_DEFINED_TYPE_ID; } else { maxID++; } } connection.executeUpdate(s, "INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES ('" + maxID + "', '" + artifactTypeName + "', '" + displayName + "', " + category.getID() + " )"); //NON-NLS BlackboardArtifact.Type type = new BlackboardArtifact.Type(maxID, artifactTypeName, displayName, category); this.typeIdToArtifactTypeMap.put(type.getTypeID(), type); this.typeNameToArtifactTypeMap.put(type.getTypeName(), type); connection.commitTransaction(); return type; } else { throw new TskDataException("The attribute type that was added was already within the system."); } } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error adding artifact type", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } public ArrayList getBlackboardAttributes(final BlackboardArtifact artifact) throws TskCoreException { CaseDbConnection connection = null; Statement statement = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); rs = connection.executeQuery(statement, "SELECT attrs.artifact_id AS artifact_id, " + "attrs.source AS source, attrs.context AS context, attrs.attribute_type_id AS attribute_type_id, " + "attrs.value_type AS value_type, attrs.value_byte AS value_byte, " + "attrs.value_text AS value_text, attrs.value_int32 AS value_int32, " + "attrs.value_int64 AS value_int64, attrs.value_double AS value_double, " + "types.type_name AS type_name, types.display_name AS display_name " + "FROM blackboard_attributes AS attrs, blackboard_attribute_types AS types WHERE attrs.artifact_id = " + artifact.getArtifactID() + " AND attrs.attribute_type_id = types.attribute_type_id"); ArrayList attributes = new ArrayList(); while (rs.next()) { int attributeTypeId = rs.getInt("attribute_type_id"); String attributeTypeName = rs.getString("type_name"); BlackboardAttribute.Type attributeType; if (this.typeIdToAttributeTypeMap.containsKey(attributeTypeId)) { attributeType = this.typeIdToAttributeTypeMap.get(attributeTypeId); } else { attributeType = new BlackboardAttribute.Type(attributeTypeId, attributeTypeName, rs.getString("display_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getInt("value_type"))); this.typeIdToAttributeTypeMap.put(attributeTypeId, attributeType); this.typeNameToAttributeTypeMap.put(attributeTypeName, attributeType); } final BlackboardAttribute attr = new BlackboardAttribute( rs.getLong("artifact_id"), attributeType, rs.getString("source"), rs.getString("context"), rs.getInt("value_int32"), rs.getLong("value_int64"), rs.getDouble("value_double"), rs.getString("value_text"), rs.getBytes("value_byte"), this ); attr.setParentDataSourceID(artifact.getDataSourceObjectID()); attributes.add(attr); } return attributes; } catch (SQLException ex) { throw new TskCoreException("Error getting attributes for artifact, artifact id = " + artifact.getArtifactID(), ex); } finally { closeResultSet(rs); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the attributes associated with the given file. * * @param file * * @return * * @throws TskCoreException */ ArrayList getFileAttributes(final AbstractFile file) throws TskCoreException { CaseDbConnection connection = null; Statement statement = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); rs = connection.executeQuery(statement, "SELECT attrs.id as id, attrs.obj_id AS obj_id, " + "attrs.attribute_type_id AS attribute_type_id, " + "attrs.value_type AS value_type, attrs.value_byte AS value_byte, " + "attrs.value_text AS value_text, attrs.value_int32 AS value_int32, " + "attrs.value_int64 AS value_int64, attrs.value_double AS value_double, " + "types.type_name AS type_name, types.display_name AS display_name " + "FROM tsk_file_attributes AS attrs " + " INNER JOIN blackboard_attribute_types AS types " + " ON attrs.attribute_type_id = types.attribute_type_id " + " WHERE attrs.obj_id = " + file.getId()); ArrayList attributes = new ArrayList(); while (rs.next()) { int attributeTypeId = rs.getInt("attribute_type_id"); String attributeTypeName = rs.getString("type_name"); BlackboardAttribute.Type attributeType; if (this.typeIdToAttributeTypeMap.containsKey(attributeTypeId)) { attributeType = this.typeIdToAttributeTypeMap.get(attributeTypeId); } else { attributeType = new BlackboardAttribute.Type(attributeTypeId, attributeTypeName, rs.getString("display_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getInt("value_type"))); this.typeIdToAttributeTypeMap.put(attributeTypeId, attributeType); this.typeNameToAttributeTypeMap.put(attributeTypeName, attributeType); } final Attribute attr = new Attribute( rs.getLong("id"), rs.getLong("obj_id"), attributeType, rs.getInt("value_int32"), rs.getLong("value_int64"), rs.getDouble("value_double"), rs.getString("value_text"), rs.getBytes("value_byte"), this ); attributes.add(attr); } return attributes; } catch (SQLException ex) { throw new TskCoreException("Error getting attributes for file, file id = " + file.getId(), ex); } finally { closeResultSet(rs); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get all attributes that match a where clause. The clause should begin * with "WHERE" or "JOIN". To use this method you must know the database * tables * * @param whereClause a sqlite where clause * * @return a list of matching attributes * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core \ref query_database_page */ public ArrayList getMatchingAttributes(String whereClause) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT blackboard_attributes.artifact_id AS artifact_id, " + "blackboard_attributes.source AS source, blackboard_attributes.context AS context, " + "blackboard_attributes.attribute_type_id AS attribute_type_id, " + "blackboard_attributes.value_type AS value_type, blackboard_attributes.value_byte AS value_byte, " + "blackboard_attributes.value_text AS value_text, blackboard_attributes.value_int32 AS value_int32, " + "blackboard_attributes.value_int64 AS value_int64, blackboard_attributes.value_double AS value_double " + "FROM blackboard_attributes " + whereClause); //NON-NLS ArrayList matches = new ArrayList(); while (rs.next()) { BlackboardAttribute.Type type; // attribute type is cached, so this does not necessarily call to the db type = this.getAttributeType(rs.getInt("attribute_type_id")); BlackboardAttribute attr = new BlackboardAttribute( rs.getLong("artifact_id"), type, rs.getString("source"), rs.getString("context"), rs.getInt("value_int32"), rs.getLong("value_int64"), rs.getDouble("value_double"), rs.getString("value_text"), rs.getBytes("value_byte"), this ); matches.add(attr); } return matches; } catch (SQLException ex) { throw new TskCoreException("Error getting attributes using this where clause: " + whereClause, ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get all artifacts that match a where clause. The clause should begin with * "WHERE" or "JOIN". To use this method you must know the database tables * * @param whereClause a sqlite where clause * * @return a list of matching artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core \ref query_database_page */ public ArrayList getMatchingArtifacts(String whereClause) throws TskCoreException { String query = "SELECT blackboard_artifacts.artifact_id AS artifact_id, " + "blackboard_artifacts.obj_id AS obj_id, blackboard_artifacts.artifact_obj_id AS artifact_obj_id, blackboard_artifacts.data_source_obj_id AS data_source_obj_id, blackboard_artifacts.artifact_type_id AS artifact_type_id, " + "blackboard_artifacts.review_status_id AS review_status_id " + "FROM blackboard_artifacts " + whereClause; acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(query)) { List analysisArtifactObjIds = new ArrayList<>(); List dataArtifactObjIds = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type type = this.getArtifactType(resultSet.getInt("artifact_type_id")); if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { analysisArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } else { dataArtifactObjIds.add(resultSet.getLong("artifact_obj_id")); } } ArrayList matches = new ArrayList<>(); if (!analysisArtifactObjIds.isEmpty()) { matches.addAll(getArtifactsForValues(BlackboardArtifact.Category.ANALYSIS_RESULT, "artifacts.artifact_obj_id", analysisArtifactObjIds, connection)); } if (!dataArtifactObjIds.isEmpty()) { matches.addAll(getArtifactsForValues(BlackboardArtifact.Category.DATA_ARTIFACT, "artifacts.artifact_obj_id", dataArtifactObjIds, connection)); } return matches; } catch (SQLException ex) { throw new TskCoreException("Error getting attributes using this where clause: " + whereClause, ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Add a new blackboard artifact with the given type. If that artifact type * does not exist an error will be thrown. The artifact type name can be * looked up in the returned blackboard artifact. * * @param artifactTypeID the type the given artifact should have * @param obj_id the content object id associated with this artifact * * @return a new blackboard artifact * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @deprecated Please use newDataArtifact or newAnalysisResult. */ @Deprecated public BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id) throws TskCoreException { BlackboardArtifact.Type type = getArtifactType(artifactTypeID); if (type == null) { throw new TskCoreException("Unknown artifact type for id: " + artifactTypeID); } Category category = type.getCategory(); if (category == null) { throw new TskCoreException(String.format("No category for %s (id: %d)", type.getDisplayName() == null ? "" : type.getDisplayName(), type.getTypeID())); } Content content = getContentById(obj_id); if (content == null) { throw new TskCoreException("No content found for object id: " + obj_id); } switch (category) { case ANALYSIS_RESULT: return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList()) .getAnalysisResult(); case DATA_ARTIFACT: return content.newDataArtifact(type, Collections.emptyList()); default: throw new TskCoreException("Unknown category type: " + category.getName()); } } /** * Add a new blackboard artifact with the given type. * * @param artifactType the type the given artifact should have * @param obj_id the content object id associated with this artifact * * @return a new blackboard artifact * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @deprecated Please use newDataArtifact or newAnalysisResult. */ @Deprecated @SuppressWarnings("deprecation") public BlackboardArtifact newBlackboardArtifact(ARTIFACT_TYPE artifactType, long obj_id) throws TskCoreException { return newBlackboardArtifact(artifactType.getTypeID(), obj_id); } /** * Add a new blackboard artifact with the given type. * * @param artifactType the type the given artifact should have * @param obj_id the content object id associated with this * artifact * @param data_source_obj_id The data source obj id associated with this * artifact * * @return a new blackboard artifact * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @deprecated Please use newDataArtifact or newAnalysisResult. */ @Deprecated @SuppressWarnings("deprecation") BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id, long data_source_obj_id) throws TskCoreException { BlackboardArtifact.Type type = getArtifactType(artifactTypeID); try (CaseDbConnection connection = connections.getConnection()) { return newBlackboardArtifact(artifactTypeID, obj_id, type.getTypeName(), type.getDisplayName(), data_source_obj_id, connection); } } @Deprecated private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName) throws TskCoreException { try (CaseDbConnection connection = connections.getConnection()) { long data_source_obj_id = getDataSourceObjectId(connection, obj_id); return this.newBlackboardArtifact(artifact_type_id, obj_id, artifactTypeName, artifactDisplayName, data_source_obj_id, connection); } } PreparedStatement createInsertArtifactStatement(int artifact_type_id, long obj_id, long artifact_obj_id, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException, SQLException { PreparedStatement statement; if (dbType == DbType.POSTGRESQL) { statement = connection.getPreparedStatement(PREPARED_STATEMENT.POSTGRESQL_INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); statement.setLong(1, obj_id); statement.setLong(2, artifact_obj_id); statement.setLong(3, data_source_obj_id); statement.setInt(4, artifact_type_id); } else { statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); this.nextArtifactId++; statement.setLong(1, this.nextArtifactId); statement.setLong(2, obj_id); statement.setLong(3, artifact_obj_id); statement.setLong(4, data_source_obj_id); statement.setInt(5, artifact_type_id); } return statement; } /** * Add a new blackboard artifact with the given type. * * @param artifact_type_id The type the given artifact should have. * @param obj_id The parent content id. * @param artifactTypeName The artifact type name. * @param artifactDisplayName The artifact type display name. * @param data_source_obj_id The id of the artifact data source. * @param connection The CaseDBConnection. * * @return A new blackboard artifact. * * @throws TskCoreException * * @deprecated Use type specific methods in Blackboard. */ @Deprecated private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException { BlackboardArtifact.Type type = getArtifactType(artifact_type_id); try { if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { return blackboard.newAnalysisResult(type, obj_id, data_source_obj_id, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList()).getAnalysisResult(); } else { return blackboard.newDataArtifact(type, obj_id, data_source_obj_id, Collections.emptyList(), null); } } catch (BlackboardException ex) { throw new TskCoreException("Error creating a blackboard artifact", ex); } } /** * Creates a new analysis result by inserting a row in the artifacts table * and a corresponding row in the tsk_analysis_results table. * * @param artifactType Analysis result artifact type. * @param objId Object id of parent. * @param dataSourceObjId Data source object id, may be null. * @param score Score. * @param conclusion Conclusion, may be null or an empty string. * @param configuration Configuration used by analysis, may be null or an * empty string. * @param justification Justification, may be null or an empty string. * @param connection Database connection to use. * * @return Analysis result. * * @throws TskCoreException */ AnalysisResult newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, CaseDbConnection connection) throws TskCoreException { if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) { throw new TskCoreException(String.format("Artifact type (name = %s) is not of the AnalysisResult category. ", artifactType.getTypeName())); } long artifactID; acquireSingleUserCaseWriteLock(); try { // add a row in tsk_objects long artifactObjId = addObject(objId, TskData.ObjectType.ARTIFACT.getObjectType(), connection); // add a row in blackboard_artifacts table PreparedStatement insertArtifactstatement; ResultSet resultSet = null; try { insertArtifactstatement = createInsertArtifactStatement(artifactType.getTypeID(), objId, artifactObjId, dataSourceObjId, connection); connection.executeUpdate(insertArtifactstatement); resultSet = insertArtifactstatement.getGeneratedKeys(); resultSet.next(); artifactID = resultSet.getLong(1); //last_insert_rowid() // add a row in tsk_analysis_results if any data for it is set if (score.getSignificance() != Score.Significance.UNKNOWN || !StringUtils.isBlank(conclusion) || !StringUtils.isBlank(configuration) || !StringUtils.isBlank(justification)) { PreparedStatement analysisResultsStatement; analysisResultsStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ANALYSIS_RESULT); analysisResultsStatement.clearParameters(); analysisResultsStatement.setLong(1, artifactObjId); analysisResultsStatement.setString(2, (conclusion != null) ? conclusion : ""); analysisResultsStatement.setInt(3, score.getSignificance().getId()); analysisResultsStatement.setInt(4, score.getPriority().getId()); analysisResultsStatement.setString(5, (configuration != null) ? configuration : ""); analysisResultsStatement.setString(6, (justification != null) ? justification : ""); connection.executeUpdate(analysisResultsStatement); } return new AnalysisResult(this, artifactID, objId, artifactObjId, dataSourceObjId, artifactType.getTypeID(), artifactType.getTypeName(), artifactType.getDisplayName(), BlackboardArtifact.ReviewStatus.UNDECIDED, true, score, (conclusion != null) ? conclusion : "", (configuration != null) ? configuration : "", (justification != null) ? justification : ""); } finally { closeResultSet(resultSet); } } catch (SQLException ex) { throw new TskCoreException("Error creating a analysis result", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Checks if the content object has children. Note: this is generally more * efficient then preloading all children and checking if the set is empty, * and facilities lazy loading. * * @param content content object to check for children * * @return true if has children, false otherwise * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ boolean getContentHasChildren(Content content) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(obj_id) AS count FROM tsk_objects WHERE par_obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CHILD_OBJECTS_BY_PARENT); statement.clearParameters(); statement.setLong(1, content.getId()); rs = connection.executeQuery(statement); boolean hasChildren = false; if (rs.next()) { hasChildren = rs.getInt("count") > 0; } return hasChildren; } catch (SQLException e) { throw new TskCoreException("Error checking for children of parent " + content, e); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Counts if the content object children. Note: this is generally more * efficient then preloading all children and counting, and facilities lazy * loading. * * @param content content object to check for children count * * @return children count * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ int getContentChildrenCount(Content content) throws TskCoreException { if (!this.getHasChildren(content)) { return 0; } CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(obj_id) AS count FROM tsk_objects WHERE par_obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CHILD_OBJECTS_BY_PARENT); statement.clearParameters(); statement.setLong(1, content.getId()); rs = connection.executeQuery(statement); int countChildren = -1; if (rs.next()) { countChildren = rs.getInt("count"); } return countChildren; } catch (SQLException e) { throw new TskCoreException("Error checking for children of parent " + content, e); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Returns the list of AbstractFile Children of a given type for a given * AbstractFileParent * * @param parent the content parent to get abstract file children for * @param type children type to look for, defined in * TSK_DB_FILES_TYPE_ENUM * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ List getAbstractFileChildren(Content parent, TSK_DB_FILES_TYPE_ENUM type) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT_AND_TYPE); statement.clearParameters(); long parentId = parent.getId(); statement.setLong(1, parentId); statement.setShort(2, type.getFileType()); rs = connection.executeQuery(statement); return fileChildren(rs, connection, parentId); } catch (SQLException ex) { throw new TskCoreException("Error getting AbstractFile children for Content", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Returns the list of all AbstractFile Children for a given * AbstractFileParent * * @param parent the content parent to get abstract file children for * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ List getAbstractFileChildren(Content parent) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT); statement.clearParameters(); long parentId = parent.getId(); statement.setLong(1, parentId); rs = connection.executeQuery(statement); return fileChildren(rs, connection, parentId); } catch (SQLException ex) { throw new TskCoreException("Error getting AbstractFile children for Content", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get list of IDs for abstract files of a given type that are children of a * given content. * * @param parent Object to find children for * @param type Type of children to find IDs for * * @return * * @throws TskCoreException */ List getAbstractFileChildrenIds(Content parent, TSK_DB_FILES_TYPE_ENUM type) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_IDS_BY_PARENT_AND_TYPE); statement.clearParameters(); statement.setLong(1, parent.getId()); statement.setShort(2, type.getFileType()); rs = connection.executeQuery(statement); List children = new ArrayList(); while (rs.next()) { children.add(rs.getLong("obj_id")); } return children; } catch (SQLException ex) { throw new TskCoreException("Error getting AbstractFile children for Content", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get list of IDs for abstract files that are children of a given content. * * @param parent Object to find children for * * @return * * @throws TskCoreException */ List getAbstractFileChildrenIds(Content parent) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_IDS_BY_PARENT); statement.clearParameters(); statement.setLong(1, parent.getId()); rs = connection.executeQuery(statement); List children = new ArrayList(); while (rs.next()) { children.add(rs.getLong("obj_id")); } return children; } catch (SQLException ex) { throw new TskCoreException("Error getting AbstractFile children for Content", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get list of object IDs for artifacts that are children of a given * content. * * @param parent Object to find children for * * @return * * @throws TskCoreException */ List getBlackboardArtifactChildrenIds(Content parent) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_OBJECTIDS_BY_PARENT); statement.clearParameters(); statement.setLong(1, parent.getId()); rs = connection.executeQuery(statement); List children = new ArrayList(); while (rs.next()) { children.add(rs.getLong("obj_id")); } return children; } catch (SQLException ex) { throw new TskCoreException("Error getting children for BlackboardArtifact", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get list of artifacts that are children of a given content. * * @param parent Object to find children for * * @return * * @throws TskCoreException */ List getBlackboardArtifactChildren(Content parent) throws TskCoreException { long parentId = parent.getId(); List lc = new ArrayList<>(); lc.addAll(blackboard.getAnalysisResults(parentId)); lc.addAll(blackboard.getDataArtifactsBySource(parentId)); return lc; } /** * Get info about children of a given Content from the database. * * @param c Parent object to run query against * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ Collection getChildrenInfo(Content c) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT tsk_objects.obj_id AS obj_id, tsk_objects.type AS type " //NON-NLS + "FROM tsk_objects LEFT JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id = tsk_files.obj_id " //NON-NLS + "WHERE tsk_objects.par_obj_id = " + c.getId() + " ORDER BY tsk_objects.obj_id"); //NON-NLS Collection infos = new ArrayList(); while (rs.next()) { infos.add(new ObjectInfo(rs.getLong("obj_id"), ObjectType.valueOf(rs.getShort("type")))); //NON-NLS } return infos; } catch (SQLException ex) { throw new TskCoreException("Error getting Children Info for Content", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get parent info for the parent of the content object * * @param c content object to get parent info for * * @return the parent object info with the parent object type and id * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ ObjectInfo getParentInfo(Content c) throws TskCoreException { return getParentInfo(c.getId()); } /** * Get parent info for the parent of the content object id * * @param id content object id to get parent info for * * @return the parent object info with the parent object type and id * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core */ ObjectInfo getParentInfo(long contentId) throws TskCoreException { acquireSingleUserCaseReadLock(); CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT parent.obj_id AS obj_id, parent.type AS type " //NON-NLS + "FROM tsk_objects AS parent INNER JOIN tsk_objects AS child " //NON-NLS + "ON child.par_obj_id = parent.obj_id " //NON-NLS + "WHERE child.obj_id = " + contentId); //NON-NLS if (rs.next()) { return new ObjectInfo(rs.getLong("obj_id"), ObjectType.valueOf(rs.getShort("type"))); } else { return null; } } catch (SQLException ex) { throw new TskCoreException("Error getting Parent Info for Content: " + contentId, ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets parent directory for FsContent object * * @param fsc FsContent to get parent dir for * * @return the parent Directory or null if the Content has no parent * * @throws TskCoreException thrown if critical error occurred within tsk * core */ Directory getParentDirectory(FsContent fsc) throws TskCoreException { if (fsc.isRoot()) { // Given FsContent is a root object and can't have parent directory return null; } else { ObjectInfo parentInfo = getParentInfo(fsc); if (parentInfo == null) { return null; } Directory parent = null; if (parentInfo.type == ObjectType.ABSTRACTFILE) { parent = getDirectoryById(parentInfo.id, fsc.getFileSystem()); } else { throw new TskCoreException("Parent of FsContent (id: " + fsc.getId() + ") has wrong type to be directory: " + parentInfo.type); } return parent; } } /** * Get content object by content id * * @param id to get content object for * * @return instance of a Content object (one of its subclasses), or null if * not found. * * @throws TskCoreException thrown if critical error occurred within tsk * core */ public Content getContentById(long id) throws TskCoreException { // First check to see if this exists in our frequently used content cache. Content content = frequentlyUsedContentMap.get(id); if (null != content) { return content; } long parentId; TskData.ObjectType type; CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_objects WHERE obj_id = " + id + " LIMIT 1"); //NON-NLS if (!rs.next()) { return null; } parentId = rs.getLong("par_obj_id"); //NON-NLS type = TskData.ObjectType.valueOf(rs.getShort("type")); //NON-NLS } catch (SQLException ex) { throw new TskCoreException("Error getting Content by ID.", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } // Construct the object switch (type) { case IMG: content = getImageById(id); frequentlyUsedContentMap.put(id, content); break; case VS: content = getVolumeSystemById(id, parentId); break; case VOL: content = getVolumeById(id, parentId); frequentlyUsedContentMap.put(id, content); break; case POOL: content = getPoolById(id, parentId); break; case FS: content = getFileSystemById(id, parentId); frequentlyUsedContentMap.put(id, content); break; case ABSTRACTFILE: content = getAbstractFileById(id); // Add virtual and root directories to frequently used map. // Calling isRoot() on local directories goes up the entire directory structure // and they can only be the root of portable cases, so skip trying to add // them to the cache. if (((AbstractFile) content).isVirtual() || ((!(content instanceof LocalDirectory)) && ((AbstractFile) content).isRoot())) { frequentlyUsedContentMap.put(id, content); } break; case ARTIFACT: content = getArtifactById(id); break; case REPORT: content = getReportById(id); break; case OS_ACCOUNT: content = this.osAccountManager.getOsAccountByObjectId(id); break; case HOST_ADDRESS: content = hostAddressManager.getHostAddress(id); break; default: content = new UnsupportedContent(this, id); } return content; } /** * Get a path of a file in tsk_files_path table or null if there is none * * @param id id of the file to get path for * * @return file path or null */ String getFilePath(long id) { String filePath = null; CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_LOCAL_PATH_FOR_FILE); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { filePath = rs.getString("path"); } } catch (SQLException | TskCoreException ex) { logger.log(Level.SEVERE, "Error getting file path for file " + id, ex); //NON-NLS } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } return filePath; } /** * Get the encoding type for a file in tsk_files_path table * * @param id id of the file to get path for * * @return Encoding type (NONE if nothing was found) */ TskData.EncodingType getEncodingType(long id) { TskData.EncodingType type = TskData.EncodingType.NONE; CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ENCODING_FOR_FILE); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { type = TskData.EncodingType.valueOf(rs.getInt(1)); } } catch (SQLException | TskCoreException ex) { logger.log(Level.SEVERE, "Error getting encoding type for file " + id, ex); //NON-NLS } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } return type; } /** * Gets the parent_path of a file. * * @param objectId The object id of the file. * @param connection An open database connection. * * @return The path of the file or null. */ String getFileParentPath(long objectId, CaseDbConnection connection) { String parentPath = null; acquireSingleUserCaseReadLock(); ResultSet rs = null; try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_PATH_FOR_FILE); statement.clearParameters(); statement.setLong(1, objectId); rs = connection.executeQuery(statement); if (rs.next()) { parentPath = rs.getString("parent_path"); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Error getting file parent_path for file " + objectId, ex); //NON-NLS } finally { closeResultSet(rs); releaseSingleUserCaseReadLock(); } return parentPath; } /** * Gets the name of a file. * * @param objectId The object id of the file. * @param connection An open database connection. * * @return The path of the file or null. */ String getFileName(long objectId, CaseDbConnection connection) { String fileName = null; acquireSingleUserCaseReadLock(); ResultSet rs = null; try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_NAME); statement.clearParameters(); statement.setLong(1, objectId); rs = connection.executeQuery(statement); if (rs.next()) { fileName = rs.getString("name"); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Error getting file parent_path for file " + objectId, ex); //NON-NLS } finally { closeResultSet(rs); releaseSingleUserCaseReadLock(); } return fileName; } /** * Get a derived method for a file, or null if none * * @param id id of the derived file * * @return derived method or null if not present * * @throws TskCoreException exception throws if core error occurred and * method could not be queried */ DerivedFile.DerivedMethod getDerivedMethod(long id) throws TskCoreException { DerivedFile.DerivedMethod method = null; CaseDbConnection connection = null; ResultSet rs1 = null; ResultSet rs2 = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_DERIVED_FILE); statement.clearParameters(); statement.setLong(1, id); rs1 = connection.executeQuery(statement); if (rs1.next()) { int method_id = rs1.getInt("derived_id"); String rederive = rs1.getString("rederive"); method = new DerivedFile.DerivedMethod(method_id, rederive); statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_DERIVATION_METHOD); statement.clearParameters(); statement.setInt(1, method_id); rs2 = connection.executeQuery(statement); if (rs2.next()) { method.setToolName(rs2.getString("tool_name")); method.setToolVersion(rs2.getString("tool_version")); method.setOther(rs2.getString("other")); } } } catch (SQLException e) { logger.log(Level.SEVERE, "Error getting derived method for file: " + id, e); //NON-NLS } finally { closeResultSet(rs2); closeResultSet(rs1); closeConnection(connection); releaseSingleUserCaseReadLock(); } return method; } /** * Get abstract file object from tsk_files table by its id * * @param id id of the file object in tsk_files table * * @return AbstractFile object populated, or null if not found. * * @throws TskCoreException thrown if critical error occurred within tsk * core and file could not be queried */ public AbstractFile getAbstractFileById(long id) throws TskCoreException { CaseDbConnection connection = connections.getConnection(); try { return getAbstractFileById(id, connection); } finally { closeConnection(connection); } } /** * Get abstract file object from tsk_files table by its id on an existing * connection. * * @param objectId The id of the file object in tsk_files table. * @param connection An open database connection. * * @return AbstractFile object populated, or null if not found. * * @throws TskCoreException thrown if critical error occurred within tsk * core and file could not be queried */ AbstractFile getAbstractFileById(long objectId, CaseDbConnection connection) throws TskCoreException { acquireSingleUserCaseReadLock(); ResultSet rs = null; try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_BY_ID); statement.clearParameters(); statement.setLong(1, objectId); rs = connection.executeQuery(statement); List files = resultSetToAbstractFiles(rs, connection); if (files.size() > 0) { return files.get(0); } else { return null; } } catch (SQLException ex) { throw new TskCoreException("Error getting file by id, id = " + objectId, ex); } finally { closeResultSet(rs); releaseSingleUserCaseReadLock(); } } /** * Get artifact from blackboard_artifacts table by its artifact_obj_id * * @param id id of the artifact in blackboard_artifacts table * (artifact_obj_id column) * * @return Artifact object populated, or null if not found. * * @throws TskCoreException thrown if critical error occurred within tsk * core and file could not be queried */ public BlackboardArtifact getArtifactById(long id) throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // get the artifact type. PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (!rs.next()) { throw new TskCoreException("Error getting artifacttype for artifact with artifact_obj_id = " + id); } // based on the artifact type category, get the analysis result or the data artifact BlackboardArtifact.Type artifactType = getArtifactType(rs.getInt("artifact_type_id")); switch (artifactType.getCategory()) { case ANALYSIS_RESULT: return blackboard.getAnalysisResultById(id); case DATA_ARTIFACT: return blackboard.getDataArtifactById(id); default: throw new TskCoreException(String.format("Unknown artifact category for artifact with artifact_obj_id = %d, and artifact type = %s", id, artifactType.getTypeName())); } } catch (SQLException ex) { throw new TskCoreException("Error getting artifacts by artifact_obj_id, artifact_obj_id = " + id, ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get artifact from blackboard_artifacts table by its artifact_id * * @param id Artifact ID of the artifact in blackboard_artifacts table * * @return Artifact object populated, or null if not found. * * @throws TskCoreException thrown if critical error occurred within tsk * core and file could not be queried * * @deprecated Use the type specific methods in Blackboard * getAnalysisResultsById and getDataArtifactById */ @Deprecated public BlackboardArtifact getArtifactByArtifactId(long id) throws TskCoreException { String query = "SELECT artifact_type_id, artifact_obj_id FROM blackboard_artifacts WHERE artifact_id = " + id; acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(query);) { if (resultSet != null && resultSet.next()) { BlackboardArtifact.Type artifactType = this.getArtifactType(resultSet.getInt("artifact_type_id")); long artifactObjId = resultSet.getLong("artifact_obj_id"); switch (artifactType.getCategory()) { case ANALYSIS_RESULT: return blackboard.getAnalysisResultById(artifactObjId); case DATA_ARTIFACT: return blackboard.getDataArtifactById(artifactObjId); } } return null; } catch (SQLException ex) { throw new TskCoreException("Error getting artifacts by artifact id, artifact id = " + id, ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get the object ID of the file system that a file is located in. * * Note: for FsContent files, this is the real fs for other non-fs * AbstractFile files, this field is used internally for data source id (the * root content obj) * * @param fileId object id of the file to get fs column id for * @param connection the database connection to use * * @return fs_id or -1 if not present */ private long getFileSystemId(long fileId, CaseDbConnection connection) { acquireSingleUserCaseReadLock(); ResultSet rs = null; long ret = -1; try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_SYSTEM_BY_OBJECT); statement.clearParameters(); statement.setLong(1, fileId); rs = connection.executeQuery(statement); if (rs.next()) { ret = rs.getLong("fs_obj_id"); if (ret == 0) { ret = -1; } } } catch (SQLException e) { logger.log(Level.SEVERE, "Error checking file system id of a file, id = " + fileId, e); //NON-NLS } finally { closeResultSet(rs); releaseSingleUserCaseReadLock(); } return ret; } /** * Checks if the file is a (sub)child of the data source (parentless Content * object such as Image or VirtualDirectory representing filesets) * * @param dataSource dataSource to check * @param fileId id of file to check * * @return true if the file is in the dataSource hierarchy * * @throws TskCoreException thrown if check failed */ public boolean isFileFromSource(Content dataSource, long fileId) throws TskCoreException { String query = String.format("SELECT COUNT(*) AS count FROM tsk_files WHERE obj_id = %d AND data_source_obj_id = %d", fileId, dataSource.getId()); //NON-NLS CaseDbConnection connection = null; Statement statement = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); resultSet = connection.executeQuery(statement, query); resultSet.next(); return (resultSet.getLong("count") > 0L); } catch (SQLException ex) { throw new TskCoreException(String.format("Error executing query %s", query), ex); } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Returns true if the string contains a SQL LIKE statement wild card based * on https://www.postgresql.org/docs/9.5/functions-matching.html and * https://sqlite.org/lang_expr.html#the_like_glob_regexp_and_match_operators. * * @param str The string. * * @return True if it contains a LIKE wild card. */ private static boolean containsLikeWildcard(String str) { if (str == null) { return false; } else { return str.contains("%") || str.contains("_"); } } /** * @param dataSource the dataSource (Image, parent-less VirtualDirectory) to * search for the given file name * @param fileName Pattern of the name of the file or directory to match * (case insensitive, used in LIKE SQL statement). * * @return a list of AbstractFile for files/directories whose name matches * the given fileName * * @throws TskCoreException thrown if check failed */ public List findFiles(Content dataSource, String fileName) throws TskCoreException { String ext = ""; if (!containsLikeWildcard(fileName)) { ext = SleuthkitCase.extractExtension(fileName); } List files = new ArrayList<>(); CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement; if (ext.isEmpty()) { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_NAME); statement.clearParameters(); statement.setString(1, fileName.toLowerCase()); statement.setLong(2, dataSource.getId()); } else { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_NAME); statement.clearParameters(); statement.setString(1, ext); statement.setString(2, fileName.toLowerCase()); statement.setLong(3, dataSource.getId()); } resultSet = connection.executeQuery(statement); files.addAll(resultSetToAbstractFiles(resultSet, connection)); } catch (SQLException e) { throw new TskCoreException(bundle.getString("SleuthkitCase.findFiles.exception.msg3.text"), e); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } return files; } /** * @param dataSource the dataSource (Image, parent-less VirtualDirectory) * to search for the given file name * @param fileName Pattern of the name of the file or directory to match * (case insensitive, used in LIKE SQL statement). * @param dirSubString Substring that must exist in parent path. Will be * surrounded by % in LIKE query * * @return a list of AbstractFile for files/directories whose name matches * fileName and whose parent directory contains dirName. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List findFiles(Content dataSource, String fileName, String dirSubString) throws TskCoreException { String ext = ""; if (!containsLikeWildcard(fileName)) { ext = SleuthkitCase.extractExtension(fileName); } List files = new ArrayList<>(); CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement; if (ext.isEmpty()) { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_PARENT_PATH_AND_NAME); statement.clearParameters(); statement.setString(1, fileName.toLowerCase()); statement.setString(2, "%" + dirSubString.toLowerCase() + "%"); //NON-NLS statement.setLong(3, dataSource.getId()); } else { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_PARENT_PATH_AND_NAME); statement.clearParameters(); statement.setString(1, ext); statement.setString(2, fileName.toLowerCase()); statement.setString(3, "%" + dirSubString.toLowerCase() + "%"); //NON-NLS statement.setLong(4, dataSource.getId()); } resultSet = connection.executeQuery(statement); files.addAll(resultSetToAbstractFiles(resultSet, connection)); } catch (SQLException e) { throw new TskCoreException(bundle.getString("SleuthkitCase.findFiles3.exception.msg3.text"), e); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } return files; } /** * Adds a virtual directory to the database and returns a VirtualDirectory * object representing it. * * @param parentId the ID of the parent, or 0 if NULL * @param directoryName the name of the virtual directory to create * * @return * * @throws TskCoreException */ public VirtualDirectory addVirtualDirectory(long parentId, String directoryName) throws TskCoreException { CaseDbTransaction localTrans = beginTransaction(); try { VirtualDirectory newVD = addVirtualDirectory(parentId, directoryName, localTrans); localTrans.commit(); localTrans = null; return newVD; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Add an object to the tsk_objects table. Returns the object ID for the new * object. * * @param parentId Parent of the new object * @param objectType Type of the new object * @param connection Case connection * * @return the object ID for the new object * * @throws SQLException */ long addObject(long parentId, int objectType, CaseDbConnection connection) throws SQLException { ResultSet resultSet = null; acquireSingleUserCaseWriteLock(); try { // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_OBJECT, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); if (parentId != 0) { statement.setLong(1, parentId); } else { statement.setNull(1, java.sql.Types.BIGINT); } statement.setInt(2, objectType); connection.executeUpdate(statement); resultSet = statement.getGeneratedKeys(); if (resultSet.next()) { if (parentId != 0) { setHasChildren(parentId); } return resultSet.getLong(1); //last_insert_rowid() } else { throw new SQLException("Error inserting object with parent " + parentId + " into tsk_objects"); } } finally { closeResultSet(resultSet); releaseSingleUserCaseWriteLock(); } } /** * Adds a virtual directory to the database and returns a VirtualDirectory * object representing it. * * Make sure the connection in transaction is used for all database * interactions called by this method * * @param parentId the ID of the parent, or 0 if NULL * @param directoryName the name of the virtual directory to create * @param transaction the transaction in the scope of which the operation * is to be performed, managed by the caller * * @return a VirtualDirectory object representing the one added to the * database. * * @throws TskCoreException */ public VirtualDirectory addVirtualDirectory(long parentId, String directoryName, CaseDbTransaction transaction) throws TskCoreException { if (transaction == null) { throw new TskCoreException("Passed null CaseDbTransaction"); } ResultSet resultSet = null; try { // Get the parent path. CaseDbConnection connection = transaction.getConnection(); String parentPath; Content parent = this.getAbstractFileById(parentId, connection); if (parent instanceof AbstractFile) { if (isRootDirectory((AbstractFile) parent, transaction)) { parentPath = "/"; } else { parentPath = ((AbstractFile) parent).getParentPath() + parent.getName() + "/"; //NON-NLS } } else { // The parent was either null or not an abstract file parentPath = "/"; } // Insert a row for the virtual directory into the tsk_objects table. long newObjId = addObject(parentId, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); // Insert a row for the virtual directory into the tsk_files table. // INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, // dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id,extension,owner_uid, os_account_obj_id) // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?,?,?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); statement.clearParameters(); statement.setLong(1, newObjId); // If the parent is part of a file system, grab its file system ID if (0 != parentId) { long parentFs = this.getFileSystemId(parentId, connection); if (parentFs != -1) { statement.setLong(2, parentFs); } else { statement.setNull(2, java.sql.Types.BIGINT); } } else { statement.setNull(2, java.sql.Types.BIGINT); } // name statement.setString(3, directoryName); //type statement.setShort(4, TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType()); statement.setShort(5, (short) 1); //flags final TSK_FS_NAME_TYPE_ENUM dirType = TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(6, dirType.getValue()); final TSK_FS_META_TYPE_ENUM metaType = TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(7, metaType.getValue()); //allocated final TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; statement.setShort(8, dirFlag.getValue()); final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); statement.setShort(9, metaFlags); //size statement.setLong(10, 0); // nulls for params 11-14 statement.setNull(11, java.sql.Types.BIGINT); statement.setNull(12, java.sql.Types.BIGINT); statement.setNull(13, java.sql.Types.BIGINT); statement.setNull(14, java.sql.Types.BIGINT); statement.setNull(15, java.sql.Types.VARCHAR); // MD5 statement.setNull(16, java.sql.Types.VARCHAR); // SHA-256 statement.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known statement.setNull(18, java.sql.Types.VARCHAR); // MIME type // parent path statement.setString(19, parentPath); // data source object id (same as object id if this is a data source) long dataSourceObjectId; if (0 == parentId) { dataSourceObjectId = newObjId; } else { dataSourceObjectId = getDataSourceObjectId(connection, parentId); } statement.setLong(20, dataSourceObjectId); //extension, since this is not really file we just set it to null statement.setString(21, null); statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(statement); return new VirtualDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType, metaType, dirFlag, metaFlags, null, null, FileKnown.UNKNOWN, parentPath); } catch (SQLException e) { throw new TskCoreException("Error creating virtual directory '" + directoryName + "'", e); } finally { closeResultSet(resultSet); } } /** * Adds a local directory to the database and returns a LocalDirectory * object representing it. * * @param parentId the ID of the parent, or 0 if NULL * @param directoryName the name of the local directory to create * * @return a LocalDirectory object representing the one added to the * database. * * @throws TskCoreException */ public LocalDirectory addLocalDirectory(long parentId, String directoryName) throws TskCoreException { CaseDbTransaction localTrans = beginTransaction(); try { LocalDirectory newLD = addLocalDirectory(parentId, directoryName, localTrans); localTrans.commit(); return newLD; } catch (TskCoreException ex) { try { localTrans.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, String.format("Failed to rollback transaction after exception: %s", ex.getMessage()), ex2); } throw ex; } } /** * Adds a local directory to the database and returns a LocalDirectory * object representing it. * * Make sure the connection in transaction is used for all database * interactions called by this method * * @param parentId the ID of the parent, or 0 if NULL * @param directoryName the name of the local directory to create * @param transaction the transaction in the scope of which the operation * is to be performed, managed by the caller * * @return a LocalDirectory object representing the one added to the * database. * * @throws TskCoreException */ public LocalDirectory addLocalDirectory(long parentId, String directoryName, CaseDbTransaction transaction) throws TskCoreException { if (transaction == null) { throw new TskCoreException("Passed null CaseDbTransaction"); } ResultSet resultSet = null; try { // Get the parent path. CaseDbConnection connection = transaction.getConnection(); AbstractFile parent = getAbstractFileById(parentId, connection); String parentPath; if ((parent == null) || isRootDirectory(parent, transaction)) { parentPath = "/"; } else { parentPath = parent.getParentPath() + parent.getName() + "/"; //NON-NLS } // Insert a row for the local directory into the tsk_objects table. long newObjId = addObject(parentId, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); // Insert a row for the local directory into the tsk_files table. // INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, // dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id) // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); statement.clearParameters(); statement.setLong(1, newObjId); // The parent of a local directory will never be a file system statement.setNull(2, java.sql.Types.BIGINT); // name statement.setString(3, directoryName); //type statement.setShort(4, TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR.getFileType()); statement.setShort(5, (short) 1); //flags final TSK_FS_NAME_TYPE_ENUM dirType = TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(6, dirType.getValue()); final TSK_FS_META_TYPE_ENUM metaType = TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(7, metaType.getValue()); //allocated final TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; statement.setShort(8, dirFlag.getValue()); final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); statement.setShort(9, metaFlags); //size statement.setLong(10, 0); // nulls for params 11-14 statement.setNull(11, java.sql.Types.BIGINT); statement.setNull(12, java.sql.Types.BIGINT); statement.setNull(13, java.sql.Types.BIGINT); statement.setNull(14, java.sql.Types.BIGINT); statement.setNull(15, java.sql.Types.VARCHAR); // MD5 statement.setNull(16, java.sql.Types.VARCHAR); // SHA-256 statement.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known statement.setNull(18, java.sql.Types.VARCHAR); // MIME type // parent path statement.setString(19, parentPath); // data source object id long dataSourceObjectId = getDataSourceObjectId(connection, parentId); statement.setLong(20, dataSourceObjectId); //extension, since this is a directory we just set it to null statement.setString(21, null); statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(statement); return new LocalDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType, metaType, dirFlag, metaFlags, null, null, FileKnown.UNKNOWN, parentPath); } catch (SQLException e) { throw new TskCoreException("Error creating local directory '" + directoryName + "'", e); } finally { closeResultSet(resultSet); } } /** * Adds a local/logical files and/or directories data source. * * @param deviceId An ASCII-printable identifier for the device * associated with the data source that is intended * to be unique across multiple cases (e.g., a * UUID). * @param rootDirectoryName The name for the root virtual directory for the * data source. * @param timeZone The time zone used to process the data source, * may be the empty string. * @param transaction A transaction in the scope of which the * operation is to be performed, managed by the * caller. * * @return The new local files data source. * * @throws TskCoreException if there is an error adding the data source. */ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootDirectoryName, String timeZone, CaseDbTransaction transaction) throws TskCoreException { return addLocalFilesDataSource(deviceId, rootDirectoryName, timeZone, null, transaction); } /** * Adds a local/logical files and/or directories data source. * * @param deviceId An ASCII-printable identifier for the device * associated with the data source that is intended * to be unique across multiple cases (e.g., a * UUID). * @param rootDirectoryName The name for the root virtual directory for the * data source. * @param timeZone The time zone used to process the data source, * may be the empty string. * @param host The host for the data source (may be null) * @param transaction A transaction in the scope of which the * operation is to be performed, managed by the * caller. * * @return The new local files data source. * * @throws TskCoreException if there is an error adding the data source. */ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootDirectoryName, String timeZone, Host host, CaseDbTransaction transaction) throws TskCoreException { Statement statement = null; try { CaseDbConnection connection = transaction.getConnection(); // Insert a row for the root virtual directory of the data source // into the tsk_objects table. long newObjId = addObject(0, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); // If no host was supplied, make one if (host == null) { host = getHostManager().newHost("LogicalFileSet_" + newObjId + " Host", transaction); } // Insert a row for the virtual directory of the data source into // the data_source_info table. statement = connection.createStatement(); statement.executeUpdate("INSERT INTO data_source_info (obj_id, device_id, time_zone, host_id) " + "VALUES(" + newObjId + ", '" + deviceId + "', '" + timeZone + "', " + host.getHostId() + ");"); // Insert a row for the root virtual directory of the data source // into the tsk_files table. Note that its data source object id is // its own object id. // INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, // dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, // atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id) // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?) PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setNull(2, java.sql.Types.BIGINT); preparedStatement.setString(3, rootDirectoryName); preparedStatement.setShort(4, TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType()); preparedStatement.setShort(5, (short) 1); TSK_FS_NAME_TYPE_ENUM dirType = TSK_FS_NAME_TYPE_ENUM.DIR; preparedStatement.setShort(6, TSK_FS_NAME_TYPE_ENUM.DIR.getValue()); TSK_FS_META_TYPE_ENUM metaType = TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; preparedStatement.setShort(7, metaType.getValue()); TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; preparedStatement.setShort(8, dirFlag.getValue()); final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); preparedStatement.setShort(9, metaFlags); preparedStatement.setLong(10, 0); preparedStatement.setNull(11, java.sql.Types.BIGINT); preparedStatement.setNull(12, java.sql.Types.BIGINT); preparedStatement.setNull(13, java.sql.Types.BIGINT); preparedStatement.setNull(14, java.sql.Types.BIGINT); preparedStatement.setNull(15, java.sql.Types.VARCHAR); // MD5 preparedStatement.setNull(16, java.sql.Types.VARCHAR); // SHA-256 preparedStatement.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known preparedStatement.setNull(18, java.sql.Types.VARCHAR); // MIME type String parentPath = "/"; //NON-NLS preparedStatement.setString(19, parentPath); preparedStatement.setLong(20, newObjId); preparedStatement.setString(21, null); //extension, just set it to null preparedStatement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid preparedStatement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(preparedStatement); return new LocalFilesDataSource(this, newObjId, newObjId, deviceId, rootDirectoryName, dirType, metaType, dirFlag, metaFlags, timeZone, null, null, FileKnown.UNKNOWN, parentPath); } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating local files data source with device id %s and directory name %s", deviceId, rootDirectoryName), ex); } finally { closeStatement(statement); } } /** * Add an image to the database. * * @param type Type of image * @param sectorSize Sector size * @param size Image size * @param displayName Display name for the image * @param imagePaths Image path(s) * @param timezone Time zone * @param md5 MD5 hash * @param sha1 SHA1 hash * @param sha256 SHA256 hash * @param deviceId Device ID * @param transaction Case DB transaction * * @return the newly added Image * * @throws TskCoreException */ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size, String displayName, List imagePaths, String timezone, String md5, String sha1, String sha256, String deviceId, CaseDbTransaction transaction) throws TskCoreException { return addImage(type, sectorSize, size, displayName, imagePaths, timezone, md5, sha1, sha256, deviceId, null, transaction); } /** * Add an image to the database. * * @param type Type of image * @param sectorSize Sector size * @param size Image size * @param displayName Display name for the image * @param imagePaths Image path(s) * @param timezone Time zone * @param md5 MD5 hash * @param sha1 SHA1 hash * @param sha256 SHA256 hash * @param deviceId Device ID * @param host Host * @param transaction Case DB transaction * * @return the newly added Image * * @throws TskCoreException */ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size, String displayName, List imagePaths, String timezone, String md5, String sha1, String sha256, String deviceId, Host host, CaseDbTransaction transaction) throws TskCoreException { Statement statement = null; try { // Insert a row for the Image into the tsk_objects table. CaseDbConnection connection = transaction.getConnection(); long newObjId = addObject(0, TskData.ObjectType.IMG.getObjectType(), connection); // Add a row to tsk_image_info // INSERT INTO tsk_image_info (obj_id, type, ssize, tzone, size, md5, sha1, sha256, display_name) PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_IMAGE_INFO); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setShort(2, (short) type.getValue()); preparedStatement.setLong(3, sectorSize); preparedStatement.setString(4, timezone); //prevent negative size long savedSize = size < 0 ? 0 : size; preparedStatement.setLong(5, savedSize); preparedStatement.setString(6, md5); preparedStatement.setString(7, sha1); preparedStatement.setString(8, sha256); preparedStatement.setString(9, displayName); connection.executeUpdate(preparedStatement); // If there are paths, add them to tsk_image_names for (int i = 0; i < imagePaths.size(); i++) { preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_IMAGE_NAME); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setString(2, imagePaths.get(i)); preparedStatement.setLong(3, i); connection.executeUpdate(preparedStatement); } // Create the display name String name = displayName; if (name == null || name.isEmpty()) { if (imagePaths.size() > 0) { String path = imagePaths.get(0); name = (new java.io.File(path)).getName(); } else { name = ""; } } // Create a host if needed if (host == null) { if (name.isEmpty()) { host = getHostManager().newHost("Image_" + newObjId + " Host", transaction); } else { host = getHostManager().newHost(name + "_" + newObjId + " Host", transaction); } } // Add a row to data_source_info preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_DATA_SOURCE_INFO); statement = connection.createStatement(); preparedStatement.setLong(1, newObjId); preparedStatement.setString(2, deviceId); preparedStatement.setString(3, timezone); preparedStatement.setLong(4, new Date().getTime()); preparedStatement.setLong(5, host.getHostId()); connection.executeUpdate(preparedStatement); // Create the new Image object return new Image(this, newObjId, type.getValue(), deviceId, sectorSize, name, imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, savedSize); } catch (SQLException ex) { if (!imagePaths.isEmpty()) { throw new TskCoreException(String.format("Error adding image with path %s to database", imagePaths.get(0)), ex); } else { throw new TskCoreException(String.format("Error adding image with display name %s to database", displayName), ex); } } finally { closeStatement(statement); } } /** * Add a volume system to the database. * * @param parentObjId Object ID of the volume system's parent * @param type Type of volume system * @param imgOffset Image offset * @param blockSize Block size * @param transaction Case DB transaction * * @return the newly added VolumeSystem * * @throws TskCoreException */ public VolumeSystem addVolumeSystem(long parentObjId, TskData.TSK_VS_TYPE_ENUM type, long imgOffset, long blockSize, CaseDbTransaction transaction) throws TskCoreException { try { // Insert a row for the VolumeSystem into the tsk_objects table. CaseDbConnection connection = transaction.getConnection(); long newObjId = addObject(parentObjId, TskData.ObjectType.VS.getObjectType(), connection); // Add a row to tsk_vs_info // INSERT INTO tsk_vs_info (obj_id, vs_type, img_offset, block_size) PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_VS_INFO); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setShort(2, (short) type.getVsType()); preparedStatement.setLong(3, imgOffset); preparedStatement.setLong(4, blockSize); connection.executeUpdate(preparedStatement); // Create the new VolumeSystem object return new VolumeSystem(this, newObjId, "", type.getVsType(), imgOffset, blockSize); } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating volume system with parent ID %d and image offset %d", parentObjId, imgOffset), ex); } } /** * Add a volume to the database * * @param parentObjId Object ID of the volume's parent * @param addr Address of the volume * @param start Start of the volume * @param length Length of the volume * @param desc Description of the volume * @param flags Flags * @param transaction Case DB transaction * * @return the newly created Volume * * @throws TskCoreException */ public Volume addVolume(long parentObjId, long addr, long start, long length, String desc, long flags, CaseDbTransaction transaction) throws TskCoreException { try { // Insert a row for the Volume into the tsk_objects table. CaseDbConnection connection = transaction.getConnection(); long newObjId = addObject(parentObjId, TskData.ObjectType.VOL.getObjectType(), connection); // Add a row to tsk_vs_parts // INSERT INTO tsk_vs_parts (obj_id, addr, start, length, desc, flags) PreparedStatement preparedStatement; if (this.dbType == DbType.POSTGRESQL) { preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_VS_PART_POSTGRESQL); } else { preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_VS_PART_SQLITE); } preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setLong(2, addr); preparedStatement.setLong(3, start); preparedStatement.setLong(4, length); preparedStatement.setString(5, desc); preparedStatement.setShort(6, (short) flags); connection.executeUpdate(preparedStatement); // Create the new Volume object return new Volume(this, newObjId, addr, start, length, flags, desc); } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating volume with address %d and parent ID %d", addr, parentObjId), ex); } } /** * Add a pool to the database. * * @param parentObjId Object ID of the pool's parent * @param type Type of pool * @param transaction Case DB transaction * * @return the newly created Pool * * @throws TskCoreException */ public Pool addPool(long parentObjId, TskData.TSK_POOL_TYPE_ENUM type, CaseDbTransaction transaction) throws TskCoreException { try { // Insert a row for the Pool into the tsk_objects table. CaseDbConnection connection = transaction.getConnection(); long newObjId = addObject(parentObjId, TskData.ObjectType.POOL.getObjectType(), connection); // Add a row to tsk_pool_info // INSERT INTO tsk_pool_info (obj_id, pool_type) VALUES (?, ?) PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_POOL_INFO); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setShort(2, type.getValue()); connection.executeUpdate(preparedStatement); // Create the new Pool object return new Pool(this, newObjId, type.getName(), type.getValue()); } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating pool with type %d and parent ID %d", type.getValue(), parentObjId), ex); } } /** * Add a FileSystem to the database. * * @param parentObjId Object ID of the file system's parent * @param imgOffset Offset in the image * @param type Type of file system * @param blockSize Block size * @param blockCount Block count * @param rootInum root inum * @param firstInum first inum * @param lastInum last inum * @param displayName display name * @param transaction Case DB transaction * * @return the newly created FileSystem * * @throws TskCoreException */ public FileSystem addFileSystem(long parentObjId, long imgOffset, TskData.TSK_FS_TYPE_ENUM type, long blockSize, long blockCount, long rootInum, long firstInum, long lastInum, String displayName, CaseDbTransaction transaction) throws TskCoreException { try { // Insert a row for the FileSystem into the tsk_objects table. CaseDbConnection connection = transaction.getConnection(); long newObjId = addObject(parentObjId, TskData.ObjectType.FS.getObjectType(), connection); // Get the data source object ID long dataSourceId = getDataSourceObjectId(connection, newObjId); // Add a row to tsk_fs_info // INSERT INTO tsk_fs_info (obj_id, data_source_obj_id, img_offset, fs_type, block_size, block_count, root_inum, first_inum, last_inum, display_name) PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FS_INFO); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setLong(2, dataSourceId); preparedStatement.setLong(3, imgOffset); preparedStatement.setInt(4, type.getValue()); preparedStatement.setLong(5, blockSize); preparedStatement.setLong(6, blockCount); preparedStatement.setLong(7, rootInum); preparedStatement.setLong(8, firstInum); preparedStatement.setLong(9, lastInum); preparedStatement.setString(10, displayName); connection.executeUpdate(preparedStatement); // Create the new FileSystem object return new FileSystem(this, newObjId, displayName, imgOffset, type, blockSize, blockCount, rootInum, firstInum, lastInum); } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating file system with image offset %d and parent ID %d", imgOffset, parentObjId), ex); } } /** * Add a file system file. * * @param dataSourceObjId The object id of the root data source of this * file. * @param fsObjId The file system object id. * @param fileName The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta address sequence of the file. * @param attrType The attributed type of the file. * @param attrId The attribute id * @param dirFlag The allocated status from the name structure * @param metaFlags * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. ** @param isFile True, unless the file is a directory. * @param parent The parent of the file (e.g., a virtual directory) * * @return Newly created file * * @throws TskCoreException */ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId, String fileName, long metaAddr, int metaSeq, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, Content parent) throws TskCoreException { CaseDbTransaction transaction = beginTransaction(); try { FsContent fileSystemFile = addFileSystemFile(dataSourceObjId, fsObjId, fileName, metaAddr, metaSeq, attrType, attrId, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, null, null, null, isFile, parent, OsAccount.NO_OWNER_ID, null, Collections.emptyList(), transaction); transaction.commit(); transaction = null; return fileSystemFile; } finally { if (null != transaction) { try { transaction.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Add a file system file. * * @param dataSourceObjId The object id of the root data source of this * file. * @param fsObjId The file system object id. * @param fileName The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta address sequence of the file. * @param attrType The attributed type of the file. * @param attrId The attribute id. * @param dirFlag The allocated status from the name structure * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file * @param sha256Hash The SHA256 hash of the file * @param mimeType The MIME type of the file * @param isFile True, unless the file is a directory. * @param parent The parent of the file (e.g., a virtual * directory). * @param ownerUid UID of the file owner as found in the file system, * can be null. * @param osAccount OS account of owner, may be null. * @param fileAttributes A list of file attributes. May be empty. * @param transaction A caller-managed transaction within which the add * file operations are performed. * * @return Newly created file * * @throws TskCoreException */ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId, String fileName, long metaAddr, int metaSeq, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, String sha256Hash, String mimeType, boolean isFile, Content parent, String ownerUid, OsAccount osAccount, List fileAttributes, CaseDbTransaction transaction) throws TskCoreException { TimelineManager timelineManager = getTimelineManager(); Statement queryStatement = null; String parentPath = "/"; try { CaseDbConnection connection = transaction.getConnection(); // Insert a row for the local/logical file into the tsk_objects table. // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) long objectId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); if (parent instanceof AbstractFile) { AbstractFile parentFile = (AbstractFile) parent; if (isRootDirectory(parentFile, transaction)) { parentPath = "/"; } else { parentPath = parentFile.getParentPath() + parent.getName() + "/"; //NON-NLS } } else { parentPath = "/"; } PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_SYSTEM_FILE); statement.clearParameters(); statement.setLong(1, objectId); // obj_is statement.setLong(2, fsObjId); // fs_obj_id statement.setLong(3, dataSourceObjId); // data_source_obj_id statement.setShort(4, (short) attrType.getValue()); // attr_type statement.setInt(5, attrId); // attr_id statement.setString(6, fileName); // name statement.setLong(7, metaAddr); // meta_addr statement.setInt(8, metaSeq); // meta_addr statement.setShort(9, TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType()); //type statement.setShort(10, (short) 1); // has_path TSK_FS_NAME_TYPE_ENUM dirType = isFile ? TSK_FS_NAME_TYPE_ENUM.REG : TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(11, dirType.getValue()); // dir_type TSK_FS_META_TYPE_ENUM metaType = isFile ? TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG : TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(12, metaType.getValue()); // meta_type statement.setShort(13, dirFlag.getValue()); // dir_flags statement.setShort(14, metaFlags); // meta_flags statement.setLong(15, size < 0 ? 0 : size); statement.setLong(16, ctime); statement.setLong(17, crtime); statement.setLong(18, atime); statement.setLong(19, mtime); statement.setString(20, md5Hash); statement.setString(21, sha256Hash); statement.setString(22, mimeType); statement.setString(23, parentPath); final String extension = extractExtension(fileName); statement.setString(24, extension); statement.setString(25, ownerUid); if (null != osAccount) { statement.setLong(26, osAccount.getId()); } else { statement.setNull(26, java.sql.Types.BIGINT); // osAccountObjId } connection.executeUpdate(statement); Long osAccountId = (osAccount != null) ? osAccount.getId() : null; DerivedFile derivedFile = new DerivedFile(this, objectId, dataSourceObjId, fileName, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, md5Hash, sha256Hash, null, parentPath, null, parent.getId(), mimeType, null, extension, ownerUid, osAccountId); timelineManager.addEventsForNewFile(derivedFile, connection); for (Attribute fileAttribute : fileAttributes) { fileAttribute.setAttributeParentId(objectId); fileAttribute.setCaseDatabase(this); addFileAttribute(fileAttribute, connection); } if (osAccount != null) { osAccountManager.newOsAccountInstance(osAccount.getId(), dataSourceObjId, OsAccountInstance.OsAccountInstanceType.ACCESSED, connection); } return new org.sleuthkit.datamodel.File(this, objectId, dataSourceObjId, fsObjId, attrType, attrId, fileName, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, null, parentPath, mimeType, extension, ownerUid, osAccountId, fileAttributes); } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to INSERT file system file %s (%s) with parent id %d in tsk_files table", fileName, parentPath, parent.getId()), ex); } finally { closeStatement(queryStatement); } } /** * Get IDs of the virtual folder roots (at the same level as image), used * for containers such as for local files. * * @return IDs of virtual directory root objects. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getVirtualDirectoryRoots() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE" //NON-NLS + " type = " + TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType() + " AND obj_id = data_source_obj_id" + " ORDER BY dir_type, LOWER(name)"); //NON-NLS List virtDirRootIds = new ArrayList(); while (rs.next()) { virtDirRootIds.add(virtualDirectory(rs, connection)); } return virtDirRootIds; } catch (SQLException ex) { throw new TskCoreException("Error getting local files virtual folder id", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Adds one or more layout files for a parent Content object to the case * database. * * @param parent The parent Content. * @param fileRanges File range objects for the file(s). * * @return A list of LayoutFile objects. * * @throws TskCoreException If there is a problem completing a case database * operation. */ public final List addLayoutFiles(Content parent, List fileRanges) throws TskCoreException { assert (null != fileRanges); if (null == fileRanges) { throw new TskCoreException("TskFileRange object is null"); } assert (null != parent); if (null == parent) { throw new TskCoreException("Conent is null"); } CaseDbTransaction transaction = null; Statement statement = null; ResultSet resultSet = null; try { transaction = beginTransaction(); CaseDbConnection connection = transaction.getConnection(); List fileRangeLayoutFiles = new ArrayList(); for (TskFileRange fileRange : fileRanges) { /* * Insert a row for the Tsk file range into the tsk_objects * table: INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, * ?) */ long fileRangeId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); long end_byte_in_parent = fileRange.getByteStart() + fileRange.getByteLen() - 1; /* * Insert a row for the Tsk file range into the tsk_files table: * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, * has_path, dir_type, meta_type, dir_flags, meta_flags, size, * ctime, crtime, atime, mtime, md5, known, mime_type, * parent_path, data_source_obj_id,extension, owner_uid, * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?) */ PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); prepStmt.clearParameters(); prepStmt.setLong(1, fileRangeId); // obj_id from tsk_objects prepStmt.setNull(2, java.sql.Types.BIGINT); // fs_obj_id prepStmt.setString(3, "Unalloc_" + parent.getId() + "_" + fileRange.getByteStart() + "_" + end_byte_in_parent); // name of form Unalloc_[image obj_id]_[start byte in parent]_[end byte in parent] prepStmt.setShort(4, TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType()); // type prepStmt.setNull(5, java.sql.Types.BIGINT); // has_path prepStmt.setShort(6, TSK_FS_NAME_TYPE_ENUM.REG.getValue()); // dir_type prepStmt.setShort(7, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()); // meta_type prepStmt.setShort(8, TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue()); // dir_flags prepStmt.setShort(9, TSK_FS_META_FLAG_ENUM.UNALLOC.getValue()); // nmeta_flags prepStmt.setLong(10, fileRange.getByteLen()); // size prepStmt.setNull(11, java.sql.Types.BIGINT); // ctime prepStmt.setNull(12, java.sql.Types.BIGINT); // crtime prepStmt.setNull(13, java.sql.Types.BIGINT); // atime prepStmt.setNull(14, java.sql.Types.BIGINT); // mtime prepStmt.setNull(15, java.sql.Types.VARCHAR); // MD5 prepStmt.setNull(16, java.sql.Types.VARCHAR); // SHA-256 prepStmt.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known prepStmt.setNull(18, java.sql.Types.VARCHAR); // MIME type prepStmt.setNull(19, java.sql.Types.VARCHAR); // parent path prepStmt.setLong(20, parent.getId()); // data_source_obj_id //extension, since this is not a FS file we just set it to null prepStmt.setString(21, null); prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(prepStmt); /* * Insert a row in the tsk_layout_file table for each chunk of * the carved file. INSERT INTO tsk_file_layout (obj_id, * byte_start, byte_len, sequence) VALUES (?, ?, ?, ?) */ prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LAYOUT_FILE); prepStmt.clearParameters(); prepStmt.setLong(1, fileRangeId); // obj_id prepStmt.setLong(2, fileRange.getByteStart()); // byte_start prepStmt.setLong(3, fileRange.getByteLen()); // byte_len prepStmt.setLong(4, fileRange.getSequence()); // sequence connection.executeUpdate(prepStmt); /* * Create a layout file representation of the carved file. */ fileRangeLayoutFiles.add(new LayoutFile(this, fileRangeId, parent.getId(), Long.toString(fileRange.getSequence()), TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS, TSK_FS_NAME_TYPE_ENUM.REG, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG, TSK_FS_NAME_FLAG_ENUM.UNALLOC, TSK_FS_META_FLAG_ENUM.UNALLOC.getValue(), fileRange.getByteLen(), 0L, 0L, 0L, 0L, null, null, FileKnown.UNKNOWN, parent.getUniquePath(), null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT)); } transaction.commit(); transaction = null; return fileRangeLayoutFiles; } catch (SQLException ex) { throw new TskCoreException("Failed to add layout files to case database", ex); } finally { closeResultSet(resultSet); closeStatement(statement); if (null != transaction) { try { transaction.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Adds a carving result to the case database. * * @param carvingResult The carving result (a set of carved files and their * parent) to be added. * * @return A list of LayoutFile representations of the carved files. * * @throws TskCoreException If there is a problem completing a case database * operation. */ public final List addCarvedFiles(CarvingResult carvingResult) throws TskCoreException { assert (null != carvingResult); if (null == carvingResult) { throw new TskCoreException("Carving is null"); } assert (null != carvingResult.getParent()); if (null == carvingResult.getParent()) { throw new TskCoreException("Carving result has null parent"); } assert (null != carvingResult.getCarvedFiles()); if (null == carvingResult.getCarvedFiles()) { throw new TskCoreException("Carving result has null carved files"); } CaseDbTransaction transaction = null; Statement statement = null; ResultSet resultSet = null; try { /* * Carved files are "re-parented" as children of the $CarvedFiles * virtual directory of the root file system, volume, or image * ancestor of the carved files parent, but if no such ancestor is * found, then the parent specified in the carving result is used. */ Content root = carvingResult.getParent(); while (null != root) { if (root instanceof FileSystem || root instanceof Volume || root instanceof Image) { break; } root = root.getParent(); } if (null == root) { root = carvingResult.getParent(); } /* * Get or create the $CarvedFiles virtual directory for the root * ancestor. */ VirtualDirectory carvedFilesDir; synchronized (carvedFileDirsLock) { carvedFilesDir = rootIdsToCarvedFileDirs.get(root.getId()); if (null == carvedFilesDir) { List rootChildren; if (root instanceof FileSystem) { rootChildren = ((FileSystem) root).getRootDirectory().getChildren(); } else { rootChildren = root.getChildren(); } for (Content child : rootChildren) { if (child instanceof VirtualDirectory && child.getName().equals(VirtualDirectory.NAME_CARVED)) { carvedFilesDir = (VirtualDirectory) child; break; } } if (null == carvedFilesDir) { long parId = root.getId(); // $CarvedFiles should be a child of the root directory, not the file system if (root instanceof FileSystem) { Content rootDir = ((FileSystem) root).getRootDirectory(); parId = rootDir.getId(); } carvedFilesDir = addVirtualDirectory(parId, VirtualDirectory.NAME_CARVED); } rootIdsToCarvedFileDirs.put(root.getId(), carvedFilesDir); } } /* * Add the carved files to the database as children of the * $CarvedFile directory of the root ancestor. */ transaction = beginTransaction(); CaseDbConnection connection = transaction.getConnection(); String parentPath = getFileParentPath(carvedFilesDir.getId(), connection) + carvedFilesDir.getName() + "/"; List carvedFiles = new ArrayList<>(); for (CarvingResult.CarvedFile carvedFile : carvingResult.getCarvedFiles()) { /* * Insert a row for the carved file into the tsk_objects table: * INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) */ long carvedFileId = addObject(carvedFilesDir.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); /* * Insert a row for the carved file into the tsk_files table: * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, * has_path, dir_type, meta_type, dir_flags, meta_flags, size, * ctime, crtime, atime, mtime, md5, known, mime_type, * parent_path, data_source_obj_id,extenion, owner_uid, * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) */ PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); prepStmt.clearParameters(); prepStmt.setLong(1, carvedFileId); // obj_id if (root instanceof FileSystem) { prepStmt.setLong(2, root.getId()); // fs_obj_id } else { prepStmt.setNull(2, java.sql.Types.BIGINT); // fs_obj_id } prepStmt.setString(3, carvedFile.getName()); // name prepStmt.setShort(4, TSK_DB_FILES_TYPE_ENUM.CARVED.getFileType()); // type prepStmt.setShort(5, (short) 1); // has_path prepStmt.setShort(6, TSK_FS_NAME_TYPE_ENUM.REG.getValue()); // dir_type prepStmt.setShort(7, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()); // meta_type prepStmt.setShort(8, TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue()); // dir_flags prepStmt.setShort(9, TSK_FS_META_FLAG_ENUM.UNALLOC.getValue()); // nmeta_flags prepStmt.setLong(10, carvedFile.getSizeInBytes()); // size prepStmt.setNull(11, java.sql.Types.BIGINT); // ctime prepStmt.setNull(12, java.sql.Types.BIGINT); // crtime prepStmt.setNull(13, java.sql.Types.BIGINT); // atime prepStmt.setNull(14, java.sql.Types.BIGINT); // mtime prepStmt.setNull(15, java.sql.Types.VARCHAR); // MD5 prepStmt.setNull(16, java.sql.Types.VARCHAR); // SHA-256 prepStmt.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known prepStmt.setNull(18, java.sql.Types.VARCHAR); // MIME type prepStmt.setString(19, parentPath); // parent path prepStmt.setLong(20, carvedFilesDir.getDataSourceObjectId()); // data_source_obj_id prepStmt.setString(21, extractExtension(carvedFile.getName())); //extension prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(prepStmt); /* * Insert a row in the tsk_layout_file table for each chunk of * the carved file. INSERT INTO tsk_file_layout (obj_id, * byte_start, byte_len, sequence) VALUES (?, ?, ?, ?) */ prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LAYOUT_FILE); for (TskFileRange tskFileRange : carvedFile.getLayoutInParent()) { prepStmt.clearParameters(); prepStmt.setLong(1, carvedFileId); // obj_id prepStmt.setLong(2, tskFileRange.getByteStart()); // byte_start prepStmt.setLong(3, tskFileRange.getByteLen()); // byte_len prepStmt.setLong(4, tskFileRange.getSequence()); // sequence connection.executeUpdate(prepStmt); } /* * Create a layout file representation of the carved file. */ carvedFiles.add(new LayoutFile(this, carvedFileId, carvedFilesDir.getDataSourceObjectId(), carvedFile.getName(), TSK_DB_FILES_TYPE_ENUM.CARVED, TSK_FS_NAME_TYPE_ENUM.REG, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG, TSK_FS_NAME_FLAG_ENUM.UNALLOC, TSK_FS_META_FLAG_ENUM.UNALLOC.getValue(), carvedFile.getSizeInBytes(), 0L, 0L, 0L, 0L, null, null, FileKnown.UNKNOWN, parentPath, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT)); } transaction.commit(); transaction = null; return carvedFiles; } catch (SQLException ex) { throw new TskCoreException("Failed to add carved files to case database", ex); } finally { closeResultSet(resultSet); closeStatement(statement); if (null != transaction) { try { transaction.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Creates a new derived file object, adds it to database and returns it. * * TODO add support for adding derived method * * @param fileName file name the derived file * @param localPath local path of the derived file, including the file * name. The path is relative to the database path. * @param size size of the derived file in bytes * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param isFile whether a file or directory, true if a file * @param parentObj parent content object * @param rederiveDetails details needed to re-derive file (will be specific * to the derivation method), currently unused * @param toolName name of derivation method/tool, currently unused * @param toolVersion version of derivation method/tool, currently * unused * @param otherDetails details of derivation method/tool, currently * unused * @param encodingType Type of encoding used on the file (or NONE if no * encoding) * * @return newly created derived file object * * @throws TskCoreException exception thrown if the object creation failed * due to a critical system error */ public DerivedFile addDerivedFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, Content parentObj, String rederiveDetails, String toolName, String toolVersion, String otherDetails, TskData.EncodingType encodingType) throws TskCoreException { CaseDbTransaction transaction = beginTransaction(); try { DerivedFile df = addDerivedFile(fileName, localPath, size, ctime, crtime, atime, mtime, isFile, parentObj, rederiveDetails, toolName, toolVersion, otherDetails, encodingType, transaction); transaction.commit(); return df; } catch (TskCoreException ex) { transaction.rollback(); throw ex; } } public DerivedFile addDerivedFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, Content parentObj, String rederiveDetails, String toolName, String toolVersion, String otherDetails, TskData.EncodingType encodingType, CaseDbTransaction transaction) throws TskCoreException { // Strip off any leading slashes from the local path (leading slashes indicate absolute paths) localPath = localPath.replaceAll("^[/\\\\]+", ""); TimelineManager timelineManager = getTimelineManager(); CaseDbConnection connection = transaction.getConnection(); try { final long parentId = parentObj.getId(); String parentPath = ""; if (parentObj instanceof BlackboardArtifact) { parentPath = parentObj.getUniquePath() + '/' + parentObj.getName() + '/'; } else if (parentObj instanceof AbstractFile) { parentPath = ((AbstractFile) parentObj).getParentPath() + parentObj.getName() + '/'; //NON-NLS } // Insert a row for the derived file into the tsk_objects table. // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) long newObjId = addObject(parentId, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); // Insert a row for the virtual directory into the tsk_files table. // INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, // dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, // parent_path, data_source_obj_id, extension) // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); statement.clearParameters(); statement.setLong(1, newObjId); // If the parentFile is part of a file system, use its file system object ID. long fsObjId = this.getFileSystemId(parentId, connection); if (fsObjId != -1) { statement.setLong(2, fsObjId); } else { statement.setNull(2, java.sql.Types.BIGINT); } statement.setString(3, fileName); //type, has_path statement.setShort(4, TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()); statement.setShort(5, (short) 1); //flags final TSK_FS_NAME_TYPE_ENUM dirType = isFile ? TSK_FS_NAME_TYPE_ENUM.REG : TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(6, dirType.getValue()); final TSK_FS_META_TYPE_ENUM metaType = isFile ? TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG : TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(7, metaType.getValue()); //note: using alloc under assumption that derived files derive from alloc files final TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; statement.setShort(8, dirFlag.getValue()); final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); statement.setShort(9, metaFlags); //size //prevent negative size long savedSize = size < 0 ? 0 : size; statement.setLong(10, savedSize); //mactimes //long ctime, long crtime, long atime, long mtime, statement.setLong(11, ctime); statement.setLong(12, crtime); statement.setLong(13, atime); statement.setLong(14, mtime); statement.setNull(15, java.sql.Types.VARCHAR); // MD5 statement.setNull(16, java.sql.Types.VARCHAR); // SHA-256 statement.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known statement.setNull(18, java.sql.Types.VARCHAR); // MIME type //parent path statement.setString(19, parentPath); // root data source object id long dataSourceObjId = getDataSourceObjectId(connection, parentObj); statement.setLong(20, dataSourceObjId); final String extension = extractExtension(fileName); //extension statement.setString(21, extension); statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(statement); //add localPath addFilePath(connection, newObjId, localPath, encodingType); DerivedFile derivedFile = new DerivedFile(this, newObjId, dataSourceObjId, fileName, dirType, metaType, dirFlag, metaFlags, savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); timelineManager.addEventsForNewFile(derivedFile, connection); //TODO add derived method to tsk_files_derived and tsk_files_derived_method return derivedFile; } catch (SQLException ex) { throw new TskCoreException("Failed to add derived file to case database", ex); } } /** * Updates an existing derived file in the database and returns a new * derived file object with the updated contents * * @param derivedFile The derived file you wish to update * @param localPath local path of the derived file, including the file * name. The path is relative to the database path. * @param size size of the derived file in bytes * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param isFile whether a file or directory, true if a file * @param mimeType The MIME type the updated file should have, null * to unset it * @param rederiveDetails details needed to re-derive file (will be specific * to the derivation method), currently unused * @param toolName name of derivation method/tool, currently unused * @param toolVersion version of derivation method/tool, currently * unused * @param otherDetails details of derivation method/tool, currently * unused * @param encodingType Type of encoding used on the file (or NONE if no * encoding) * * @return newly created derived file object which contains the updated data * * @throws TskCoreException exception thrown if the object creation failed * due to a critical system error */ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, String mimeType, String rederiveDetails, String toolName, String toolVersion, String otherDetails, TskData.EncodingType encodingType) throws TskCoreException { CaseDbTransaction trans = null; try { Content parentObj = derivedFile.getParent(); trans = beginTransaction(); DerivedFile updatedFile = updateDerivedFile(derivedFile, localPath, size, ctime, crtime, atime, mtime, isFile, mimeType, rederiveDetails, toolName, toolVersion, otherDetails, encodingType, parentObj, trans); trans.commit(); return updatedFile; } catch (TskCoreException ex) { if (trans != null) { trans.rollback(); } throw ex; } } public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, String mimeType, String rederiveDetails, String toolName, String toolVersion, String otherDetails, TskData.EncodingType encodingType, Content parentObj, CaseDbTransaction trans) throws TskCoreException { // Strip off any leading slashes from the local path (leading slashes indicate absolute paths) localPath = localPath.replaceAll("^[/\\\\]+", ""); ResultSet rs = null; try { final long parentId = parentObj.getId(); String parentPath = ""; if (parentObj instanceof BlackboardArtifact) { parentPath = parentObj.getUniquePath() + '/' + parentObj.getName() + '/'; } else if (parentObj instanceof AbstractFile) { parentPath = ((AbstractFile) parentObj).getParentPath() + parentObj.getName() + '/'; //NON-NLS } // UPDATE tsk_files SET type = ?, dir_type = ?, meta_type = ?, dir_flags = ?, meta_flags = ?, " // + "size= ?, ctime= ?, crtime= ?, atime= ?, mtime= ?, mime_type = ? WHERE obj_id = ?"), //NON-NLS PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.UPDATE_DERIVED_FILE); statement.clearParameters(); //type statement.setShort(1, TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()); //flags final TSK_FS_NAME_TYPE_ENUM dirType = isFile ? TSK_FS_NAME_TYPE_ENUM.REG : TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(2, dirType.getValue()); final TSK_FS_META_TYPE_ENUM metaType = isFile ? TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG : TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(3, metaType.getValue()); //note: using alloc under assumption that derived files derive from alloc files final TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; statement.setShort(4, dirFlag.getValue()); final short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); statement.setShort(5, metaFlags); //size //prevent negative size long savedSize = size < 0 ? 0 : size; statement.setLong(6, savedSize); //mactimes //long ctime, long crtime, long atime, long mtime, statement.setLong(7, ctime); statement.setLong(8, crtime); statement.setLong(9, atime); statement.setLong(10, mtime); statement.setString(11, mimeType); statement.setString(12, String.valueOf(derivedFile.getId())); trans.getConnection().executeUpdate(statement); //add localPath updateFilePath(trans.getConnection(), derivedFile.getId(), localPath, encodingType); long dataSourceObjId = getDataSourceObjectId(trans.getConnection(), parentObj); final String extension = extractExtension(derivedFile.getName()); return new DerivedFile(this, derivedFile.getId(), dataSourceObjId, derivedFile.getName(), dirType, metaType, dirFlag, metaFlags, savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension, derivedFile.getOwnerUid().orElse(null), derivedFile.getOsAccountObjectId().orElse(null)); } catch (SQLException ex) { throw new TskCoreException("Failed to add derived file to case database", ex); } finally { closeResultSet(rs); } } /** * Wraps the version of addLocalFile that takes a Transaction in a * transaction local to this method. * * @param fileName * @param localPath * @param size * @param ctime * @param crtime * @param atime * @param mtime * @param isFile * @param encodingType * @param parent * * @return * * @throws TskCoreException */ public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, TskData.EncodingType encodingType, AbstractFile parent) throws TskCoreException { CaseDbTransaction localTrans = beginTransaction(); try { LocalFile created = addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, isFile, encodingType, parent, localTrans); localTrans.commit(); localTrans = null; return created; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Adds a local/logical file to the case database. The database operations * are done within a caller-managed transaction; the caller is responsible * for committing or rolling back the transaction. * * @param fileName The name of the file. * @param localPath The absolute path (including the file name) of the * local/logical in secondary storage. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param isFile True, unless the file is a directory. * @param encodingType Type of encoding used on the file * @param parent The parent of the file (e.g., a virtual directory) * @param transaction A caller-managed transaction within which the add * file operations are performed. * * @return An object representing the local/logical file. * * @throws TskCoreException if there is an error completing a case database * operation. */ public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, TskData.EncodingType encodingType, Content parent, CaseDbTransaction transaction) throws TskCoreException { return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, null, null, null, isFile, encodingType, parent, transaction); } /** * Adds a local/logical file to the case database. The database operations * are done within a caller-managed transaction; the caller is responsible * for committing or rolling back the transaction. * * @param fileName The name of the file. * @param localPath The absolute path (including the file name) of the * local/logical in secondary storage. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param md5 The MD5 hash of the file * @param sha256 the SHA-256 hash of the file. * @param known The known status of the file (can be null) * @param mimeType The MIME type of the file * @param isFile True, unless the file is a directory. * @param encodingType Type of encoding used on the file * @param parent The parent of the file (e.g., a virtual directory) * @param transaction A caller-managed transaction within which the add * file operations are performed. * * @return An object representing the local/logical file. * * @throws TskCoreException if there is an error completing a case database * operation. */ public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, String md5, String sha256, FileKnown known, String mimeType, boolean isFile, TskData.EncodingType encodingType, Content parent, CaseDbTransaction transaction) throws TskCoreException { return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, md5, sha256, known, mimeType, isFile, encodingType, OsAccount.NO_ACCOUNT, OsAccount.NO_OWNER_ID, parent, transaction); } /** * Adds a local/logical file to the case database. The database operations * are done within a caller-managed transaction; the caller is responsible * for committing or rolling back the transaction. * * @param fileName The name of the file. * @param localPath The absolute path (including the file name) of the * local/logical in secondary storage. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param md5 The MD5 hash of the file * @param sha256 the SHA-256 hash of the file. * @param known The known status of the file (can be null) * @param mimeType The MIME type of the file * @param isFile True, unless the file is a directory. * @param encodingType Type of encoding used on the file * @param osAccountId OS account id (can be null) * @param ownerAccount Owner account (can be null) * @param parent The parent of the file (e.g., a virtual directory) * @param transaction A caller-managed transaction within which the add * file operations are performed. * * @return An object representing the local/logical file. * * @throws TskCoreException if there is an error completing a case database * operation. */ public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, String md5, String sha256, FileKnown known, String mimeType, boolean isFile, TskData.EncodingType encodingType, Long osAccountId, String ownerAccount, Content parent, CaseDbTransaction transaction) throws TskCoreException { CaseDbConnection connection = transaction.getConnection(); Statement queryStatement = null; try { // Insert a row for the local/logical file into the tsk_objects table. // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) long objectId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); // Insert a row for the local/logical file into the tsk_files table. // INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, // dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, // parent_path, data_source_obj_id,extension, uid_str, os_account_obj_id) // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); statement.clearParameters(); statement.setLong(1, objectId); statement.setNull(2, java.sql.Types.BIGINT); // Not part of a file system statement.setString(3, fileName); statement.setShort(4, TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.getFileType()); statement.setShort(5, (short) 1); TSK_FS_NAME_TYPE_ENUM dirType = isFile ? TSK_FS_NAME_TYPE_ENUM.REG : TSK_FS_NAME_TYPE_ENUM.DIR; statement.setShort(6, dirType.getValue()); TSK_FS_META_TYPE_ENUM metaType = isFile ? TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG : TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR; statement.setShort(7, metaType.getValue()); TSK_FS_NAME_FLAG_ENUM dirFlag = TSK_FS_NAME_FLAG_ENUM.ALLOC; statement.setShort(8, dirFlag.getValue()); short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue()); statement.setShort(9, metaFlags); //prevent negative size long savedSize = size < 0 ? 0 : size; statement.setLong(10, savedSize); statement.setLong(11, ctime); statement.setLong(12, crtime); statement.setLong(13, atime); statement.setLong(14, mtime); statement.setString(15, md5); statement.setString(16, sha256); if (known != null) { statement.setByte(17, known.getFileKnownValue()); } else { statement.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); } statement.setString(18, mimeType); String parentPath; long dataSourceObjId; if (parent instanceof AbstractFile) { AbstractFile parentFile = (AbstractFile) parent; if (isRootDirectory(parentFile, transaction)) { parentPath = "/"; } else { parentPath = parentFile.getParentPath() + parent.getName() + "/"; //NON-NLS } dataSourceObjId = parentFile.getDataSourceObjectId(); } else { parentPath = "/"; dataSourceObjId = getDataSourceObjectId(connection, parent); } statement.setString(19, parentPath); statement.setLong(20, dataSourceObjId); final String extension = extractExtension(fileName); statement.setString(21, extension); if (ownerAccount != null) { statement.setString(22, ownerAccount); // ownerUid } else { statement.setNull(22, java.sql.Types.VARCHAR); } if (osAccountId != null) { statement.setLong(23, osAccountId); // osAccountObjId } else { statement.setNull(23, java.sql.Types.BIGINT); } connection.executeUpdate(statement); addFilePath(connection, objectId, localPath, encodingType); LocalFile localFile = new LocalFile(this, objectId, fileName, TSK_DB_FILES_TYPE_ENUM.LOCAL, dirType, metaType, dirFlag, metaFlags, savedSize, ctime, crtime, atime, mtime, mimeType, md5, sha256, known, parent.getId(), parentPath, dataSourceObjId, localPath, encodingType, extension, ownerAccount, osAccountId); getTimelineManager().addEventsForNewFile(localFile, connection); return localFile; } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to INSERT local file %s (%s) with parent id %d in tsk_files table", fileName, localPath, parent.getId()), ex); } finally { closeStatement(queryStatement); } } /** * Utility class to create keys for the cache used in isRootDirectory(). The * dataSourceId must be set but the fileSystemId can be null (for local * directories, for example). */ private class RootDirectoryKey { private long dataSourceId; private Long fileSystemId; RootDirectoryKey(long dataSourceId, Long fileSystemId) { this.dataSourceId = dataSourceId; this.fileSystemId = fileSystemId; } @Override public int hashCode() { int hash = 7; hash = 41 * hash + Objects.hashCode(dataSourceId); hash = 41 * hash + Objects.hashCode(fileSystemId); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } RootDirectoryKey otherKey = (RootDirectoryKey) obj; if (dataSourceId != otherKey.dataSourceId) { return false; } if (fileSystemId != null) { return fileSystemId.equals(otherKey.fileSystemId); } return (otherKey.fileSystemId == null); } } /** * Check whether a given AbstractFile is the "root" directory. True if the * AbstractFile either has no parent or its parent is an image, volume, * volume system, or file system. * * @param file the file to test * @param transaction the current transaction * * @return true if the file is a root directory, false otherwise * * @throws TskCoreException */ private boolean isRootDirectory(AbstractFile file, CaseDbTransaction transaction) throws TskCoreException { // First check if we know the root directory for this data source and optionally // file system. There is only one root, so if we know it we can simply compare // this file ID to the known root directory. Long fsObjId = null; if (file instanceof FsContent) { fsObjId = ((FsContent) file).getFileSystemId(); } RootDirectoryKey key = new RootDirectoryKey(file.getDataSourceObjectId(), fsObjId); synchronized (rootDirectoryMapLock) { if (rootDirectoryMap.containsKey(key)) { return rootDirectoryMap.get(key).equals(file.getId()); } } // Fallback cache. We store the result of each database lookup // so it won't be done multiple times in a row. In practice, this will // only be used if this method was never called on the root directory. Boolean isRoot = isRootDirectoryCache.getIfPresent(file.getId()); if (isRoot != null) { return isRoot; } CaseDbConnection connection = transaction.getConnection(); Statement statement = null; ResultSet resultSet = null; try { String query = String.format("SELECT ParentRow.type AS parent_type, ParentRow.obj_id AS parent_object_id " + "FROM tsk_objects ParentRow JOIN tsk_objects ChildRow ON ChildRow.par_obj_id = ParentRow.obj_id " + "WHERE ChildRow.obj_id = %s;", file.getId()); statement = connection.createStatement(); resultSet = statement.executeQuery(query); if (resultSet.next()) { long parentId = resultSet.getLong("parent_object_id"); if (parentId == 0) { return true; } int type = resultSet.getInt("parent_type"); boolean result = type == TskData.ObjectType.IMG.getObjectType() || type == TskData.ObjectType.VS.getObjectType() || type == TskData.ObjectType.VOL.getObjectType() || type == TskData.ObjectType.FS.getObjectType(); if (result == true) { synchronized (rootDirectoryMapLock) { // This is a root directory so save it rootDirectoryMap.put(key, file.getId()); } } isRootDirectoryCache.put(file.getId(), result); return result; } else { // This is a root directory so save it synchronized (rootDirectoryMapLock) { rootDirectoryMap.put(key, file.getId()); } isRootDirectoryCache.put(file.getId(), true); return true; // The file has no parent } } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to lookup parent of file (%s) with id %d", file.getName(), file.getId()), ex); } finally { closeResultSet(resultSet); closeStatement(statement); } } /** * Add a new layout file to the database. * * @param fileName The name of the file. * @param size The size of the file in bytes. * @param dirFlag The allocated status from the name structure * @param metaFlag The allocated status from the metadata structure * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param fileRanges The byte ranges that belong to this file (relative to * start of image) * @param parent The parent of the file * * @return The new LayoutFile * * @throws TskCoreException */ public LayoutFile addLayoutFile(String fileName, long size, TSK_FS_NAME_FLAG_ENUM dirFlag, TSK_FS_META_FLAG_ENUM metaFlag, long ctime, long crtime, long atime, long mtime, List fileRanges, Content parent) throws TskCoreException { if (null == parent) { throw new TskCoreException("Parent can not be null"); } String parentPath; if (parent instanceof AbstractFile) { parentPath = ((AbstractFile) parent).getParentPath() + parent.getName() + '/'; //NON-NLS } else { parentPath = "/"; } CaseDbTransaction transaction = null; Statement statement = null; ResultSet resultSet = null; try { transaction = beginTransaction(); CaseDbConnection connection = transaction.getConnection(); /* * Insert a row for the layout file into the tsk_objects table: * INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) */ long newFileId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); /* * Insert a row for the file into the tsk_files table: INSERT INTO * tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, * meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, * mtime, md5, known, mime_type, parent_path, * data_source_obj_id,extenion, owner_uid, os_account_obj_id) VALUES * (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) */ PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE); prepStmt.clearParameters(); prepStmt.setLong(1, newFileId); // obj_id // If the parent is part of a file system, grab its file system ID if (0 != parent.getId()) { long parentFs = this.getFileSystemId(parent.getId(), connection); if (parentFs != -1) { prepStmt.setLong(2, parentFs); } else { prepStmt.setNull(2, java.sql.Types.BIGINT); } } else { prepStmt.setNull(2, java.sql.Types.BIGINT); } prepStmt.setString(3, fileName); // name prepStmt.setShort(4, TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.getFileType()); // type prepStmt.setShort(5, (short) 0); // has_path prepStmt.setShort(6, TSK_FS_NAME_TYPE_ENUM.REG.getValue()); // dir_type prepStmt.setShort(7, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()); // meta_type prepStmt.setShort(8, dirFlag.getValue()); // dir_flags prepStmt.setShort(9, metaFlag.getValue()); // meta_flags //prevent negative size long savedSize = size < 0 ? 0 : size; prepStmt.setLong(10, savedSize); // size prepStmt.setLong(11, ctime); // ctime prepStmt.setLong(12, crtime); // crtime prepStmt.setLong(13, atime); // atime prepStmt.setLong(14, mtime); // mtime prepStmt.setNull(15, java.sql.Types.VARCHAR); // MD5 prepStmt.setNull(16, java.sql.Types.VARCHAR); // SHA-256 prepStmt.setByte(17, FileKnown.UNKNOWN.getFileKnownValue()); // Known prepStmt.setNull(18, java.sql.Types.VARCHAR); // MIME type prepStmt.setString(19, parentPath); // parent path prepStmt.setLong(20, parent.getDataSource().getId()); // data_source_obj_id prepStmt.setString(21, extractExtension(fileName)); //extension prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId connection.executeUpdate(prepStmt); /* * Insert a row in the tsk_layout_file table for each chunk of the * carved file. INSERT INTO tsk_file_layout (obj_id, byte_start, * byte_len, sequence) VALUES (?, ?, ?, ?) */ prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LAYOUT_FILE); for (TskFileRange tskFileRange : fileRanges) { prepStmt.clearParameters(); prepStmt.setLong(1, newFileId); // obj_id prepStmt.setLong(2, tskFileRange.getByteStart()); // byte_start prepStmt.setLong(3, tskFileRange.getByteLen()); // byte_len prepStmt.setLong(4, tskFileRange.getSequence()); // sequence connection.executeUpdate(prepStmt); } /* * Create a layout file representation of the carved file. */ LayoutFile layoutFile = new LayoutFile(this, newFileId, parent.getDataSource().getId(), fileName, TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE, TSK_FS_NAME_TYPE_ENUM.REG, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG, dirFlag, metaFlag.getValue(), savedSize, ctime, crtime, atime, mtime, null, null, FileKnown.UNKNOWN, parentPath, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); transaction.commit(); transaction = null; return layoutFile; } catch (SQLException ex) { throw new TskCoreException("Failed to add layout file " + fileName + " to case database", ex); } finally { closeResultSet(resultSet); closeStatement(statement); if (null != transaction) { try { transaction.rollback(); } catch (TskCoreException ex2) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } } } /** * Given a Content object, return its data source object ID. For * AbstractFiles, this simply returns the data source ID field. * * @param connection A case database connection. * @param content The content to look up the data source object ID. * * @return A data source object id. */ private long getDataSourceObjectId(CaseDbConnection connection, Content content) throws TskCoreException { if (content == null) { throw new TskCoreException("Null Content parameter given"); } if (content instanceof AbstractFile) { return ((AbstractFile) content).getDataSourceObjectId(); } else { return getDataSourceObjectId(connection, content.getId()); } } /** * Given an object id, works up the tree of ancestors to the data source for * the object and gets the object id of the data source. The trivial case * where the input object id is for a source is handled. * * @param connection A case database connection. * @param objectId An object id. * * @return A data source object id. * * @throws TskCoreException if there is an error querying the case database. */ private long getDataSourceObjectId(CaseDbConnection connection, long objectId) throws TskCoreException { acquireSingleUserCaseReadLock(); Statement statement = null; ResultSet resultSet = null; try { statement = connection.createStatement(); long dataSourceObjId; long ancestorId = objectId; do { dataSourceObjId = ancestorId; String query = String.format("SELECT par_obj_id FROM tsk_objects WHERE obj_id = %s;", ancestorId); resultSet = statement.executeQuery(query); if (resultSet.next()) { ancestorId = resultSet.getLong("par_obj_id"); } else { throw new TskCoreException(String.format("tsk_objects table is corrupt, SQL query returned no result: %s", query)); } resultSet.close(); resultSet = null; } while (0 != ancestorId); // Not NULL return dataSourceObjId; } catch (SQLException ex) { throw new TskCoreException(String.format("Error finding root data source for object (obj_id = %d)", objectId), ex); } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseReadLock(); } } /** * Add a path (such as a local path) for a content object to tsk_file_paths * * @param connection A case database connection. * @param objId The object id of the file for which to add the path. * @param path The path to add. * @param type The TSK encoding type of the file. * * @throws SQLException Thrown if database error occurred and path was not * added. */ private void addFilePath(CaseDbConnection connection, long objId, String path, TskData.EncodingType type) throws SQLException { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_LOCAL_PATH); statement.clearParameters(); statement.setLong(1, objId); statement.setString(2, path); statement.setInt(3, type.getType()); connection.executeUpdate(statement); } /** * Update the path for a content object in the tsk_file_paths table * * @param connection A case database connection. * @param objId The object id of the file for which to update the path. * @param path The path to update. * @param type The TSK encoding type of the file. * * @throws SQLException Thrown if database error occurred and path was not * updated. */ private void updateFilePath(CaseDbConnection connection, long objId, String path, TskData.EncodingType type) throws SQLException { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_LOCAL_PATH); statement.clearParameters(); statement.setString(1, path); statement.setInt(2, type.getType()); statement.setLong(3, objId); connection.executeUpdate(statement); } /** * Find all files by name and parent * * @param fileName Pattern of the name of the file or directory to match * (case insensitive, used in LIKE SQL statement). * @param parentFile Object for parent file/directory to find children in * * @return a list of AbstractFile for files/directories whose name matches * fileName and that were inside a directory described by * parentFile. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List findFilesInFolder(String fileName, AbstractFile parentFile) throws TskCoreException { String ext = ""; if (!containsLikeWildcard(fileName)) { ext = SleuthkitCase.extractExtension(fileName); } CaseDbConnection connection = null; ResultSet rs = null; long parentId = parentFile.getId(); acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement; if (ext.isEmpty()) { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT_AND_NAME); statement.clearParameters(); statement.setLong(1, parentId); statement.setString(2, fileName); } else { statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_PARENT_AND_NAME); statement.clearParameters(); statement.setString(1, ext); statement.setLong(2, parentId); statement.setString(3, fileName); } rs = connection.executeQuery(statement); return resultSetToAbstractFiles(rs, connection); } catch (SQLException ex) { throw new TskCoreException("Error getting AbstractFile children with name=" + fileName + " for Content parent with ID=" + parentFile.getId(), ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Count files matching the specific Where clause * * @param sqlWhereClause a SQL where clause appropriate for the desired * files (do not begin the WHERE clause with the word * WHERE!) * * @return count of files each of which satisfy the given WHERE clause * * @throws TskCoreException \ref query_database_page */ public long countFilesWhere(String sqlWhereClause) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS rs.next(); return rs.getLong("count"); } catch (SQLException e) { throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.countFilesWhere().", e); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Find and return list of all (abstract) files matching the specific Where * clause. You need to know the database schema to use this, which is * outlined on the * wiki. * You should use enums from org.sleuthkit.datamodel.TskData to make the * queries easier to maintain and understand. * * @param sqlWhereClause a SQL where clause appropriate for the desired * files (do not begin the WHERE clause with the word * WHERE!) * * @return a list of AbstractFile each of which satisfy the given WHERE * clause * * @throws TskCoreException \ref query_database_page */ public List findAllFilesWhere(String sqlWhereClause) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS return resultSetToAbstractFiles(rs, connection); } catch (SQLException e) { throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findAllFilesWhere(): " + sqlWhereClause, e); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Find and return list of all (abstract) files matching the specific Where * clause with the give parentId. You need to know the database schema to * use this, which is outlined on the * wiki. * You should use enums from org.sleuthkit.datamodel.TskData to make the * queries easier to maintain and understand. * * @param parentId The parentId * @param sqlWhereClause a SQL where clause appropriate for the desired * files (do not begin the WHERE clause with the word * WHERE!) * * @return a list of AbstractFile each of which satisfy the given WHERE * clause * * @throws TskCoreException \ref query_database_page */ public List findAllFilesInFolderWhere(long parentId, String sqlWhereClause) throws TskCoreException { String queryTemplate = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE par_obj_id = %d AND %s"; acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection()) { String query = String.format(queryTemplate, parentId, sqlWhereClause); try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) { return resultSetToAbstractFiles(rs, connection); } catch (SQLException ex) { throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findAllFilesInFolderWhere(): " + query, ex); } } finally { releaseSingleUserCaseReadLock(); } } /** * Find and return list of all (abstract) ids of files matching the specific * Where clause * * @param sqlWhereClause a SQL where clause appropriate for the desired * files (do not begin the WHERE clause with the word * WHERE!) * * @return a list of file ids each of which satisfy the given WHERE clause * * @throws TskCoreException \ref query_database_page */ public List findAllFileIdsWhere(String sqlWhereClause) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT obj_id FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS List ret = new ArrayList<>(); while (rs.next()) { ret.add(rs.getLong("obj_id")); } return ret; } catch (SQLException e) { throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findAllFileIdsWhere(): " + sqlWhereClause, e); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * @param dataSource the data source (Image, VirtualDirectory for file-sets, * etc) to search for the given file name * @param filePath The full path to the file(s) of interest. This can * optionally include the image and volume names. Treated * in a case- insensitive manner. * * @return a list of AbstractFile that have the given file path. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List openFiles(Content dataSource, String filePath) throws TskCoreException { // get the non-unique path (strip of image and volume path segments, if // the exist. String path = AbstractFile.createNonUniquePath(filePath).toLowerCase(); // split the file name from the parent path int lastSlash = path.lastIndexOf('/'); //NON-NLS // if the last slash is at the end, strip it off if (lastSlash == path.length()) { path = path.substring(0, lastSlash - 1); lastSlash = path.lastIndexOf('/'); //NON-NLS } String parentPath = path.substring(0, lastSlash); String fileName = path.substring(lastSlash); return findFiles(dataSource, fileName, parentPath); } /** * Get file layout ranges from tsk_file_layout, for a file with specified id * * @param id of the file to get file layout ranges for * * @return list of populated file ranges * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public List getFileRanges(long id) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_file_layout WHERE obj_id = " + id + " ORDER BY sequence"); List ranges = new ArrayList(); while (rs.next()) { TskFileRange range = new TskFileRange(rs.getLong("byte_start"), //NON-NLS rs.getLong("byte_len"), rs.getLong("sequence")); //NON-NLS ranges.add(range); } return ranges; } catch (SQLException ex) { throw new TskCoreException("Error getting TskFileLayoutRanges by id, id = " + id, ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get am image by the image object id * * @param id of the image object * * @return Image object populated * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public Image getImageById(long id) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT tsk_image_info.type, tsk_image_info.ssize, tsk_image_info.tzone, tsk_image_info.size, tsk_image_info.md5, tsk_image_info.sha1, tsk_image_info.sha256, tsk_image_info.display_name, data_source_info.device_id, tsk_image_names.name " + "FROM tsk_image_info " + "INNER JOIN data_source_info ON tsk_image_info.obj_id = data_source_info.obj_id " + "LEFT JOIN tsk_image_names ON tsk_image_names.obj_id = data_source_info.obj_id " + "WHERE tsk_image_info.obj_id = " + id); //NON-NLS List imagePaths = new ArrayList<>(); long type, ssize, size; String tzone, md5, sha1, sha256, name, device_id, imagePath; if (rs.next()) { imagePath = rs.getString("name"); if (imagePath != null) { imagePaths.add(imagePath); } type = rs.getLong("type"); //NON-NLS ssize = rs.getLong("ssize"); //NON-NLS tzone = rs.getString("tzone"); //NON-NLS size = rs.getLong("size"); //NON-NLS md5 = rs.getString("md5"); //NON-NLS sha1 = rs.getString("sha1"); //NON-NLS sha256 = rs.getString("sha256"); //NON-NLS name = rs.getString("display_name"); if (name == null) { if (imagePaths.size() > 0) { String path = imagePaths.get(0); name = (new java.io.File(path)).getName(); } else { name = ""; } } device_id = rs.getString("device_id"); } else { throw new TskCoreException("No image found for id: " + id); } // image can have multiple paths, therefore there can be multiple rows in the result set while (rs.next()) { imagePath = rs.getString("name"); if (imagePath != null) { imagePaths.add(imagePath); } } return new Image(this, id, type, device_id, ssize, name, imagePaths.toArray(new String[imagePaths.size()]), tzone, md5, sha1, sha256, size); } catch (SQLException ex) { throw new TskCoreException("Error getting Image by id, id = " + id, ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get a volume system by the volume system object id * * @param id id of the volume system * @param parent image containing the volume system * * @return populated VolumeSystem object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ VolumeSystem getVolumeSystemById(long id, Content parent) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_vs_info " //NON-NLS + "where obj_id = " + id); //NON-NLS if (rs.next()) { long type = rs.getLong("vs_type"); //NON-NLS long imgOffset = rs.getLong("img_offset"); //NON-NLS long blockSize = rs.getLong("block_size"); //NON-NLS VolumeSystem vs = new VolumeSystem(this, id, "", type, imgOffset, blockSize); vs.setParent(parent); return vs; } else { throw new TskCoreException("No volume system found for id:" + id); } } catch (SQLException ex) { throw new TskCoreException("Error getting Volume System by ID.", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * @param id ID of the desired VolumeSystem * @param parentId ID of the VolumeSystem's parent * * @return the VolumeSystem with the given ID * * @throws TskCoreException */ VolumeSystem getVolumeSystemById(long id, long parentId) throws TskCoreException { VolumeSystem vs = getVolumeSystemById(id, null); vs.setParentId(parentId); return vs; } /** * Get a file system by the object id * * @param id of the filesystem * @param parent parent Image of the file system * * @return populated FileSystem object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ FileSystem getFileSystemById(long id, Image parent) throws TskCoreException { return getFileSystemByIdHelper(id, parent); } /** * @param id ID of the desired FileSystem * @param parentId ID of the FileSystem's parent * * @return the desired FileSystem * * @throws TskCoreException */ FileSystem getFileSystemById(long id, long parentId) throws TskCoreException { Volume vol = null; FileSystem fs = getFileSystemById(id, vol); fs.setParentId(parentId); return fs; } /** * Get a file system by the object id * * @param id of the filesystem * @param parent parent Volume of the file system * * @return populated FileSystem object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ FileSystem getFileSystemById(long id, Volume parent) throws TskCoreException { return getFileSystemByIdHelper(id, parent); } /** * Get a pool by the object id * * @param id of the pool * @param parent parent of the pool (image or volume) * * @return populated Pool object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ Pool getPoolById(long id, Content parent) throws TskCoreException { return getPoolByIdHelper(id, parent); } /** * @param id ID of the desired Volume * @param parentId ID of the Volume's parent * * @return the desired Volume * * @throws TskCoreException */ Pool getPoolById(long id, long parentId) throws TskCoreException { Pool pool = getPoolById(id, null); pool.setParentId(parentId); return pool; } /** * Get pool by id and Content parent * * @param id of the pool to get * @param parent a direct parent Content object * * @return populated FileSystem object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ private Pool getPoolByIdHelper(long id, Content parent) throws TskCoreException { acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = connections.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, "SELECT * FROM tsk_pool_info " //NON-NLS + "where obj_id = " + id);) { //NON-NLS if (rs.next()) { Pool pool = new Pool(this, rs.getLong("obj_id"), TskData.TSK_POOL_TYPE_ENUM.valueOf(rs.getLong("pool_type")).getName(), rs.getLong("pool_type")); pool.setParent(parent); return pool; } else { throw new TskCoreException("No pool found for ID:" + id); } } catch (SQLException ex) { throw new TskCoreException("Error getting Pool by ID", ex); } finally { releaseSingleUserCaseReadLock(); } } /** * Get file system by id and Content parent * * @param id of the filesystem to get * @param parent a direct parent Content object * * @return populated FileSystem object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ private FileSystem getFileSystemByIdHelper(long id, Content parent) throws TskCoreException { // see if we already have it // @@@ NOTE: this is currently kind of bad in that we are ignoring the parent value, // but it should be the same... synchronized (fileSystemIdMap) { if (fileSystemIdMap.containsKey(id)) { return fileSystemIdMap.get(id); } } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_fs_info " //NON-NLS + "where obj_id = " + id); //NON-NLS if (rs.next()) { TskData.TSK_FS_TYPE_ENUM fsType = TskData.TSK_FS_TYPE_ENUM.valueOf(rs.getInt("fs_type")); //NON-NLS FileSystem fs = new FileSystem(this, rs.getLong("obj_id"), "", rs.getLong("img_offset"), //NON-NLS fsType, rs.getLong("block_size"), rs.getLong("block_count"), //NON-NLS rs.getLong("root_inum"), rs.getLong("first_inum"), rs.getLong("last_inum")); //NON-NLS fs.setParent(parent); // save it for the next call synchronized (fileSystemIdMap) { fileSystemIdMap.put(id, fs); } return fs; } else { throw new TskCoreException("No file system found for id:" + id); } } catch (SQLException ex) { throw new TskCoreException("Error getting File System by ID", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get volume by id * * @param id * @param parent volume system * * @return populated Volume object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ Volume getVolumeById(long id, VolumeSystem parent) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_vs_parts " //NON-NLS + "where obj_id = " + id); //NON-NLS if (rs.next()) { /** * TODO!! LANDMINE!! This allows the two types of databases to * have slightly different schemas. SQLite uses desc as the * column name in tsk_vs_parts and Postgres uses descr, as desc * is a reserved keyword in Postgres. When we have to make a * schema change, be sure to change this over to just one name. */ String description; try { description = rs.getString("desc"); } catch (Exception ex) { description = rs.getString("descr"); } Volume vol = new Volume(this, rs.getLong("obj_id"), rs.getLong("addr"), //NON-NLS rs.getLong("start"), rs.getLong("length"), rs.getLong("flags"), //NON-NLS description); vol.setParent(parent); return vol; } else { throw new TskCoreException("No volume found for id:" + id); } } catch (SQLException ex) { throw new TskCoreException("Error getting Volume by ID", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * @param id ID of the desired Volume * @param parentId ID of the Volume's parent * * @return the desired Volume * * @throws TskCoreException */ Volume getVolumeById(long id, long parentId) throws TskCoreException { Volume vol = getVolumeById(id, null); vol.setParentId(parentId); return vol; } /** * Get a directory by id * * @param id of the directory object * @param parentFs parent file system * * @return populated Directory object * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ Directory getDirectoryById(long id, FileSystem parentFs) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_files " //NON-NLS + "WHERE obj_id = " + id); Directory temp = null; //NON-NLS if (rs.next()) { final short type = rs.getShort("type"); //NON-NLS if (type == TSK_DB_FILES_TYPE_ENUM.FS.getFileType()) { if (rs.getShort("meta_type") == TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue() || rs.getShort("meta_type") == TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue()) { //NON-NLS temp = directory(rs, parentFs); } } else if (type == TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType()) { throw new TskCoreException("Expecting an FS-type directory, got virtual, id: " + id); } } else { throw new TskCoreException("No Directory found for id:" + id); } return temp; } catch (SQLException ex) { throw new TskCoreException("Error getting Directory by ID", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Helper to return FileSystems in an Image * * @param image Image to lookup FileSystem for * * @return Collection of FileSystems in the image * * @throws TskCoreException */ public Collection getImageFileSystems(Image image) throws TskCoreException { List fileSystems = new ArrayList<>(); String queryStr = "SELECT * FROM tsk_fs_info WHERE data_source_obj_id = " + image.getId(); CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, queryStr); //NON-NLS while (rs.next()) { TskData.TSK_FS_TYPE_ENUM fsType = TskData.TSK_FS_TYPE_ENUM.valueOf(rs.getInt("fs_type")); //NON-NLS FileSystem fs = new FileSystem(this, rs.getLong("obj_id"), "", rs.getLong("img_offset"), //NON-NLS fsType, rs.getLong("block_size"), rs.getLong("block_count"), //NON-NLS rs.getLong("root_inum"), rs.getLong("first_inum"), rs.getLong("last_inum")); //NON-NLS fs.setParent(null); fileSystems.add(fs); } } catch (SQLException ex) { throw new TskCoreException("Error looking up files systems. Query: " + queryStr, ex); //NON-NLS } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } return fileSystems; } /** * Returns the list of direct children for a given Image * * @param img image to get children for * * @return list of Contents (direct image children) * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getImageChildren(Image img) throws TskCoreException { Collection childInfos = getChildrenInfo(img); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (null != info.type) { switch (info.type) { case VS: children.add(getVolumeSystemById(info.id, img)); break; case POOL: children.add(getPoolById(info.id, img)); break; case FS: children.add(getFileSystemById(info.id, img)); break; case ABSTRACTFILE: AbstractFile f = getAbstractFileById(info.id); if (f != null) { children.add(f); } break; case ARTIFACT: BlackboardArtifact art = getArtifactById(info.id); if (art != null) { children.add(art); } break; case REPORT: // Do nothing for now - see JIRA-3673 break; default: throw new TskCoreException("Image has child of invalid type: " + info.type); } } } return children; } /** * Returns the list of direct children IDs for a given Image * * @param img image to get children for * * @return list of IDs (direct image children) * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getImageChildrenIds(Image img) throws TskCoreException { Collection childInfos = getChildrenInfo(img); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (info.type == ObjectType.VS || info.type == ObjectType.POOL || info.type == ObjectType.FS || info.type == ObjectType.ABSTRACTFILE || info.type == ObjectType.ARTIFACT) { children.add(info.id); } else if (info.type == ObjectType.REPORT) { // Do nothing for now - see JIRA-3673 } else { throw new TskCoreException("Image has child of invalid type: " + info.type); } } return children; } /** * Returns the list of direct children for a given Pool * * @param pool pool to get children for * * @return list of pool children objects * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getPoolChildren(Pool pool) throws TskCoreException { Collection childInfos = getChildrenInfo(pool); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (null != info.type) { switch (info.type) { case VS: children.add(getVolumeSystemById(info.id, pool)); break; case ABSTRACTFILE: AbstractFile f = getAbstractFileById(info.id); if (f != null) { children.add(f); } break; case ARTIFACT: BlackboardArtifact art = getArtifactById(info.id); if (art != null) { children.add(art); } break; default: throw new TskCoreException("Pool has child of invalid type: " + info.type); } } } return children; } /** * Returns the list of direct children IDs for a given Pool * * @param pool pool to get children for * * @return list of pool children IDs * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getPoolChildrenIds(Pool pool) throws TskCoreException { Collection childInfos = getChildrenInfo(pool); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (info.type == ObjectType.VS || info.type == ObjectType.ABSTRACTFILE || info.type == ObjectType.ARTIFACT) { children.add(info.id); } else { throw new TskCoreException("Pool has child of invalid type: " + info.type); } } return children; } /** * Returns the list of direct children for a given VolumeSystem * * @param vs volume system to get children for * * @return list of volume system children objects * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getVolumeSystemChildren(VolumeSystem vs) throws TskCoreException { Collection childInfos = getChildrenInfo(vs); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (null != info.type) { switch (info.type) { case VOL: children.add(getVolumeById(info.id, vs)); break; case ABSTRACTFILE: AbstractFile f = getAbstractFileById(info.id); if (f != null) { children.add(f); } break; case ARTIFACT: BlackboardArtifact art = getArtifactById(info.id); if (art != null) { children.add(art); } break; default: throw new TskCoreException("VolumeSystem has child of invalid type: " + info.type); } } } return children; } /** * Returns the list of direct children IDs for a given VolumeSystem * * @param vs volume system to get children for * * @return list of volume system children IDs * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getVolumeSystemChildrenIds(VolumeSystem vs) throws TskCoreException { Collection childInfos = getChildrenInfo(vs); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (info.type == ObjectType.VOL || info.type == ObjectType.ABSTRACTFILE || info.type == ObjectType.ARTIFACT) { children.add(info.id); } else { throw new TskCoreException("VolumeSystem has child of invalid type: " + info.type); } } return children; } /** * Returns a list of direct children for a given Volume * * @param vol volume to get children of * * @return list of Volume children * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getVolumeChildren(Volume vol) throws TskCoreException { Collection childInfos = getChildrenInfo(vol); List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (null != info.type) { switch (info.type) { case POOL: children.add(getPoolById(info.id, vol)); break; case FS: children.add(getFileSystemById(info.id, vol)); break; case ABSTRACTFILE: AbstractFile f = getAbstractFileById(info.id); if (f != null) { children.add(f); } break; case ARTIFACT: BlackboardArtifact art = getArtifactById(info.id); if (art != null) { children.add(art); } break; default: throw new TskCoreException("Volume has child of invalid type: " + info.type); } } } return children; } /** * Returns a list of direct children IDs for a given Volume * * @param vol volume to get children of * * @return list of Volume children IDs * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ List getVolumeChildrenIds(Volume vol) throws TskCoreException { final Collection childInfos = getChildrenInfo(vol); final List children = new ArrayList(); for (ObjectInfo info : childInfos) { if (info.type == ObjectType.FS || info.type == ObjectType.ABSTRACTFILE || info.type == ObjectType.ARTIFACT) { children.add(info.id); } else { throw new TskCoreException("Volume has child of invalid type: " + info.type); } } return children; } /** * Adds an image to the case database. * * @param deviceObjId The object id of the device associated with the * image. * @param imageFilePaths The image file paths. * @param timeZone The time zone for the image. * * @return An Image object. * * @throws TskCoreException if there is an error adding the image to case * database. */ public Image addImageInfo(long deviceObjId, List imageFilePaths, String timeZone) throws TskCoreException { return addImageInfo(deviceObjId, imageFilePaths, timeZone, null); } /** * Adds an image to the case database. * * @param deviceObjId The object id of the device associated with the * image. * @param imageFilePaths The image file paths. * @param timeZone The time zone for the image. * @param host The host for this image. * * @return An Image object. * * @throws TskCoreException if there is an error adding the image to case * database. */ public Image addImageInfo(long deviceObjId, List imageFilePaths, String timeZone, Host host) throws TskCoreException { long imageId = this.caseHandle.addImageInfo(deviceObjId, imageFilePaths, timeZone, host, this); return getImageById(imageId); } /** * Returns a map of image object IDs to a list of fully qualified file paths * for that image * * @return map of image object IDs to file paths * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public Map> getImagePaths() throws TskCoreException { CaseDbConnection connection = null; Statement s1 = null; ResultSet rs1 = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s1 = connection.createStatement(); rs1 = connection.executeQuery(s1, "SELECT tsk_image_info.obj_id, tsk_image_names.name FROM tsk_image_info " + "LEFT JOIN tsk_image_names ON tsk_image_info.obj_id = tsk_image_names.obj_id"); //NON-NLS Map> imgPaths = new LinkedHashMap>(); while (rs1.next()) { long obj_id = rs1.getLong("obj_id"); //NON-NLS String name = rs1.getString("name"); //NON-NLS List imagePaths = imgPaths.get(obj_id); if (imagePaths == null) { List paths = new ArrayList(); if (name != null) { paths.add(name); } imgPaths.put(obj_id, paths); } else { if (name != null) { imagePaths.add(name); } } } return imgPaths; } catch (SQLException ex) { throw new TskCoreException("Error getting image paths.", ex); } finally { closeResultSet(rs1); closeStatement(s1); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Returns a list of fully qualified file paths based on an image object ID. * * @param objectId The object id of the data source. * @param connection Database connection to use. * * @return List of file paths. * * @throws TskCoreException Thrown if a critical error occurred within tsk * core */ private List getImagePathsById(long objectId, CaseDbConnection connection) throws TskCoreException { List imagePaths = new ArrayList<>(); acquireSingleUserCaseReadLock(); Statement statement = null; ResultSet resultSet = null; try { statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT name FROM tsk_image_names WHERE tsk_image_names.obj_id = " + objectId); //NON-NLS while (resultSet.next()) { imagePaths.add(resultSet.getString("name")); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting image names with obj_id = %d", objectId), ex); } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseReadLock(); } return imagePaths; } /** * @return a collection of Images associated with this instance of * SleuthkitCase * * @throws TskCoreException */ public List getImages() throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT obj_id FROM tsk_image_info"); //NON-NLS Collection imageIDs = new ArrayList(); while (rs.next()) { imageIDs.add(rs.getLong("obj_id")); //NON-NLS } List images = new ArrayList(); for (long id : imageIDs) { images.add(getImageById(id)); } return images; } catch (SQLException ex) { throw new TskCoreException("Error retrieving images.", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Set the file paths for the image given by obj_id * * @param obj_id the ID of the image to update * @param paths the fully qualified path to the files that make up the * image * * @throws TskCoreException exception thrown when critical error occurs * within tsk core and the update fails */ public void setImagePaths(long obj_id, List paths) throws TskCoreException { CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); PreparedStatement statement; try { connection = connections.getConnection(); connection.beginTransaction(); statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_IMAGE_NAME); statement.clearParameters(); statement.setLong(1, obj_id); connection.executeUpdate(statement); for (int i = 0; i < paths.size(); i++) { statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_IMAGE_NAME); statement.clearParameters(); statement.setLong(1, obj_id); statement.setString(2, paths.get(i)); statement.setLong(3, i); connection.executeUpdate(statement); } connection.commitTransaction(); } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error updating image paths.", ex); } finally { closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Deletes a datasource from the open case, the database has foreign keys * with a delete cascade so that all the tables that have a datasource * object id will have their data deleted. This is private to keep it out of * the public API * * @param dataSourceObjectId the id of the datasource to be deleted * * @throws TskCoreException exception thrown when critical error occurs * within tsk core and the update fails */ void deleteDataSource(long dataSourceObjectId) throws TskCoreException { // Check if this data source is the only one associated with its host. If so, // we will delete the host and other associated data. // Note that the cascading deletes were only added in schema 9.1, so we // would get an error trying to delete a host from older cases. Host hostToDelete = null; VersionNumber version = getDBSchemaCreationVersion(); int major = version.getMajor(); int minor = version.getMinor(); if (major > 9 || (major == 9 && minor >= 1)) { hostToDelete = getHostManager().getHostByDataSource(dataSourceObjectId); if (getHostManager().getDataSourcesForHost(hostToDelete).size() != 1) { hostToDelete = null; } } CaseDbConnection connection = null; Statement statement; acquireSingleUserCaseWriteLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); connection.beginTransaction(); // The following delete(s) uses a foreign key delete with cascade in the DB so that it will delete // all associated rows from tsk_object and its children. For large data sources this may take some time. statement.execute("DELETE FROM tsk_objects WHERE obj_id = " + dataSourceObjectId); // The following delete uses a foreign key delete with cascade in the DB so that it will delete all // associated rows from accounts table and its children. String accountSql = "DELETE FROM accounts WHERE account_id in (SELECT account_id FROM accounts " + "WHERE account_id NOT IN (SELECT account1_id FROM account_relationships) " + "AND account_id NOT IN (SELECT account2_id FROM account_relationships))"; statement.execute(accountSql); // Now delete any host that was only associated with this data source. This will cascade to delete // realms, os accounts, and os account attributes that were associated with the host. if (hostToDelete != null) { statement.execute("DELETE FROM tsk_hosts WHERE id = " + hostToDelete.getHostId()); // Clean up any stray OS Account objects String deleteOsAcctObjectsQuery = "DELETE FROM tsk_objects " + "WHERE type=" + TskData.ObjectType.OS_ACCOUNT.getObjectType() + " " + "AND obj_id NOT IN (SELECT os_account_obj_id FROM tsk_os_accounts WHERE os_account_obj_id IS NOT NULL)"; statement.execute(deleteOsAcctObjectsQuery); } connection.commitTransaction(); } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error deleting data source.", ex); } finally { closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Creates file object from a SQL query result set of rows from the * tsk_files table. Assumes that the query was of the form "SELECT * FROM * tsk_files WHERE XYZ". * * @param rs ResultSet to get content from. Caller is responsible for * closing it. * * @return list of file objects from tsk_files table containing the files * * @throws SQLException if the query fails */ /** * Creates AbstractFile objects for the result set of a tsk_files table * query of the form "SELECT * FROM tsk_files WHERE XYZ". * * @param rs A result set from a query of the tsk_files table of the * form "SELECT * FROM tsk_files WHERE XYZ". * @param connection A case database connection. * * @return A list of AbstractFile objects. * * @throws SQLException Thrown if there is a problem iterating through the * record set. */ List resultSetToAbstractFiles(ResultSet rs, CaseDbConnection connection) throws SQLException { ArrayList results = new ArrayList(); try { while (rs.next()) { final short type = rs.getShort("type"); //NON-NLS if (type == TSK_DB_FILES_TYPE_ENUM.FS.getFileType() && (rs.getShort("meta_type") != TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue())) { FsContent result; if (rs.getShort("meta_type") == TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()) { //NON-NLS result = directory(rs, null); } else { result = file(rs, null); } results.add(result); } else if (type == TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType() || (rs.getShort("meta_type") == TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue())) { //NON-NLS final VirtualDirectory virtDir = virtualDirectory(rs, connection); results.add(virtDir); } else if (type == TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR.getFileType()) { final LocalDirectory localDir = localDirectory(rs); results.add(localDir); } else if (type == TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() || type == TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS.getFileType() || type == TSK_DB_FILES_TYPE_ENUM.CARVED.getFileType() || type == TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.getFileType()) { TSK_DB_FILES_TYPE_ENUM atype = TSK_DB_FILES_TYPE_ENUM.valueOf(type); String parentPath = rs.getString("parent_path"); //NON-NLS if (parentPath == null) { parentPath = "/"; //NON-NLS } Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } LayoutFile lf = new LayoutFile(this, rs.getLong("obj_id"), //NON-NLS rs.getLong("data_source_obj_id"), rs.getString("name"), //NON-NLS atype, TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), //NON-NLS rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"), rs.getString("owner_uid"), osAccountObjId); //NON-NLS results.add(lf); } else if (type == TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()) { final DerivedFile df; df = derivedFile(rs, connection, AbstractContent.UNKNOWN_ID); results.add(df); } else if (type == TSK_DB_FILES_TYPE_ENUM.LOCAL.getFileType()) { final LocalFile lf; lf = localFile(rs, connection, AbstractContent.UNKNOWN_ID); results.add(lf); } else if (type == TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType()) { final SlackFile sf = slackFile(rs, null); results.add(sf); } } //end for each resultSet } catch (SQLException e) { logger.log(Level.SEVERE, "Error getting abstract files from result set", e); //NON-NLS } return results; } // This following methods generate AbstractFile objects from a ResultSet /** * Create a File object from the result set containing query results on * tsk_files table * * @param rs the result set * @param fs parent file system * * @return a newly create File * * @throws SQLException */ org.sleuthkit.datamodel.File file(ResultSet rs, FileSystem fs) throws SQLException { Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } org.sleuthkit.datamodel.File f = new org.sleuthkit.datamodel.File(this, rs.getLong("obj_id"), //NON-NLS rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS rs.getInt("attr_id"), rs.getString("name"), rs.getLong("meta_addr"), rs.getInt("meta_seq"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), //NON-NLS rs.getShort("meta_flags"), rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS (short) rs.getInt("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"), rs.getString("owner_uid"), osAccountObjId, Collections.emptyList()); //NON-NLS f.setFileSystem(fs); return f; } /** * Create a Directory object from the result set containing query results on * tsk_files table * * @param rs the result set * @param fs parent file system * * @return a newly created Directory object * * @throws SQLException thrown if SQL error occurred */ Directory directory(ResultSet rs, FileSystem fs) throws SQLException { Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } Directory dir = new Directory(this, rs.getLong("obj_id"), rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS rs.getInt("attr_id"), rs.getString("name"), rs.getLong("meta_addr"), rs.getInt("meta_seq"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), //NON-NLS rs.getShort("meta_flags"), rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS rs.getShort("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS rs.getString("parent_path"), rs.getString("owner_uid"), osAccountObjId); //NON-NLS dir.setFileSystem(fs); return dir; } /** * Create a virtual directory object from a result set. * * @param rs the result set. * @param connection The case database connection. * * @return newly created VirtualDirectory object. * * @throws SQLException */ VirtualDirectory virtualDirectory(ResultSet rs, CaseDbConnection connection) throws SQLException { String parentPath = rs.getString("parent_path"); //NON-NLS if (parentPath == null) { parentPath = ""; } long objId = rs.getLong("obj_id"); long dsObjId = rs.getLong("data_source_obj_id"); if (objId == dsObjId) { // virtual directory is a data source String deviceId = ""; String timeZone = ""; Statement s = null; ResultSet rsDataSourceInfo = null; acquireSingleUserCaseReadLock(); try { s = connection.createStatement(); rsDataSourceInfo = connection.executeQuery(s, "SELECT device_id, time_zone FROM data_source_info WHERE obj_id = " + objId); if (rsDataSourceInfo.next()) { deviceId = rsDataSourceInfo.getString("device_id"); timeZone = rsDataSourceInfo.getString("time_zone"); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Error data source info for datasource id " + objId, ex); //NON-NLS } finally { closeResultSet(rsDataSourceInfo); closeStatement(s); releaseSingleUserCaseReadLock(); } return new LocalFilesDataSource(this, objId, dsObjId, deviceId, rs.getString("name"), TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), timeZone, rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath); } else { final VirtualDirectory vd = new VirtualDirectory(this, objId, dsObjId, rs.getString("name"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), //NON-NLS rs.getShort("meta_flags"), rs.getString("md5"), rs.getString("sha256"), //NON-NLS FileKnown.valueOf(rs.getByte("known")), parentPath); //NON-NLS return vd; } } /** * Create a virtual directory object from a result set * * @param rs the result set * * @return newly created VirtualDirectory object * * @throws SQLException */ LocalDirectory localDirectory(ResultSet rs) throws SQLException { String parentPath = rs.getString("parent_path"); //NON-NLS if (parentPath == null) { parentPath = ""; } final LocalDirectory ld = new LocalDirectory(this, rs.getLong("obj_id"), //NON-NLS rs.getLong("data_source_obj_id"), rs.getString("name"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), //NON-NLS rs.getShort("meta_flags"), rs.getString("md5"), rs.getString("sha256"), //NON-NLS FileKnown.valueOf(rs.getByte("known")), parentPath); //NON-NLS return ld; } /** * Creates a DerivedFile object using the values of a given result set. * * @param rs The result set. * @param connection The case database connection. * @param parentId The parent id for the derived file or * AbstractContent.UNKNOWN_ID. * * @return The DerivedFile object. * * @throws SQLException if there is an error reading from the result set or * doing additional queries. */ private DerivedFile derivedFile(ResultSet rs, CaseDbConnection connection, long parentId) throws SQLException { boolean hasLocalPath = rs.getBoolean("has_path"); //NON-NLS long objId = rs.getLong("obj_id"); //NON-NLS String localPath = null; TskData.EncodingType encodingType = TskData.EncodingType.NONE; if (hasLocalPath) { ResultSet rsFilePath = null; acquireSingleUserCaseReadLock(); try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_LOCAL_PATH_AND_ENCODING_FOR_FILE); statement.clearParameters(); statement.setLong(1, objId); rsFilePath = connection.executeQuery(statement); if (rsFilePath.next()) { localPath = rsFilePath.getString("path"); encodingType = TskData.EncodingType.valueOf(rsFilePath.getInt("encoding_type")); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Error getting encoding type for file " + objId, ex); //NON-NLS } finally { closeResultSet(rsFilePath); releaseSingleUserCaseReadLock(); } } String parentPath = rs.getString("parent_path"); //NON-NLS if (parentPath == null) { parentPath = ""; } Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } final DerivedFile df = new DerivedFile(this, objId, rs.getLong("data_source_obj_id"), rs.getString("name"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), //NON-NLS rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS parentPath, localPath, parentId, rs.getString("mime_type"), encodingType, rs.getString("extension"), rs.getString("owner_uid"), osAccountObjId); return df; } /** * Creates a LocalFile object using the data from a given result set. * * @param rs The result set. * @param connection The case database connection. * @param parentId The parent id for the derived file or * AbstractContent.UNKNOWN_ID. * * @return The LocalFile object. * * @throws SQLException if there is an error reading from the result set or * doing additional queries. */ private LocalFile localFile(ResultSet rs, CaseDbConnection connection, long parentId) throws SQLException { long objId = rs.getLong("obj_id"); //NON-NLS String localPath = null; TskData.EncodingType encodingType = TskData.EncodingType.NONE; if (rs.getBoolean("has_path")) { ResultSet rsFilePath = null; acquireSingleUserCaseReadLock(); try { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_LOCAL_PATH_AND_ENCODING_FOR_FILE); statement.clearParameters(); statement.setLong(1, objId); rsFilePath = connection.executeQuery(statement); if (rsFilePath.next()) { localPath = rsFilePath.getString("path"); encodingType = TskData.EncodingType.valueOf(rsFilePath.getInt("encoding_type")); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Error getting encoding type for file " + objId, ex); //NON-NLS } finally { closeResultSet(rsFilePath); releaseSingleUserCaseReadLock(); } } String parentPath = rs.getString("parent_path"); //NON-NLS if (null == parentPath) { parentPath = ""; } Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } LocalFile file = new LocalFile(this, objId, rs.getString("name"), //NON-NLS TSK_DB_FILES_TYPE_ENUM.valueOf(rs.getShort("type")), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), //NON-NLS rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS rs.getString("mime_type"), rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS parentId, parentPath, rs.getLong("data_source_obj_id"), localPath, encodingType, rs.getString("extension"), rs.getString("owner_uid"), osAccountObjId); return file; } /** * Create a Slack File object from the result set containing query results * on tsk_files table * * @param rs the result set * @param fs parent file system * * @return a newly created Slack File * * @throws SQLException */ org.sleuthkit.datamodel.SlackFile slackFile(ResultSet rs, FileSystem fs) throws SQLException { Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } org.sleuthkit.datamodel.SlackFile f = new org.sleuthkit.datamodel.SlackFile(this, rs.getLong("obj_id"), //NON-NLS rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS rs.getInt("attr_id"), rs.getString("name"), rs.getLong("meta_addr"), rs.getInt("meta_seq"), //NON-NLS TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), //NON-NLS TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), //NON-NLS rs.getShort("meta_flags"), rs.getLong("size"), //NON-NLS rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS (short) rs.getInt("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"), rs.getString("owner_uid"), osAccountObjId); //NON-NLS f.setFileSystem(fs); return f; } /** * Returns the list of abstractFile objects from a result of selecting many * files that meet a certain criteria. * * @param rs * @param parentId * * @return * * @throws SQLException */ List fileChildren(ResultSet rs, CaseDbConnection connection, long parentId) throws SQLException { List children = new ArrayList(); while (rs.next()) { TskData.TSK_DB_FILES_TYPE_ENUM type = TskData.TSK_DB_FILES_TYPE_ENUM.valueOf(rs.getShort("type")); if (null != type) { switch (type) { case FS: if (rs.getShort("meta_type") != TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue()) { FsContent result; if (rs.getShort("meta_type") == TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()) { result = directory(rs, null); } else { result = file(rs, null); } children.add(result); } else { VirtualDirectory virtDir = virtualDirectory(rs, connection); children.add(virtDir); } break; case VIRTUAL_DIR: VirtualDirectory virtDir = virtualDirectory(rs, connection); children.add(virtDir); break; case LOCAL_DIR: LocalDirectory localDir = localDirectory(rs); children.add(localDir); break; case UNALLOC_BLOCKS: case UNUSED_BLOCKS: case CARVED: case LAYOUT_FILE: { String parentPath = rs.getString("parent_path"); if (parentPath == null) { parentPath = ""; } Long osAccountObjId = rs.getLong("os_account_obj_id"); if (rs.wasNull()) { osAccountObjId = null; } final LayoutFile lf = new LayoutFile(this, rs.getLong("obj_id"), rs.getLong("data_source_obj_id"), rs.getString("name"), type, TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), TSK_FS_META_TYPE_ENUM.valueOf(rs.getShort("meta_type")), TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), rs.getLong("size"), rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"), rs.getString("owner_uid"), osAccountObjId); children.add(lf); break; } case DERIVED: final DerivedFile df = derivedFile(rs, connection, parentId); children.add(df); break; case LOCAL: { final LocalFile lf = localFile(rs, connection, parentId); children.add(lf); break; } case SLACK: { final SlackFile sf = slackFile(rs, null); children.add(sf); break; } default: break; } } } return children; } /** * This method allows developers to run arbitrary SQL "SELECT" queries. The * CaseDbQuery object will take care of acquiring the necessary database * lock and when used in a try-with-resources block will automatically take * care of releasing the lock. If you do not use a try-with-resources block * you must call CaseDbQuery.close() once you are done processing the files * of the query. * * Also note that if you use it within a transaction to insert something * into the database, and then within that same transaction query the * inserted item from the database, you will likely not see your inserted * item, as the method uses new connections for each execution. With this * method, you must close your transaction before successfully querying for * newly-inserted items. * * @param query The query string to execute. * * @return A CaseDbQuery instance. * * @throws TskCoreException */ public CaseDbQuery executeQuery(String query) throws TskCoreException { return new CaseDbQuery(query); } /** * This method allows developers to run arbitrary SQL queries, including * INSERT and UPDATE. The CaseDbQuery object will take care of acquiring the * necessary database lock and when used in a try-with-resources block will * automatically take care of releasing the lock. If you do not use a * try-with-resources block you must call CaseDbQuery.close() once you are * done processing the files of the query. * * Also note that if you use it within a transaction to insert something * into the database, and then within that same transaction query the * inserted item from the database, you will likely not see your inserted * item, as the method uses new connections for each execution. With this * method, you must close your transaction before successfully querying for * newly-inserted items. * * @param query The query string to execute. * * @return A CaseDbQuery instance. * * @throws TskCoreException */ public CaseDbQuery executeInsertOrUpdate(String query) throws TskCoreException { return new CaseDbQuery(query, true); } /** * Get a case database connection. * * @return The case database connection. * * @throws TskCoreException */ CaseDbConnection getConnection() throws TskCoreException { return connections.getConnection(); } /** * Gets the string used to identify this case in the JNI cache. * * @return The string for this case * * @throws TskCoreException */ String getCaseHandleIdentifier() { return caseHandleIdentifier; } @Override protected void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } /** * Call to free resources when done with instance. */ public synchronized void close() { acquireSingleUserCaseWriteLock(); try { connections.close(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error closing database connection pool.", ex); //NON-NLS } fileSystemIdMap.clear(); try { if (this.caseHandle != null) { this.caseHandle.free(); this.caseHandle = null; } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error freeing case handle.", ex); //NON-NLS } finally { releaseSingleUserCaseWriteLock(); } } /** * Store the known status for the FsContent in the database Note: will not * update status if content is already 'Known Bad' * * @param file The AbstractFile object * @param fileKnown The object's known status * * @return true if the known status was updated, false otherwise * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public boolean setKnown(AbstractFile file, FileKnown fileKnown) throws TskCoreException { long id = file.getId(); FileKnown currentKnown = file.getKnown(); if (currentKnown.compareTo(fileKnown) > 0) { return false; } acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { connection.executeUpdate(statement, "UPDATE tsk_files " //NON-NLS + "SET known='" + fileKnown.getFileKnownValue() + "' " //NON-NLS + "WHERE obj_id=" + id); //NON-NLS file.setKnown(fileKnown); } catch (SQLException ex) { throw new TskCoreException("Error setting Known status.", ex); } finally { releaseSingleUserCaseWriteLock(); } return true; } /** * Set the name of an object in the tsk_files table. * * @param name The new name for the object * @param objId The object ID * * @throws TskCoreException If there is an error updating the case database. */ void setFileName(String name, long objId) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_FILE_NAME); preparedStatement.clearParameters(); preparedStatement.setString(1, name); preparedStatement.setLong(2, objId); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating while the name for object ID %d to %s", objId, name), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Set the display name of an image in the tsk_image_info table. * * @param name The new name for the image * @param objId The object ID * * @throws TskCoreException If there is an error updating the case database. */ void setImageName(String name, long objId) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_IMAGE_NAME); preparedStatement.clearParameters(); preparedStatement.setString(1, name); preparedStatement.setLong(2, objId); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating while the name for object ID %d to %s", objId, name), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Updates the image's total size and sector size.This function may be used * to update the sizes after the image was created. * * Can only update the sizes if they were not set before. Will throw * TskCoreException if the values in the db are not 0 prior to this call. * * @param imgage The image that needs to be updated * @param totalSize The total size * @param sectorSize The sector size * * @throws TskCoreException If there is an error updating the case database. * */ void setImageSizes(Image image, long totalSize, long sectorSize) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_IMAGE_SIZES); preparedStatement.clearParameters(); preparedStatement.setLong(1, totalSize); preparedStatement.setLong(2, sectorSize); preparedStatement.setLong(3, image.getId()); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating image sizes to %d and sector size to %d for object ID %d ", totalSize, sectorSize, image.getId()), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Stores the MIME type of a file in the case database and updates the MIME * type of the given file object. * * @param file A file. * @param mimeType The MIME type. * * @throws TskCoreException If there is an error updating the case database. */ public void setFileMIMEType(AbstractFile file, String mimeType) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement()) { connection.executeUpdate(statement, String.format("UPDATE tsk_files SET mime_type = '%s' WHERE obj_id = %d", mimeType, file.getId())); file.setMIMEType(mimeType); } catch (SQLException ex) { throw new TskCoreException(String.format("Error setting MIME type for file (obj_id = %s)", file.getId()), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Sets the unalloc meta flags for the file in the case database, and * updates the meta flags in given file object. Also updates the dir flag to * unalloc. * * @param file A file. * * * @throws TskCoreException If there is an error updating the case database. */ public void setFileUnalloc(AbstractFile file) throws TskCoreException { // get the flags, reset the ALLOC flag, and set the UNALLOC flag short metaFlag = file.getMetaFlagsAsInt(); Set metaFlagAsSet = TSK_FS_META_FLAG_ENUM.valuesOf(metaFlag); metaFlagAsSet.remove(TSK_FS_META_FLAG_ENUM.ALLOC); metaFlagAsSet.add(TSK_FS_META_FLAG_ENUM.UNALLOC); short newMetaFlgs = TSK_FS_META_FLAG_ENUM.toInt(metaFlagAsSet); short newDirFlags = TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { connection.executeUpdate(statement, String.format("UPDATE tsk_files SET meta_flags = '%d', dir_flags = '%d' WHERE obj_id = %d", newMetaFlgs, newDirFlags, file.getId())); file.removeMetaFlag(TSK_FS_META_FLAG_ENUM.ALLOC); file.setMetaFlag(TSK_FS_META_FLAG_ENUM.UNALLOC); file.setDirFlag(TSK_FS_NAME_FLAG_ENUM.UNALLOC); } catch (SQLException ex) { throw new TskCoreException(String.format("Error setting unalloc meta flag for file (obj_id = %s)", file.getId()), ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Store the md5Hash for the file in the database * * @param file The file object * @param md5Hash The object's md5Hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ void setMd5Hash(AbstractFile file, String md5Hash) throws TskCoreException { if (md5Hash == null) { return; } long id = file.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_FILE_MD5); statement.clearParameters(); statement.setString(1, md5Hash.toLowerCase()); statement.setLong(2, id); connection.executeUpdate(statement); file.setMd5Hash(md5Hash.toLowerCase()); } catch (SQLException ex) { throw new TskCoreException("Error setting MD5 hash", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Store the MD5 hash for the image in the database * * @param img The image object * @param md5Hash The image's MD5 hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ void setMd5ImageHash(Image img, String md5Hash) throws TskCoreException { if (md5Hash == null) { return; } long id = img.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_MD5); statement.clearParameters(); statement.setString(1, md5Hash.toLowerCase()); statement.setLong(2, id); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting MD5 hash", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Get the MD5 hash of an image from the case database * * @param The image object * * @return The image's MD5 hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ String getMd5ImageHash(Image img) throws TskCoreException { long id = img.getId(); CaseDbConnection connection = null; ResultSet rs = null; String hash = ""; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_MD5); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { hash = rs.getString("md5"); } return hash; } catch (SQLException ex) { throw new TskCoreException("Error getting MD5 hash", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Store the SHA1 hash for the image in the database * * @param img The image object * @param sha1Hash The image's sha1 hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ void setSha1ImageHash(Image img, String sha1Hash) throws TskCoreException { if (sha1Hash == null) { return; } long id = img.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_SHA1); statement.clearParameters(); statement.setString(1, sha1Hash.toLowerCase()); statement.setLong(2, id); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting SHA1 hash", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Get the SHA1 hash of an image from the case database * * @param The image object * * @return The image's SHA1 hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ String getSha1ImageHash(Image img) throws TskCoreException { long id = img.getId(); CaseDbConnection connection = null; ResultSet rs = null; String hash = ""; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_SHA1); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { hash = rs.getString("sha1"); } return hash; } catch (SQLException ex) { throw new TskCoreException("Error getting SHA1 hash", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Store the SHA256 hash for the file in the database * * @param img The image object * @param sha256Hash The object's md5Hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ void setSha256ImageHash(Image img, String sha256Hash) throws TskCoreException { if (sha256Hash == null) { return; } long id = img.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_SHA256); statement.clearParameters(); statement.setString(1, sha256Hash.toLowerCase()); statement.setLong(2, id); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting SHA256 hash", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Get the SHA256 hash of an image from the case database * * @param The image object * * @return The image's SHA256 hash * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ String getSha256ImageHash(Image img) throws TskCoreException { long id = img.getId(); CaseDbConnection connection = null; ResultSet rs = null; String hash = ""; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_SHA256); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { hash = rs.getString("sha256"); } return hash; } catch (SQLException ex) { throw new TskCoreException("Error setting SHA256 hash", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Set the acquisition details in the data_source_info table * * @param datasource The data source * @param details The acquisition details * * @throws TskCoreException Thrown if the database write fails */ void setAcquisitionDetails(DataSource datasource, String details) throws TskCoreException { long id = datasource.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_DETAILS); statement.clearParameters(); statement.setString(1, details); statement.setLong(2, id); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Sets the acquisition tool details such as its name, version number and * any settings used during the acquisition to acquire data. * * @param datasource The datasource object * @param name The name of the acquisition tool. May be NULL. * @param version The acquisition tool version number. May be NULL. * @param settings The settings used by the acquisition tool. May be NULL. * * @throws TskCoreException Thrown if the database write fails */ void setAcquisitionToolDetails(DataSource datasource, String name, String version, String settings) throws TskCoreException { long id = datasource.getId(); acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_TOOL_SETTINGS); statement.clearParameters(); statement.setString(1, settings); statement.setString(2, name); statement.setString(3, version); statement.setLong(4, id); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Set the acquisition details in the data_source_info table. * * @param dataSourceId The data source ID. * @param details The acquisition details. * @param trans The current transaction. * * @throws TskCoreException */ void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction trans) throws TskCoreException { try { CaseDbConnection connection = trans.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_DETAILS); statement.clearParameters(); statement.setString(1, details); statement.setLong(2, dataSourceId); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } } /** * Get the acquisition details from the data_source_info table * * @param datasource The data source * * @return The acquisition details * * @throws TskCoreException Thrown if the database read fails */ String getAcquisitionDetails(DataSource datasource) throws TskCoreException { long id = datasource.getId(); CaseDbConnection connection = null; ResultSet rs = null; String hash = ""; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_DETAILS); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { hash = rs.getString("acquisition_details"); } return hash; } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get String value from the provided column from data_source_info table. * * @param datasource The datasource * @param columnName The column from which the data should be returned * * @return String value from the column * * @throws TskCoreException */ String getDataSourceInfoString(DataSource datasource, String columnName) throws TskCoreException { long id = datasource.getId(); CaseDbConnection connection = null; ResultSet rs = null; String returnValue = ""; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_TOOL_SETTINGS); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { returnValue = rs.getString(columnName); } return returnValue; } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get Long value from the provided column from data_source_info table. * * @param datasource The datasource * @param columnName The column from which the data should be returned * * @return Long value from the column * * @throws TskCoreException */ Long getDataSourceInfoLong(DataSource datasource, String columnName) throws TskCoreException { long id = datasource.getId(); CaseDbConnection connection = null; ResultSet rs = null; Long returnValue = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_TOOL_SETTINGS); statement.clearParameters(); statement.setLong(1, id); rs = connection.executeQuery(statement); if (rs.next()) { returnValue = rs.getLong(columnName); } return returnValue; } catch (SQLException ex) { throw new TskCoreException("Error setting acquisition details", ex); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Set the review status of the given artifact to newStatus * * @param artifact The artifact whose review status is being set. * @param newStatus The new review status for the given artifact. Must not * be null. * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public void setReviewStatus(BlackboardArtifact artifact, BlackboardArtifact.ReviewStatus newStatus) throws TskCoreException { if (newStatus == null) { return; } acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { connection.executeUpdate(statement, "UPDATE blackboard_artifacts " + " SET review_status_id=" + newStatus.getID() + " WHERE blackboard_artifacts.artifact_id = " + artifact.getArtifactID()); } catch (SQLException ex) { throw new TskCoreException("Error setting review status", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Return the number of objects in the database of a given file type. * * @param contentType Type of file to count * * @return Number of objects with that type. * * @throws TskCoreException thrown if a critical error occurred within tsk * core */ public int countFsContentType(TskData.TSK_FS_META_TYPE_ENUM contentType) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); Short contentShort = contentType.getValue(); rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files WHERE meta_type = '" + contentShort.toString() + "'"); //NON-NLS int count = 0; if (rs.next()) { count = rs.getInt("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting number of objects.", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Escape the single quotes in the given string so they can be added to the * SQL caseDbConnection * * @param text * * @return text the escaped version */ public static String escapeSingleQuotes(String text) { String escapedText = null; if (text != null) { escapedText = text.replaceAll("'", "''"); } return escapedText; } /** * Find all the files with the given MD5 hash. * * @param md5Hash hash value to match files with * * @return List of AbstractFile with the given hash */ public List findFilesByMd5(String md5Hash) { if (md5Hash == null) { return Collections.emptyList(); } CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " //NON-NLS + " md5 = '" + md5Hash.toLowerCase() + "' " //NON-NLS + "AND size > 0"); //NON-NLS return resultSetToAbstractFiles(rs, connection); } catch (SQLException | TskCoreException ex) { logger.log(Level.WARNING, "Error querying database.", ex); //NON-NLS } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } return Collections.emptyList(); } /** * Query all the files to verify if they have an MD5 hash associated with * them. * * @return true if all files have an MD5 hash */ public boolean allFilesMd5Hashed() { boolean allFilesAreHashed = false; CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files " //NON-NLS + "WHERE dir_type = '" + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + "' " //NON-NLS + "AND md5 IS NULL " //NON-NLS + "AND size > '0'"); //NON-NLS if (rs.next() && rs.getInt("count") == 0) { allFilesAreHashed = true; } } catch (SQLException | TskCoreException ex) { logger.log(Level.WARNING, "Failed to query whether all files have MD5 hashes", ex); //NON-NLS } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } return allFilesAreHashed; } /** * Query all the files and counts how many have an MD5 hash. * * @return the number of files with an MD5 hash */ public int countFilesMd5Hashed() { int count = 0; acquireSingleUserCaseReadLock(); CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files " //NON-NLS + "WHERE md5 IS NOT NULL " //NON-NLS + "AND size > '0'"); //NON-NLS if (rs.next()) { count = rs.getInt("count"); } } catch (SQLException | TskCoreException ex) { logger.log(Level.WARNING, "Failed to query for all the files.", ex); //NON-NLS } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } return count; } /** * Selects all of the rows from the tag_names table in the case database. * * @return A list, possibly empty, of TagName data transfer objects (DTOs) * for the rows. * * @throws TskCoreException */ public List getAllTagNames() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT * FROM tag_names PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES); resultSet = connection.executeQuery(statement); ArrayList tagNames = new ArrayList<>(); while (resultSet.next()) { tagNames.add(new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank"))); //NON-NLS } return tagNames; } catch (SQLException ex) { throw new TskCoreException("Error selecting rows from tag_names table", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects all of the rows from the tag_names table in the case database for * which there is at least one matching row in the content_tags or * blackboard_artifact_tags tables. * * @return A list, possibly empty, of TagName data transfer objects (DTOs) * for the rows. * * @throws TskCoreException */ public List getTagNamesInUse() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT * FROM tag_names WHERE tag_name_id IN (SELECT tag_name_id from content_tags UNION SELECT tag_name_id FROM blackboard_artifact_tags) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES_IN_USE); resultSet = connection.executeQuery(statement); ArrayList tagNames = new ArrayList<>(); while (resultSet.next()) { tagNames.add(new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank"))); //NON-NLS } return tagNames; } catch (SQLException ex) { throw new TskCoreException("Error selecting rows from tag_names table", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects all of the rows from the tag_names table in the case database for * which there is at least one matching row in the content_tags or * blackboard_artifact_tags tables, for the given data source object id. * * @param dsObjId data source object id * * @return A list, possibly empty, of TagName data transfer objects (DTOs) * for the rows. * * @throws TskCoreException */ public List getTagNamesInUse(long dsObjId) throws TskCoreException { ArrayList tagNames = new ArrayList<>(); // SELECT * FROM tag_names WHERE tag_name_id IN // ( SELECT content_tags.tag_name_id as tag_name_id FROM content_tags as content_tags, tsk_files as tsk_files WHERE content_tags.obj_id = tsk_files.obj_id AND tsk_files.data_source_obj_id = ? " // UNION // SELECT artifact_tags.tag_name_id as tag_name_id FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts WHERE artifact_tags.artifact_id = arts.artifact_id AND arts.data_source_obj_id = ? ) // ) CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES_IN_USE_BY_DATASOURCE); statement.setLong(1, dsObjId); statement.setLong(2, dsObjId); resultSet = connection.executeQuery(statement); //NON-NLS while (resultSet.next()) { tagNames.add(new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank"))); //NON-NLS } return tagNames; } catch (SQLException ex) { throw new TskCoreException("Failed to get tag names in use for data source objID : " + dsObjId, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Inserts row into the tags_names table in the case database. * * @param displayName The display name for the new tag name. * @param description The description for the new tag name. * @param color The HTML color to associate with the new tag name. * * @return A TagName data transfer object (DTO) for the new row. * * @throws TskCoreException * @deprecated TaggingManager.addOrUpdateTagName should be used instead with * the default knowStatus of TskData.FileKnown.UNKNOWN */ @Deprecated @SuppressWarnings("deprecation") public TagName addTagName(String displayName, String description, TagName.HTML_COLOR color) throws TskCoreException { return addOrUpdateTagName(displayName, description, color, TskData.FileKnown.UNKNOWN); } /** * Inserts row into the tags_names table, or updates the existing row if the * displayName already exists in the tag_names table in the case database. * * @param displayName The display name for the new tag name. * @param description The description for the new tag name. * @param color The HTML color to associate with the new tag name. * @param knownStatus The TskData.FileKnown value to associate with the new * tag name. * * @return A TagName data transfer object (DTO) for the new row. * * @throws TskCoreException * @deprecated This method has been replaced by * TaggingManager.addOrUpdateTagName. */ @Deprecated public TagName addOrUpdateTagName(String displayName, String description, TagName.HTML_COLOR color, TskData.FileKnown knownStatus) throws TskCoreException { return getTaggingManager().addOrUpdateTagName(displayName, description, color, knownStatus); } /** * Inserts a row into the content_tags table in the case database. * * @param content The content to tag. * @param tagName The name to use for the tag. * @param comment A comment to store with the tag. * @param beginByteOffset Designates the beginning of a tagged section. * @param endByteOffset Designates the end of a tagged section. * * @return A ContentTag data transfer object (DTO) for the new row. * * @throws TskCoreException * @deprecated Use TaggingManager.addContentTag */ @Deprecated public ContentTag addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws TskCoreException { return taggingMgr.addContentTag(content, tagName, comment, beginByteOffset, endByteOffset).getAddedTag(); } /* * Deletes a row from the content_tags table in the case database. @param * tag A ContentTag data transfer object (DTO) for the row to delete. * @throws TskCoreException */ public void deleteContentTag(ContentTag tag) throws TskCoreException { CaseDbTransaction trans = beginTransaction(); try { // DELETE FROM content_tags WHERE tag_id = ? PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.DELETE_CONTENT_TAG); statement.clearParameters(); statement.setLong(1, tag.getId()); trans.getConnection().executeUpdate(statement); // update the aggregate score for the content Long contentId = tag.getContent() != null ? tag.getContent().getId() : null; Long dataSourceId = tag.getContent() != null && tag.getContent().getDataSource() != null ? tag.getContent().getDataSource().getId() : null; this.getScoringManager().updateAggregateScoreAfterDeletion(contentId, dataSourceId, trans); trans.commit(); trans = null; } catch (SQLException ex) { throw new TskCoreException("Error deleting row from content_tags table (id = " + tag.getId() + ")", ex); } finally { if (trans != null) { trans.rollback(); } } } /** * Selects all of the rows from the content_tags table in the case database. * * @return A list, possibly empty, of ContentTag data transfer objects * (DTOs) for the rows. * * @throws TskCoreException */ public List getAllContentTags() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM content_tags // INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id // LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_CONTENT_TAGS); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); //NON-NLS Content content = getContentById(resultSet.getLong("obj_id")); //NON-NLS tags.add(new ContentTag(resultSet.getLong("tag_id"), content, tagName, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name"))); //NON-NLS } return tags; } catch (SQLException ex) { throw new TskCoreException("Error selecting rows from content_tags table", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a count of the rows in the content_tags table in the case database * with a specified foreign key into the tag_names table. * * @param tagName A data transfer object (DTO) for the tag name to match. * * @return The count, possibly zero. * * @throws TskCoreException */ public long getContentTagsCountByTagName(TagName tagName) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM content_tags WHERE tag_name_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CONTENT_TAGS_BY_TAG_NAME); statement.clearParameters(); statement.setLong(1, tagName.getId()); resultSet = connection.executeQuery(statement); if (resultSet.next()) { return resultSet.getLong("count"); } else { throw new TskCoreException("Error getting content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")"); } } catch (SQLException ex) { throw new TskCoreException("Error getting content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets content tags count by tag name, for the given data source * * @param tagName The representation of the desired tag type in the case * database, which can be obtained by calling getTagNames * and/or addTagName. * * @param dsObjId data source object id * * @return A count of the content tags with the specified tag name, and for * the given data source * * @throws TskCoreException If there is an error getting the tags count from * the case database. */ public long getContentTagsCountByTagName(TagName tagName, long dsObjId) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // "SELECT COUNT(*) AS count FROM content_tags as content_tags, tsk_files as tsk_files WHERE content_tags.obj_id = tsk_files.obj_id" // + " AND content_tags.tag_name_id = ? " // + " AND tsk_files.data_source_obj_id = ? " PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CONTENT_TAGS_BY_TAG_NAME_BY_DATASOURCE); statement.clearParameters(); statement.setLong(1, tagName.getId()); statement.setLong(2, dsObjId); resultSet = connection.executeQuery(statement); if (resultSet.next()) { return resultSet.getLong("count"); } else { throw new TskCoreException("Error getting content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")" + " for dsObjId = " + dsObjId); } } catch (SQLException ex) { throw new TskCoreException("Failed to get content_tags row count for tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects the rows in the content_tags table in the case database with a * specified tag id. * * @param contentTagID the tag id of the ContentTag to retrieve. * * @return The content tag. * * @throws TskCoreException */ public ContentTag getContentTagByID(long contentTagID) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; ContentTag tag = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM content_tags // INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id // UTER LEFT JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id // WHERE tag_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_CONTENT_TAG_BY_ID); statement.clearParameters(); statement.setLong(1, contentTagID); resultSet = connection.executeQuery(statement); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); tag = new ContentTag(resultSet.getLong("tag_id"), getContentById(resultSet.getLong("obj_id")), tagName, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name")); } resultSet.close(); } catch (SQLException ex) { throw new TskCoreException("Error getting content tag with id = " + contentTagID, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } return tag; } /** * Selects the rows in the content_tags table in the case database with a * specified foreign key into the tag_names table. * * @param tagName A data transfer object (DTO) for the tag name to match. * * @return A list, possibly empty, of ContentTag data transfer objects * (DTOs) for the rows. * * @throws TskCoreException */ public List getContentTagsByTagName(TagName tagName) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tsk_examiners.login_name // FROM content_tags // LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id // WHERE tag_name_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_CONTENT_TAGS_BY_TAG_NAME); statement.clearParameters(); statement.setLong(1, tagName.getId()); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { ContentTag tag = new ContentTag(resultSet.getLong("tag_id"), getContentById(resultSet.getLong("obj_id")), tagName, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } resultSet.close(); return tags; } catch (SQLException ex) { throw new TskCoreException("Error getting content_tags rows (tag_name_id = " + tagName.getId() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets content tags by tag name, for the given data source. * * @param tagName The tag name of interest. * @param dsObjId data source object id * * @return A list, possibly empty, of the content tags with the specified * tag name, and for the given data source. * * @throws TskCoreException If there is an error getting the tags from the * case database. */ public List getContentTagsByTagName(TagName tagName, long dsObjId) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM content_tags as content_tags, tsk_files as tsk_files // LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id // WHERE content_tags.obj_id = tsk_files.obj_id // AND content_tags.tag_name_id = ? // AND tsk_files.data_source_obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_CONTENT_TAGS_BY_TAG_NAME_BY_DATASOURCE); statement.clearParameters(); statement.setLong(1, tagName.getId()); statement.setLong(2, dsObjId); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { ContentTag tag = new ContentTag(resultSet.getLong("tag_id"), getContentById(resultSet.getLong("obj_id")), tagName, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } resultSet.close(); return tags; } catch (SQLException ex) { throw new TskCoreException("Failed to get content_tags row count for tag_name_id = " + tagName.getId() + " data source objID : " + dsObjId, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects the rows in the content_tags table in the case database with a * specified foreign key into the tsk_objects table. * * @param content A data transfer object (DTO) for the content to match. * * @return A list, possibly empty, of ContentTag data transfer objects * (DTOs) for the rows. * * @throws TskCoreException */ public List getContentTagsByContent(Content content) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM content_tags // INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id // LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id // WHERE content_tags.obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_CONTENT_TAGS_BY_CONTENT); statement.clearParameters(); statement.setLong(1, content.getId()); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); //NON-NLS ContentTag tag = new ContentTag(resultSet.getLong("tag_id"), content, tagName, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } return tags; } catch (SQLException ex) { throw new TskCoreException("Error getting content tags data for content (obj_id = " + content.getId() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Inserts a row into the blackboard_artifact_tags table in the case * database. * * @param artifact The blackboard artifact to tag. * @param tagName The name to use for the tag. * @param comment A comment to store with the tag. * * @return A BlackboardArtifactTag data transfer object (DTO) for the new * row. * * @throws TskCoreException * @deprecated User TaggingManager.addArtifactTag instead. */ @Deprecated public BlackboardArtifactTag addBlackboardArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { return taggingMgr.addArtifactTag(artifact, tagName, comment).getAddedTag(); } /* * Deletes a row from the blackboard_artifact_tags table in the case * database. @param tag A BlackboardArtifactTag data transfer object (DTO) * representing the row to delete. @throws TskCoreException */ public void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException { CaseDbTransaction trans = beginTransaction(); try { // DELETE FROM blackboard_artifact_tags WHERE tag_id = ? PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.DELETE_ARTIFACT_TAG); statement.clearParameters(); statement.setLong(1, tag.getId()); trans.getConnection().executeUpdate(statement); // update the aggregate score for the artifact Long artifactObjId = tag.getArtifact().getId(); Long dataSourceId = tag.getContent() != null && tag.getContent().getDataSource() != null ? tag.getContent().getDataSource().getId() : null; this.getScoringManager().updateAggregateScoreAfterDeletion(artifactObjId, dataSourceId, trans); trans.commit(); trans = null; } catch (SQLException ex) { throw new TskCoreException("Error deleting row from blackboard_artifact_tags table (id = " + tag.getId() + ")", ex); } finally { if (trans != null) { trans.rollback(); } } } /** * Selects all of the rows from the blackboard_artifacts_tags table in the * case database. * * @return A list, possibly empty, of BlackboardArtifactTag data transfer * objects (DTOs) for the rows. * * @throws TskCoreException */ public List getAllBlackboardArtifactTags() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM blackboard_artifact_tags // INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id // LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TAGS); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList<>(); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); //NON-NLS BlackboardArtifact artifact = getBlackboardArtifact(resultSet.getLong("artifact_id")); //NON-NLS Content content = getContentById(artifact.getObjectID()); BlackboardArtifactTag tag = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, content, tagName, resultSet.getString("comment"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } return tags; } catch (SQLException ex) { throw new TskCoreException("Error selecting rows from blackboard_artifact_tags table", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a count of the rows in the blackboard_artifact_tags table in the * case database with a specified foreign key into the tag_names table. * * @param tagName A data transfer object (DTO) for the tag name to match. * * @return The count, possibly zero. * * @throws TskCoreException */ public long getBlackboardArtifactTagsCountByTagName(TagName tagName) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT COUNT(*) AS count FROM blackboard_artifact_tags WHERE tag_name_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_BY_TAG_NAME); statement.clearParameters(); statement.setLong(1, tagName.getId()); resultSet = connection.executeQuery(statement); if (resultSet.next()) { return resultSet.getLong("count"); } else { throw new TskCoreException("Error getting blackboard_artifact_tags row count for tag name (tag_name_id = " + tagName.getId() + ")"); } } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifact_content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets an artifact tags count by tag name, for the given data source. * * @param tagName The representation of the desired tag type in the case * database, which can be obtained by calling getTagNames * and/or addTagName. * @param dsObjId data source object id * * @return A count of the artifact tags with the specified tag name, for the * given data source. * * @throws TskCoreException If there is an error getting the tags count from * the case database. */ public long getBlackboardArtifactTagsCountByTagName(TagName tagName, long dsObjId) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // "SELECT COUNT(*) AS count FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts WHERE artifact_tags.artifact_id = arts.artifact_id" // + " AND artifact_tags.tag_name_id = ?" // + " AND arts.data_source_obj_id = ? " PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_BY_TAG_NAME_BY_DATASOURCE); statement.clearParameters(); statement.setLong(1, tagName.getId()); statement.setLong(2, dsObjId); resultSet = connection.executeQuery(statement); if (resultSet.next()) { return resultSet.getLong("count"); } else { throw new TskCoreException("Error getting blackboard_artifact_tags row count for tag name (tag_name_id = " + tagName.getId() + ")" + " for dsObjId = " + dsObjId); } } catch (SQLException ex) { throw new TskCoreException("Failed to get blackboard_artifact_tags row count for tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects the rows in the blackboard_artifacts_tags table in the case * database with a specified foreign key into the tag_names table. * * @param tagName A data transfer object (DTO) for the tag name to match. * * @return A list, possibly empty, of BlackboardArtifactTag data transfer * objects (DTOs) for the rows. * * @throws TskCoreException */ public List getBlackboardArtifactTagsByTagName(TagName tagName) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tsk_examiners.login_name // FROM blackboard_artifact_tags // LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id // WHERE tag_name_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TAGS_BY_TAG_NAME); statement.clearParameters(); statement.setLong(1, tagName.getId()); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { BlackboardArtifact artifact = getBlackboardArtifact(resultSet.getLong("artifact_id")); //NON-NLS Content content = getContentById(artifact.getObjectID()); BlackboardArtifactTag tag = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, content, tagName, resultSet.getString("comment"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } return tags; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifact tags data (tag_name_id = " + tagName.getId() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets artifact tags by tag name, for specified data source. * * @param tagName The representation of the desired tag type in the case * database, which can be obtained by calling getTagNames * and/or addTagName. * @param dsObjId data source object id * * @return A list, possibly empty, of the artifact tags with the specified * tag name, for the specified data source. * * @throws TskCoreException If there is an error getting the tags from the * case database. */ public List getBlackboardArtifactTagsByTagName(TagName tagName, long dsObjId) throws TskCoreException { if (tagName.getId() == Tag.ID_NOT_SET) { throw new TskCoreException("TagName object is invalid, id not set"); } CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT artifact_tags.tag_id, artifact_tags.artifact_id, artifact_tags.tag_name_id, artifact_tags.comment, arts.obj_id, arts.artifact_obj_id, arts.data_source_obj_id, arts.artifact_type_id, arts.review_status_id, tsk_examiners.login_name // FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts // LEFT OUTER JOIN tsk_examiners ON artifact_tags.examiner_id = tsk_examiners.examiner_id // WHERE artifact_tags.artifact_id = arts.artifact_id // AND artifact_tags.tag_name_id = ? // AND arts.data_source_obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TAGS_BY_TAG_NAME_BY_DATASOURCE); statement.clearParameters(); statement.setLong(1, tagName.getId()); statement.setLong(2, dsObjId); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList(); while (resultSet.next()) { BlackboardArtifact artifact = getBlackboardArtifact(resultSet.getLong("artifact_id")); //NON-NLS Content content = getContentById(artifact.getObjectID()); BlackboardArtifactTag tag = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, content, tagName, resultSet.getString("comment"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } return tags; } catch (SQLException ex) { throw new TskCoreException("Failed to get blackboard_artifact_tags row count for tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Selects the row in the blackboard artifact tags table in the case * database with a specified tag id. * * @param artifactTagID the tag id of the BlackboardArtifactTag to retrieve. * * @return the BlackBoardArtifact Tag with the given tag id, or null if no * such tag could be found * * @throws TskCoreException */ public BlackboardArtifactTag getBlackboardArtifactTagByID(long artifactTagID) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; BlackboardArtifactTag tag = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); //SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM blackboard_artifact_tags // INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id // LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id // WHERE blackboard_artifact_tags.tag_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TAG_BY_ID); statement.clearParameters(); statement.setLong(1, artifactTagID); resultSet = connection.executeQuery(statement); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); BlackboardArtifact artifact = getBlackboardArtifact(resultSet.getLong("artifact_id")); //NON-NLS Content content = getContentById(artifact.getObjectID()); tag = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, content, tagName, resultSet.getString("comment"), resultSet.getString("login_name")); } resultSet.close(); } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifact tag with id = " + artifactTagID, ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } return tag; } /** * Selects the rows in the blackboard_artifacts_tags table in the case * database with a specified foreign key into the blackboard_artifacts * table. * * @param artifact A data transfer object (DTO) for the artifact to match. * * @return A list, possibly empty, of BlackboardArtifactTag data transfer * objects (DTOs) for the rows. * * @throws TskCoreException */ public List getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name // FROM blackboard_artifact_tags // INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id // LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id // WHERE blackboard_artifact_tags.artifact_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TAGS_BY_ARTIFACT); statement.clearParameters(); statement.setLong(1, artifact.getArtifactID()); resultSet = connection.executeQuery(statement); ArrayList tags = new ArrayList<>(); while (resultSet.next()) { TagName tagName = new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); //NON-NLS Content content = getContentById(artifact.getObjectID()); BlackboardArtifactTag tag = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, content, tagName, resultSet.getString("comment"), resultSet.getString("login_name")); //NON-NLS tags.add(tag); } return tags; } catch (SQLException ex) { throw new TskCoreException("Error getting blackboard artifact tags data (artifact_id = " + artifact.getArtifactID() + ")", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Change the path for an image in the database. * * @param newPath New path to the image * @param objectId Data source ID of the image * * @throws TskCoreException */ public void updateImagePath(String newPath, long objectId) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { // UPDATE tsk_image_names SET name = ? WHERE obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_PATH); statement.clearParameters(); statement.setString(1, newPath); statement.setLong(2, objectId); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error updating image path in database for object " + objectId, ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Inserts a row into the reports table in the case database. * * @param localPath The path of the report file, must be in the * database directory (case directory in Autopsy) or * one of its subdirectories. * @param sourceModuleName The name of the module that created the report. * @param reportName The report name. * * @return A Report object for the new row. * * @throws TskCoreException */ public Report addReport(String localPath, String sourceModuleName, String reportName) throws TskCoreException { return addReport(localPath, sourceModuleName, reportName, null); } /** * Inserts a row into the reports table in the case database. * * @param localPath The path of the report file, must be in the * database directory (case directory in Autopsy) or * one of its subdirectories. * @param sourceModuleName The name of the module that created the report. * @param reportName The report name. * @param parent The Content from which the report was created, if * available. * * @return A Report object for the new row. * * @throws TskCoreException */ public Report addReport(String localPath, String sourceModuleName, String reportName, Content parent) throws TskCoreException { // Make sure the local path of the report is in the database directory // or one of its subdirectories. String relativePath = ""; //NON-NLS long createTime = 0; String localPathLower = localPath.toLowerCase(); if (localPathLower.startsWith("http")) { relativePath = localPathLower; createTime = System.currentTimeMillis() / 1000; } else { /* * Note: The following call to .relativize() may be dangerous in * case-sensitive operating systems and should be looked at. For * now, we are simply relativizing the paths as all lower case, then * using the length of the result to pull out the appropriate number * of characters from the localPath String. */ try { String casePathLower = getDbDirPath().toLowerCase(); int length = new File(casePathLower).toURI().relativize(new File(localPathLower).toURI()).getPath().length(); relativePath = new File(localPath.substring(localPathLower.length() - length)).getPath(); } catch (IllegalArgumentException ex) { String errorMessage = String.format("Local path %s not in the database directory or one of its subdirectories", localPath); throw new TskCoreException(errorMessage, ex); } try { // get its file time java.io.File tempFile = new java.io.File(localPath); // Convert to UNIX epoch (seconds, not milliseconds). createTime = tempFile.lastModified() / 1000; } catch (Exception ex) { throw new TskCoreException("Could not get create time for report at " + localPath, ex); } } // Write the report data to the database. acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { // Insert a row for the report into the tsk_objects table. // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) long parentObjId = 0; if (parent != null) { parentObjId = parent.getId(); } long objectId = addObject(parentObjId, TskData.ObjectType.REPORT.getObjectType(), connection); // INSERT INTO reports (obj_id, path, crtime, src_module_name, display_name) VALUES (?, ?, ?, ?, ?) PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_REPORT); statement.clearParameters(); statement.setLong(1, objectId); statement.setString(2, relativePath); statement.setLong(3, createTime); statement.setString(4, sourceModuleName); statement.setString(5, reportName); connection.executeUpdate(statement); return new Report(this, objectId, localPath, createTime, sourceModuleName, reportName, parent); } catch (SQLException ex) { throw new TskCoreException("Error adding report " + localPath + " to reports table", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * Selects all of the rows from the reports table in the case database. * * @return A list, possibly empty, of Report data transfer objects (DTOs) * for the rows. * * @throws TskCoreException */ public List getAllReports() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; ResultSet parentResultSet = null; PreparedStatement statement = null; Statement parentStatement = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT * FROM reports statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_REPORTS); parentStatement = connection.createStatement(); resultSet = connection.executeQuery(statement); ArrayList reports = new ArrayList(); while (resultSet.next()) { String localpath = resultSet.getString("path"); if (localpath.toLowerCase().startsWith("http") == false) { // make path absolute localpath = Paths.get(getDbDirPath(), localpath).normalize().toString(); //NON-NLS } // get the report parent Content parent = null; long reportId = resultSet.getLong("obj_id"); // NON-NLS String parentQuery = String.format("SELECT * FROM tsk_objects WHERE obj_id = %s;", reportId); parentResultSet = parentStatement.executeQuery(parentQuery); if (parentResultSet.next()) { long parentId = parentResultSet.getLong("par_obj_id"); // NON-NLS parent = this.getContentById(parentId); } parentResultSet.close(); reports.add(new Report(this, reportId, localpath, resultSet.getLong("crtime"), //NON-NLS resultSet.getString("src_module_name"), //NON-NLS resultSet.getString("report_name"), parent)); //NON-NLS } return reports; } catch (SQLException ex) { throw new TskCoreException("Error querying reports table", ex); } finally { closeResultSet(resultSet); closeResultSet(parentResultSet); closeStatement(statement); closeStatement(parentStatement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get a Report object for the given id. * * @param id * * @return A new Report object for the given id. * * @throws TskCoreException */ public Report getReportById(long id) throws TskCoreException { CaseDbConnection connection = null; PreparedStatement statement = null; Statement parentStatement = null; ResultSet resultSet = null; ResultSet parentResultSet = null; Report report = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT * FROM reports WHERE obj_id = ? statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_REPORT_BY_ID); parentStatement = connection.createStatement(); statement.clearParameters(); statement.setLong(1, id); resultSet = connection.executeQuery(statement); if (resultSet.next()) { // get the report parent Content parent = null; String parentQuery = String.format("SELECT * FROM tsk_objects WHERE obj_id = %s;", id); parentResultSet = parentStatement.executeQuery(parentQuery); if (parentResultSet.next()) { long parentId = parentResultSet.getLong("par_obj_id"); // NON-NLS parent = this.getContentById(parentId); } report = new Report(this, resultSet.getLong("obj_id"), //NON-NLS Paths.get(getDbDirPath(), resultSet.getString("path")).normalize().toString(), //NON-NLS resultSet.getLong("crtime"), //NON-NLS resultSet.getString("src_module_name"), //NON-NLS resultSet.getString("report_name"), parent); //NON-NLS } else { throw new TskCoreException("No report found for id: " + id); } } catch (SQLException ex) { throw new TskCoreException("Error querying reports table for id: " + id, ex); } finally { closeResultSet(resultSet); closeResultSet(parentResultSet); closeStatement(statement); closeStatement(parentStatement); closeConnection(connection); releaseSingleUserCaseReadLock(); } return report; } /** * Deletes a row from the reports table in the case database. * * @param report A Report data transfer object (DTO) for the row to delete. * * @throws TskCoreException */ public void deleteReport(Report report) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { // DELETE FROM reports WHERE reports.obj_id = ? PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_REPORT); statement.setLong(1, report.getId()); connection.executeUpdate(statement); // DELETE FROM tsk_objects WHERE tsk_objects.obj_id = ? statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_REPORT_TSK_OBJECT); statement.setLong(1, report.getId()); statement.setLong(2, TskData.ObjectType.REPORT.getObjectType()); connection.executeUpdate(statement); } catch (SQLException ex) { throw new TskCoreException("Error querying reports table", ex); } finally { releaseSingleUserCaseWriteLock(); } } static void closeResultSet(ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { logger.log(Level.SEVERE, "Error closing ResultSet", ex); //NON-NLS } } } static void closeStatement(Statement statement) { if (statement != null) { try { statement.close(); } catch (SQLException ex) { logger.log(Level.SEVERE, "Error closing Statement", ex); //NON-NLS } } } static void closeConnection(CaseDbConnection connection) { if (connection != null) { connection.close(); } } private static void rollbackTransaction(CaseDbConnection connection) { if (connection != null) { connection.rollbackTransaction(); } } /** * Sets the end date for the given ingest job * * @param ingestJobId The ingest job to set the end date for * @param endDateTime The end date * * @throws TskCoreException If inserting into the database fails */ void setIngestJobEndDateTime(long ingestJobId, long endDateTime) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection();) { Statement statement = connection.createStatement(); statement.executeUpdate("UPDATE ingest_jobs SET end_date_time=" + endDateTime + " WHERE ingest_job_id=" + ingestJobId + ";"); } catch (SQLException ex) { throw new TskCoreException("Error updating the end date (ingest_job_id = " + ingestJobId + ".", ex); } finally { releaseSingleUserCaseWriteLock(); } } void setIngestJobStatus(long ingestJobId, IngestJobStatusType status) throws TskCoreException { acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = connections.getConnection(); Statement statement = connection.createStatement();) { statement.executeUpdate("UPDATE ingest_jobs SET status_id=" + status.ordinal() + " WHERE ingest_job_id=" + ingestJobId + ";"); } catch (SQLException ex) { throw new TskCoreException("Error ingest job status (ingest_job_id = " + ingestJobId + ".", ex); } finally { releaseSingleUserCaseWriteLock(); } } /** * * @param dataSource The datasource the ingest job is being run on * @param hostName The name of the host * @param ingestModules The ingest modules being run during the ingest job. * Should be in pipeline order. * @param jobStart The time the job started * @param jobEnd The time the job ended * @param status The ingest job status * @param settingsDir The directory of the job's settings * * @return An information object representing the ingest job added to the * database. * * @throws TskCoreException If adding the job to the database fails. */ public final IngestJobInfo addIngestJob(Content dataSource, String hostName, List ingestModules, Date jobStart, Date jobEnd, IngestJobStatusType status, String settingsDir) throws TskCoreException { CaseDbConnection connection = null; acquireSingleUserCaseWriteLock(); ResultSet resultSet = null; Statement statement; try { connection = connections.getConnection(); connection.beginTransaction(); statement = connection.createStatement(); PreparedStatement insertStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_INGEST_JOB, Statement.RETURN_GENERATED_KEYS); insertStatement.setLong(1, dataSource.getId()); insertStatement.setString(2, hostName); insertStatement.setLong(3, jobStart.getTime()); insertStatement.setLong(4, jobEnd.getTime()); insertStatement.setInt(5, status.ordinal()); insertStatement.setString(6, settingsDir); connection.executeUpdate(insertStatement); resultSet = insertStatement.getGeneratedKeys(); resultSet.next(); long id = resultSet.getLong(1); //last_insert_rowid() for (int i = 0; i < ingestModules.size(); i++) { IngestModuleInfo ingestModule = ingestModules.get(i); statement.executeUpdate("INSERT INTO ingest_job_modules (ingest_job_id, ingest_module_id, pipeline_position) " + "VALUES (" + id + ", " + ingestModule.getIngestModuleId() + ", " + i + ");"); } resultSet.close(); resultSet = null; connection.commitTransaction(); return new IngestJobInfo(id, dataSource.getId(), hostName, jobStart, "", ingestModules, this); } catch (SQLException ex) { rollbackTransaction(connection); throw new TskCoreException("Error adding the ingest job.", ex); } finally { closeResultSet(resultSet); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Adds the given ingest module to the database. * * @param displayName The display name of the module * @param factoryClassName The factory class name of the module. * @param type The type of the module. * @param version The version of the module. * * @return An ingest module info object representing the module added to the * db. * * @throws TskCoreException When the ingest module cannot be added. */ public final IngestModuleInfo addIngestModule(String displayName, String factoryClassName, IngestModuleType type, String version) throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; Statement statement = null; String uniqueName = factoryClassName + "-" + displayName + "-" + version; acquireSingleUserCaseWriteLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT * FROM ingest_modules WHERE unique_name = '" + uniqueName + "'"); if (!resultSet.next()) { resultSet.close(); resultSet = null; PreparedStatement insertStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_INGEST_MODULE, Statement.RETURN_GENERATED_KEYS); insertStatement.setString(1, displayName); insertStatement.setString(2, uniqueName); insertStatement.setInt(3, type.ordinal()); insertStatement.setString(4, version); connection.executeUpdate(insertStatement); resultSet = statement.getGeneratedKeys(); resultSet.next(); long id = resultSet.getLong(1); //last_insert_rowid() resultSet.close(); resultSet = null; return new IngestModuleInfo(id, displayName, uniqueName, type, version); } else { return new IngestModuleInfo(resultSet.getInt("ingest_module_id"), resultSet.getString("display_name"), resultSet.getString("unique_name"), IngestModuleType.fromID(resultSet.getInt("type_id")), resultSet.getString("version")); } } catch (SQLException ex) { try { closeStatement(statement); if (connection != null) { statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT * FROM ingest_modules WHERE unique_name = '" + uniqueName + "'"); if (resultSet.next()) { return new IngestModuleInfo(resultSet.getInt("ingest_module_id"), resultSet.getString("display_name"), uniqueName, IngestModuleType.fromID(resultSet.getInt("type_id")), resultSet.getString("version")); } } throw new TskCoreException("Couldn't add new module to database.", ex); } catch (SQLException ex1) { throw new TskCoreException("Couldn't add new module to database.", ex1); } } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseWriteLock(); } } /** * Gets all of the ingest jobs that have been run. * * @return The information about the ingest jobs that have been run * * @throws TskCoreException If there is a problem getting the ingest jobs */ public final List getIngestJobs() throws TskCoreException { CaseDbConnection connection = null; ResultSet resultSet = null; Statement statement = null; List ingestJobs = new ArrayList<>(); acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT * FROM ingest_jobs"); while (resultSet.next()) { ingestJobs.add(new IngestJobInfo(resultSet.getInt("ingest_job_id"), resultSet.getLong("obj_id"), resultSet.getString("host_name"), new Date(resultSet.getLong("start_date_time")), new Date(resultSet.getLong("end_date_time")), IngestJobStatusType.fromID(resultSet.getInt("status_id")), resultSet.getString("settings_dir"), this.getIngestModules(resultSet.getInt("ingest_job_id"), connection), this)); } return ingestJobs; } catch (SQLException ex) { throw new TskCoreException("Couldn't get the ingest jobs.", ex); } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets the ingest modules associated with the ingest job * * @param ingestJobId The id of the ingest job to get ingest modules for * @param connection The database connection * * @return The ingest modules of the job * * @throws SQLException If it fails to get the modules from the db. */ private List getIngestModules(int ingestJobId, CaseDbConnection connection) throws SQLException { ResultSet resultSet = null; Statement statement = null; List ingestModules = new ArrayList<>(); acquireSingleUserCaseReadLock(); try { statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT ingest_job_modules.ingest_module_id AS ingest_module_id, " + "ingest_job_modules.pipeline_position AS pipeline_position, " + "ingest_modules.display_name AS display_name, ingest_modules.unique_name AS unique_name, " + "ingest_modules.type_id AS type_id, ingest_modules.version AS version " + "FROM ingest_job_modules, ingest_modules " + "WHERE ingest_job_modules.ingest_job_id = " + ingestJobId + " " + "AND ingest_modules.ingest_module_id = ingest_job_modules.ingest_module_id " + "ORDER BY (ingest_job_modules.pipeline_position);"); while (resultSet.next()) { ingestModules.add(new IngestModuleInfo(resultSet.getInt("ingest_module_id"), resultSet.getString("display_name"), resultSet.getString("unique_name"), IngestModuleType.fromID(resultSet.getInt("type_id")), resultSet.getString("version"))); } return ingestModules; } finally { closeResultSet(resultSet); closeStatement(statement); releaseSingleUserCaseReadLock(); } } /** * Builds "INSERT or IGNORE ....", or "INSERT .... ON CONFLICT DO NOTHING" * insert SQL, based on the database type being used, using the given base * SQL. * * @param sql Base insert SQL - "INTO xyz ...." * * @return SQL string. */ String getInsertOrIgnoreSQL(String sql) { switch (getDatabaseType()) { case POSTGRESQL: return " INSERT " + sql + " ON CONFLICT DO NOTHING "; //NON-NLS case SQLITE: return " INSERT OR IGNORE " + sql; //NON-NLS default: throw new UnsupportedOperationException("Unsupported DB type: " + getDatabaseType().name()); } } /** * Returns a list of Blackboard artifact whoes values in dbColumn match the * list of values. The method will generate an SQL OR statement that can be * used as part of a where clause to retrieve artifacts for a set of values. * * For example getArtifactsForValues("artifacts.artifact_obj_id", * artifactObjIdList) will return a list of artifacts for the artifactObjID * values in the given list. * * When using this method be sure to use the tables as nicknamed in * DATA_ARTIFACT_QUERY_STRING and ANALYSIS_RESULT_QUERY_STRING; * * @param category The type of artifacts to return. * @param dbColumn The database column. * @param value List of values. * * @return A list of BlackboardArtifacts * * @throws TskCoreException */ private List getArtifactsForValues(BlackboardArtifact.Category category, String dbColumn, List values, CaseDbConnection connection) throws TskCoreException { String where = ""; // This look creates the OR statment with the following format: // = OR = OR ... for (Number value : values) { if (!where.isEmpty()) { where += " OR "; } where += dbColumn + " = " + value; } // Base on the category pass the OR statement to the approprate method // that will retrieve the artifacts. if (category == BlackboardArtifact.Category.DATA_ARTIFACT) { return blackboard.getDataArtifactsWhere(where, connection); } else { return blackboard.getAnalysisResultsWhere(where, connection); } } /** * Stores a pair of object ID and its type */ static class ObjectInfo { private long id; private TskData.ObjectType type; ObjectInfo(long id, ObjectType type) { this.id = id; this.type = type; } long getId() { return id; } TskData.ObjectType getType() { return type; } } private interface DbCommand { void execute() throws SQLException; } private enum PREPARED_STATEMENT { SELECT_ARTIFACTS_BY_TYPE("SELECT artifact_id, obj_id FROM blackboard_artifacts " //NON-NLS + "WHERE artifact_type_id = ?"), //NON-NLS COUNT_ARTIFACTS_OF_TYPE("SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE artifact_type_id = ? AND review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID()), //NON-NLS COUNT_ARTIFACTS_OF_TYPE_BY_DATA_SOURCE("SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE data_source_obj_id = ? AND artifact_type_id = ? AND review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID()), //NON-NLS COUNT_ARTIFACTS_FROM_SOURCE("SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ? AND review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID()), //NON-NLS COUNT_ARTIFACTS_BY_SOURCE_AND_TYPE("SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ? AND artifact_type_id = ? AND review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID()), //NON-NLS SELECT_FILES_BY_PARENT("SELECT tsk_files.* " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ? ) " //NON-NLS + "ORDER BY tsk_files.meta_type DESC, LOWER(tsk_files.name)"), //NON-NLS SELECT_FILES_BY_PARENT_AND_TYPE("SELECT tsk_files.* " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ? AND tsk_files.type = ? ) " //NON-NLS + "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS SELECT_FILES_BY_PARENT_AND_NAME("SELECT tsk_files.* " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ? AND " //NON-NLS + "LOWER(tsk_files.name) LIKE LOWER(?) AND LOWER(tsk_files.name) NOT LIKE LOWER('%journal%')) "//NON-NLS + "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS SELECT_FILES_BY_EXTENSION_AND_PARENT_AND_NAME("SELECT tsk_files.* " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE tsk_files.extension = ? AND " + "(tsk_objects.par_obj_id = ? AND " //NON-NLS + "LOWER(tsk_files.name) LIKE LOWER(?) AND LOWER(tsk_files.name) NOT LIKE LOWER('%journal%')) "//NON-NLS + "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS SELECT_FILE_IDS_BY_PARENT("SELECT tsk_files.obj_id AS obj_id " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ?)"), //NON-NLS SELECT_FILE_IDS_BY_PARENT_AND_TYPE("SELECT tsk_files.obj_id AS obj_id " //NON-NLS + "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS + "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ? " //NON-NLS + "AND tsk_files.type = ? )"), //NON-NLS SELECT_FILE_BY_ID("SELECT * FROM tsk_files WHERE obj_id = ? LIMIT 1"), //NON-NLS SELECT_ARTIFACT_BY_ARTIFACT_OBJ_ID("SELECT * FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"), SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID("SELECT artifact_type_id FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"), SELECT_ARTIFACT_BY_ARTIFACT_ID("SELECT * FROM blackboard_artifacts WHERE artifact_id = ? LIMIT 1"), INSERT_ARTIFACT("INSERT INTO blackboard_artifacts (artifact_id, obj_id, artifact_obj_id, data_source_obj_id, artifact_type_id, review_status_id) " //NON-NLS + "VALUES (?, ?, ?, ?, ?," + BlackboardArtifact.ReviewStatus.UNDECIDED.getID() + ")"), //NON-NLS POSTGRESQL_INSERT_ARTIFACT("INSERT INTO blackboard_artifacts (artifact_id, obj_id, artifact_obj_id, data_source_obj_id, artifact_type_id, review_status_id) " //NON-NLS + "VALUES (DEFAULT, ?, ?, ?, ?," + BlackboardArtifact.ReviewStatus.UNDECIDED.getID() + ")"), //NON-NLS INSERT_ANALYSIS_RESULT("INSERT INTO tsk_analysis_results (artifact_obj_id, conclusion, significance, priority, configuration, justification) " //NON-NLS + "VALUES (?, ?, ?, ?, ?, ?)"), //NON-NLS INSERT_STRING_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_text) " //NON-NLS + "VALUES (?,?,?,?,?,?,?)"), //NON-NLS INSERT_BYTE_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_byte) " //NON-NLS + "VALUES (?,?,?,?,?,?,?)"), //NON-NLS INSERT_INT_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_int32) " //NON-NLS + "VALUES (?,?,?,?,?,?,?)"), //NON-NLS INSERT_LONG_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_int64) " //NON-NLS + "VALUES (?,?,?,?,?,?,?)"), //NON-NLS INSERT_DOUBLE_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_double) " //NON-NLS + "VALUES (?,?,?,?,?,?,?)"), //NON-NLS INSERT_FILE_ATTRIBUTE("INSERT INTO tsk_file_attributes (obj_id, attribute_type_id, value_type, value_byte, value_text, value_int32, value_int64, value_double) " //NON-NLS + "VALUES (?,?,?,?,?,?,?,?)"), //NON-NLS SELECT_FILES_BY_DATA_SOURCE_AND_NAME("SELECT * FROM tsk_files WHERE LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND data_source_obj_id = ?"), //NON-NLS SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_NAME("SELECT * FROM tsk_files WHERE extension = ? AND LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND data_source_obj_id = ?"), //NON-NLS SELECT_FILES_BY_DATA_SOURCE_AND_PARENT_PATH_AND_NAME("SELECT * FROM tsk_files WHERE LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND LOWER(parent_path) LIKE LOWER(?) AND data_source_obj_id = ?"), //NON-NLS SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_PARENT_PATH_AND_NAME("SELECT * FROM tsk_files WHERE extension = ? AND LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND LOWER(parent_path) LIKE LOWER(?) AND data_source_obj_id = ?"), //NON-NLS UPDATE_FILE_MD5("UPDATE tsk_files SET md5 = ? WHERE obj_id = ?"), //NON-NLS UPDATE_IMAGE_MD5("UPDATE tsk_image_info SET md5 = ? WHERE obj_id = ?"), //NON-NLS UPDATE_IMAGE_SHA1("UPDATE tsk_image_info SET sha1 = ? WHERE obj_id = ?"), //NON-NLS UPDATE_IMAGE_SHA256("UPDATE tsk_image_info SET sha256 = ? WHERE obj_id = ?"), //NON-NLS SELECT_IMAGE_MD5("SELECT md5 FROM tsk_image_info WHERE obj_id = ?"), //NON-NLS SELECT_IMAGE_SHA1("SELECT sha1 FROM tsk_image_info WHERE obj_id = ?"), //NON-NLS SELECT_IMAGE_SHA256("SELECT sha256 FROM tsk_image_info WHERE obj_id = ?"), //NON-NLS UPDATE_ACQUISITION_DETAILS("UPDATE data_source_info SET acquisition_details = ? WHERE obj_id = ?"), //NON-NLS UPDATE_ACQUISITION_TOOL_SETTINGS("UPDATE data_source_info SET acquisition_tool_settings = ?, acquisition_tool_name = ?, acquisition_tool_version = ? WHERE obj_id = ?"), //NON-NLS SELECT_ACQUISITION_DETAILS("SELECT acquisition_details FROM data_source_info WHERE obj_id = ?"), //NON-NLS SELECT_ACQUISITION_TOOL_SETTINGS("SELECT acquisition_tool_settings, acquisition_tool_name, acquisition_tool_version, added_date_time FROM data_source_info WHERE obj_id = ?"), //NON-NLS SELECT_LOCAL_PATH_FOR_FILE("SELECT path FROM tsk_files_path WHERE obj_id = ?"), //NON-NLS SELECT_ENCODING_FOR_FILE("SELECT encoding_type FROM tsk_files_path WHERE obj_id = ?"), // NON-NLS SELECT_LOCAL_PATH_AND_ENCODING_FOR_FILE("SELECT path, encoding_type FROM tsk_files_path WHERE obj_id = ?"), // NON_NLS SELECT_PATH_FOR_FILE("SELECT parent_path FROM tsk_files WHERE obj_id = ?"), //NON-NLS SELECT_FILE_NAME("SELECT name FROM tsk_files WHERE obj_id = ?"), //NON-NLS SELECT_DERIVED_FILE("SELECT derived_id, rederive FROM tsk_files_derived WHERE obj_id = ?"), //NON-NLS SELECT_FILE_DERIVATION_METHOD("SELECT tool_name, tool_version, other FROM tsk_files_derived_method WHERE derived_id = ?"), //NON-NLS SELECT_MAX_OBJECT_ID("SELECT MAX(obj_id) AS max_obj_id FROM tsk_objects"), //NON-NLS INSERT_OBJECT("INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)"), //NON-NLS INSERT_FILE("INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id ) " //NON-NLS + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), //NON-NLS INSERT_FILE_SYSTEM_FILE("INSERT INTO tsk_files(obj_id, fs_obj_id, data_source_obj_id, attr_type, attr_id, name, meta_addr, meta_seq, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, mime_type, parent_path, extension, owner_uid, os_account_obj_id )" + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), // NON-NLS UPDATE_DERIVED_FILE("UPDATE tsk_files SET type = ?, dir_type = ?, meta_type = ?, dir_flags = ?, meta_flags = ?, size= ?, ctime= ?, crtime= ?, atime= ?, mtime= ?, mime_type = ? " + "WHERE obj_id = ?"), //NON-NLS INSERT_LAYOUT_FILE("INSERT INTO tsk_file_layout (obj_id, byte_start, byte_len, sequence) " //NON-NLS + "VALUES (?, ?, ?, ?)"), //NON-NLS INSERT_LOCAL_PATH("INSERT INTO tsk_files_path (obj_id, path, encoding_type) VALUES (?, ?, ?)"), //NON-NLS UPDATE_LOCAL_PATH("UPDATE tsk_files_path SET path = ?, encoding_type = ? WHERE obj_id = ?"), //NON-NLS COUNT_CHILD_OBJECTS_BY_PARENT("SELECT COUNT(obj_id) AS count FROM tsk_objects WHERE par_obj_id = ?"), //NON-NLS SELECT_FILE_SYSTEM_BY_OBJECT("SELECT fs_obj_id from tsk_files WHERE obj_id=?"), //NON-NLS SELECT_TAG_NAMES("SELECT * FROM tag_names"), //NON-NLS SELECT_TAG_NAMES_IN_USE("SELECT * FROM tag_names " //NON-NLS + "WHERE tag_name_id IN " //NON-NLS + "(SELECT tag_name_id from content_tags UNION SELECT tag_name_id FROM blackboard_artifact_tags)"), //NON-NLS SELECT_TAG_NAMES_IN_USE_BY_DATASOURCE("SELECT * FROM tag_names " + "WHERE tag_name_id IN " + "( SELECT content_tags.tag_name_id as tag_name_id " + "FROM content_tags as content_tags, tsk_files as tsk_files" + " WHERE content_tags.obj_id = tsk_files.obj_id" + " AND tsk_files.data_source_obj_id = ?" + " UNION " + "SELECT artifact_tags.tag_name_id as tag_name_id " + " FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts " + " WHERE artifact_tags.artifact_id = arts.artifact_id" + " AND arts.data_source_obj_id = ?" + " )"), INSERT_TAG_NAME("INSERT INTO tag_names (display_name, description, color, knownStatus) VALUES (?, ?, ?, ?)"), //NON-NLS INSERT_CONTENT_TAG("INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset, examiner_id) VALUES (?, ?, ?, ?, ?, ?)"), //NON-NLS DELETE_CONTENT_TAG("DELETE FROM content_tags WHERE tag_id = ?"), //NON-NLS COUNT_CONTENT_TAGS_BY_TAG_NAME("SELECT COUNT(*) AS count FROM content_tags WHERE tag_name_id = ?"), //NON-NLS COUNT_CONTENT_TAGS_BY_TAG_NAME_BY_DATASOURCE( "SELECT COUNT(*) AS count FROM content_tags as content_tags, tsk_files as tsk_files WHERE content_tags.obj_id = tsk_files.obj_id" + " AND content_tags.tag_name_id = ? " + " AND tsk_files.data_source_obj_id = ? " ), SELECT_CONTENT_TAGS("SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id, tag_names.rank " + "FROM content_tags " + "INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id"), //NON-NLS SELECT_CONTENT_TAGS_BY_TAG_NAME("SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tsk_examiners.login_name " + "FROM content_tags " + "LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE tag_name_id = ?"), //NON-NLS SELECT_CONTENT_TAGS_BY_TAG_NAME_BY_DATASOURCE("SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id " + "FROM content_tags as content_tags, tsk_files as tsk_files, tag_names as tag_names, tsk_examiners as tsk_examiners " + "WHERE content_tags.examiner_id = tsk_examiners.examiner_id" + " AND content_tags.obj_id = tsk_files.obj_id" + " AND content_tags.tag_name_id = tag_names.tag_name_id" + " AND content_tags.tag_name_id = ?" + " AND tsk_files.data_source_obj_id = ? "), SELECT_CONTENT_TAG_BY_ID("SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id, tag_names.rank " + "FROM content_tags " + "INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE tag_id = ?"), //NON-NLS SELECT_CONTENT_TAGS_BY_CONTENT("SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id, tag_names.rank " + "FROM content_tags " + "INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE content_tags.obj_id = ?"), //NON-NLS INSERT_ARTIFACT_TAG("INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment, examiner_id) " + "VALUES (?, ?, ?, ?)"), //NON-NLS DELETE_ARTIFACT_TAG("DELETE FROM blackboard_artifact_tags WHERE tag_id = ?"), //NON-NLS SELECT_ARTIFACT_TAGS("SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tag_names.tag_set_id, tsk_examiners.login_name, tag_names.rank " + "FROM blackboard_artifact_tags " + "INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id"), //NON-NLS COUNT_ARTIFACTS_BY_TAG_NAME("SELECT COUNT(*) AS count FROM blackboard_artifact_tags WHERE tag_name_id = ?"), //NON-NLS COUNT_ARTIFACTS_BY_TAG_NAME_BY_DATASOURCE("SELECT COUNT(*) AS count FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts WHERE artifact_tags.artifact_id = arts.artifact_id" + " AND artifact_tags.tag_name_id = ?" + " AND arts.data_source_obj_id = ? "), SELECT_ARTIFACT_TAGS_BY_TAG_NAME("SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tsk_examiners.login_name " + "FROM blackboard_artifact_tags " + "LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE tag_name_id = ?"), //NON-NLS SELECT_ARTIFACT_TAGS_BY_TAG_NAME_BY_DATASOURCE("SELECT artifact_tags.tag_id, artifact_tags.artifact_id, artifact_tags.tag_name_id, artifact_tags.comment, arts.obj_id, arts.artifact_obj_id, arts.data_source_obj_id, arts.artifact_type_id, arts.review_status_id, tsk_examiners.login_name " + "FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts, tsk_examiners AS tsk_examiners " + "WHERE artifact_tags.examiner_id = tsk_examiners.examiner_id" + " AND artifact_tags.artifact_id = arts.artifact_id" + " AND artifact_tags.tag_name_id = ? " + " AND arts.data_source_obj_id = ? "), SELECT_ARTIFACT_TAG_BY_ID("SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id, tag_names.rank " + "FROM blackboard_artifact_tags " + "INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE blackboard_artifact_tags.tag_id = ?"), //NON-NLS SELECT_ARTIFACT_TAGS_BY_ARTIFACT("SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name, tag_names.tag_set_id, tag_names.rank " + "FROM blackboard_artifact_tags " + "INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id " + "LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id " + "WHERE blackboard_artifact_tags.artifact_id = ?"), //NON-NLS SELECT_REPORTS("SELECT * FROM reports"), //NON-NLS SELECT_REPORT_BY_ID("SELECT * FROM reports WHERE obj_id = ?"), //NON-NLS INSERT_REPORT("INSERT INTO reports (obj_id, path, crtime, src_module_name, report_name) VALUES (?, ?, ?, ?, ?)"), //NON-NLS DELETE_REPORT("DELETE FROM reports WHERE reports.obj_id = ?"), //NON-NLS DELETE_REPORT_TSK_OBJECT("DELETE FROM tsk_objects where tsk_objects.obj_id = ? and tsk_objects.type = ?"), INSERT_INGEST_JOB("INSERT INTO ingest_jobs (obj_id, host_name, start_date_time, end_date_time, status_id, settings_dir) VALUES (?, ?, ?, ?, ?, ?)"), //NON-NLS INSERT_INGEST_MODULE("INSERT INTO ingest_modules (display_name, unique_name, type_id, version) VALUES(?, ?, ?, ?)"), //NON-NLS SELECT_ATTR_BY_VALUE_BYTE("SELECT source FROM blackboard_attributes WHERE artifact_id = ? AND attribute_type_id = ? AND value_type = 4 AND value_byte = ?"), //NON-NLS UPDATE_ATTR_BY_VALUE_BYTE("UPDATE blackboard_attributes SET source = ? WHERE artifact_id = ? AND attribute_type_id = ? AND value_type = 4 AND value_byte = ?"), //NON-NLS UPDATE_IMAGE_PATH("UPDATE tsk_image_names SET name = ? WHERE obj_id = ?"), // NON-NLS SELECT_ARTIFACT_OBJECTIDS_BY_PARENT("SELECT blackboard_artifacts.artifact_obj_id AS artifact_obj_id " //NON-NLS + "FROM tsk_objects INNER JOIN blackboard_artifacts " //NON-NLS + "ON tsk_objects.obj_id=blackboard_artifacts.obj_id " //NON-NLS + "WHERE (tsk_objects.par_obj_id = ?)"), SELECT_EXAMINER_BY_ID("SELECT * FROM tsk_examiners WHERE examiner_id = ?"), SELECT_EXAMINER_BY_LOGIN_NAME("SELECT * FROM tsk_examiners WHERE login_name = ?"), INSERT_EXAMINER_POSTGRESQL("INSERT INTO tsk_examiners (login_name) VALUES (?) ON CONFLICT DO NOTHING"), INSERT_EXAMINER_SQLITE("INSERT OR IGNORE INTO tsk_examiners (login_name) VALUES (?)"), UPDATE_FILE_NAME("UPDATE tsk_files SET name = ? WHERE obj_id = ?"), UPDATE_IMAGE_NAME("UPDATE tsk_image_info SET display_name = ? WHERE obj_id = ?"), UPDATE_IMAGE_SIZES("UPDATE tsk_image_info SET size = ?, ssize = ? WHERE obj_id = ?"), DELETE_IMAGE_NAME("DELETE FROM tsk_image_names WHERE obj_id = ?"), INSERT_IMAGE_NAME("INSERT INTO tsk_image_names (obj_id, name, sequence) VALUES (?, ?, ?)"), INSERT_IMAGE_INFO("INSERT INTO tsk_image_info (obj_id, type, ssize, tzone, size, md5, sha1, sha256, display_name)" + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"), INSERT_DATA_SOURCE_INFO("INSERT INTO data_source_info (obj_id, device_id, time_zone, added_date_time, host_id) VALUES (?, ?, ?, ?, ?)"), INSERT_VS_INFO("INSERT INTO tsk_vs_info (obj_id, vs_type, img_offset, block_size) VALUES (?, ?, ?, ?)"), INSERT_VS_PART_SQLITE("INSERT INTO tsk_vs_parts (obj_id, addr, start, length, desc, flags) VALUES (?, ?, ?, ?, ?, ?)"), INSERT_VS_PART_POSTGRESQL("INSERT INTO tsk_vs_parts (obj_id, addr, start, length, descr, flags) VALUES (?, ?, ?, ?, ?, ?)"), INSERT_POOL_INFO("INSERT INTO tsk_pool_info (obj_id, pool_type) VALUES (?, ?)"), INSERT_FS_INFO("INSERT INTO tsk_fs_info (obj_id, data_source_obj_id, img_offset, fs_type, block_size, block_count, root_inum, first_inum, last_inum, display_name)" + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), SELECT_TAG_NAME_BY_ID("SELECT * FROM tag_names where tag_name_id = ?"); private final String sql; private PREPARED_STATEMENT(String sql) { this.sql = sql; } String getSQL() { return sql; } } /** * A class for the connection pool. This class will hand out connections of * the appropriate type based on the subclass that is calling * getPooledConnection(); */ abstract private class ConnectionPool { private PooledDataSource pooledDataSource; public ConnectionPool() { pooledDataSource = null; } CaseDbConnection getConnection() throws TskCoreException { if (pooledDataSource == null) { throw new TskCoreException("Error getting case database connection - case is closed"); } try { return getPooledConnection(); } catch (SQLException exp) { throw new TskCoreException(exp.getMessage()); } } void close() throws TskCoreException { if (pooledDataSource != null) { try { pooledDataSource.close(); } catch (SQLException exp) { throw new TskCoreException(exp.getMessage()); } finally { pooledDataSource = null; } } } abstract CaseDbConnection getPooledConnection() throws SQLException; public PooledDataSource getPooledDataSource() { return pooledDataSource; } public void setPooledDataSource(PooledDataSource pooledDataSource) { this.pooledDataSource = pooledDataSource; } } /** * Handles the initial setup of SQLite database connections, as well as * overriding getPooledConnection() */ private final class SQLiteConnections extends ConnectionPool { private final Map configurationOverrides = new HashMap(); SQLiteConnections(String dbPath) throws SQLException { configurationOverrides.put("acquireIncrement", "2"); configurationOverrides.put("initialPoolSize", "5"); configurationOverrides.put("minPoolSize", "5"); /* * NOTE: max pool size and max statements are related. If you * increase max pool size, then also increase statements. */ configurationOverrides.put("maxPoolSize", "20"); configurationOverrides.put("maxStatements", "200"); configurationOverrides.put("maxStatementsPerConnection", "20"); SQLiteConfig config = new SQLiteConfig(); config.setSynchronous(SQLiteConfig.SynchronousMode.OFF); // Reduce I/O operations, we have no OS crash recovery anyway. config.setReadUncommited(true); config.enforceForeignKeys(true); // Enforce foreign key constraints. SQLiteDataSource unpooled = new SQLiteDataSource(config); unpooled.setUrl("jdbc:sqlite:" + dbPath); setPooledDataSource((PooledDataSource) DataSources.pooledDataSource(unpooled, configurationOverrides)); } @Override public CaseDbConnection getPooledConnection() throws SQLException { // If the requesting thread already has an open transaction, the new connection may get SQLITE_BUSY errors. if (CaseDbTransaction.hasOpenTransaction(Thread.currentThread().getId())) { // Temporarily filter out Image Gallery threads if (!Thread.currentThread().getName().contains("ImageGallery")) { logger.log(Level.WARNING, String.format("Thread %s (ID = %d) already has an open transaction. New connection may encounter SQLITE_BUSY error. ", Thread.currentThread().getName(), Thread.currentThread().getId()), new Throwable()); } } return new SQLiteConnection(getPooledDataSource().getConnection()); } } /** * Handles the initial setup of PostgreSQL database connections, as well as * overriding getPooledConnection() */ private final class PostgreSQLConnections extends ConnectionPool { PostgreSQLConnections(String host, int port, String dbName, String userName, String password) throws PropertyVetoException, UnsupportedEncodingException { ComboPooledDataSource comboPooledDataSource = new ComboPooledDataSource(); comboPooledDataSource.setDriverClass("org.postgresql.Driver"); //loads the jdbc driver comboPooledDataSource.setJdbcUrl("jdbc:postgresql://" + host + ":" + port + "/" + URLEncoder.encode(dbName, StandardCharsets.UTF_8.toString())); comboPooledDataSource.setUser(userName); comboPooledDataSource.setPassword(password); comboPooledDataSource.setAcquireIncrement(2); comboPooledDataSource.setInitialPoolSize(5); comboPooledDataSource.setMinPoolSize(5); /* * NOTE: max pool size and max statements are related. If you * increase max pool size, then also increase statements. */ comboPooledDataSource.setMaxPoolSize(20); comboPooledDataSource.setMaxStatements(200); comboPooledDataSource.setMaxStatementsPerConnection(20); setPooledDataSource(comboPooledDataSource); } @Override public CaseDbConnection getPooledConnection() throws SQLException { return new PostgreSQLConnection(getPooledDataSource().getConnection()); } } /** * An abstract base class for case database connection objects. */ abstract class CaseDbConnection implements AutoCloseable { static final int SLEEP_LENGTH_IN_MILLISECONDS = 5000; static final int MAX_RETRIES = 20; //MAX_RETRIES * SLEEP_LENGTH_IN_MILLESECONDS = max time to hang attempting connection private class CreateStatement implements DbCommand { private final Connection connection; private Statement statement = null; CreateStatement(Connection connection) { this.connection = connection; } Statement getStatement() { return statement; } @Override public void execute() throws SQLException { statement = connection.createStatement(); } } private class SetAutoCommit implements DbCommand { private final Connection connection; private final boolean mode; SetAutoCommit(Connection connection, boolean mode) { this.connection = connection; this.mode = mode; } @Override public void execute() throws SQLException { connection.setAutoCommit(mode); } } private class Commit implements DbCommand { private final Connection connection; Commit(Connection connection) { this.connection = connection; } @Override public void execute() throws SQLException { connection.commit(); } } /** * Obtains a write lock on tsk_aggregate_score table. Only PostgreSQL is * supported. * * NOTE: We run into deadlock risks when we start to lock multiple * tables. If that need arrises, consider changing to opportunistic * locking and single-step transactions. */ private class AggregateScoreTablePostgreSQLWriteLock implements DbCommand { private final Connection connection; AggregateScoreTablePostgreSQLWriteLock(Connection connection) { this.connection = connection; } @Override public void execute() throws SQLException { PreparedStatement preparedStatement = connection.prepareStatement("LOCK TABLE ONLY tsk_aggregate_score in SHARE ROW EXCLUSIVE MODE"); preparedStatement.execute(); } } private class ExecuteQuery implements DbCommand { private final Statement statement; private final String query; private ResultSet resultSet; ExecuteQuery(Statement statement, String query) { this.statement = statement; this.query = query; } ResultSet getResultSet() { return resultSet; } @Override public void execute() throws SQLException { resultSet = statement.executeQuery(query); } } private class ExecutePreparedStatementQuery implements DbCommand { private final PreparedStatement preparedStatement; private ResultSet resultSet; ExecutePreparedStatementQuery(PreparedStatement preparedStatement) { this.preparedStatement = preparedStatement; } ResultSet getResultSet() { return resultSet; } @Override public void execute() throws SQLException { resultSet = preparedStatement.executeQuery(); } } private class ExecutePreparedStatementUpdate implements DbCommand { private final PreparedStatement preparedStatement; ExecutePreparedStatementUpdate(PreparedStatement preparedStatement) { this.preparedStatement = preparedStatement; } @Override public void execute() throws SQLException { preparedStatement.executeUpdate(); } } private class ExecuteStatementUpdate implements DbCommand { private final Statement statement; private final String updateCommand; ExecuteStatementUpdate(Statement statement, String updateCommand) { this.statement = statement; this.updateCommand = updateCommand; } @Override public void execute() throws SQLException { statement.executeUpdate(updateCommand); } } private class ExecuteStatementUpdateGenerateKeys implements DbCommand { private final Statement statement; private final int generateKeys; private final String updateCommand; ExecuteStatementUpdateGenerateKeys(Statement statement, String updateCommand, int generateKeys) { this.statement = statement; this.generateKeys = generateKeys; this.updateCommand = updateCommand; } @Override public void execute() throws SQLException { statement.executeUpdate(updateCommand, generateKeys); } } private class PrepareStatement implements DbCommand { private final Connection connection; private final String input; private PreparedStatement preparedStatement = null; PrepareStatement(Connection connection, String input) { this.connection = connection; this.input = input; } PreparedStatement getPreparedStatement() { return preparedStatement; } @Override public void execute() throws SQLException { preparedStatement = connection.prepareStatement(input); } } private class PrepareStatementGenerateKeys implements DbCommand { private final Connection connection; private final String input; private final int generateKeys; private PreparedStatement preparedStatement = null; PrepareStatementGenerateKeys(Connection connection, String input, int generateKeysInput) { this.connection = connection; this.input = input; this.generateKeys = generateKeysInput; } PreparedStatement getPreparedStatement() { return preparedStatement; } @Override public void execute() throws SQLException { preparedStatement = connection.prepareStatement(input, generateKeys); } } abstract void executeCommand(DbCommand command) throws SQLException; private final Connection connection; private final Map preparedStatements; private final Map adHocPreparedStatements; CaseDbConnection(Connection connection) { this.connection = connection; preparedStatements = new EnumMap(PREPARED_STATEMENT.class); adHocPreparedStatements = new HashMap<>(); } boolean isOpen() { return this.connection != null; } PreparedStatement getPreparedStatement(PREPARED_STATEMENT statementKey) throws SQLException { return getPreparedStatement(statementKey, Statement.NO_GENERATED_KEYS); } PreparedStatement getPreparedStatement(PREPARED_STATEMENT statementKey, int generateKeys) throws SQLException { // Lazy statement preparation. PreparedStatement statement; if (this.preparedStatements.containsKey(statementKey)) { statement = this.preparedStatements.get(statementKey); } else { statement = prepareStatement(statementKey.getSQL(), generateKeys); this.preparedStatements.put(statementKey, statement); } return statement; } /** * Get a prepared statement for the given input. Will cache the prepared * statement for this connection. * * @param sqlStatement The SQL for the prepared statement. * @param generateKeys The generate keys enum from Statement. * * @return The prepared statement * * @throws SQLException */ PreparedStatement getPreparedStatement(String sqlStatement, int generateKeys) throws SQLException { PreparedStatement statement; String statementKey = "SQL:" + sqlStatement + " Key:" + generateKeys; if (adHocPreparedStatements.containsKey(statementKey)) { statement = this.adHocPreparedStatements.get(statementKey); } else { statement = prepareStatement(sqlStatement, generateKeys); this.adHocPreparedStatements.put(statementKey, statement); } return statement; } PreparedStatement prepareStatement(String sqlStatement, int generateKeys) throws SQLException { PrepareStatement prepareStatement = new PrepareStatement(this.getConnection(), sqlStatement); executeCommand(prepareStatement); return prepareStatement.getPreparedStatement(); } Statement createStatement() throws SQLException { CreateStatement createStatement = new CreateStatement(this.connection); executeCommand(createStatement); return createStatement.getStatement(); } void beginTransaction() throws SQLException { SetAutoCommit setAutoCommit = new SetAutoCommit(connection, false); executeCommand(setAutoCommit); } void commitTransaction() throws SQLException { Commit commit = new Commit(connection); executeCommand(commit); // You must turn auto commit back on when done with the transaction. SetAutoCommit setAutoCommit = new SetAutoCommit(connection, true); executeCommand(setAutoCommit); } /** * A rollback that logs exceptions and does not throw, intended for * "internal" use in SleuthkitCase methods where the exception that * motivated the rollback is the exception to report to the client. */ void rollbackTransaction() { try { connection.rollback(); } catch (SQLException e) { logger.log(Level.SEVERE, "Error rolling back transaction", e); } try { connection.setAutoCommit(true); } catch (SQLException e) { logger.log(Level.SEVERE, "Error restoring auto-commit", e); } } /** * A rollback that throws, intended for use by the CaseDbTransaction * class where client code is managing the transaction and the client * may wish to know that the rollback failed. * * @throws SQLException */ void rollbackTransactionWithThrow() throws SQLException { try { connection.rollback(); } finally { connection.setAutoCommit(true); } } /** * Blocks until a write lock can be obtained on the tsk_aggregate_score * table. Used to ensure only one thread/client is updating the score at * a time. Can be called multiple times on the same transaction. * * @throws SQLException * @throws TskCoreException */ void getAggregateScoreTableWriteLock() throws SQLException, TskCoreException { switch (getDatabaseType()) { case POSTGRESQL: AggregateScoreTablePostgreSQLWriteLock tableWriteLock = new AggregateScoreTablePostgreSQLWriteLock(connection); executeCommand(tableWriteLock); break; case SQLITE: // We do nothing here because we assume the entire SQLite DB is already locked from // when the analysis results were added/deleted in the same transaction. break; default: throw new TskCoreException("Unknown DB Type: " + getDatabaseType().name()); } } ResultSet executeQuery(Statement statement, String query) throws SQLException { ExecuteQuery queryCommand = new ExecuteQuery(statement, query); executeCommand(queryCommand); return queryCommand.getResultSet(); } /** * * @param statement The SQL statement to execute * * @return returns the ResultSet from the execution of the query * * @throws SQLException \ref query_database_page \ref * insert_and_update_database_page */ ResultSet executeQuery(PreparedStatement statement) throws SQLException { ExecutePreparedStatementQuery executePreparedStatementQuery = new ExecutePreparedStatementQuery(statement); executeCommand(executePreparedStatementQuery); return executePreparedStatementQuery.getResultSet(); } void executeUpdate(Statement statement, String update) throws SQLException { executeUpdate(statement, update, Statement.NO_GENERATED_KEYS); } void executeUpdate(Statement statement, String update, int generateKeys) throws SQLException { ExecuteStatementUpdate executeStatementUpdate = new ExecuteStatementUpdate(statement, update); executeCommand(executeStatementUpdate); } void executeUpdate(PreparedStatement statement) throws SQLException { ExecutePreparedStatementUpdate executePreparedStatementUpdate = new ExecutePreparedStatementUpdate(statement); executeCommand(executePreparedStatementUpdate); } /** * Close the connection to the database. */ @Override public void close() { try { for (PreparedStatement stmt : preparedStatements.values()) { closeStatement(stmt); } for (PreparedStatement stmt : adHocPreparedStatements.values()) { closeStatement(stmt); } connection.close(); } catch (SQLException ex) { logger.log(Level.SEVERE, "Unable to close connection to case database", ex); } } Connection getConnection() { return this.connection; } } /** * A connection to an SQLite case database. */ private final class SQLiteConnection extends CaseDbConnection { private static final int DATABASE_LOCKED_ERROR = 0; // This should be 6 according to documentation, but it has been observed to be 0. private static final int SQLITE_BUSY_ERROR = 5; SQLiteConnection(Connection conn) { super(conn); } @Override void executeCommand(DbCommand command) throws SQLException { int retryCounter = 0; while (true) { try { command.execute(); // Perform the operation break; } catch (SQLException ex) { if ((ex.getErrorCode() == SQLITE_BUSY_ERROR || ex.getErrorCode() == DATABASE_LOCKED_ERROR) && retryCounter < MAX_RETRIES) { try { // We do not notify of error here, as this is not an // error condition. It is likely a temporary busy or // locked issue and we will retry. retryCounter++; Thread.sleep(SLEEP_LENGTH_IN_MILLISECONDS); } catch (InterruptedException exp) { Logger.getLogger(SleuthkitCase.class.getName()).log(Level.WARNING, "Unexpectedly unable to wait for database.", exp); } } else { throw ex; } } } } } /** * A connection to a PostgreSQL case database. */ private final class PostgreSQLConnection extends CaseDbConnection { private final String COMMUNICATION_ERROR = PSQLState.COMMUNICATION_ERROR.getState(); private final String SYSTEM_ERROR = PSQLState.SYSTEM_ERROR.getState(); private final String UNKNOWN_STATE = PSQLState.UNKNOWN_STATE.getState(); private static final int MAX_RETRIES = 3; PostgreSQLConnection(Connection conn) { super(conn); } @Override void executeUpdate(Statement statement, String update, int generateKeys) throws SQLException { CaseDbConnection.ExecuteStatementUpdateGenerateKeys executeStatementUpdateGenerateKeys = new CaseDbConnection.ExecuteStatementUpdateGenerateKeys(statement, update, generateKeys); executeCommand(executeStatementUpdateGenerateKeys); } @Override PreparedStatement prepareStatement(String sqlStatement, int generateKeys) throws SQLException { CaseDbConnection.PrepareStatementGenerateKeys prepareStatementGenerateKeys = new CaseDbConnection.PrepareStatementGenerateKeys(this.getConnection(), sqlStatement, generateKeys); executeCommand(prepareStatementGenerateKeys); return prepareStatementGenerateKeys.getPreparedStatement(); } @Override void executeCommand(DbCommand command) throws SQLException { SQLException lastException = null; for (int retries = 0; retries < MAX_RETRIES; retries++) { try { command.execute(); lastException = null; // reset since we had a successful execution break; } catch (SQLException ex) { lastException = ex; String sqlState = ex.getSQLState(); if (sqlState == null || sqlState.equals(COMMUNICATION_ERROR) || sqlState.equals(SYSTEM_ERROR) || sqlState.equals(UNKNOWN_STATE)) { try { Thread.sleep(SLEEP_LENGTH_IN_MILLISECONDS); } catch (InterruptedException exp) { Logger.getLogger(SleuthkitCase.class.getName()).log(Level.WARNING, "Unexpectedly unable to wait for database.", exp); } } else { throw ex; } } } // rethrow the exception if we bailed because of too many retries if (lastException != null) { throw lastException; } } } /** * Wraps the transactional capabilities of a CaseDbConnection object to * support use cases where control of a transaction is given to a * SleuthkitCase client. Note that this class does not implement the * Transaction interface because that sort of flexibility and its associated * complexity is not needed. Also, TskCoreExceptions are thrown to be * consistent with the outer SleuthkitCase class. * * This class will automatically acquire the single user case write lock and * release it when the transaction is closed. Otherwise we risk deadlock * because this transaction can lock up SQLite and make it "busy" and * another thread may get a write lock to the DB, but not be able to do * anything because the DB is busy. */ public static final class CaseDbTransaction { private final CaseDbConnection connection; private SleuthkitCase sleuthkitCase; // A collection of object score changes that ocuured as part of this transaction. // When the transaction is committed, events are fired to notify any listeners. // Score changes are stored as a map keyed by objId to prevent duplicates. private Map scoreChangeMap = new HashMap<>(); private List hostsAdded = new ArrayList<>(); private List accountsChanged = new ArrayList<>(); private List accountsAdded = new ArrayList<>(); private List deletedOsAccountObjectIds = new ArrayList<>(); private List deletedResultObjectIds = new ArrayList<>(); private static Set threadsWithOpenTransaction = new HashSet<>(); private static final Object threadsWithOpenTransactionLock = new Object(); private CaseDbTransaction(SleuthkitCase sleuthkitCase) throws TskCoreException { this.sleuthkitCase = sleuthkitCase; sleuthkitCase.acquireSingleUserCaseWriteLock(); this.connection = sleuthkitCase.getConnection(); try { synchronized (threadsWithOpenTransactionLock) { this.connection.beginTransaction(); threadsWithOpenTransaction.add(Thread.currentThread().getId()); } } catch (SQLException ex) { sleuthkitCase.releaseSingleUserCaseWriteLock(); throw new TskCoreException("Failed to create transaction on case database", ex); } } /** * The implementations of the public APIs that take a CaseDbTransaction * object need access to the underlying CaseDbConnection. * * @return The CaseDbConnection instance for this instance of * CaseDbTransaction. */ CaseDbConnection getConnection() { return this.connection; } /** * Saves a score change done as part of the transaction. * * @param scoreChange Score change. */ void registerScoreChange(ScoreChange scoreChange) { scoreChangeMap.put(scoreChange.getObjectId(), scoreChange); } /** * Saves a host that has been added as a part of this transaction. * * @param host The host. */ void registerAddedHost(Host host) { if (host != null) { this.hostsAdded.add(host); } } /** * Saves an account that has been updated as a part of this transaction. * * @param account The account. */ void registerChangedOsAccount(OsAccount account) { if (account != null) { accountsChanged.add(account); } } /** * Saves an account that has been deleted as a part of this transaction. * * @param osAccountObjId The account. */ void registerDeletedOsAccount(long osAccountObjId) { deletedOsAccountObjectIds.add(osAccountObjId); } /** * Saves an account that has been added as a part of this transaction. * * @param account The account. */ void registerAddedOsAccount(OsAccount account) { if (account != null) { accountsAdded.add(account); } } /** * Saves an analysis result that has been deleted as a part of this * transaction. * * @param result Deleted result. */ void registerDeletedAnalysisResult(long analysisResultObjId) { this.deletedResultObjectIds.add(analysisResultObjId); } /** * Check if the given thread has an open transaction. * * @param threadId Thread id to check for. * * @return True if the given thread has an open transaction, false * otherwise. */ private static boolean hasOpenTransaction(long threadId) { synchronized (threadsWithOpenTransactionLock) { return threadsWithOpenTransaction.contains(threadId); } } /** * Commits the transaction on the case database that was begun when this * object was constructed. * * @throws TskCoreException */ public void commit() throws TskCoreException { try { this.connection.commitTransaction(); } catch (SQLException ex) { throw new TskCoreException("Failed to commit transaction on case database", ex); } finally { close(); if (!scoreChangeMap.isEmpty()) { Map> changesByDataSource = scoreChangeMap.values().stream() .collect(Collectors.groupingBy(ScoreChange::getDataSourceObjectId)); for (Map.Entry> entry : changesByDataSource.entrySet()) { sleuthkitCase.fireTSKEvent(new TskEvent.AggregateScoresChangedEvent(entry.getKey(), ImmutableSet.copyOf(entry.getValue()))); } } if (!hostsAdded.isEmpty()) { sleuthkitCase.fireTSKEvent(new TskEvent.HostsAddedTskEvent(hostsAdded)); } if (!accountsAdded.isEmpty()) { sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsAddedTskEvent(accountsAdded)); } if (!accountsChanged.isEmpty()) { sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsUpdatedTskEvent(accountsChanged)); } if (!deletedOsAccountObjectIds.isEmpty()) { sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsDeletedTskEvent(deletedOsAccountObjectIds)); } if (!deletedResultObjectIds.isEmpty()) { sleuthkitCase.fireTSKEvent(new TskEvent.AnalysisResultsDeletedTskEvent(deletedResultObjectIds)); } } } /** * Rolls back the transaction on the case database that was begun when * this object was constructed. * * @throws TskCoreException */ public void rollback() throws TskCoreException { try { this.connection.rollbackTransactionWithThrow(); } catch (SQLException ex) { throw new TskCoreException("Case database transaction rollback failed", ex); } finally { close(); } } /** * Close the database connection * */ void close() { this.connection.close(); sleuthkitCase.releaseSingleUserCaseWriteLock(); synchronized (threadsWithOpenTransactionLock) { threadsWithOpenTransaction.remove(Thread.currentThread().getId()); } } } /** * The CaseDbQuery supports the use case where developers have a need for * data that is not exposed through the SleuthkitCase API. A CaseDbQuery * instance gets created through the SleuthkitCase executeDbQuery() method. * It wraps the ResultSet and takes care of acquiring and releasing the * appropriate database lock. It implements AutoCloseable so that it can be * used in a try-with -resources block freeing developers from having to * remember to close the result set and releasing the lock. */ public final class CaseDbQuery implements AutoCloseable { private ResultSet resultSet; private CaseDbConnection connection; private CaseDbQuery(String query) throws TskCoreException { this(query, false); } private CaseDbQuery(String query, boolean allowWriteQuery) throws TskCoreException { if (!allowWriteQuery) { if (!query.regionMatches(true, 0, "SELECT", 0, "SELECT".length())) { throw new TskCoreException("Unsupported query: Only SELECT queries are supported."); } } SleuthkitCase.this.acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); resultSet = connection.executeQuery(connection.createStatement(), query); } catch (SQLException ex) { SleuthkitCase.this.releaseSingleUserCaseReadLock(); throw new TskCoreException("Error executing query: ", ex); } catch (TskCoreException ex) { SleuthkitCase.this.releaseSingleUserCaseReadLock(); throw ex; } } /** * Get the result set for this query. * * @return The result set. */ public ResultSet getResultSet() { return resultSet; } @Override public void close() throws TskCoreException { try { if (resultSet != null) { final Statement statement = resultSet.getStatement(); if (statement != null) { statement.close(); } resultSet.close(); } closeConnection(connection); } catch (SQLException ex) { throw new TskCoreException("Error closing query: ", ex); } finally { SleuthkitCase.this.releaseSingleUserCaseReadLock(); } } } /** * Add an observer for SleuthkitCase errors. * * @param observer The observer to add. * * @deprecated Catch exceptions instead. */ @Deprecated public void addErrorObserver(ErrorObserver observer) { sleuthkitCaseErrorObservers.add(observer); } /** * Remove an observer for SleuthkitCase errors. * * @param observer The observer to remove. * * @deprecated Catch exceptions instead. */ @Deprecated public void removeErrorObserver(ErrorObserver observer) { int i = sleuthkitCaseErrorObservers.indexOf(observer); if (i >= 0) { sleuthkitCaseErrorObservers.remove(i); } } /** * Submit an error to all clients that are listening. * * @param context The context in which the error occurred. * @param errorMessage A description of the error that occurred. * * @deprecated Catch exceptions instead. */ @Deprecated public void submitError(String context, String errorMessage) { for (ErrorObserver observer : sleuthkitCaseErrorObservers) { if (observer != null) { try { observer.receiveError(context, errorMessage); } catch (Exception ex) { logger.log(Level.SEVERE, "Observer client unable to receive message: {0}, {1}", new Object[]{context, errorMessage, ex}); } } } } /** * Notifies observers of errors in the SleuthkitCase. * * @deprecated Catch exceptions instead. */ @Deprecated public interface ErrorObserver { /** * List of arguments for the context string parameters. This does not * preclude the use of arbitrary context strings by client code, but it * does provide a place to define standard context strings to allow * filtering of notifications by implementations of ErrorObserver. */ public enum Context { /** * Error occurred while reading image content. */ IMAGE_READ_ERROR("Image File Read Error"), /** * Error occurred while reading database content. */ DATABASE_READ_ERROR("Database Read Error"); private final String contextString; private Context(String context) { this.contextString = context; } public String getContextString() { return contextString; } }; void receiveError(String context, String errorMessage); } /** * Given an object id, works up the tree of ancestors to the data source for * the object and gets the object id of the data source. The trivial case * where the input object id is for a source is handled. * * @param objectId An object id. * * @return A data source object id. * */ @Deprecated long getDataSourceObjectId(long objectId) { try { CaseDbConnection connection = connections.getConnection(); try { return getDataSourceObjectId(connection, objectId); } finally { closeConnection(connection); } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error getting data source object id for a file", ex); return 0; } } /** * Get last (max) object id of content object in tsk_objects. * * @return currently max id * * @throws TskCoreException exception thrown when database error occurs and * last object id could not be queried * @deprecated Do not use, assumes a single-threaded, single-user case. */ @Deprecated public long getLastObjectId() throws TskCoreException { CaseDbConnection connection = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); // SELECT MAX(obj_id) AS max_obj_id FROM tsk_objects PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_MAX_OBJECT_ID); rs = connection.executeQuery(statement); long id = -1; if (rs.next()) { id = rs.getLong("max_obj_id"); } return id; } catch (SQLException e) { throw new TskCoreException("Error getting last object id", e); } finally { closeResultSet(rs); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Find and return list of files matching the specific Where clause. Use * findAllFilesWhere instead. It returns a more generic data type * * @param sqlWhereClause a SQL where clause appropriate for the desired * files (do not begin the WHERE clause with the word * WHERE!) * * @return a list of FsContent each of which satisfy the given WHERE clause * * @throws TskCoreException * @deprecated use SleuthkitCase.findAllFilesWhere() instead */ @Deprecated public List findFilesWhere(String sqlWhereClause) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS List results = new ArrayList(); List temp = resultSetToAbstractFiles(rs, connection); for (AbstractFile f : temp) { final TSK_DB_FILES_TYPE_ENUM type = f.getType(); if (type.equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { results.add((FsContent) f); } } return results; } catch (SQLException e) { throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findFilesWhere().", e); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the artifact type id associated with an artifact type name. * * @param artifactTypeName An artifact type name. * * @return An artifact id or -1 if the attribute type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. * * @deprecated Use getArtifactType instead */ @Deprecated public int getArtifactTypeID(String artifactTypeName) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT artifact_type_id FROM blackboard_artifact_types WHERE type_name = '" + artifactTypeName + "'"); //NON-NLS int typeId = -1; if (rs.next()) { typeId = rs.getInt("artifact_type_id"); } return typeId; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact type id", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a list of the standard blackboard artifact type enum objects. * * @return The members of the BlackboardArtifact.ARTIFACT_TYPE enum. * * @throws TskCoreException Specified, but not thrown. * @deprecated For a list of standard blackboard artifacts type enum * objects, use BlackboardArtifact.ARTIFACT_TYPE.values. */ @Deprecated public ArrayList getBlackboardArtifactTypes() throws TskCoreException { return new ArrayList(Arrays.asList(BlackboardArtifact.ARTIFACT_TYPE.values())); } /** * Adds a custom artifact type. The artifact type name must be unique, but * the display name need not be unique. * * @param artifactTypeName The artifact type name. * @param displayName The artifact type display name. * * @return The artifact type id assigned to the artifact type. * * @throws TskCoreException If there is an error adding the type to the case * database. * @deprecated Use SleuthkitCase.addBlackboardArtifactType instead. */ @Deprecated public int addArtifactType(String artifactTypeName, String displayName) throws TskCoreException { try { return addBlackboardArtifactType(artifactTypeName, displayName).getTypeID(); } catch (TskDataException ex) { throw new TskCoreException("Failed to add artifact type.", ex); } } /** * Adds a custom attribute type with a string value type. The attribute type * name must be unique, but the display name need not be unique. * * @param attrTypeString The attribute type name. * @param displayName The attribute type display name. * * @return The attribute type id. * * @throws TskCoreException If there is an error adding the type to the case * database. * @deprecated Use SleuthkitCase.addArtifactAttributeType instead. */ @Deprecated public int addAttrType(String attrTypeString, String displayName) throws TskCoreException { try { return addArtifactAttributeType(attrTypeString, TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, displayName).getTypeID(); } catch (TskDataException ex) { throw new TskCoreException("Couldn't add new attribute type"); } } /** * Gets the attribute type id associated with an attribute type name. * * @param attrTypeName An attribute type name. * * @return An attribute id or -1 if the attribute type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. * @deprecated Use SleuthkitCase.getAttributeType instead. */ @Deprecated public int getAttrTypeID(String attrTypeName) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT attribute_type_id FROM blackboard_attribute_types WHERE type_name = '" + attrTypeName + "'"); //NON-NLS int typeId = -1; if (rs.next()) { typeId = rs.getInt("attribute_type_id"); } return typeId; } catch (SQLException ex) { throw new TskCoreException("Error getting attribute type id", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the string associated with the given id. Will throw an error if that * id does not exist * * @param attrTypeID attribute id * * @return string associated with the given id * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @deprecated Use getAttributeType instead */ @Deprecated public String getAttrTypeString(int attrTypeID) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT type_name FROM blackboard_attribute_types WHERE attribute_type_id = " + attrTypeID); //NON-NLS if (rs.next()) { return rs.getString("type_name"); } else { throw new TskCoreException("No type with that id"); } } catch (SQLException ex) { throw new TskCoreException("Error getting or creating a attribute type name", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Get the display name for the attribute with the given id. Will throw an * error if that id does not exist * * @param attrTypeID attribute id * * @return string associated with the given id * * @throws TskCoreException exception thrown if a critical error occurs * within tsk core * @deprecated Use getAttributeType instead */ @Deprecated public String getAttrTypeDisplayName(int attrTypeID) throws TskCoreException { CaseDbConnection connection = null; Statement s = null; ResultSet rs = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); s = connection.createStatement(); rs = connection.executeQuery(s, "SELECT display_name FROM blackboard_attribute_types WHERE attribute_type_id = " + attrTypeID); //NON-NLS if (rs.next()) { return rs.getString("display_name"); } else { throw new TskCoreException("No type with that id"); } } catch (SQLException ex) { throw new TskCoreException("Error getting or creating a attribute type name", ex); } finally { closeResultSet(rs); closeStatement(s); closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Gets a list of the standard blackboard attribute type enum objects. * * @return The members of the BlackboardAttribute.ATTRIBUTE_TYPE enum. * * @throws TskCoreException Specified, but not thrown. * @deprecated For a list of standard blackboard attribute types enum * objects, use BlackboardAttribute.ATTRIBUTE_TYP.values. */ @Deprecated public ArrayList getBlackboardAttributeTypes() throws TskCoreException { return new ArrayList(Arrays.asList(BlackboardAttribute.ATTRIBUTE_TYPE.values())); } /** * Process a read-only query on the tsk database, any table Can be used to * e.g. to find files of a given criteria. resultSetToFsContents() will * convert the files to useful objects. MUST CALL closeRunQuery() when done * * @param query the given string query to run * * @return the resultSet from running the query. Caller MUST CALL * closeRunQuery(resultSet) as soon as possible, when done with * retrieving data from the resultSet * * @throws SQLException if error occurred during the query * @deprecated Do not use runQuery(), use executeQuery() instead. \ref * query_database_page */ @Deprecated public ResultSet runQuery(String query) throws SQLException { CaseDbConnection connection = null; acquireSingleUserCaseReadLock(); try { connection = connections.getConnection(); return connection.executeQuery(connection.createStatement(), query); } catch (TskCoreException ex) { throw new SQLException("Error getting connection for ad hoc query", ex); } finally { //TODO unlock should be done in closeRunQuery() //but currently not all code calls closeRunQuery - need to fix this closeConnection(connection); releaseSingleUserCaseReadLock(); } } /** * Closes ResultSet and its Statement previously retrieved from runQuery() * * @param resultSet with its Statement to close * * @throws SQLException of closing the query files failed * @deprecated Do not use runQuery() and closeRunQuery(), use executeQuery() * instead. \ref query_database_page */ @Deprecated public void closeRunQuery(ResultSet resultSet) throws SQLException { final Statement statement = resultSet.getStatement(); resultSet.close(); if (statement != null) { statement.close(); } } /** * Adds a carved file to the VirtualDirectory '$CarvedFiles' in the volume * or image given by systemId. Creates $CarvedFiles virtual directory if it * does not exist already. * * @param carvedFileName the name of the carved file to add * @param carvedFileSize the size of the carved file to add * @param containerId the ID of the parent volume, file system, or image * @param data the layout information - a list of offsets that * make up this carved file. * * @return A LayoutFile object representing the carved file. * * @throws org.sleuthkit.datamodel.TskCoreException * @deprecated Use addCarvedFile(CarvingResult) instead */ @Deprecated public LayoutFile addCarvedFile(String carvedFileName, long carvedFileSize, long containerId, List data) throws TskCoreException { CarvingResult.CarvedFile carvedFile = new CarvingResult.CarvedFile(carvedFileName, carvedFileSize, data); List files = new ArrayList(); files.add(carvedFile); CarvingResult carvingResult; Content parent = getContentById(containerId); if (parent instanceof FileSystem || parent instanceof Volume || parent instanceof Image) { carvingResult = new CarvingResult(parent, files); } else { throw new TskCoreException(String.format("Parent (id =%d) is not an file system, volume or image", containerId)); } return addCarvedFiles(carvingResult).get(0); } /** * Adds a collection of carved files to the VirtualDirectory '$CarvedFiles' * in the volume or image given by systemId. Creates $CarvedFiles virtual * directory if it does not exist already. * * @param filesToAdd A list of CarvedFileContainer files to add as carved * files. * * @return A list of the files added to the database. * * @throws org.sleuthkit.datamodel.TskCoreException * @deprecated Use addCarvedFile(CarvingResult) instead */ @Deprecated public List addCarvedFiles(List filesToAdd) throws TskCoreException { List carvedFiles = new ArrayList(); for (CarvedFileContainer container : filesToAdd) { CarvingResult.CarvedFile carvedFile = new CarvingResult.CarvedFile(container.getName(), container.getSize(), container.getRanges()); carvedFiles.add(carvedFile); } CarvingResult carvingResult; Content parent = getContentById(filesToAdd.get(0).getId()); if (parent instanceof FileSystem || parent instanceof Volume || parent instanceof Image) { carvingResult = new CarvingResult(parent, carvedFiles); } else { throw new TskCoreException(String.format("Parent (id =%d) is not an file system, volume or image", parent.getId())); } return addCarvedFiles(carvingResult); } /** * Creates a new derived file object, adds it to database and returns it. * * TODO add support for adding derived method * * @param fileName file name the derived file * @param localPath local path of the derived file, including the file * name. The path is relative to the database path. * @param size size of the derived file in bytes * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param isFile whether a file or directory, true if a file * @param parentFile parent file object (derived or local file) * @param rederiveDetails details needed to re-derive file (will be specific * to the derivation method), currently unused * @param toolName name of derivation method/tool, currently unused * @param toolVersion version of derivation method/tool, currently * unused * @param otherDetails details of derivation method/tool, currently * unused * * @return newly created derived file object * * @throws TskCoreException exception thrown if the object creation failed * due to a critical system error * @deprecated Use the newer version with explicit encoding type parameter */ @Deprecated public DerivedFile addDerivedFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, AbstractFile parentFile, String rederiveDetails, String toolName, String toolVersion, String otherDetails) throws TskCoreException { return addDerivedFile(fileName, localPath, size, ctime, crtime, atime, mtime, isFile, parentFile, rederiveDetails, toolName, toolVersion, otherDetails, TskData.EncodingType.NONE); } /** * Adds a local/logical file to the case database. The database operations * are done within a caller-managed transaction; the caller is responsible * for committing or rolling back the transaction. * * @param fileName The name of the file. * @param localPath The absolute path (including the file name) of the * local/logical in secondary storage. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param md5 The MD5 hash of the file * @param known The known status of the file (can be null) * @param mimeType The MIME type of the file * @param isFile True, unless the file is a directory. * @param encodingType Type of encoding used on the file * @param parent The parent of the file (e.g., a virtual directory) * @param transaction A caller-managed transaction within which the add * file operations are performed. * * @return An object representing the local/logical file. * * @throws TskCoreException if there is an error completing a case database * operation. * * @deprecated Use the newer version with explicit sha256 parameter */ @Deprecated public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, String md5, FileKnown known, String mimeType, boolean isFile, TskData.EncodingType encodingType, Content parent, CaseDbTransaction transaction) throws TskCoreException { return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, md5, null, known, mimeType, isFile, encodingType, parent, transaction); } /** * Adds a local/logical file to the case database. The database operations * are done within a caller-managed transaction; the caller is responsible * for committing or rolling back the transaction. * * @param fileName The name of the file. * @param localPath The absolute path (including the file name) of the * local/logical in secondary storage. * @param size The size of the file in bytes. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param isFile True, unless the file is a directory. * @param parent The parent of the file (e.g., a virtual directory) * @param transaction A caller-managed transaction within which the add file * operations are performed. * * @return An object representing the local/logical file. * * @throws TskCoreException if there is an error completing a case database * operation. * @deprecated Use the newer version with explicit encoding type parameter */ @Deprecated public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, AbstractFile parent, CaseDbTransaction transaction) throws TskCoreException { return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, isFile, TskData.EncodingType.NONE, parent, transaction); } /** * Wraps the version of addLocalFile that takes a Transaction in a * transaction local to this method. * * @param fileName * @param localPath * @param size * @param ctime * @param crtime * @param atime * @param mtime * @param isFile * @param parent * * @return * * @throws TskCoreException * @deprecated Use the newer version with explicit encoding type parameter */ @Deprecated public LocalFile addLocalFile(String fileName, String localPath, long size, long ctime, long crtime, long atime, long mtime, boolean isFile, AbstractFile parent) throws TskCoreException { return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime, isFile, TskData.EncodingType.NONE, parent); } /** * Start process of adding a image to the case. Adding an image is a * multi-step process and this returns an object that allows it to happen. * * @param timezone TZ time zone string to use for ingest of image. * @param addUnallocSpace Set to true to create virtual files for * unallocated space in the image. * @param noFatFsOrphans Set to true to skip processing orphan files of FAT * file systems. * * @return Object that encapsulates control of adding an image via the * SleuthKit native code layer * * @deprecated Use the newer version with explicit image writer path * parameter */ @Deprecated public AddImageProcess makeAddImageProcess(String timezone, boolean addUnallocSpace, boolean noFatFsOrphans) { return this.caseHandle.initAddImageProcess(timezone, addUnallocSpace, noFatFsOrphans, "", this); } /** * Helper to return FileSystems in an Image * * @param image Image to lookup FileSystem for * * @return Collection of FileSystems in the image * * @deprecated Use getImageFileSystems which throws an exception if an error * occurs. */ @Deprecated public Collection getFileSystems(Image image) { try { return getImageFileSystems(image); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error loading all file systems for image with ID {0}", image.getId()); return new ArrayList<>(); } } /** * Find all files in the data source, by name and parent * * @param dataSource the dataSource (Image, parent-less VirtualDirectory) to * search for the given file name * @param fileName Pattern of the name of the file or directory to match * (case insensitive, used in LIKE SQL statement). * @param parentFile Object for parent file/directory to find children in * * @return a list of AbstractFile for files/directories whose name matches * fileName and that were inside a directory described by * parentFile. * * @throws org.sleuthkit.datamodel.TskCoreException * * @deprecated Use findFilesInFolder() */ @Deprecated public List findFiles(Content dataSource, String fileName, AbstractFile parentFile) throws TskCoreException { return findFilesInFolder(fileName, parentFile); } /** * Acquires a write lock, but only if this is a single-user case. Always * call this method in a try block with a call to the lock release method in * an associated finally block. * * @deprecated Use acquireSingleUserCaseWriteLock. */ @Deprecated public void acquireExclusiveLock() { acquireSingleUserCaseWriteLock(); } /** * Releases a write lock, but only if this is a single-user case. This * method should always be called in the finally block of a try block in * which the lock was acquired. * * @deprecated Use releaseSingleUserCaseWriteLock. */ @Deprecated public void releaseExclusiveLock() { releaseSingleUserCaseWriteLock(); } /** * Acquires a read lock, but only if this is a single-user case. Call this * method in a try block with a call to the lock release method in an * associated finally block. * * @deprecated Use acquireSingleUserCaseReadLock. */ @Deprecated public void acquireSharedLock() { acquireSingleUserCaseReadLock(); } /** * Releases a read lock, but only if this is a single-user case. This method * should always be called in the finally block of a try block in which the * lock was acquired. * * @deprecated Use releaseSingleUserCaseReadLock. */ @Deprecated public void releaseSharedLock() { releaseSingleUserCaseReadLock(); } }; sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java000644 000765 000024 00000151404 14137073413 030362 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.annotations.Beta; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE; import static org.sleuthkit.datamodel.CollectionUtils.isNotEmpty; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import static org.sleuthkit.datamodel.SleuthkitCase.escapeSingleQuotes; import static org.sleuthkit.datamodel.StringUtils.buildCSVString; /** * Provides access to the timeline data in a case database. */ public final class TimelineManager { private static final Logger logger = Logger.getLogger(TimelineManager.class.getName()); /** * Timeline event types added to the case database when it is created. */ private static final ImmutableList ROOT_CATEGORY_AND_FILESYSTEM_TYPES = ImmutableList.of( TimelineEventType.ROOT_EVENT_TYPE, TimelineEventType.WEB_ACTIVITY, TimelineEventType.MISC_TYPES, TimelineEventType.FILE_SYSTEM, TimelineEventType.FILE_ACCESSED, TimelineEventType.FILE_CHANGED, TimelineEventType.FILE_CREATED, TimelineEventType.FILE_MODIFIED); /** * Timeline event types added to the case database by the TimelineManager * constructor. Adding these types at runtime permits new child types of the * category types to be defined without modifying the table creation and * population code in the Sleuth Kit. */ private static final ImmutableList PREDEFINED_EVENT_TYPES = new ImmutableList.Builder() .addAll(TimelineEventType.WEB_ACTIVITY.getChildren()) .addAll(TimelineEventType.MISC_TYPES.getChildren()) .build(); // all known artifact type ids (used for determining if an artifact is standard or custom event) private static final Set ARTIFACT_TYPE_IDS = Stream.of(BlackboardArtifact.ARTIFACT_TYPE.values()) .map(artType -> artType.getTypeID()) .collect(Collectors.toSet()); private final SleuthkitCase caseDB; /** * Maximum timestamp to look to in future. Twelve (12) years from current * date. */ private static final Long MAX_TIMESTAMP_TO_ADD = Instant.now().getEpochSecond() + 394200000; /** * Mapping of timeline event type IDs to TimelineEventType objects. */ private final Map eventTypeIDMap = new HashMap<>(); /** * Constructs a timeline manager that provides access to the timeline data * in a case database. * * @param caseDB The case database. * * @throws TskCoreException If there is an error constructing the timeline * manager. */ TimelineManager(SleuthkitCase caseDB) throws TskCoreException { this.caseDB = caseDB; List fullList = new ArrayList<>(); fullList.addAll(ROOT_CATEGORY_AND_FILESYSTEM_TYPES); fullList.addAll(PREDEFINED_EVENT_TYPES); caseDB.acquireSingleUserCaseWriteLock(); try (final CaseDbConnection con = caseDB.getConnection(); final PreparedStatement pStatement = con.prepareStatement( insertOrIgnore(" INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES (?, ?, ?)"), Statement.NO_GENERATED_KEYS)) { for (TimelineEventType type : fullList) { pStatement.setLong(1, type.getTypeID()); pStatement.setString(2, escapeSingleQuotes(type.getDisplayName())); if (type != type.getParent()) { pStatement.setLong(3, type.getParent().getTypeID()); } else { pStatement.setNull(3, java.sql.Types.INTEGER); } con.executeUpdate(pStatement); eventTypeIDMap.put(type.getTypeID(), type); } } catch (SQLException ex) { throw new TskCoreException("Failed to initialize timeline event types", ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Gets the smallest possible time interval that spans a collection of * timeline events. * * @param eventIDs The event IDs of the events for which to obtain the * spanning interval. * * @return The minimal spanning interval, may be null. * * @throws TskCoreException If there is an error querying the case database. */ public Interval getSpanningInterval(Collection eventIDs) throws TskCoreException { if (eventIDs.isEmpty()) { return null; } final String query = "SELECT Min(time) as minTime, Max(time) as maxTime FROM tsk_events WHERE event_id IN (" + buildCSVString(eventIDs) + ")"; //NON-NLS caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); ResultSet results = stmt.executeQuery(query);) { if (results.next()) { return new Interval(results.getLong("minTime") * 1000, (results.getLong("maxTime") + 1) * 1000, DateTimeZone.UTC); // NON-NLS } } catch (SQLException ex) { throw new TskCoreException("Error executing get spanning interval query: " + query, ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return null; } /** * Gets the smallest possible time interval that spans a collection of * timeline events. * * @param timeRange A time range that the events must be within. * @param filter A timeline events filter that the events must pass. * @param timeZone The time zone for the returned time interval. * * @return The minimal spanning interval, may be null. * * @throws TskCoreException If there is an error querying the case database. */ public Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilter filter, DateTimeZone timeZone) throws TskCoreException { long start = timeRange.getStartMillis() / 1000; long end = timeRange.getEndMillis() / 1000; String sqlWhere = getSQLWhere(filter); String augmentedEventsTablesSQL = getAugmentedEventsTablesSQL(filter); String queryString = " SELECT (SELECT Max(time) FROM " + augmentedEventsTablesSQL + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start," + " (SELECT Min(time) FROM " + augmentedEventsTablesSQL + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end";//NON-NLS caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause ResultSet results = stmt.executeQuery(queryString);) { if (results.next()) { long start2 = results.getLong("start"); // NON-NLS long end2 = results.getLong("end"); // NON-NLS if (end2 == 0) { end2 = getMaxEventTime(); } return new Interval(start2 * 1000, (end2 + 1) * 1000, timeZone); } } catch (SQLException ex) { throw new TskCoreException("Failed to get MIN time.", ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return null; } /** * Gets the timeline event with a given event ID. * * @param eventID An event ID. * * @return The timeline event, may be null. * * @throws TskCoreException If there is an error querying the case database. */ public TimelineEvent getEventById(long eventID) throws TskCoreException { String sql = "SELECT * FROM " + getAugmentedEventsTablesSQL(false) + " WHERE event_id = " + eventID; caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement();) { try (ResultSet results = stmt.executeQuery(sql);) { if (results.next()) { int typeID = results.getInt("event_type_id"); TimelineEventType type = getEventType(typeID).orElseThrow(() -> newEventTypeMappingException(typeID)); //NON-NLS return new TimelineEvent(eventID, results.getLong("data_source_obj_id"), results.getLong("content_obj_id"), results.getLong("artifact_id"), results.getLong("time"), type, results.getString("full_description"), results.getString("med_description"), results.getString("short_description"), intToBoolean(results.getInt("hash_hit")), intToBoolean(results.getInt("tagged"))); } } } catch (SQLException sqlEx) { throw new TskCoreException("Error while executing query " + sql, sqlEx); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return null; } /** * Gets the event IDs of the timeline events within a given time range that * pass a given timeline events filter. * * @param timeRange The time range that the events must be within. * @param filter The timeline events filter that the events must pass. * * @return A list of event IDs ordered by event time. * * @throws TskCoreException If there is an error querying the case database. */ public List getEventIDs(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException { Long startTime = timeRange.getStartMillis() / 1000; Long endTime = timeRange.getEndMillis() / 1000; if (Objects.equals(startTime, endTime)) { endTime++; //make sure end is at least 1 millisecond after start } ArrayList resultIDs = new ArrayList<>(); String query = "SELECT tsk_events.event_id AS event_id FROM " + getAugmentedEventsTablesSQL(filter) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + getSQLWhere(filter) + " ORDER BY time ASC"; // NON-NLS caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); ResultSet results = stmt.executeQuery(query);) { while (results.next()) { resultIDs.add(results.getLong("event_id")); //NON-NLS } } catch (SQLException sqlEx) { throw new TskCoreException("Error while executing query " + query, sqlEx); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return resultIDs; } /** * Gets the maximum timeline event time in the case database. * * @return The maximum timeline event time in seconds since the UNIX epoch, * or -1 if there are no timeline events in the case database. * * @throws TskCoreException If there is an error querying the case database. */ public Long getMaxEventTime() throws TskCoreException { caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stms = con.createStatement(); ResultSet results = stms.executeQuery(STATEMENTS.GET_MAX_TIME.getSQL());) { if (results.next()) { return results.getLong("max"); // NON-NLS } } catch (SQLException ex) { throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return -1l; } /** * Gets the minimum timeline event time in the case database. * * @return The minimum timeline event time in seconds since the UNIX epoch, * or -1 if there are no timeline events in the case database. * * @throws TskCoreException If there is an error querying the case database. */ public Long getMinEventTime() throws TskCoreException { caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stms = con.createStatement(); ResultSet results = stms.executeQuery(STATEMENTS.GET_MIN_TIME.getSQL());) { if (results.next()) { return results.getLong("min"); // NON-NLS } } catch (SQLException ex) { throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return -1l; } /** * Gets the timeline event type with a given event type ID. * * @param eventTypeID An event type ID. * * @return The timeline event type in an Optional object, may be empty if * the event type is not found. */ public Optional getEventType(long eventTypeID) { // The parent EventType with ID 22 has been deprecated. This ID had two // children which have be reassigned to MISC_TYPES. if(eventTypeID == TimelineEventType.DEPRECATED_OTHER_EVENT_ID) { return Optional.of(TimelineEventType.MISC_TYPES); } return Optional.ofNullable(eventTypeIDMap.get(eventTypeID)); } /** * Gets all of the timeline event types in the case database. * * @return A list of timeline event types. */ public ImmutableList getEventTypes() { return ImmutableList.copyOf(eventTypeIDMap.values()); } private String insertOrIgnore(String query) { switch (caseDB.getDatabaseType()) { case POSTGRESQL: return " INSERT " + query + " ON CONFLICT DO NOTHING "; //NON-NLS case SQLITE: return " INSERT OR IGNORE " + query; //NON-NLS default: throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name()); } } /** * Enum constants for sql statements. TODO: Inline these away? */ private enum STATEMENTS { GET_MAX_TIME("SELECT Max(time) AS max FROM tsk_events"), // NON-NLS GET_MIN_TIME("SELECT Min(time) AS min FROM tsk_events"); // NON-NLS private final String sql; private STATEMENTS(String sql) { this.sql = sql; } String getSQL() { return sql; } } /** * Gets a list of event IDs for the timeline events that have a given * artifact as the event source. * * @param artifact An artifact. * * @return The list of event IDs. * * @throws TskCoreException If there is an error querying the case database. */ public List getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException { ArrayList eventIDs = new ArrayList<>(); String query = "SELECT event_id FROM tsk_events " + " LEFT JOIN tsk_event_descriptions on ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id ) " + " WHERE artifact_id = " + artifact.getArtifactID(); caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); ResultSet results = stmt.executeQuery(query);) { while (results.next()) { eventIDs.add(results.getLong("event_id"));//NON-NLS } } catch (SQLException ex) { throw new TskCoreException("Error executing getEventIDsForArtifact query.", ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return eventIDs; } /** * Gets a list of event IDs for the timeline events that have a given * content as the event source. * * @param content The content. * @param includeDerivedArtifacts If true, also get event IDs for events * where the event source is an artifact that * has the given content as its source. * * @return The list of event IDs. * * @throws TskCoreException If there is an error querying the case database. */ public Set getEventIDsForContent(Content content, boolean includeDerivedArtifacts) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection conn = caseDB.getConnection()) { return getEventAndDescriptionIDs(conn, content.getId(), includeDerivedArtifacts).keySet(); } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Add a row to the tsk_events_description table. * * @param dataSourceObjId * @param fileObjId * @param artifactID * @param fullDescription * @param medDescription * @param shortDescription * @param hasHashHits * @param tagged * @param connection * * @return the event_decription_id of the inserted row. * * @throws TskCoreException * @throws DuplicateException */ private Long addEventDescription(long dataSourceObjId, long fileObjId, Long artifactID, String fullDescription, String medDescription, String shortDescription, boolean hasHashHits, boolean tagged, CaseDbConnection connection) throws TskCoreException, DuplicateException { String tableValuesClause = "tsk_event_descriptions ( " + "data_source_obj_id, content_obj_id, artifact_id, " + " full_description, med_description, short_description, " + " hash_hit, tagged " + " ) VALUES " + "(?, ?, ?, ?, ?, ?, ?, ?)"; String insertDescriptionSql = getSqlIgnoreConflict(tableValuesClause); caseDB.acquireSingleUserCaseWriteLock(); try { PreparedStatement insertDescriptionStmt = connection.getPreparedStatement(insertDescriptionSql, PreparedStatement.RETURN_GENERATED_KEYS); insertDescriptionStmt.clearParameters(); insertDescriptionStmt.setLong(1, dataSourceObjId); insertDescriptionStmt.setLong(2, fileObjId); if (artifactID == null) { insertDescriptionStmt.setNull(3, Types.INTEGER); } else { insertDescriptionStmt.setLong(3, artifactID); } insertDescriptionStmt.setString(4, fullDescription); insertDescriptionStmt.setString(5, medDescription); insertDescriptionStmt.setString(6, shortDescription); insertDescriptionStmt.setInt(7, booleanToInt(hasHashHits)); insertDescriptionStmt.setInt(8, booleanToInt(tagged)); int row = insertDescriptionStmt.executeUpdate(); // if no inserted rows, there is a conflict due to a duplicate event // description. If that happens, return null as no id was inserted. if (row < 1) { return null; } try (ResultSet generatedKeys = insertDescriptionStmt.getGeneratedKeys()) { if (generatedKeys.next()) { return generatedKeys.getLong(1); } else { return null; } } } catch (SQLException ex) { throw new TskCoreException("Failed to insert event description.", ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Returns an event description id for an existing event. * * @param dataSourceObjId Existing data source object id * @param fileObjId Existing content object id * @param artifactID Existing artifact id * @param fullDescription Full event description * @param connection Database connection * * @return The id of an existing description or null if none what found. * * @throws TskCoreException */ private Long getEventDescription(long dataSourceObjId, long fileObjId, Long artifactID, String fullDescription, CaseDbConnection connection) throws TskCoreException { String query = "SELECT event_description_id FROM tsk_event_descriptions " + "WHERE data_source_obj_id = " + dataSourceObjId + " AND content_obj_id = " + fileObjId + " AND artifact_id " + (artifactID != null ? " = " + artifactID : "IS null") + " AND full_description " + (fullDescription != null ? "= '" + SleuthkitCase.escapeSingleQuotes(fullDescription) + "'" : "IS null"); caseDB.acquireSingleUserCaseReadLock(); try (ResultSet resultSet = connection.createStatement().executeQuery(query)) { if (resultSet.next()) { long id = resultSet.getLong(1); return id; } } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to get description, dataSource=%d, fileObjId=%d, artifactId=%d", dataSourceObjId, fileObjId, artifactID), ex); } finally { caseDB.releaseSingleUserCaseReadLock(); } return null; } Collection addEventsForNewFile(AbstractFile file, CaseDbConnection connection) throws TskCoreException { Set events = addEventsForNewFileQuiet(file, connection); events.stream() .map(TimelineEventAddedEvent::new) .forEach(caseDB::fireTSKEvent); return events; } /** * Adds timeline events for the new file to the database. Does not fire * TSKEvents for each addition. This method should only be used if an update * event will be sent later. For example, a data source processor may send * out a single event that a data source has been added rather than an event * for each timeline event. * * @param file The new file * @param connection Database connection to use * * @return Set of new events * * @throws TskCoreException */ Set addEventsForNewFileQuiet(AbstractFile file, CaseDbConnection connection) throws TskCoreException { //gather time stamps into map Map timeMap = ImmutableMap.of(TimelineEventType.FILE_CREATED, file.getCrtime(), TimelineEventType.FILE_ACCESSED, file.getAtime(), TimelineEventType.FILE_CHANGED, file.getCtime(), TimelineEventType.FILE_MODIFIED, file.getMtime()); /* * If there are no legitimate ( greater than zero ) time stamps skip the * rest of the event generation. */ if (Collections.max(timeMap.values()) <= 0) { return Collections.emptySet(); } String description = file.getParentPath() + file.getName(); long fileObjId = file.getId(); Set events = new HashSet<>(); caseDB.acquireSingleUserCaseWriteLock(); try { Long descriptionID = addEventDescription(file.getDataSourceObjectId(), fileObjId, null, description, null, null, false, false, connection); if(descriptionID == null) { descriptionID = getEventDescription(file.getDataSourceObjectId(), fileObjId, null, description, connection); } if(descriptionID != null) { for (Map.Entry timeEntry : timeMap.entrySet()) { Long time = timeEntry.getValue(); if (time > 0 && time < MAX_TIMESTAMP_TO_ADD) {// if the time is legitimate ( greater than zero and less then 12 years from current date) insert it TimelineEventType type = timeEntry.getKey(); long eventID = addEventWithExistingDescription(time, type, descriptionID, connection); /* * Last two flags indicating hasTags and hasHashHits are * both set to false with the assumption that this is not * possible for a new file. See JIRA-5407 */ events.add(new TimelineEvent(eventID, descriptionID, fileObjId, null, time, type, description, null, null, false, false)); } else { if (time >= MAX_TIMESTAMP_TO_ADD) { logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for file %s with Id %d", timeEntry.getKey().getDisplayName(), file.getParentPath() + file.getName(), file.getId())); } } } } else { throw new TskCoreException(String.format("Failed to get event description for file id = %d", fileObjId)); } } catch (DuplicateException dupEx) { logger.log(Level.SEVERE, "Attempt to make file event duplicate.", dupEx); } finally { caseDB.releaseSingleUserCaseWriteLock(); } return events; } /** * Add any events that can be created from the given Artifact. If the * artifact is a TSK_EVENT then the TSK_DATETIME, TSK_EVENT_TYPE and * TSK_DESCRIPTION are used to make the event, otherwise each event type is * checked to see if it can automatically create an event from the given * artifact. * * @param artifact The artifact to add events for * * @return A set of added events. * * @throws TskCoreException */ Set addArtifactEvents(BlackboardArtifact artifact) throws TskCoreException { Set newEvents = new HashSet<>(); /* * If the artifact is a TSK_TL_EVENT, use the TSK_TL_EVENT_TYPE * attribute to determine its event type, but give it a generic * description. */ if (artifact.getArtifactTypeID() == TSK_TL_EVENT.getTypeID()) { TimelineEventType eventType;//the type of the event to add. BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(TSK_TL_EVENT_TYPE)); if (attribute == null) { eventType = TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL; } else { long eventTypeID = attribute.getValueLong(); eventType = eventTypeIDMap.getOrDefault(eventTypeID, TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL); } try { // @@@ This casting is risky if we change class hierarchy, but was expedient. Should move parsing to another class addArtifactEvent(((TimelineEventArtifactTypeImpl) TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL).makeEventDescription(artifact), eventType, artifact) .ifPresent(newEvents::add); } catch (DuplicateException ex) { logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make a timeline event artifact duplicate"), ex); } } else { /* * If there are any event types configured to make descriptions * automatically, use those. */ Set eventTypesForArtifact = eventTypeIDMap.values().stream() .filter(TimelineEventArtifactTypeImpl.class::isInstance) .map(TimelineEventArtifactTypeImpl.class::cast) .filter(eventType -> eventType.getArtifactTypeID() == artifact.getArtifactTypeID()) .collect(Collectors.toSet()); boolean duplicateExists = false; for (TimelineEventArtifactTypeImpl eventType : eventTypesForArtifact) { try { addArtifactEvent(eventType.makeEventDescription(artifact), eventType, artifact) .ifPresent(newEvents::add); } catch (DuplicateException ex) { duplicateExists = true; logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make artifact event duplicate"), ex); } } // if no other timeline events were created directly, then create new 'other' ones. if (!duplicateExists && newEvents.isEmpty()) { try { addOtherEventDesc(artifact).ifPresent(newEvents::add); } catch (DuplicateException ex) { logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make 'other' artifact event duplicate"), ex); } } } newEvents.stream() .map(TimelineEventAddedEvent::new) .forEach(caseDB::fireTSKEvent); return newEvents; } /** * Formats a message to be displayed in response to a duplicate exception. * * @param artifact The artifact that caused the exception. * @param error The error message to be displayed in the core of the * message. * * @return A formatted message (i.e. * "[org.sleuthkit.datamodel.TimelineManager]: Attempt to make * 'other' artifact event duplicate (artifactID=12345, Source=Recent * Activity).") */ private String getDuplicateExceptionMessage(BlackboardArtifact artifact, String error) { String artifactIDStr = null; String sourceStr = null; if (artifact != null) { artifactIDStr = Long.toString(artifact.getId()); try { sourceStr = artifact.getAttributes().stream() .filter(attr -> attr != null && attr.getSources() != null && !attr.getSources().isEmpty()) .map(attr -> String.join(",", attr.getSources())) .findFirst() .orElse(null); } catch (TskCoreException ex) { logger.log(Level.WARNING, String.format("Could not fetch artifacts for artifact id: %d.", artifact.getId()), ex); } } artifactIDStr = (artifactIDStr == null) ? "" : artifactIDStr; sourceStr = (sourceStr == null) ? "" : sourceStr; return String.format("%s (artifactID=%s, Source=%s).", error, artifactIDStr, sourceStr); } /** * Adds 'other' type events for artifacts that have no corresponding * TimelineEventType. * * @param artifact The artifact for which to add a new timeline event. * * @return An optional of a new timeline event or empty if no time attribute * can be determined or the artifact is null. * * @throws TskCoreException */ private Optional addOtherEventDesc(BlackboardArtifact artifact) throws TskCoreException, DuplicateException { if (artifact == null) { return Optional.empty(); } Long timeVal = artifact.getAttributes().stream() .filter((attr) -> attr.getAttributeType().getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) .map(attr -> attr.getValueLong()) .findFirst() .orElse(null); if (timeVal == null) { return Optional.empty(); } String description = String.format("%s: %d", artifact.getDisplayName(), artifact.getId()); TimelineEventDescriptionWithTime evtWDesc = new TimelineEventDescriptionWithTime(timeVal, description, description, description); TimelineEventType evtType = (ARTIFACT_TYPE_IDS.contains(artifact.getArtifactTypeID())) ? TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL : TimelineEventType.CUSTOM_ARTIFACT_CATCH_ALL; return addArtifactEvent(evtWDesc, evtType, artifact); } /** * Add an event of the given type from the given artifact to the database. * * @param eventPayload A description for this artifact including the time. * @param eventType The event type to create. * @param artifact The artifact to create the event from. * * @return The created event, wrapped in an Optional, or an empty Optional * if no event was created. * * @throws TskCoreException * @throws DuplicateException */ private Optional addArtifactEvent(TimelineEventDescriptionWithTime eventPayload, TimelineEventType eventType, BlackboardArtifact artifact) throws TskCoreException, DuplicateException { if (eventPayload == null) { return Optional.empty(); } long time = eventPayload.getTime(); // if the time is legitimate ( greater than or equal to zero or less than or equal to 12 years from present time) insert it into the db if (time <= 0 || time >= MAX_TIMESTAMP_TO_ADD) { if (time >= MAX_TIMESTAMP_TO_ADD) { logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for artifact %s with id %d", artifact.getDisplayName(), eventPayload.getDescription(TimelineLevelOfDetail.HIGH), artifact.getId())); } return Optional.empty(); } String fullDescription = eventPayload.getDescription(TimelineLevelOfDetail.HIGH); String medDescription = eventPayload.getDescription(TimelineLevelOfDetail.MEDIUM); String shortDescription = eventPayload.getDescription(TimelineLevelOfDetail.LOW); long artifactID = artifact.getArtifactID(); long fileObjId = artifact.getObjectID(); Long dataSourceObjectID = artifact.getDataSourceObjectID(); if(dataSourceObjectID == null) { logger.log(Level.SEVERE, String.format("Failed to create timeline event for artifact (%d), artifact data source was null"), artifact.getId()); return Optional.empty(); } AbstractFile file = caseDB.getAbstractFileById(fileObjId); boolean hasHashHits = false; // file will be null if source was data source or some non-file if (file != null) { hasHashHits = isNotEmpty(file.getHashSetNames()); } boolean tagged = isNotEmpty(caseDB.getBlackboardArtifactTagsByArtifact(artifact)); TimelineEvent event; caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = caseDB.getConnection();) { Long descriptionID = addEventDescription(dataSourceObjectID, fileObjId, artifactID, fullDescription, medDescription, shortDescription, hasHashHits, tagged, connection); if(descriptionID == null) { descriptionID = getEventDescription(dataSourceObjectID, fileObjId, artifactID, fullDescription, connection); } if(descriptionID != null) { long eventID = addEventWithExistingDescription(time, eventType, descriptionID, connection); event = new TimelineEvent(eventID, dataSourceObjectID, fileObjId, artifactID, time, eventType, fullDescription, medDescription, shortDescription, hasHashHits, tagged); } else { throw new TskCoreException(String.format("Failed to get event description for file id = %d, artifactId %d", fileObjId, artifactID)); } } finally { caseDB.releaseSingleUserCaseWriteLock(); } return Optional.of(event); } private long addEventWithExistingDescription(Long time, TimelineEventType type, long descriptionID, CaseDbConnection connection) throws TskCoreException, DuplicateException { String tableValuesClause = "tsk_events ( event_type_id, event_description_id , time) VALUES (?, ?, ?)"; String insertEventSql = getSqlIgnoreConflict(tableValuesClause); caseDB.acquireSingleUserCaseWriteLock(); try { PreparedStatement insertRowStmt = connection.getPreparedStatement(insertEventSql, Statement.RETURN_GENERATED_KEYS); insertRowStmt.clearParameters(); insertRowStmt.setLong(1, type.getTypeID()); insertRowStmt.setLong(2, descriptionID); insertRowStmt.setLong(3, time); int row = insertRowStmt.executeUpdate(); // if no inserted rows, return null. if (row < 1) { throw new DuplicateException(String.format("An event already exists in the event table for this item [time: %s, type: %s, description: %d].", time == null ? "" : Long.toString(time), type == null ? "" : type.toString(), descriptionID)); } try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys();) { if (generatedKeys.next()) { return generatedKeys.getLong(1); } else { throw new DuplicateException(String.format("An event already exists in the event table for this item [time: %s, type: %s, description: %d].", time == null ? "" : Long.toString(time), type == null ? "" : type.toString(), descriptionID)); } } } catch (SQLException ex) { throw new TskCoreException("Failed to insert event for existing description.", ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseWriteLock(); } } private Map getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, boolean includeArtifacts) throws TskCoreException { return getEventAndDescriptionIDsHelper(conn, contentObjID, (includeArtifacts ? "" : " AND artifact_id IS NULL")); } private Map getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, Long artifactID) throws TskCoreException { return getEventAndDescriptionIDsHelper(conn, contentObjID, " AND artifact_id = " + artifactID); } private Map getEventAndDescriptionIDsHelper(CaseDbConnection con, long fileObjID, String artifactClause) throws TskCoreException { //map from event_id to the event_description_id for that event. Map eventIDToDescriptionIDs = new HashMap<>(); String sql = "SELECT event_id, tsk_events.event_description_id" + " FROM tsk_events " + " LEFT JOIN tsk_event_descriptions ON ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id )" + " WHERE content_obj_id = " + fileObjID + artifactClause; try (Statement selectStmt = con.createStatement(); ResultSet executeQuery = selectStmt.executeQuery(sql);) { while (executeQuery.next()) { eventIDToDescriptionIDs.put(executeQuery.getLong("event_id"), executeQuery.getLong("event_description_id")); //NON-NLS } } catch (SQLException ex) { throw new TskCoreException("Error getting event description ids for object id = " + fileObjID, ex); } return eventIDToDescriptionIDs; } /** * Finds all of the timeline events directly associated with a given content * and marks them as having an event source that is tagged. This does not * include timeline events where the event source is an artifact, even if * the artifact source is the tagged content. * * @param content The content. * * @return The event IDs of the events that were marked as having a tagged * event source. * * @throws TskCoreException If there is an error updating the case database. * * WARNING: THIS IS A BETA VERSION OF THIS METHOD, SUBJECT TO CHANGE AT ANY * TIME. */ @Beta public Set updateEventsForContentTagAdded(Content content) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection conn = caseDB.getConnection()) { Map eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false); updateEventSourceTaggedFlag(conn, eventIDs.values(), 1); return eventIDs.keySet(); } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Finds all of the timeline events directly associated with a given content * and marks them as not having an event source that is tagged, if and only * if there are no other tags on the content. The inspection of events does * not include events where the event source is an artifact, even if the * artifact source is the content from which trhe tag was removed. * * @param content The content. * * @return The event IDs of the events that were marked as not having a * tagged event source. * * @throws TskCoreException If there is an error updating the case database. * * WARNING: THIS IS A BETA VERSION OF THIS METHOD, SUBJECT TO CHANGE AT ANY * TIME. */ @Beta public Set updateEventsForContentTagDeleted(Content content) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection conn = caseDB.getConnection()) { if (caseDB.getContentTagsByContent(content).isEmpty()) { Map eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false); updateEventSourceTaggedFlag(conn, eventIDs.values(), 0); return eventIDs.keySet(); } else { return Collections.emptySet(); } } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Finds all of the timeline events directly associated with a given * artifact and marks them as having an event source that is tagged. * * @param artifact The artifact. * * @return The event IDs of the events that were marked as having a tagged * event source. * * @throws TskCoreException If there is an error updating the case database. */ public Set updateEventsForArtifactTagAdded(BlackboardArtifact artifact) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection conn = caseDB.getConnection()) { Map eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID()); updateEventSourceTaggedFlag(conn, eventIDs.values(), 1); return eventIDs.keySet(); } finally { caseDB.releaseSingleUserCaseWriteLock(); } } /** * Finds all of the timeline events directly associated with a given * artifact and marks them as not having an event source that is tagged, if * and only if there are no other tags on the artifact. * * @param artifact The artifact. * * @return The event IDs of the events that were marked as not having a * tagged event source. * * @throws TskCoreException If there is an error updating the case database. */ public Set updateEventsForArtifactTagDeleted(BlackboardArtifact artifact) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection conn = caseDB.getConnection()) { if (caseDB.getBlackboardArtifactTagsByArtifact(artifact).isEmpty()) { Map eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID()); updateEventSourceTaggedFlag(conn, eventIDs.values(), 0); return eventIDs.keySet(); } else { return Collections.emptySet(); } } finally { caseDB.releaseSingleUserCaseWriteLock(); } } private void updateEventSourceTaggedFlag(CaseDbConnection conn, Collection eventDescriptionIDs, int flagValue) throws TskCoreException { if (eventDescriptionIDs.isEmpty()) { return; } String sql = "UPDATE tsk_event_descriptions SET tagged = " + flagValue + " WHERE event_description_id IN (" + buildCSVString(eventDescriptionIDs) + ")"; //NON-NLS try (Statement updateStatement = conn.createStatement()) { updateStatement.executeUpdate(sql); } catch (SQLException ex) { throw new TskCoreException("Error marking content events tagged: " + sql, ex);//NON-NLS } } /** * Finds all of the timeline events associated directly or indirectly with a * given content and marks them as having an event source that has a hash * set hit. This includes both the events that have the content as their * event source and the events for which the content is the source content * for the source artifact of the event. * * @param content The content. * * @return The event IDs of the events that were marked as having an event * source with a hash set hit. * * @throws TskCoreException If there is an error updating the case database. */ public Set updateEventsForHashSetHit(Content content) throws TskCoreException { caseDB.acquireSingleUserCaseWriteLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement updateStatement = con.createStatement();) { Map eventIDs = getEventAndDescriptionIDs(con, content.getId(), true); if (!eventIDs.isEmpty()) { String sql = "UPDATE tsk_event_descriptions SET hash_hit = 1" + " WHERE event_description_id IN (" + buildCSVString(eventIDs.values()) + ")"; //NON-NLS try { updateStatement.executeUpdate(sql); //NON-NLS return eventIDs.keySet(); } catch (SQLException ex) { throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS } } else { return eventIDs.keySet(); } } catch (SQLException ex) { throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS } finally { caseDB.releaseSingleUserCaseWriteLock(); } } void rollBackTransaction(SleuthkitCase.CaseDbTransaction trans) throws TskCoreException { trans.rollback(); } /** * Counts the timeline events events that satisfy the given conditions. * * @param startTime Events that occurred before this time are not * counted (units: seconds from UNIX epoch) * @param endTime Events that occurred at or after this time are * not counted (seconds from unix epoch) * @param filter Events that fall within the specified time range * are only ocunted if they pass this filter. * @param typeHierachyLevel Events that fall within the specified time range * and pass the specified filter asre only counted * if their types are at the specified level of the * event type hierarchy. * * @return The event counts for each event type at the specified level in * the event types hierarchy. * * @throws TskCoreException If there is an error querying the case database. */ public Map countEventsByType(Long startTime, Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.HierarchyLevel typeHierachyLevel) throws TskCoreException { long adjustedEndTime = Objects.equals(startTime, endTime) ? endTime + 1 : endTime; //do we want the base or subtype column of the databse String typeColumn = typeColumnHelper(TimelineEventType.HierarchyLevel.EVENT.equals(typeHierachyLevel)); String queryString = "SELECT count(DISTINCT tsk_events.event_id) AS count, " + typeColumn//NON-NLS + " FROM " + getAugmentedEventsTablesSQL(filter)//NON-NLS + " WHERE time >= " + startTime + " AND time < " + adjustedEndTime + " AND " + getSQLWhere(filter) // NON-NLS + " GROUP BY " + typeColumn; // NON-NLS caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); ResultSet results = stmt.executeQuery(queryString);) { Map typeMap = new HashMap<>(); while (results.next()) { int eventTypeID = results.getInt(typeColumn); TimelineEventType eventType = getEventType(eventTypeID) .orElseThrow(() -> newEventTypeMappingException(eventTypeID));//NON-NLS typeMap.put(eventType, results.getLong("count")); // NON-NLS } return typeMap; } catch (SQLException ex) { throw new TskCoreException("Error getting count of events from db: " + queryString, ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } } private static TskCoreException newEventTypeMappingException(int eventTypeID) { return new TskCoreException("Error mapping event type id " + eventTypeID + " to EventType.");//NON-NLS } /** * Get an SQL expression that produces an events table augmented with the * columns required by the given filter: The union of the events table * joined to the content and blackboard artifacts tags tables, if necessary, * then joined to a query that selects hash set hits, if necessary. Then * joined to the tsk_files table for mime_types if necessary. * * @param filter The filter that is inspected to determine what * joins/columns are needed.. * * @return An SQL expresion that produces an events table augmented with the * columns required by the filters. */ static private String getAugmentedEventsTablesSQL(TimelineFilter.RootFilter filter) { TimelineFilter.FileTypesFilter fileTypesFitler = filter.getFileTypesFilter(); boolean needsMimeTypes = fileTypesFitler != null && fileTypesFitler.hasSubFilters(); return getAugmentedEventsTablesSQL(needsMimeTypes); } /** * Get an SQL expression that produces an events table augmented with the * columns required by the filters: The union of the events table joined to * the content and blackboard artifacts tags tables, if necessary; then * joined to a query that selects hash set hits, if necessary; then joined * to the tsk_files table for mime_types if necessary. If all flags are * false, just return "events". * * @param needMimeTypes True if the filters require joining to the tsk_files * table for the mime_type. * * @return An SQL expression that produces an events table augmented with * the columns required by the filters. */ static private String getAugmentedEventsTablesSQL(boolean needMimeTypes) { /* * Regarding the timeline event tables schema, note that several columns * in the tsk_event_descriptions table seem, at first glance, to be * attributes of events rather than their descriptions and would appear * to belong in tsk_events table instead. The rationale for putting the * data source object ID, content object ID, artifact ID and the flags * indicating whether or not the event source has a hash set hit or is * tagged were motivated by the fact that these attributes are identical * for each event in a set of file system file MAC time events. The * decision was made to avoid duplication and save space by placing this * data in the tsk_event-descriptions table. */ return "( SELECT event_id, time, tsk_event_descriptions.data_source_obj_id, content_obj_id, artifact_id, " + " full_description, med_description, short_description, tsk_events.event_type_id, super_type_id," + " hash_hit, tagged " + (needMimeTypes ? ", mime_type" : "") + " FROM tsk_events " + " JOIN tsk_event_descriptions ON ( tsk_event_descriptions.event_description_id = tsk_events.event_description_id)" + " JOIN tsk_event_types ON (tsk_events.event_type_id = tsk_event_types.event_type_id ) " + (needMimeTypes ? " LEFT OUTER JOIN tsk_files " + " ON (tsk_event_descriptions.content_obj_id = tsk_files.obj_id)" : "") + ") AS tsk_events"; } /** * Convert a boolean to int with the mappings true => 1, false =>0 * * @param value the boolean value to convert to an int. * * @return 1 if value is true, 0 if value is false. */ private static int booleanToInt(boolean value) { return value ? 1 : 0; } private static boolean intToBoolean(int value) { return value != 0; } /** * Gets the timeline events that fall within a given time interval and * satisfy a given event filter. * * @param timeRange The time level. * @param filter The event filter. * * @return The list of events that fall within the specified interval and * poass the specified filter. * * @throws TskCoreException If there is an error querying the case database. */ public List getEvents(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException { List events = new ArrayList<>(); Long startTime = timeRange.getStartMillis() / 1000; Long endTime = timeRange.getEndMillis() / 1000; if (Objects.equals(startTime, endTime)) { endTime++; //make sure end is at least 1 millisecond after start } if (filter == null) { return events; } if (endTime < startTime) { return events; } //build dynamic parts of query String querySql = "SELECT time, content_obj_id, data_source_obj_id, artifact_id, " // NON-NLS + " event_id, " //NON-NLS + " hash_hit, " //NON-NLS + " tagged, " //NON-NLS + " event_type_id, super_type_id, " + " full_description, med_description, short_description " // NON-NLS + " FROM " + getAugmentedEventsTablesSQL(filter) // NON-NLS + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + getSQLWhere(filter) // NON-NLS + " ORDER BY time"; // NON-NLS caseDB.acquireSingleUserCaseReadLock(); try (CaseDbConnection con = caseDB.getConnection(); Statement stmt = con.createStatement(); ResultSet resultSet = stmt.executeQuery(querySql);) { while (resultSet.next()) { int eventTypeID = resultSet.getInt("event_type_id"); TimelineEventType eventType = getEventType(eventTypeID).orElseThrow(() -> new TskCoreException("Error mapping event type id " + eventTypeID + "to EventType."));//NON-NLS TimelineEvent event = new TimelineEvent( resultSet.getLong("event_id"), // NON-NLS resultSet.getLong("data_source_obj_id"), // NON-NLS resultSet.getLong("content_obj_id"), // NON-NLS resultSet.getLong("artifact_id"), // NON-NLS resultSet.getLong("time"), // NON-NLS eventType, resultSet.getString("full_description"), // NON-NLS resultSet.getString("med_description"), // NON-NLS resultSet.getString("short_description"), // NON-NLS resultSet.getInt("hash_hit") != 0, //NON-NLS resultSet.getInt("tagged") != 0); events.add(event); } } catch (SQLException ex) { throw new TskCoreException("Error getting events from db: " + querySql, ex); // NON-NLS } finally { caseDB.releaseSingleUserCaseReadLock(); } return events; } /** * Get the column name to use depending on if we want base types or subtypes * * @param useSubTypes True to use sub types, false to use base types. * * @return column name to use depending on if we want base types or subtypes */ private static String typeColumnHelper(final boolean useSubTypes) { return useSubTypes ? "event_type_id" : "super_type_id"; //NON-NLS } /** * Get the SQL where clause corresponding to the given filter * * @param filter A filter to generate the SQL where clause for, * * @return An SQL where clause (without the "where") corresponding to the * filter. */ String getSQLWhere(TimelineFilter.RootFilter filter) { String result; if (filter == null) { return getTrueLiteral(); } else { result = filter.getSQLWhere(this); } return result; } /** * Creates a sql statement that will do nothing due to unique constraint. * * @param insertTableValues the table, columns, and values portion of the * insert statement (i.e. 'table_name(col1, col2) * VALUES (rowVal1, rowVal2)'). * * @return The sql statement. * * @throws TskCoreException */ private String getSqlIgnoreConflict(String insertTableValues) throws TskCoreException { switch (caseDB.getDatabaseType()) { case POSTGRESQL: return "INSERT INTO " + insertTableValues + " ON CONFLICT DO NOTHING"; case SQLITE: return "INSERT OR IGNORE INTO " + insertTableValues; default: throw new TskCoreException("Unknown DB Type: " + caseDB.getDatabaseType().name()); } } private String getTrueLiteral() { switch (caseDB.getDatabaseType()) { case POSTGRESQL: return "TRUE";//NON-NLS case SQLITE: return "1";//NON-NLS default: throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name());//NON-NLS } } /** * Event fired by SleuthkitCase to indicate that a event has been added to * the tsk_events table. */ final static public class TimelineEventAddedEvent { private final TimelineEvent addedEvent; public TimelineEvent getAddedEvent() { return addedEvent; } TimelineEventAddedEvent(TimelineEvent event) { this.addedEvent = event; } } /** * Exception thrown in the event of a duplicate. */ private static class DuplicateException extends Exception { private static final long serialVersionUID = 1L; /** * Main constructor. * * @param message Message for duplicate exception. */ DuplicateException(String message) { super(message); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/BundleProvider.java000644 000765 000024 00000002023 14137073413 030235 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ResourceBundle; /** * Provides easy access to the ResourceBundle in this package. */ final class BundleProvider { private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); static ResourceBundle getBundle() { return BUNDLE; } private BundleProvider() { } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Image.java000644 000765 000024 00000045064 14137073413 026347 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.text.MessageFormat; import java.util.ResourceBundle; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.io.File; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; /** * Represents a disk image file, stored in tsk_image_info. Populated based on * data in database. * * Caches internal tsk image handle and reuses it for reads */ public class Image extends AbstractContent implements DataSource { //data about image private final long type, ssize; private long size; private final String[] paths; private volatile long imageHandle = 0; private volatile Host host = null; private final String deviceId, timezone; private String md5, sha1, sha256; private static ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private static final Logger LOGGER = Logger.getLogger(Image.class.getName()); /** * Create a disk image. * * Note: Most inputs originate from the database. * * @param db Case database. * @param obj_id Object ID. * @param type Image type. * @param ssize Sector size. * @param name Display name. * @param paths Image paths. * @param timezone Timezone. * @param md5 MD5 hash. * * @throws TskCoreException * * @deprecated Use the constructor that takes a device ID and size. */ @Deprecated protected Image(SleuthkitCase db, long obj_id, long type, long ssize, String name, String[] paths, String timezone, String md5) throws TskCoreException { super(db, obj_id, name); this.deviceId = ""; this.type = type; this.ssize = ssize; this.paths = paths; this.timezone = timezone; this.size = 0; this.md5 = md5; this.sha1 = ""; this.sha256 = ""; } /** * Create a disk image. * * Note: Most inputs originate from the database. * * @param db Case database. * @param obj_id Object ID. * @param type Image type. * @param deviceId Device ID. * @param ssize Sector size. * @param name Display name. * @param paths Image paths. * @param timezone Timezone. * @param md5 MD5 hash. * @param size Size. */ Image(SleuthkitCase db, long obj_id, long type, String deviceId, long ssize, String name, String[] paths, String timezone, String md5, String sha1, String sha256, long size) throws TskCoreException { super(db, obj_id, name); this.deviceId = deviceId; this.type = type; this.ssize = ssize; this.paths = paths; this.timezone = timezone; this.size = size; this.md5 = md5; this.sha1 = sha1; this.sha256 = sha256; } /** * Get the handle to the sleuthkit image info object * * @return the object pointer * * @throws TskCoreException */ public synchronized long getImageHandle() throws TskCoreException { if (paths.length == 0) { throw new TskCoreException("Image has no associated paths"); } if (imageHandle == 0) { imageHandle = SleuthkitJNI.openImage(paths, (int)ssize, getSleuthkitCase()); } return imageHandle; } synchronized void setImageHandle(long imageHandle) { this.imageHandle = imageHandle; } @Override public Content getDataSource() { return this; } @Override public void close() { //frees nothing, as we are caching image handles } @Override public void finalize() throws Throwable { try { if (imageHandle != 0) { // SleuthkitJNI.closeImg(imageHandle); // closeImg is currently a no-op imageHandle = 0; } } finally { super.finalize(); } } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { // If there are no paths, don't attempt to read the image if (paths.length == 0) { return 0; } // read from the image return SleuthkitJNI.readImg(getImageHandle(), buf, offset, len); } @Override public long getSize() { if (size == 0) { try { if (paths.length > 0) { //should always had at least one path size = SleuthkitJNI.findDeviceSize(paths[0]); } } catch (TskCoreException ex) { Logger.getLogger(Image.class.getName()).log(Level.SEVERE, "Could not find image size, image: " + this.getId(), ex); //NON-NLS } } return size; } //Methods for retrieval of meta-data attributes /** * Get the image type * * @return image type */ public TskData.TSK_IMG_TYPE_ENUM getType() { return TskData.TSK_IMG_TYPE_ENUM.valueOf(type); } /** * Get the sector size * * @return sector size */ public long getSsize() { return ssize; } @Override public String getUniquePath() throws TskCoreException { return "/img_" + getName(); //NON-NLS } /** * Get the image path * * @return image path */ public String[] getPaths() { return paths; } /** * @return a list of VolumeSystem associated with this Image. * * @throws TskCoreException */ public List getVolumeSystems() throws TskCoreException { List children = getChildren(); List vs = new ArrayList(); for (Content child : children) { if (child instanceof VolumeSystem) { vs.add((VolumeSystem) child); } } return vs; } /** * @return a list of Volume associated with this Image. * * @throws TskCoreException */ public List getVolumes() throws TskCoreException { List children = getChildren(); List volumes = new ArrayList(); for (Content child : children) { if (child instanceof Volume) { volumes.add((Volume) child); } } return volumes; } /** * @return a list of FileSystems in this Image. This includes FileSystems * that are both children of this Image as well as children of * Volumes in this image. * * @throws TskCoreException */ public List getFileSystems() throws TskCoreException { List fs = new ArrayList<>(); fs.addAll(getSleuthkitCase().getImageFileSystems(this)); return fs; } /** * Get the timezone set for the image * * @return timezone string representation */ @Override public String getTimeZone() { return timezone; } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public List getChildren() throws TskCoreException { return getSleuthkitCase().getImageChildren(this); } @Override public List getChildrenIds() throws TskCoreException { return getSleuthkitCase().getImageChildrenIds(this); } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "Image [\t" + "\t" + "paths " + Arrays.toString(paths) + "\t" + "size " + size + "\t" + "ssize " + ssize + "\t" + "timezone " + timezone + "\t" + "type " + type + "]\t"; //NON-NLS } /** * Test if the file that created this image exists on disk. Does not work on * local disks - will always return false * * @return True if the file still exists */ public Boolean imageFileExists() { if (paths.length > 0) { File imageFile = new File(paths[0]); return imageFile.exists(); } return false; } /** * Perform some sanity checks on the bounds of the image contents to * determine if we could be missing some pieces of the image. * * @return String of error messages to display to user or empty string if * there are no errors */ public String verifyImageSize() { Logger logger1 = Logger.getLogger("verifyImageSizes"); //NON-NLS String errorString = ""; try { List volumeSystems = getVolumeSystems(); for (VolumeSystem vs : volumeSystems) { List volumes = vs.getVolumes(); for (Volume v : volumes) { byte[] buf = new byte[512]; long endOffset = (v.getStart() + v.getLength()) * 512 - 512; try { int readBytes = read(buf, endOffset, 512); if (readBytes < 0) { logger1.log(Level.WARNING, "Possible Incomplete Image: Error reading volume at offset {0}", endOffset); //NON-NLS errorString = MessageFormat.format(bundle.getString("Image.verifyImageSize.errStr1.text"), endOffset); } } catch (TskCoreException ex) { logger1.log(Level.WARNING, "Possible Incomplete Image: Error reading volume at offset {0}: {1}", new Object[]{endOffset, ex.getLocalizedMessage()}); //NON-NLS errorString = MessageFormat.format(bundle.getString("Image.verifyImageSize.errStr2.text"), endOffset); } } } List fileSystems = getFileSystems(); for (FileSystem fs : fileSystems) { long block_size = fs.getBlock_size(); long endOffset = fs.getImageOffset() + fs.getSize() - block_size; try { byte[] buf = new byte[(int) block_size]; int readBytes = read(buf, endOffset, block_size); if (readBytes < 0) { logger1.log(Level.WARNING, "Possible Incomplete Image: Error reading file system at offset {0}", endOffset); //NON-NLS errorString = MessageFormat.format(bundle.getString("Image.verifyImageSize.errStr3.text"), endOffset); } } catch (TskCoreException ex) { logger1.log(Level.WARNING, "Possible Incomplete Image: Error reading file system at offset {0}: {1}", new Object[]{endOffset, ex.getLocalizedMessage()}); //NON-NLS errorString = MessageFormat.format(bundle.getString("Image.verifyImageSize.errStr4.text"), endOffset); } } } catch (TskException ex) { // do nothing if we got an exception from trying to get file systems and volume systems } return errorString; } /** * Gets the md5 hash value * * @return md5 hash if attained(from database), empty string otherwise * * @throws TskCoreException */ public String getMd5() throws TskCoreException { if (md5 == null || md5.isEmpty()) { md5 = getSleuthkitCase().getMd5ImageHash(this); } return md5; } /** * gets the SHA1 hash value * * @return SHA1 hash if attained(from database), empty string otherwise * * @throws TskCoreException on DB error. */ public String getSha1() throws TskCoreException { if (sha1 == null || sha1.isEmpty()) { sha1 = getSleuthkitCase().getSha1ImageHash(this); } return sha1; } /** * gets the SHA256 hash value * * @return SHA256 hash if attained(from database), empty string otherwise * * @throws TskCoreException */ public String getSha256() throws TskCoreException { if (sha256 == null || sha256.isEmpty()) { sha256 = getSleuthkitCase().getSha256ImageHash(this); } return sha256; } /** * * @param md5 * @throws TskCoreException On DB errors * @throws TskDataException If hash has already been set */ public void setMD5(String md5) throws TskCoreException, TskDataException { if (getMd5().isEmpty() == false) { throw new TskDataException("MD5 value has already been set"); } getSleuthkitCase().setMd5ImageHash(this, md5); this.md5 = md5; } /** * * @param sha1 * @throws TskCoreException On DB errors * @throws TskDataException If hash has already been set */ public void setSha1(String sha1) throws TskCoreException, TskDataException { if (getSha1().isEmpty() == false) { throw new TskDataException("SHA1 value has already been set"); } getSleuthkitCase().setSha1ImageHash(this, sha1); this.sha1 = sha1; } /** * * @param sha256 * @throws TskCoreException On DB errors * @throws TskDataException If hash has already been set */ public void setSha256(String sha256) throws TskCoreException, TskDataException { if (getSha256().isEmpty() == false) { throw new TskDataException("SHA256 value has already been set"); } getSleuthkitCase().setSha256ImageHash(this, sha256); this.sha256 = sha256; } /** * Gets the ASCII-printable identifier for the device associated with the * data source. This identifier is intended to be unique across multiple * cases (e.g., a UUID). * * @return The device id. */ @Override public String getDeviceId() { return deviceId; } /** * Set the name for this data source. * * @param newName The new name for the data source * * @throws TskCoreException Thrown if an error occurs while updating the database */ @Override public void setDisplayName(String newName) throws TskCoreException { this.getSleuthkitCase().setImageName(newName, getId()); } /** * Gets the size of the contents of the data source in bytes. This size can * change as archive files within the data source are expanded, files are * carved, etc., and is different from the size of the data source as * returned by Content.getSize, which is the size of the data source as a * file. * * @param sleuthkitCase The sleuthkit case instance from which to make calls * to the database. * * @return The size in bytes. * * @throws TskCoreException Thrown when there is an issue trying to retrieve * data from the database. */ @Override public long getContentSize(SleuthkitCase sleuthkitCase) throws TskCoreException { SleuthkitCase.CaseDbConnection connection; Statement statement = null; ResultSet resultSet = null; long contentSize = 0; connection = sleuthkitCase.getConnection(); try { statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT SUM (size) FROM tsk_image_info WHERE tsk_image_info.obj_id = " + getId()); if (resultSet.next()) { contentSize = resultSet.getLong("sum"); } } catch (SQLException ex) { throw new TskCoreException(String.format("There was a problem while querying the database for size data for object ID %d.", getId()), ex); } finally { closeResultSet(resultSet); closeStatement(statement); connection.close(); } return contentSize; } /** * Sets the acquisition details field in the case database. * * @param details The acquisition details * * @throws TskCoreException Thrown if the data can not be written */ @Override public void setAcquisitionDetails(String details) throws TskCoreException { getSleuthkitCase().setAcquisitionDetails(this, details); } /** * Sets the acquisition tool details such as its name, version number and * any settings used during the acquisition to acquire data. * * @param name The name of the acquisition tool. May be NULL. * @param version The acquisition tool version number. May be NULL. * @param settings The settings used by the acquisition tool. May be NULL. * * @throws TskCoreException Thrown if the data can not be written */ @Override public void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException { getSleuthkitCase().setAcquisitionToolDetails(this, name, version, settings); } /** * Gets the acquisition tool settings field from the case database. * * @return The acquisition tool settings. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ public String getAcquisitionToolSettings() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_settings"); } /** * Gets the acquisition tool name field from the case database. * * @return The acquisition tool name. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ public String getAcquisitionToolName() throws TskCoreException{ return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_name"); } /** * Gets the acquisition tool version field from the case database. * * @return The acquisition tool version. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ public String getAcquisitionToolVersion() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_version"); } /** * Gets the added date field from the case database. * * @return The date time when the image was added in epoch seconds. * * @throws TskCoreException Thrown if the data can not be read */ public Long getDateAdded() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoLong(this, "added_date_time"); } /** * Gets the acquisition details field from the case database. * * @return The acquisition details * * @throws TskCoreException Thrown if the data can not be read */ @Override public String getAcquisitionDetails() throws TskCoreException { return getSleuthkitCase().getAcquisitionDetails(this); } /** * Gets the host for this data source. * * @return The host * * @throws TskCoreException */ @Override public Host getHost() throws TskCoreException { // This is a check-then-act race condition that may occasionally result // in additional processing but is safer than using locks. if (host == null) { host = getSleuthkitCase().getHostManager().getHostByDataSource(this); } return host; } /** * Updates the image's total size and sector size.This function may be used * to update the sizes after the image was created. * * Can only update the sizes if they were not set before. Will throw * TskCoreException if the values in the db are not 0 prior to this call. * * @param totalSize The total size * @param sectorSize The sector size * * @throws TskCoreException If there is an error updating the case database. * */ public void setSizes(long totalSize, long sectorSize) throws TskCoreException { getSleuthkitCase().setImageSizes(this, totalSize, sectorSize); } /** * Close a ResultSet. * * @param resultSet The ResultSet to be closed. */ private static void closeResultSet(ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { LOGGER.log(Level.SEVERE, "Error closing ResultSet", ex); //NON-NLS } } } /** * Close a Statement. * * @param statement The Statement to be closed. */ private static void closeStatement(Statement statement) { if (statement != null) { try { statement.close(); } catch (SQLException ex) { LOGGER.log(Level.SEVERE, "Error closing Statement", ex); //NON-NLS } } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java000644 000765 000024 00000244111 14137073414 031022 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.ResourceBundle; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * An artifact that has been posted to the blackboard. Artifacts store analysis * results (such as hash set hits) and extracted data (such as a web bookmark). * An artifact is a typed collection of name value pairs (attributes) that is * associated with its source content (A data source, a file, or another * artifact). Both standard artifact types and custom artifact types are * supported. * * IMPORTANT NOTE: No more than one attribute of a given type should be added to * an artifact. It is undefined about which will be used. */ public abstract class BlackboardArtifact implements Content { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private final long artifactId; private final long sourceObjId; // refers to objID of parent/source object private final long artifactObjId; // objId of the artifact in tsk_objects. TBD: replace artifactID with this private final Long dataSourceObjId; // objId of the data source in tsk_objects. private final int artifactTypeId; private final String artifactTypeName; private final String displayName; private ReviewStatus reviewStatus; private final SleuthkitCase sleuthkitCase; private final List attrsCache = new ArrayList(); private boolean loadedCacheFromDb = false; private volatile Content parent; private volatile String uniquePath; private byte[] contentBytes = null; private volatile boolean checkedHasChildren; private volatile boolean hasChildren; private volatile int childrenCount; /** * Constructs an artifact that has been posted to the blackboard. An * artifact is a typed collection of name value pairs (attributes) that is * associated with its source content (either a data source, or file within * a data source). Both standard artifact types and custom artifact types * are supported. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param sourceObjId The unique id of the content with which this * artifact is associated. * @param artifactObjId The unique id this artifact, in tsk_objects. * @param dataSourceObjId Object ID of the datasource where the artifact * was found. May be null. * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * @param reviewStatus The review status of this artifact. */ BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus) { this.sleuthkitCase = sleuthkitCase; this.artifactId = artifactID; this.sourceObjId = sourceObjId; this.artifactObjId = artifactObjId; this.artifactTypeId = artifactTypeID; this.dataSourceObjId = dataSourceObjId; this.artifactTypeName = artifactTypeName; this.displayName = displayName; this.reviewStatus = reviewStatus; this.checkedHasChildren = false; this.hasChildren = false; this.childrenCount = -1; } /** * Constructs an artifact that has been posted to the blackboard. An * artifact is a typed collection of name value pairs (attributes) that is * associated with its source content (either a data source, or file within * a data source). Both standard artifact types and custom artifact types * are supported. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param sourceObjId The unique id of the content with which this * artifact is associated. * @param artifactObjID The unique id this artifact. in tsk_objects * @param dataSourceObjID Unique id of the data source. * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * @param reviewStatus The review status of this artifact. * @param isNew If the artifact is newly created. */ BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjID, Long dataSourceObjID, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, boolean isNew) { this(sleuthkitCase, artifactID, sourceObjId, artifactObjID, dataSourceObjID, artifactTypeID, artifactTypeName, displayName, reviewStatus); if (isNew) { /* * If this object represents a newly created artifact, then its * collection of attributes has already been populated and there is * no need to fetch them form the case database. */ this.loadedCacheFromDb = true; } } /** * Gets the SleuthKit case (case database) that contains the data for this * artifact. * * @return The SleuthKit case (case database) object. */ public SleuthkitCase getSleuthkitCase() { return sleuthkitCase; } /** * Gets the unique id for this artifact. * * @return The artifact id. */ public long getArtifactID() { return this.artifactId; } /** * Gets the object id of the source content (data source or file within a * data source) of this artifact * * @return The object id. */ public long getObjectID() { return this.sourceObjId; } /** * Gets the object id of the data source for this artifact. * * @return The data source object id, may be null. */ Long getDataSourceObjectID() { return this.dataSourceObjId; } /** * Gets the artifact type id for this artifact. * * @return The artifact type id. */ public int getArtifactTypeID() { return this.artifactTypeId; } /** * Gets the artifact type for this artifact. * * @return The artifact type. */ public BlackboardArtifact.Type getType() throws TskCoreException { BlackboardArtifact.Type standardTypesValue = BlackboardArtifact.Type.STANDARD_TYPES.get(getArtifactTypeID()); if (standardTypesValue != null) { return standardTypesValue; } else { return getSleuthkitCase().getArtifactType(getArtifactTypeID()); } } /** * Gets the artifact type name for this artifact. * * @return The artifact type name. */ public String getArtifactTypeName() { return this.artifactTypeName; } /** * Gets the artifact type display name for this artifact. * * @return The artifact type display name. */ public String getDisplayName() { return this.displayName; } /** * Gets a short description for this artifact. * * @return The description, may be the empty string. * * @throws TskCoreException if there is a problem creating the description. */ public String getShortDescription() throws TskCoreException { BlackboardAttribute attr = null; StringBuilder shortDescription = new StringBuilder(""); if(BlackboardArtifact.Type.STANDARD_TYPES.get(artifactTypeId) != null) { switch (ARTIFACT_TYPE.fromID(artifactTypeId)) { case TSK_WIFI_NETWORK_ADAPTER: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_MAC_ADDRESS)); break; case TSK_WIFI_NETWORK: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SSID)); break; case TSK_REMOTE_DRIVE: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_REMOTE_PATH)); break; case TSK_SERVICE_ACCOUNT: case TSK_SCREEN_SHOTS: case TSK_DELETED_PROG: case TSK_METADATA: case TSK_OS_INFO: case TSK_PROG_NOTIFICATIONS: case TSK_PROG_RUN: case TSK_RECENT_OBJECT: case TSK_USER_DEVICE_EVENT: case TSK_WEB_SEARCH_QUERY: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PROG_NAME)); break; case TSK_BLUETOOTH_PAIRING: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_NAME)); break; case TSK_ACCOUNT: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_ID)); if(attr == null) { attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER)); } break; case TSK_WEB_CATEGORIZATION: case TSK_BLUETOOTH_ADAPTER: case TSK_GPS_AREA: case TSK_GPS_BOOKMARK: case TSK_GPS_LAST_KNOWN_LOCATION: case TSK_GPS_ROUTE: case TSK_GPS_SEARCH: case TSK_GPS_TRACK: case TSK_WEB_FORM_AUTOFILL: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_NAME)); break; case TSK_WEB_ACCOUNT_TYPE: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_TEXT)); break; case TSK_HASHSET_HIT: case TSK_INTERESTING_ARTIFACT_HIT: case TSK_INTERESTING_FILE_HIT: case TSK_YARA_HIT: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SET_NAME)); break; case TSK_ENCRYPTION_DETECTED: case TSK_ENCRYPTION_SUSPECTED: case TSK_OBJECT_DETECTED: case TSK_USER_CONTENT_SUSPECTED: case TSK_VERIFICATION_FAILED: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_COMMENT)); break; case TSK_DATA_SOURCE_USAGE: case TSK_CALENDAR_ENTRY: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DESCRIPTION)); break; case TSK_WEB_BOOKMARK: //web_bookmark, web_cookie, web_download, and web_history are the same attribute for now case TSK_WEB_COOKIE: case TSK_WEB_DOWNLOAD: case TSK_WEB_HISTORY: case TSK_WEB_CACHE: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DOMAIN)); break; case TSK_KEYWORD_HIT: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); break; case TSK_DEVICE_ATTACHED: attr = getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_ID)); break; case TSK_CONTACT: //contact, message, and calllog are the same attributes for now case TSK_MESSAGE: case TSK_CALLLOG: case TSK_SPEED_DIAL_ENTRY: case TSK_WEB_FORM_ADDRESS: //get the first of these attributes which exists and is non null final ATTRIBUTE_TYPE[] typesThatCanHaveName = {ATTRIBUTE_TYPE.TSK_NAME, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE, ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_OFFICE, ATTRIBUTE_TYPE.TSK_EMAIL, ATTRIBUTE_TYPE.TSK_EMAIL_FROM, ATTRIBUTE_TYPE.TSK_EMAIL_TO, ATTRIBUTE_TYPE.TSK_EMAIL_HOME, ATTRIBUTE_TYPE.TSK_EMAIL_OFFICE, ATTRIBUTE_TYPE.TSK_LOCATION}; //in the order we want to use them for (ATTRIBUTE_TYPE t : typesThatCanHaveName) { attr = getAttribute(new BlackboardAttribute.Type(t)); if (attr != null && !attr.getDisplayString().isEmpty()) { break; } } break; default: break; } } if (attr != null) { shortDescription.append(attr.getAttributeType().getDisplayName()).append(": ").append(attr.getDisplayString()); } else { shortDescription.append(getDisplayName()); } //get the first of these date attributes which exists and is non null final ATTRIBUTE_TYPE[] typesThatCanHaveDate = {ATTRIBUTE_TYPE.TSK_DATETIME, ATTRIBUTE_TYPE.TSK_DATETIME_SENT, ATTRIBUTE_TYPE.TSK_DATETIME_RCVD, ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED, ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, ATTRIBUTE_TYPE.TSK_DATETIME_START, ATTRIBUTE_TYPE.TSK_DATETIME_END}; //in the order we want to use them BlackboardAttribute date; for (ATTRIBUTE_TYPE t : typesThatCanHaveDate) { date = getAttribute(new BlackboardAttribute.Type(t)); if (date != null && !date.getDisplayString().isEmpty()) { shortDescription.append(" "); shortDescription.append(MessageFormat.format(bundle.getString("BlackboardArtifact.shortDescriptionDate.text"), date.getDisplayString())); //NON-NLS break; } } return shortDescription.toString(); } /** * Gets the review status of this artifact, i.e., whether it has been * approved, rejected, or is still waiting for a decision from the user. * * @return The review status. */ public ReviewStatus getReviewStatus() { return reviewStatus; } /** * Sets the review status of this artifact, i.e., whether it has been * approved, rejected, or is still waiting for a decision from the user. * * @param newStatus new status of the artifact * * @throws TskCoreException If an error occurs */ public void setReviewStatus(ReviewStatus newStatus) throws TskCoreException { getSleuthkitCase().setReviewStatus(this, newStatus); reviewStatus = newStatus; } /** * Adds an attribute to this artifact. * * IMPORTANT NOTE: No more than one attribute of a given type should be * added to an artifact. * * @param attribute The attribute to add * * @throws TskCoreException If an error occurs and the attribute was not * added to the artifact. */ public void addAttribute(BlackboardAttribute attribute) throws TskCoreException { attribute.setArtifactId(artifactId); attribute.setCaseDatabase(getSleuthkitCase()); getSleuthkitCase().addBlackboardAttribute(attribute, this.artifactTypeId); attrsCache.add(attribute); } /** * Gets the attributes of this artifact. * * @return The attributes. * * @throws TskCoreException If an error occurs and the attributes cannot be * fetched. */ public List getAttributes() throws TskCoreException { ArrayList attributes; if (false == loadedCacheFromDb) { attributes = getSleuthkitCase().getBlackboardAttributes(this); attrsCache.clear(); attrsCache.addAll(attributes); loadedCacheFromDb = true; } else { attributes = new ArrayList(attrsCache); } return attributes; } /** * Gets the attribute of this artifact that matches a given type. * * IMPORTANT NOTE: No more than one attribute of a given type should be * added to an artifact. * * @param attributeType The attribute type. * * @return The first attribute of the given type, or null if there are no * attributes of that type. * * @throws TskCoreException If an error occurs and the attribute is not * fetched. */ public BlackboardAttribute getAttribute(BlackboardAttribute.Type attributeType) throws TskCoreException { List attributes = this.getAttributes(); for (BlackboardAttribute attribute : attributes) { if (attribute.getAttributeType().equals(attributeType)) { return attribute; } } return null; } /** * Adds a collection of attributes to this artifact in a single operation * (faster than adding each attribute individually). * * @param attributes The collection of attributes. * * @throws TskCoreException If an error occurs and the attributes were not * added to the artifact. */ public void addAttributes(Collection attributes) throws TskCoreException { if (attributes.isEmpty()) { return; } for (BlackboardAttribute attribute : attributes) { attribute.setArtifactId(artifactId); attribute.setCaseDatabase(getSleuthkitCase()); } getSleuthkitCase().addBlackboardAttributes(attributes, artifactTypeId); attrsCache.addAll(attributes); } /** * Adds a collection of attributes to this artifact in a single operation * (faster than adding each attribute individually) within a transaction * supplied by the caller. * * @param attributes The collection of attributes. * @param caseDbTransaction The transaction in the scope of which the * operation is to be performed, managed by the * caller. Null is not permitted. * * @throws TskCoreException If an error occurs and the attributes were not * added to the artifact. If * caseDbTransaction is null or if * attributes is null or empty. */ public void addAttributes(Collection attributes, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException { if (Objects.isNull(attributes) || attributes.isEmpty()) { throw new TskCoreException("Illegal argument passed to addAttributes: null or empty attributes passed to addAttributes"); } if (Objects.isNull(caseDbTransaction)) { throw new TskCoreException("Illegal argument passed to addAttributes: null caseDbTransaction passed to addAttributes"); } try { for (final BlackboardAttribute attribute : attributes) { attribute.setArtifactId(artifactId); attribute.setCaseDatabase(getSleuthkitCase()); getSleuthkitCase().addBlackBoardAttribute(attribute, artifactTypeId, caseDbTransaction.getConnection()); } attrsCache.addAll(attributes); } catch (SQLException ex) { throw new TskCoreException("Error adding blackboard attributes", ex); } } /** * This overiding implementation returns the unique path of the parent. It * does not include the Artifact name in the unique path. * * @throws org.sleuthkit.datamodel.TskCoreException */ @Override public String getUniquePath() throws TskCoreException { // Return the path of the parent file // It is possible that multiple threads could be doing this calculation // simultaneously, but it's worth the potential extra processing to prevent deadlocks. if (uniquePath == null) { String tempUniquePath = ""; Content myParent = getParent(); if (myParent != null) { tempUniquePath = myParent.getUniquePath(); } // Don't update uniquePath until it is complete. uniquePath = tempUniquePath; } return uniquePath; } @Override public Content getParent() throws TskCoreException { if (parent == null) { parent = getSleuthkitCase().getContentById(sourceObjId); } return parent; } /** * Get all artifacts associated with this content * * @return a list of blackboard artifacts * * @throws TskCoreException if critical error occurred within tsk core */ @Override public ArrayList getAllArtifacts() throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new ArrayList(); } @Override public List getAllAnalysisResults() throws TskCoreException { return sleuthkitCase.getBlackboard().getAnalysisResults(artifactObjId); } @Override public List getAllDataArtifacts() throws TskCoreException { return sleuthkitCase.getBlackboard().getDataArtifactsBySource(artifactObjId); } @Override public Score getAggregateScore() throws TskCoreException { return sleuthkitCase.getScoringManager().getAggregateScore(artifactObjId); } @Override public List getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException { return sleuthkitCase.getBlackboard().getAnalysisResults(artifactObjId, artifactType.getTypeID()); //NON-NLS } /** * Get all artifacts associated with this content that have the given type * name * * @param artifactTypeName name of the type to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public ArrayList getArtifacts(String artifactTypeName) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new ArrayList(); } /** * Get all artifacts associated with this content that have the given type * id * * @param artifactTypeID type id to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public ArrayList getArtifacts(int artifactTypeID) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new ArrayList(); } /** * Get all artifacts associated with this content that have the given type * * @param type type to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public ArrayList getArtifacts(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new ArrayList(); } /** * Get count of all artifacts associated with this content * * @return count of all blackboard artifacts for this content * * @throws TskCoreException if critical error occurred within tsk core */ @Override public long getAllArtifactsCount() throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return 0; } /** * Get count of all artifacts associated with this content that have the * given type name * * @param artifactTypeName name of the type to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public long getArtifactsCount(String artifactTypeName) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return 0; } /** * Get count of all artifacts associated with this content that have the * given type id * * @param artifactTypeID type id to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public long getArtifactsCount(int artifactTypeID) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return 0; } /** * Get count of all artifacts associated with this content that have the * given type * * @param type type to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ @Override public long getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return 0; } /** * Return the TSK_GEN_INFO artifact for the file so that individual * attributes can be added to it. Creates one if it does not already exist. * * @return Instance of the TSK_GEN_INFO artifact * * @throws TskCoreException */ @Override public BlackboardArtifact getGenInfoArtifact() throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return null; } /** * Return the TSK_GEN_INFO artifact for the file so that individual * attributes can be added to it. If one does not create, behavior depends * on the create argument. * * @param create If true, an artifact will be created if it does not already * exist. * * @return Instance of the TSK_GEN_INFO artifact or null if artifact does * not already exist and create was set to false * * @throws TskCoreException */ @Override public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. if (create) { throw new TskCoreException("Artifacts of artifacts are not supported."); } return null; } /** * Return attributes of a given type from TSK_GEN_INFO. * * @param attr_type Attribute type to find inside of the TSK_GEN_INFO * artifact. * * @return Attributes * * @throws org.sleuthkit.datamodel.TskCoreException */ @Override public ArrayList getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE attr_type) throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new ArrayList<>(); } /** * Get the names of all the hashsets that this content is in. * * @return the names of the hashsets that this content is in * * @throws TskCoreException if critical error occurred within tsk core */ @Override public Set getHashSetNames() throws TskCoreException { // Currently we don't have any artifacts derived from an artifact. return new HashSet(); } /** * Create and add an artifact associated with this content to the blackboard * * @param artifactTypeID id of the artifact type (if the id doesn't already * exist an exception will be thrown) * * @return the blackboard artifact created (the artifact type id can be * looked up from this) * * @throws TskCoreException if critical error occurred within tsk core * @deprecated Use the Blackboard to create Data Artifacts and Analysis Results. */ @Deprecated @Override public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException { throw new TskCoreException("Cannot create artifact of an artifact. Not supported."); } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList) throws TskCoreException { CaseDbTransaction trans = sleuthkitCase.beginTransaction(); try { AnalysisResultAdded resultAdded = sleuthkitCase.getBlackboard().newAnalysisResult(artifactType, this.getId(), this.getDataSource().getId(), score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException("Error adding analysis result.", ex); } } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList, long dataSourceId) throws TskCoreException { CaseDbTransaction trans = sleuthkitCase.beginTransaction(); try { AnalysisResultAdded resultAdded = sleuthkitCase.getBlackboard().newAnalysisResult(artifactType, this.getId(), dataSourceId, score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException("Error adding analysis result.", ex); } } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId) throws TskCoreException { throw new TskCoreException("Cannot create data artifact of an artifact. Not supported."); } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId, long dataSourceId) throws TskCoreException { throw new TskCoreException("Cannot create data artifact of an artifact. Not supported."); } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList) throws TskCoreException { return newDataArtifact(artifactType, attributesList, null); } /** * Create and add an artifact associated with this content to the blackboard * * @param type artifact enum type * * @return the blackboard artifact created (the artifact type id can be * looked up from this) * * @throws TskCoreException if critical error occurred within tsk core * @deprecated Use the Blackboard to create Data Artifacts and Analysis Results. */ @Deprecated @Override public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { throw new TskCoreException("Cannot create artifact of an artifact. Not supported."); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this derived file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Tests this artifact for equality with another object. * * @param object The other object. * * @return True or false. */ @Override public boolean equals(Object object) { if (object == null) { return false; } if (getClass() != object.getClass()) { return false; } final BlackboardArtifact other = (BlackboardArtifact) object; return artifactId == other.getArtifactID(); } /** * Gets the hash code for this artifact. * * @return The hash code. */ @Override public int hashCode() { int hash = 7; hash = 41 * hash + (int) (this.artifactId ^ (this.artifactId >>> 32)); return hash; } /** * Gets a string representation of this artifact. * * @return The string. */ @Override public String toString() { return "BlackboardArtifact{" + "artifactID=" + artifactId + ", objID=" + getObjectID() + ", artifactObjID=" + artifactObjId + ", artifactTypeID=" + artifactTypeId + ", artifactTypeName=" + artifactTypeName + ", displayName=" + displayName + ", Case=" + getSleuthkitCase() + '}'; //NON-NLS } /** * Accepts a visitor SleuthkitItemVisitor that will perform an operation on * this artifact type and return some object as the result of the operation. * * @param visitor The visitor, where the type parameter of the visitor is * the type of the object that will be returned as the result * of the visit operation. * * @return An object of type T. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Get the (reported) size of the content object. Artifact content is a * string dump of all its attributes. * * @return size of the content in bytes */ @Override public long getSize() { if (contentBytes == null) { try { loadArtifactContent(); } catch (TskCoreException ex) { return 0; } } return contentBytes.length; } /** * Close the Content object. */ @Override public void close() { contentBytes = null; } /** * Reads content data for this artifact Artifact content is a string dump of * all its attributes. * * @param buf a character array of data (in bytes) to copy read data to * @param offset byte offset in the content to start reading from * @param len number of bytes to read into buf. * * @return num of bytes read, or -1 on error * * @throws TskCoreException if critical error occurred during read in the * tsk core */ @Override public final int read(byte[] buf, long offset, long len) throws TskCoreException { if (contentBytes == null) { loadArtifactContent(); } if (0 == contentBytes.length) { return 0; } // Copy bytes long readLen = Math.min(contentBytes.length - offset, len); System.arraycopy(contentBytes, 0, buf, 0, (int) readLen); return (int) readLen; } @Override public String getName() { return this.displayName + getArtifactID(); } @Override public Content getDataSource() throws TskCoreException { return dataSourceObjId != null ? getSleuthkitCase().getContentById(dataSourceObjId) : null; } /** * Load and save the content for the artifact. Artifact content is a string * dump of all its attributes. * * @throws TskCoreException if critical error occurred during read */ private void loadArtifactContent() throws TskCoreException { StringBuilder artifactContents = new StringBuilder(); Content dataSource = null; try { dataSource = getDataSource(); } catch (TskCoreException ex) { throw new TskCoreException("Unable to get datasource for artifact: " + this.toString(), ex); } if (dataSource == null) { throw new TskCoreException("Datasource was null for artifact: " + this.toString()); } try { for (BlackboardAttribute attribute : getAttributes()) { artifactContents.append(attribute.getAttributeType().getDisplayName()); artifactContents.append(" : "); artifactContents.append(attribute.getDisplayString()); artifactContents.append(System.lineSeparator()); } } catch (TskCoreException ex) { throw new TskCoreException("Unable to get attributes for artifact: " + this.toString(), ex); } try { contentBytes = artifactContents.toString().getBytes("UTF-8"); } catch (UnsupportedEncodingException ex) { throw new TskCoreException("Failed to convert artifact string to bytes for artifact: " + this.toString(), ex); } } /** * An artifact type. */ public static final class Type implements Serializable { private static final long serialVersionUID = 1L; /** * A generic information artifact. */ public static final Type TSK_GEN_INFO = new BlackboardArtifact.Type(1, "TSK_GEN_INFO", bundle.getString("BlackboardArtifact.tskGenInfo.text"), Category.DATA_ARTIFACT); /** * A Web bookmark. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create bookmark artifacts. */ public static final Type TSK_WEB_BOOKMARK = new BlackboardArtifact.Type(2, "TSK_WEB_BOOKMARK", bundle.getString("BlackboardArtifact.tskWebBookmark.text"), Category.DATA_ARTIFACT); /** * A Web cookie. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create cookie artifacts. */ public static final Type TSK_WEB_COOKIE = new BlackboardArtifact.Type(3, "TSK_WEB_COOKIE", bundle.getString("BlackboardArtifact.tskWebCookie.text"), Category.DATA_ARTIFACT); /** * A Web history. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create history artifacts. */ public static final Type TSK_WEB_HISTORY = new BlackboardArtifact.Type(4, "TSK_WEB_HISTORY", bundle.getString("BlackboardArtifact.tskWebHistory.text"), Category.DATA_ARTIFACT); /** * A Web download. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create download artifacts. */ public static final Type TSK_WEB_DOWNLOAD = new BlackboardArtifact.Type(5, "TSK_WEB_DOWNLOAD", bundle.getString("BlackboardArtifact.tskWebDownload.text"), Category.DATA_ARTIFACT); /** * A recent object. */ public static final Type TSK_RECENT_OBJECT = new BlackboardArtifact.Type(6, "TSK_RECENT_OBJ", bundle.getString("BlackboardArtifact.tsk.recentObject.text"), Category.DATA_ARTIFACT); // 7 was used for deprecated TSK_GPS_TRACKPOINT. /** * An installed program. */ public static final Type TSK_INSTALLED_PROG = new BlackboardArtifact.Type(8, "TSK_INSTALLED_PROG", bundle.getString("BlackboardArtifact.tskInstalledProg.text"), Category.DATA_ARTIFACT); /** * A search hit for a keyword. */ public static final Type TSK_KEYWORD_HIT = new BlackboardArtifact.Type(9, "TSK_KEYWORD_HIT", bundle.getString("BlackboardArtifact.tskKeywordHits.text"), Category.ANALYSIS_RESULT); /** * A hit for a hash set (hash database). */ public static final Type TSK_HASHSET_HIT = new BlackboardArtifact.Type(10, "TSK_HASHSET_HIT", bundle.getString("BlackboardArtifact.tskHashsetHit.text"), Category.ANALYSIS_RESULT); /** * An attached device. */ public static final Type TSK_DEVICE_ATTACHED = new BlackboardArtifact.Type(11, "TSK_DEVICE_ATTACHED", bundle.getString("BlackboardArtifact.tskDeviceAttached.text"), Category.DATA_ARTIFACT); /** * An meta-artifact to call attention to a file deemed to be * interesting. */ public static final Type TSK_INTERESTING_FILE_HIT = new BlackboardArtifact.Type(12, "TSK_INTERESTING_FILE_HIT", bundle.getString("BlackboardArtifact.tskInterestingFileHit.text"), Category.ANALYSIS_RESULT); /** * An email message. */ public static final Type TSK_EMAIL_MSG = new BlackboardArtifact.Type(13, "TSK_EMAIL_MSG", bundle.getString("BlackboardArtifact.tskEmailMsg.text"), Category.DATA_ARTIFACT); /** * Text extracted from the source content. */ public static final Type TSK_EXTRACTED_TEXT = new BlackboardArtifact.Type(14, "TSK_EXTRACTED_TEXT", bundle.getString("BlackboardArtifact.tskExtractedText.text"), Category.DATA_ARTIFACT); /** * A Web search engine query extracted from Web history. */ public static final Type TSK_WEB_SEARCH_QUERY = new BlackboardArtifact.Type(15, "TSK_WEB_SEARCH_QUERY", bundle.getString("BlackboardArtifact.tskWebSearchQuery.text"), Category.DATA_ARTIFACT); /** * EXIF metadata. */ public static final Type TSK_METADATA_EXIF = new BlackboardArtifact.Type(16, "TSK_METADATA_EXIF", bundle.getString("BlackboardArtifact.tskMetadataExif.text"), Category.ANALYSIS_RESULT); // 17 was used for deprecated TSK_TAG_FILE. // 18 was used for deprecated TSK_TAG_ARTIFACT. /** * Information pertaining to an operating system. */ public static final Type TSK_OS_INFO = new BlackboardArtifact.Type(19, "TSK_OS_INFO", bundle.getString("BlackboardArtifact.tskOsInfo.text"), Category.DATA_ARTIFACT); // 20 was used for deprecated TSK_OS_ACCOUNT. /** * An application or Web service account. */ public static final Type TSK_SERVICE_ACCOUNT = new BlackboardArtifact.Type(21, "TSK_SERVICE_ACCOUNT", bundle.getString("BlackboardArtifact.tskServiceAccount.text"), Category.DATA_ARTIFACT); // 22 was used for deprecated TSK_TOOL_OUTPUT. /** * A contact extracted from a phone, or from an address * book/email/messaging application. Use methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create contact artifacts. */ public static final Type TSK_CONTACT = new BlackboardArtifact.Type(23, "TSK_CONTACT", bundle.getString("BlackboardArtifact.tskContact.text"), Category.DATA_ARTIFACT); /** * An SMS/MMS message extracted from phone, or from another messaging * application, like IM. Use methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create message artifacts. */ public static final Type TSK_MESSAGE = new BlackboardArtifact.Type(24, "TSK_MESSAGE", bundle.getString("BlackboardArtifact.tskMessage.text"), Category.DATA_ARTIFACT); /** * A phone call log extracted from a phone or softphone application. Use * methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create call log artifacts. */ public static final Type TSK_CALLLOG = new BlackboardArtifact.Type(25, "TSK_CALLLOG", bundle.getString("BlackboardArtifact.tskCalllog.text"), Category.DATA_ARTIFACT); /** * A calendar entry from a phone, PIM, or a calendar application. */ public static final Type TSK_CALENDAR_ENTRY = new BlackboardArtifact.Type(26, "TSK_CALENDAR_ENTRY", bundle.getString("BlackboardArtifact.tskCalendarEntry.text"), Category.DATA_ARTIFACT); /** * A speed dial entry from a phone. */ public static final Type TSK_SPEED_DIAL_ENTRY = new BlackboardArtifact.Type(27, "TSK_SPEED_DIAL_ENTRY", bundle.getString("BlackboardArtifact.tskSpeedDialEntry.text"), Category.DATA_ARTIFACT); /** * A bluetooth pairing entry. */ public static final Type TSK_BLUETOOTH_PAIRING = new BlackboardArtifact.Type(28, "TSK_BLUETOOTH_PAIRING", bundle.getString("BlackboardArtifact.tskBluetoothPairing.text"), Category.DATA_ARTIFACT); /** * A GPS bookmark / way point that the user saved. */ public static final Type TSK_GPS_BOOKMARK = new BlackboardArtifact.Type(29, "TSK_GPS_BOOKMARK", bundle.getString("BlackboardArtifact.tskGpsBookmark.text"), Category.DATA_ARTIFACT); /** * A GPS last known location record. */ public static final Type TSK_GPS_LAST_KNOWN_LOCATION = new BlackboardArtifact.Type(30, "TSK_GPS_LAST_KNOWN_LOCATION", bundle.getString("BlackboardArtifact.tskGpsLastKnownLocation.text"), Category.DATA_ARTIFACT); /** * A GPS search record. */ public static final Type TSK_GPS_SEARCH = new BlackboardArtifact.Type(31, "TSK_GPS_SEARCH", bundle.getString("BlackboardArtifact.tskGpsSearch.text"), Category.DATA_ARTIFACT); /** * Application run information. */ public static final Type TSK_PROG_RUN = new BlackboardArtifact.Type(32, "TSK_PROG_RUN", bundle.getString("BlackboardArtifact.tskProgRun.text"), Category.DATA_ARTIFACT); /** * An encrypted file. */ public static final Type TSK_ENCRYPTION_DETECTED = new BlackboardArtifact.Type(33, "TSK_ENCRYPTION_DETECTED", bundle.getString("BlackboardArtifact.tskEncryptionDetected.text"), Category.ANALYSIS_RESULT); /** * A file with an extension that does not match its MIME type. */ public static final Type TSK_EXT_MISMATCH_DETECTED = new BlackboardArtifact.Type(34, "TSK_EXT_MISMATCH_DETECTED", bundle.getString("BlackboardArtifact.tskExtMismatchDetected.text"), Category.ANALYSIS_RESULT); /** * An meta-artifact to call attention to an artifact deemed to be * interesting. */ public static final Type TSK_INTERESTING_ARTIFACT_HIT = new BlackboardArtifact.Type(35, "TSK_INTERESTING_ARTIFACT_HIT", bundle.getString("BlackboardArtifact.tskInterestingArtifactHit.text"), Category.ANALYSIS_RESULT); /** * A route based on GPS coordinates. Use * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addRoute() * to create route artifacts. */ public static final Type TSK_GPS_ROUTE = new BlackboardArtifact.Type(36, "TSK_GPS_ROUTE", bundle.getString("BlackboardArtifact.tskGpsRoute.text"), Category.DATA_ARTIFACT); /** * A remote drive. */ public static final Type TSK_REMOTE_DRIVE = new BlackboardArtifact.Type(37, "TSK_REMOTE_DRIVE", bundle.getString("BlackboardArtifact.tskRemoteDrive.text"), Category.DATA_ARTIFACT); /** * A human face was detected in a media file. */ public static final Type TSK_FACE_DETECTED = new BlackboardArtifact.Type(38, "TSK_FACE_DETECTED", bundle.getString("BlackboardArtifact.tskFaceDetected.text"), Category.ANALYSIS_RESULT); /** * An account. */ public static final Type TSK_ACCOUNT = new BlackboardArtifact.Type(39, "TSK_ACCOUNT", bundle.getString("BlackboardArtifact.tskAccount.text"), Category.DATA_ARTIFACT); /** * An encrypted file. */ public static final Type TSK_ENCRYPTION_SUSPECTED = new BlackboardArtifact.Type(40, "TSK_ENCRYPTION_SUSPECTED", bundle.getString("BlackboardArtifact.tskEncryptionSuspected.text"), Category.ANALYSIS_RESULT); /* * A classifier detected an object in a media file. */ public static final Type TSK_OBJECT_DETECTED = new BlackboardArtifact.Type(41, "TSK_OBJECT_DETECTED", bundle.getString("BlackboardArtifact.tskObjectDetected.text"), Category.ANALYSIS_RESULT); /** * A wireless network. */ public static final Type TSK_WIFI_NETWORK = new BlackboardArtifact.Type(42, "TSK_WIFI_NETWORK", bundle.getString("BlackboardArtifact.tskWIFINetwork.text"), Category.DATA_ARTIFACT); /** * Information related to a device. */ public static final Type TSK_DEVICE_INFO = new BlackboardArtifact.Type(43, "TSK_DEVICE_INFO", bundle.getString("BlackboardArtifact.tskDeviceInfo.text"), Category.DATA_ARTIFACT); /** * A SIM card. */ public static final Type TSK_SIM_ATTACHED = new BlackboardArtifact.Type(44, "TSK_SIM_ATTACHED", bundle.getString("BlackboardArtifact.tskSimAttached.text"), Category.DATA_ARTIFACT); /** * A bluetooth adapter. */ public static final Type TSK_BLUETOOTH_ADAPTER = new BlackboardArtifact.Type(45, "TSK_BLUETOOTH_ADAPTER", bundle.getString("BlackboardArtifact.tskBluetoothAdapter.text"), Category.DATA_ARTIFACT); /** * A wireless network adapter. */ public static final Type TSK_WIFI_NETWORK_ADAPTER = new BlackboardArtifact.Type(46, "TSK_WIFI_NETWORK_ADAPTER", bundle.getString("BlackboardArtifact.tskWIFINetworkAdapter.text"), Category.DATA_ARTIFACT); /** * Indicates a verification failure */ public static final Type TSK_VERIFICATION_FAILED = new BlackboardArtifact.Type(47, "TSK_VERIFICATION_FAILED", bundle.getString("BlackboardArtifact.tskVerificationFailed.text"), Category.ANALYSIS_RESULT); /** * Categorization information for a data source. */ public static final Type TSK_DATA_SOURCE_USAGE = new BlackboardArtifact.Type(48, "TSK_DATA_SOURCE_USAGE", bundle.getString("BlackboardArtifact.tskDataSourceUsage.text"), Category.ANALYSIS_RESULT); /** * Indicates auto fill data from a Web form. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create web form autofill artifacts. */ public static final Type TSK_WEB_FORM_AUTOFILL = new BlackboardArtifact.Type(49, "TSK_WEB_FORM_AUTOFILL", bundle.getString("BlackboardArtifact.tskWebFormAutofill.text"), Category.DATA_ARTIFACT); /** * Indicates an person's address filled in a web form. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create web form address artifacts. */ public static final Type TSK_WEB_FORM_ADDRESS = new BlackboardArtifact.Type(50, "TSK_WEB_FORM_ADDRESSES ", bundle.getString("BlackboardArtifact.tskWebFormAddresses.text"), Category.DATA_ARTIFACT); // 51 was used for deprecated TSK_DOWNLOAD_SOURCE /** * Indicates web cache data */ public static final Type TSK_WEB_CACHE = new BlackboardArtifact.Type(52, "TSK_WEB_CACHE", bundle.getString("BlackboardArtifact.tskWebCache.text"), Category.DATA_ARTIFACT); /** * A generic (timeline) event. */ public static final Type TSK_TL_EVENT = new BlackboardArtifact.Type(53, "TSK_TL_EVENT", bundle.getString("BlackboardArtifact.tskTLEvent.text"), Category.DATA_ARTIFACT); /** * Indicates clipboard content */ public static final Type TSK_CLIPBOARD_CONTENT = new BlackboardArtifact.Type(54, "TSK_CLIPBOARD_CONTENT", bundle.getString("BlackboardArtifact.tskClipboardContent.text"), Category.DATA_ARTIFACT); /** * An associated object. */ public static final Type TSK_ASSOCIATED_OBJECT = new BlackboardArtifact.Type(55, "TSK_ASSOCIATED_OBJECT", bundle.getString("BlackboardArtifact.tskAssociatedObject.text"), Category.DATA_ARTIFACT); /** * Indicates file may have been created by the user. */ public static final Type TSK_USER_CONTENT_SUSPECTED = new BlackboardArtifact.Type(56, "TSK_USER_CONTENT_SUSPECTED", bundle.getString("BlackboardArtifact.tskUserContentSuspected.text"), Category.ANALYSIS_RESULT); /** * Stores metadata about an object. */ public static final Type TSK_METADATA = new BlackboardArtifact.Type(57, "TSK_METADATA", bundle.getString("BlackboardArtifact.tskMetadata.text"), Category.DATA_ARTIFACT); /** * Stores a GPS track log. Use * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addTrack() * to create track artifacts. */ public static final Type TSK_GPS_TRACK = new BlackboardArtifact.Type(58, "TSK_GPS_TRACK", bundle.getString("BlackboardArtifact.tskTrack.text"), Category.DATA_ARTIFACT); /** * Stores a role on a given domain. */ public static final Type TSK_WEB_ACCOUNT_TYPE = new BlackboardArtifact.Type(59, "TSK_WEB_ACCOUNT_TYPE", bundle.getString("BlackboardArtifact.tskWebAccountType.text"), Category.ANALYSIS_RESULT); /** * Screen shots from device or Application. */ public static final Type TSK_SCREEN_SHOTS = new BlackboardArtifact.Type(60, "TSK_SCREEN_SHOTS", bundle.getString("BlackboardArtifact.tskScreenShots.text"), Category.DATA_ARTIFACT); /** * Notifications Sent to User. */ public static final Type TSK_PROG_NOTIFICATIONS = new BlackboardArtifact.Type(62, "TSK_PROG_NOTIFICATIONS", bundle.getString("BlackboardArtifact.tskProgNotifications.text"), Category.DATA_ARTIFACT); /** * System/Application/File backup. */ public static final Type TSK_BACKUP_EVENT = new BlackboardArtifact.Type(63, "TSK_BACKUP_EVENT", bundle.getString("BlackboardArtifact.tskBackupEvent.text"), Category.DATA_ARTIFACT); /** * Programs that have been deleted. */ public static final Type TSK_DELETED_PROG = new BlackboardArtifact.Type(64, "TSK_DELETED_PROG", bundle.getString("BlackboardArtifact.tskDeletedProg.text"), Category.DATA_ARTIFACT); /** * Activity on the System/Application. */ public static final Type TSK_USER_DEVICE_EVENT = new BlackboardArtifact.Type(65, "TSK_USER_DEVICE_EVENT", bundle.getString("BlackboardArtifact.tskUserDeviceEvent.text"), Category.DATA_ARTIFACT); /** * Indicates that the file had a yara pattern match hit. */ public static final Type TSK_YARA_HIT = new BlackboardArtifact.Type(66, "TSK_YARA_HIT", bundle.getString("BlackboardArtifact.tskYaraHit.text"), Category.ANALYSIS_RESULT); /** * Stores the outline of an area using GPS coordinates. */ public static final Type TSK_GPS_AREA = new BlackboardArtifact.Type(67, "TSK_GPS_AREA", bundle.getString("BlackboardArtifact.tskGPSArea.text"), Category.DATA_ARTIFACT); /** * Defines a category for a particular domain. */ public static final Type TSK_WEB_CATEGORIZATION = new BlackboardArtifact.Type(68, "TSK_WEB_CATEGORIZATION", bundle.getString("BlackboardArtifact.tskWebCategorization.text"), Category.ANALYSIS_RESULT); /** * Indicates that the file or artifact was previously seen in another Autopsy case. */ public static final Type TSK_PREVIOUSLY_SEEN = new BlackboardArtifact.Type(69, "TSK_PREVIOUSLY_SEEN", bundle.getString("BlackboardArtifact.tskPreviouslySeen.text"), Category.ANALYSIS_RESULT); /** * Indicates that the file or artifact was previously unseen in another Autopsy case. */ public static final Type TSK_PREVIOUSLY_UNSEEN = new BlackboardArtifact.Type(70, "TSK_PREVIOUSLY_UNSEEN", bundle.getString("BlackboardArtifact.tskPreviouslyUnseen.text"), Category.ANALYSIS_RESULT); /** * Indicates that the file or artifact was previously tagged as "Notable" in another Autopsy case. */ public static final Type TSK_PREVIOUSLY_NOTABLE = new BlackboardArtifact.Type(71, "TSK_PREVIOUSLY_NOTABLE", bundle.getString("BlackboardArtifact.tskPreviouslyNotable.text"), Category.ANALYSIS_RESULT); // NOTE: When adding a new standard BlackboardArtifact.Type, add the instance and then add to the STANDARD_TYPES map. /** * All standard artifact types with ids mapped to the type. */ static final Map STANDARD_TYPES = Collections.unmodifiableMap(Stream.of( TSK_GEN_INFO, TSK_WEB_BOOKMARK, TSK_WEB_COOKIE, TSK_WEB_HISTORY, TSK_WEB_DOWNLOAD, TSK_RECENT_OBJECT, TSK_INSTALLED_PROG, TSK_KEYWORD_HIT, TSK_HASHSET_HIT, TSK_DEVICE_ATTACHED, TSK_INTERESTING_FILE_HIT, TSK_EMAIL_MSG, TSK_EXTRACTED_TEXT, TSK_WEB_SEARCH_QUERY, TSK_METADATA_EXIF, TSK_OS_INFO, TSK_SERVICE_ACCOUNT, TSK_CONTACT, TSK_MESSAGE, TSK_CALLLOG, TSK_CALENDAR_ENTRY, TSK_SPEED_DIAL_ENTRY, TSK_BLUETOOTH_PAIRING, TSK_GPS_BOOKMARK, TSK_GPS_LAST_KNOWN_LOCATION, TSK_GPS_SEARCH, TSK_PROG_RUN, TSK_ENCRYPTION_DETECTED, TSK_EXT_MISMATCH_DETECTED, TSK_INTERESTING_ARTIFACT_HIT, TSK_GPS_ROUTE, TSK_REMOTE_DRIVE, TSK_FACE_DETECTED, TSK_ACCOUNT, TSK_ENCRYPTION_SUSPECTED, TSK_OBJECT_DETECTED, TSK_WIFI_NETWORK, TSK_DEVICE_INFO, TSK_SIM_ATTACHED, TSK_BLUETOOTH_ADAPTER, TSK_WIFI_NETWORK_ADAPTER, TSK_VERIFICATION_FAILED, TSK_DATA_SOURCE_USAGE, TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS, TSK_WEB_CACHE, TSK_TL_EVENT, TSK_CLIPBOARD_CONTENT, TSK_ASSOCIATED_OBJECT, TSK_USER_CONTENT_SUSPECTED, TSK_METADATA, TSK_GPS_TRACK, TSK_WEB_ACCOUNT_TYPE, TSK_SCREEN_SHOTS, TSK_PROG_NOTIFICATIONS, TSK_BACKUP_EVENT, TSK_DELETED_PROG, TSK_USER_DEVICE_EVENT, TSK_YARA_HIT, TSK_GPS_AREA, TSK_WEB_CATEGORIZATION, TSK_PREVIOUSLY_SEEN, TSK_PREVIOUSLY_UNSEEN, TSK_PREVIOUSLY_NOTABLE ).collect(Collectors.toMap(type -> type.getTypeID(), type -> type))); private final String typeName; private final int typeID; private final String displayName; private final Category category; /** * Constructs a custom artifact type. * * @param typeName The name of the type. * @param typeID The id of the type. * @param displayName The display name of the type. * @param category The artifact type category. */ Type(int typeID, String typeName, String displayName, Category category) { this.typeID = typeID; this.typeName = typeName; this.displayName = displayName; this.category = category; } /** * Constructs a standard artifact type. * * @param type An element of the ARTIFACT_TYPE enum. */ public Type(ARTIFACT_TYPE type) { this(type.getTypeID(), type.getLabel(), type.getDisplayName(), type.getCategory()); } /** * Gets the type for this artifact type. * * @return The type name. */ public String getTypeName() { return this.typeName; } /** * Gets the type id for this artifact type. * * @return The type id. */ public int getTypeID() { return this.typeID; } /** * Gets display name of this artifact type. * * @return The display name. */ public String getDisplayName() { return this.displayName; } /** * Gets category of this artifact type. * * @return The artifact type category. */ public Category getCategory() { return category; } /** * Tests this artifact type for equality with another object. * * @param that The other object. * * @return True or false. */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof Type)) { return false; } else { return ((Type) that).sameType(this); } } /** * Compares two artifact types to see if they are the same type. * * @param that The other type. * * @return True or false. */ private boolean sameType(Type that) { return this.typeName.equals(that.getTypeName()) && this.displayName.equals(that.getDisplayName()) && this.typeID == that.getTypeID(); } /** * Gets the hash code for this artifact type. * * @return The hash code. */ @Override public int hashCode() { int hash = 11; hash = 83 * hash + Objects.hashCode(this.typeID); hash = 83 * hash + Objects.hashCode(this.displayName); hash = 83 * hash + Objects.hashCode(this.typeName); return hash; } } /** * Enum for the standard artifact types. Refer to * http://sleuthkit.org/sleuthkit/docs/jni-docs/latest/artifact_catalog_page.html * for details on the standard attributes for each artifact type. */ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem { /** * A generic information artifact. */ TSK_GEN_INFO(1, "TSK_GEN_INFO", //NON-NLS bundle.getString("BlackboardArtifact.tskGenInfo.text"), Category.DATA_ARTIFACT), /** * A Web bookmark. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create bookmark artifacts. */ TSK_WEB_BOOKMARK(2, "TSK_WEB_BOOKMARK", //NON-NLS bundle.getString("BlackboardArtifact.tskWebBookmark.text"), Category.DATA_ARTIFACT), /** * A Web cookie. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create cookie artifacts. */ TSK_WEB_COOKIE(3, "TSK_WEB_COOKIE", bundle.getString("BlackboardArtifact.tskWebCookie.text"), Category.DATA_ARTIFACT), //NON-NLS /** * A Web history. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create history artifacts. */ TSK_WEB_HISTORY(4, "TSK_WEB_HISTORY", //NON-NLS bundle.getString("BlackboardArtifact.tskWebHistory.text"), Category.DATA_ARTIFACT), /** * A Web download. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create download artifacts. */ TSK_WEB_DOWNLOAD(5, "TSK_WEB_DOWNLOAD", //NON-NLS bundle.getString("BlackboardArtifact.tskWebDownload.text"), Category.DATA_ARTIFACT), /** * A recent object. */ TSK_RECENT_OBJECT(6, "TSK_RECENT_OBJ", //NON-NLS bundle.getString("BlackboardArtifact.tsk.recentObject.text"), Category.DATA_ARTIFACT), /** * A GPS track point (geolocation data). * * @deprecated Use TSK_GPS_TRACK instead */ @Deprecated TSK_GPS_TRACKPOINT(7, "TSK_GPS_TRACKPOINT", //NON-NLS bundle.getString("BlackboardArtifact.tskGpsTrackpoint.text"), Category.DATA_ARTIFACT), /** * An installed program. */ TSK_INSTALLED_PROG(8, "TSK_INSTALLED_PROG", //NON-NLS bundle.getString("BlackboardArtifact.tskInstalledProg.text"), Category.DATA_ARTIFACT), /** * A search hit for a keyword. */ TSK_KEYWORD_HIT(9, "TSK_KEYWORD_HIT", bundle.getString("BlackboardArtifact.tskKeywordHits.text"), Category.ANALYSIS_RESULT), /** * A hit for a hash set (hash database). */ TSK_HASHSET_HIT(10, "TSK_HASHSET_HIT", //NON-NLS bundle.getString("BlackboardArtifact.tskHashsetHit.text"), Category.ANALYSIS_RESULT), /** * An attached device. */ TSK_DEVICE_ATTACHED(11, "TSK_DEVICE_ATTACHED", //NON-NLS bundle.getString("BlackboardArtifact.tskDeviceAttached.text"), Category.DATA_ARTIFACT), /** * An meta-artifact to call attention to a file deemed to be * interesting. */ TSK_INTERESTING_FILE_HIT(12, "TSK_INTERESTING_FILE_HIT", //NON-NLS bundle.getString("BlackboardArtifact.tskInterestingFileHit.text"), Category.ANALYSIS_RESULT), ///< an interesting/notable file hit /** * An email message. */ TSK_EMAIL_MSG(13, "TSK_EMAIL_MSG", //NON-NLS bundle.getString("BlackboardArtifact.tskEmailMsg.text"), Category.DATA_ARTIFACT), /** * Text extracted from the source content. */ TSK_EXTRACTED_TEXT(14, "TSK_EXTRACTED_TEXT", //NON-NLS bundle.getString("BlackboardArtifact.tskExtractedText.text"), Category.DATA_ARTIFACT), /** * A Web search engine query extracted from Web history. */ TSK_WEB_SEARCH_QUERY(15, "TSK_WEB_SEARCH_QUERY", //NON-NLS bundle.getString("BlackboardArtifact.tskWebSearchQuery.text"), Category.DATA_ARTIFACT), /** * EXIF metadata. */ TSK_METADATA_EXIF(16, "TSK_METADATA_EXIF", //NON-NLS bundle.getString("BlackboardArtifact.tskMetadataExif.text"), Category.ANALYSIS_RESULT), /** * A tag applied to a file. * * @deprecated Tags are no longer treated as artifacts. */ @Deprecated TSK_TAG_FILE(17, "TSK_TAG_FILE", //NON-NLS bundle.getString("BlackboardArtifact.tagFile.text"), Category.ANALYSIS_RESULT), /** * A tag applied to an artifact. * * @deprecated Tags are no longer treated as artifacts. */ @Deprecated TSK_TAG_ARTIFACT(18, "TSK_TAG_ARTIFACT", //NON-NLS bundle.getString("BlackboardArtifact.tskTagArtifact.text"), Category.ANALYSIS_RESULT), /** * Information pertaining to an operating system. */ TSK_OS_INFO(19, "TSK_OS_INFO", //NON-NLS bundle.getString("BlackboardArtifact.tskOsInfo.text"), Category.DATA_ARTIFACT), /** * An operating system user account. */ @Deprecated TSK_OS_ACCOUNT(20, "TSK_OS_ACCOUNT", //NON-NLS bundle.getString("BlackboardArtifact.tskOsAccount.text"), Category.DATA_ARTIFACT), /** * An application or Web service account. */ TSK_SERVICE_ACCOUNT(21, "TSK_SERVICE_ACCOUNT", //NON-NLS bundle.getString("BlackboardArtifact.tskServiceAccount.text"), Category.DATA_ARTIFACT), /** * Output from an external tool or module (raw text). * * @deprecated Tool output should be saved as a report. */ @Deprecated TSK_TOOL_OUTPUT(22, "TSK_TOOL_OUTPUT", //NON-NLS bundle.getString("BlackboardArtifact.tskToolOutput.text"), Category.DATA_ARTIFACT), /** * A contact extracted from a phone, or from an address * book/email/messaging application. Use methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create contact artifacts. */ TSK_CONTACT(23, "TSK_CONTACT", //NON-NLS bundle.getString("BlackboardArtifact.tskContact.text"), Category.DATA_ARTIFACT), /** * An SMS/MMS message extracted from phone, or from another messaging * application, like IM. Use methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create message artifacts. */ TSK_MESSAGE(24, "TSK_MESSAGE", //NON-NLS bundle.getString("BlackboardArtifact.tskMessage.text"), Category.DATA_ARTIFACT), /** * A phone call log extracted from a phone or softphone application. Use * methods in * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper * to create call log artifacts. */ TSK_CALLLOG(25, "TSK_CALLLOG", //NON-NLS bundle.getString("BlackboardArtifact.tskCalllog.text"), Category.DATA_ARTIFACT), /** * A calendar entry from a phone, PIM, or a calendar application. */ TSK_CALENDAR_ENTRY(26, "TSK_CALENDAR_ENTRY", //NON-NLS bundle.getString("BlackboardArtifact.tskCalendarEntry.text"), Category.DATA_ARTIFACT), /** * A speed dial entry from a phone. */ TSK_SPEED_DIAL_ENTRY(27, "TSK_SPEED_DIAL_ENTRY", //NON-NLS bundle.getString("BlackboardArtifact.tskSpeedDialEntry.text"), Category.DATA_ARTIFACT), /** * A bluetooth pairing entry. */ TSK_BLUETOOTH_PAIRING(28, "TSK_BLUETOOTH_PAIRING", //NON-NLS bundle.getString("BlackboardArtifact.tskBluetoothPairing.text"), Category.DATA_ARTIFACT), /** * A GPS bookmark / way point that the user saved. */ TSK_GPS_BOOKMARK(29, "TSK_GPS_BOOKMARK", //NON-NLS bundle.getString("BlackboardArtifact.tskGpsBookmark.text"), Category.DATA_ARTIFACT), /** * A GPS last known location record. */ TSK_GPS_LAST_KNOWN_LOCATION(30, "TSK_GPS_LAST_KNOWN_LOCATION", //NON-NLS bundle.getString("BlackboardArtifact.tskGpsLastKnownLocation.text"), Category.DATA_ARTIFACT), /** * A GPS search record. */ TSK_GPS_SEARCH(31, "TSK_GPS_SEARCH", //NON-NLS bundle.getString("BlackboardArtifact.tskGpsSearch.text"), Category.DATA_ARTIFACT), /** * Application run information. */ TSK_PROG_RUN(32, "TSK_PROG_RUN", //NON-NLS bundle.getString("BlackboardArtifact.tskProgRun.text"), Category.DATA_ARTIFACT), /** * An encrypted file. */ TSK_ENCRYPTION_DETECTED(33, "TSK_ENCRYPTION_DETECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskEncryptionDetected.text"), Category.ANALYSIS_RESULT), /** * A file with an extension that does not match its MIME type. */ TSK_EXT_MISMATCH_DETECTED(34, "TSK_EXT_MISMATCH_DETECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskExtMismatchDetected.text"), Category.ANALYSIS_RESULT), /** * An meta-artifact to call attention to an artifact deemed to be * interesting. */ TSK_INTERESTING_ARTIFACT_HIT(35, "TSK_INTERESTING_ARTIFACT_HIT", //NON-NLS bundle.getString("BlackboardArtifact.tskInterestingArtifactHit.text"), Category.ANALYSIS_RESULT), /** * A route based on GPS coordinates. Use * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addRoute() * to create route artifacts. */ TSK_GPS_ROUTE(36, "TSK_GPS_ROUTE", //NON-NLS bundle.getString("BlackboardArtifact.tskGpsRoute.text"), Category.DATA_ARTIFACT), /** * A remote drive. */ TSK_REMOTE_DRIVE(37, "TSK_REMOTE_DRIVE", //NON-NLS bundle.getString("BlackboardArtifact.tskRemoteDrive.text"), Category.DATA_ARTIFACT), /** * A human face was detected in a media file. */ TSK_FACE_DETECTED(38, "TSK_FACE_DETECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskFaceDetected.text"), Category.ANALYSIS_RESULT), /** * An account. */ TSK_ACCOUNT(39, "TSK_ACCOUNT", //NON-NLS bundle.getString("BlackboardArtifact.tskAccount.text"), Category.DATA_ARTIFACT), /** * An encrypted file. */ TSK_ENCRYPTION_SUSPECTED(40, "TSK_ENCRYPTION_SUSPECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskEncryptionSuspected.text"), Category.ANALYSIS_RESULT), /* * A classifier detected an object in a media file. */ TSK_OBJECT_DETECTED(41, "TSK_OBJECT_DETECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskObjectDetected.text"), Category.ANALYSIS_RESULT), /** * A wireless network. */ TSK_WIFI_NETWORK(42, "TSK_WIFI_NETWORK", //NON-NLS bundle.getString("BlackboardArtifact.tskWIFINetwork.text"), Category.DATA_ARTIFACT), /** * Information related to a device. */ TSK_DEVICE_INFO(43, "TSK_DEVICE_INFO", //NON-NLS bundle.getString("BlackboardArtifact.tskDeviceInfo.text"), Category.DATA_ARTIFACT), /** * A SIM card. */ TSK_SIM_ATTACHED(44, "TSK_SIM_ATTACHED", //NON-NLS bundle.getString("BlackboardArtifact.tskSimAttached.text"), Category.DATA_ARTIFACT), /** * A bluetooth adapter. */ TSK_BLUETOOTH_ADAPTER(45, "TSK_BLUETOOTH_ADAPTER", //NON-NLS bundle.getString("BlackboardArtifact.tskBluetoothAdapter.text"), Category.DATA_ARTIFACT), /** * A wireless network adapter. */ TSK_WIFI_NETWORK_ADAPTER(46, "TSK_WIFI_NETWORK_ADAPTER", //NON-NLS bundle.getString("BlackboardArtifact.tskWIFINetworkAdapter.text"), Category.DATA_ARTIFACT), /** * Indicates a verification failure */ TSK_VERIFICATION_FAILED(47, "TSK_VERIFICATION_FAILED", //NON-NLS bundle.getString("BlackboardArtifact.tskVerificationFailed.text"), Category.ANALYSIS_RESULT), /** * Categorization information for a data source. */ TSK_DATA_SOURCE_USAGE(48, "TSK_DATA_SOURCE_USAGE", //NON-NLS bundle.getString("BlackboardArtifact.tskDataSourceUsage.text"), Category.ANALYSIS_RESULT), /** * Indicates auto fill data from a Web form. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create web form autofill artifacts. */ TSK_WEB_FORM_AUTOFILL(49, "TSK_WEB_FORM_AUTOFILL", //NON-NLS bundle.getString("BlackboardArtifact.tskWebFormAutofill.text"), Category.DATA_ARTIFACT), /** * Indicates an person's address filled in a web form. Use methods in * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to * create web form address artifacts. */ TSK_WEB_FORM_ADDRESS(50, "TSK_WEB_FORM_ADDRESSES ", //NON-NLS bundle.getString("BlackboardArtifact.tskWebFormAddresses.text"), Category.DATA_ARTIFACT), /** * Indicates source of a file/object * * @deprecated TSK_ASSOCIATED_OBJECT should be used instead to associate * the file/object with its source artifact/object.. */ @Deprecated TSK_DOWNLOAD_SOURCE(51, "TSK_DOWNLOAD_SOURCE", //NON-NLS bundle.getString("BlackboardArtifact.tskDownloadSource.text"), Category.DATA_ARTIFACT), /** * Indicates web cache data */ TSK_WEB_CACHE(52, "TSK_WEB_CACHE", //NON-NLS bundle.getString("BlackboardArtifact.tskWebCache.text"), Category.DATA_ARTIFACT), /** * A generic (timeline) event. */ TSK_TL_EVENT(53, "TSK_TL_EVENT", //NON-NLS bundle.getString("BlackboardArtifact.tskTLEvent.text"), Category.DATA_ARTIFACT), /** * Indicates clipboard content */ TSK_CLIPBOARD_CONTENT(54, "TSK_CLIPBOARD_CONTENT", //NON-NLS bundle.getString("BlackboardArtifact.tskClipboardContent.text"), Category.DATA_ARTIFACT), /** * An associated object. */ TSK_ASSOCIATED_OBJECT(55, "TSK_ASSOCIATED_OBJECT", //NON-NLS bundle.getString("BlackboardArtifact.tskAssociatedObject.text"), Category.DATA_ARTIFACT), /** * Indicates file may have been created by the user. */ TSK_USER_CONTENT_SUSPECTED(56, "TSK_USER_CONTENT_SUSPECTED", //NON-NLS bundle.getString("BlackboardArtifact.tskUserContentSuspected.text"), Category.ANALYSIS_RESULT), /** * Stores metadata about an object. */ TSK_METADATA(57, "TSK_METADATA", //NON-NLS bundle.getString("BlackboardArtifact.tskMetadata.text"), Category.DATA_ARTIFACT), /** * Stores a GPS track log. Use * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addTrack() * to create track artifacts. */ TSK_GPS_TRACK(58, "TSK_GPS_TRACK", bundle.getString("BlackboardArtifact.tskTrack.text"), Category.DATA_ARTIFACT), /** * Stores a role on a given domain. */ TSK_WEB_ACCOUNT_TYPE(59, "TSK_WEB_ACCOUNT_TYPE", bundle.getString("BlackboardArtifact.tskWebAccountType.text"), Category.ANALYSIS_RESULT), /** * Screen shots from device or Application. */ TSK_SCREEN_SHOTS(60, "TSK_SCREEN_SHOTS", bundle.getString("BlackboardArtifact.tskScreenShots.text"), Category.DATA_ARTIFACT), /** * Notifications Sent to User. */ TSK_PROG_NOTIFICATIONS(62, "TSK_PROG_NOTIFICATIONS", bundle.getString("BlackboardArtifact.tskProgNotifications.text"), Category.DATA_ARTIFACT), /** * System/Application/File backup. */ TSK_BACKUP_EVENT(63, "TSK_BACKUP_EVENT", bundle.getString("BlackboardArtifact.tskBackupEvent.text"), Category.DATA_ARTIFACT), /** * Programs that have been deleted. */ TSK_DELETED_PROG(64, "TSK_DELETED_PROG", bundle.getString("BlackboardArtifact.tskDeletedProg.text"), Category.DATA_ARTIFACT), /** * Activity on the System/Application. */ TSK_USER_DEVICE_EVENT(65, "TSK_USER_DEVICE_EVENT", bundle.getString("BlackboardArtifact.tskUserDeviceEvent.text"), Category.DATA_ARTIFACT), /** * Indicates that the file had a yara pattern match hit. */ TSK_YARA_HIT(66, "TSK_YARA_HIT", bundle.getString("BlackboardArtifact.tskYaraHit.text"), Category.ANALYSIS_RESULT), /** * Stores the outline of an area using GPS coordinates. */ TSK_GPS_AREA(67, "TSK_GPS_AREA", bundle.getString("BlackboardArtifact.tskGPSArea.text"), Category.DATA_ARTIFACT), TSK_WEB_CATEGORIZATION(68, "TSK_WEB_CATEGORIZATION", bundle.getString("BlackboardArtifact.tskWebCategorization.text"), Category.ANALYSIS_RESULT), /** * Indicates that the file or artifact was previously seen in another Autopsy case. */ TSK_PREVIOUSLY_SEEN(69, "TSK_PREVIOUSLY_SEEN", bundle.getString("BlackboardArtifact.tskPreviouslySeen.text"), Category.ANALYSIS_RESULT), /** * Indicates that the file or artifact was previously unseen in another Autopsy case. */ TSK_PREVIOUSLY_UNSEEN(70, "TSK_PREVIOUSLY_UNSEEN", bundle.getString("BlackboardArtifact.tskPreviouslyUnseen.text"), Category.ANALYSIS_RESULT), /** * Indicates that the file or artifact was previously tagged as "Notable" in another Autopsy case. */ TSK_PREVIOUSLY_NOTABLE(71, "TSK_PREVIOUSLY_NOTABLE", bundle.getString("BlackboardArtifact.tskPreviouslyNotable.text"), Category.ANALYSIS_RESULT); /* * To developers: For each new artifact, ensure that: - The enum value * has 1-line JavaDoc description - The artifact catalog * (artifact_catalog.dox) is updated to reflect the attributes it uses */ private final String label; private final int typeId; private final String displayName; private final Category category; /** * Constructs a value for the standard artifact types enum. * * @param typeId The type id. * @param label The type name. * @param displayName The type display name. */ private ARTIFACT_TYPE(int typeId, String label, String displayName) { this(typeId, label, displayName, Category.DATA_ARTIFACT); } /** * Constructs a value for the standard artifact types enum. * * @param typeId The type id. * @param label The type name. * @param displayName The type display name. * @param category The type category. */ private ARTIFACT_TYPE(int typeId, String label, String displayName, Category category) { this.typeId = typeId; this.label = label; this.displayName = displayName; this.category = category; } /** * Gets the type id for this standard artifact type. * * @return type id */ public int getTypeID() { return this.typeId; } /** * Gets the type name (label) for this standard artifact type. * * @return The type name. */ public String getLabel() { return this.label; } /** * Gets the type category for this standard artifact type. * * @return The type category. */ public Category getCategory() { return this.category; } /** * Gets the standard artifact type enum value that corresponds to a * given type name (label). * * @param label The type name * * @return The enum element. */ static public ARTIFACT_TYPE fromLabel(String label) { for (ARTIFACT_TYPE value : ARTIFACT_TYPE.values()) { if (value.getLabel().equals(label)) { return value; } } throw new IllegalArgumentException("No ARTIFACT_TYPE matching type: " + label); } /** * Gets the artifact type enum value that corresponds to a given type * id. This method should only be used when the id is known to be one of * the built-in types - otherwise use getArtifactType() in * SleuthkitCase. * * @param id The type id. * * @return the corresponding enum */ static public ARTIFACT_TYPE fromID(int id) { for (ARTIFACT_TYPE value : ARTIFACT_TYPE.values()) { if (value.getTypeID() == id) { return value; } } throw new IllegalArgumentException("No ARTIFACT_TYPE matching type: " + id); } /** * Gets the display name of this standard artifact type. * * @return The display name. */ public String getDisplayName() { return displayName; } /** * Accepts a visitor SleuthkitItemVisitor that will perform an operation * on this artifact type and return some object as the result of the * operation. * * @param visitor The visitor, where the type parameter of the visitor * is the type of the object that will be returned as the * result of the visit operation. * * @return An object of type T. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } } /** * Enumeration to encapsulate categories of artifact. * * Some artifact types represent data directly extracted from a data source, * while others may be the result of some analysis done on the extracted * data. */ public enum Category { // NOTE: The schema code defaults to '0', so that code must be updated too if DATA_ARTIFACT changes from being 0 DATA_ARTIFACT(0, "DATA_ARTIFACT", ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("CategoryType.DataArtifact")), // artifact is data that is directly/indirectly extracted from a data source. ANALYSIS_RESULT(1, "ANALYSIS_RESULT", ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("CategoryType.AnalysisResult")); // artifacts represents outcome of analysis of data. private final Integer id; private final String name; private final String displayName; private final static Map idToCategory = new HashMap(); static { for (Category status : values()) { idToCategory.put(status.getID(), status); } } /** * Constructs a value for the category enum. * * @param id The category id. * @param name The category name * @param displayNameKey Category display name. */ private Category(Integer id, String name, String displayName) { this.id = id; this.name = name; this.displayName = displayName; } /** * Gets the category value with the given id, if one exists. * * @param id A category id. * * @return The category with the given id, or null if none exists. */ public static Category fromID(int id) { return idToCategory.get(id); } /** * Gets the id of this review status. * * @return The id of this review status. */ public Integer getID() { return id; } /** * Gets the name of this category. * * @return The name of this category. */ String getName() { return name; } /** * Gets the display name of this category. * * @return The display name of this category. */ public String getDisplayName() { return displayName; } } /** * Enum to represent the review status of an artifact. */ public enum ReviewStatus { APPROVED(1, "APPROVED", "ReviewStatus.Approved"), //approved by human user REJECTED(2, "REJECTED", "ReviewStatus.Rejected"), //rejected by humna user UNDECIDED(3, "UNDECIDED", "ReviewStatus.Undecided"); // not yet reviewed by human user private final Integer id; private final String name; private final String displayName; private final static Map idToStatus = new HashMap(); static { for (ReviewStatus status : values()) { idToStatus.put(status.getID(), status); } } /** * Constructs a value for the review status enum. * * @param id The status id. * @param name The status name * @param displayNameKey The bundle.properties key for the status * display name. */ private ReviewStatus(Integer id, String name, String displayNameKey) { this.id = id; this.name = name; this.displayName = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString(displayNameKey); } /** * Gets the review status value with the given id, if one exists. * * @param id A review status id. * * @return The review status with the given id, or null if none exists. */ public static ReviewStatus withID(int id) { return idToStatus.get(id); } /** * Gets the id of this review status. * * @return The id of this review status. */ public Integer getID() { return id; } /** * Gets the name of this review status. * * @return The name of this review status. */ String getName() { return name; } /** * Gets the display name of this review status. * * @return The display name of this review status. */ public String getDisplayName() { return displayName; } } /** * Constructs an artifact that has been posted to the blackboard. An * artifact is a typed collection of name value pairs (attributes) that is * associated with its source content (either a data source, or file within * a data source). Both standard artifact types and custom artifact types * are supported. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param objID The unique id of the content with which this * artifact is associated. * @param artifactObjID The unique id of the artifact, in tsk_objects * @param dataSourceObjId The id of the data source * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * * @deprecated Use new BlackboardArtifact(SleuthkitCase, long, long, int, * String, String, ReviewStatus) instead. */ @Deprecated protected BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long objID, long artifactObjID, long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName) { this(sleuthkitCase, artifactID, objID, artifactObjID, dataSourceObjId, artifactTypeID, artifactTypeName, displayName, ReviewStatus.UNDECIDED); } /** * Gets all attributes associated with this artifact that are of the given * attribute type. * * @param attributeType the type of attributes to get * * @return a list of attributes of the given type * * @throws TskCoreException if a critical error occurs and the attributes * are not fetched * * @deprecated An artifact should not have multiple attributes of the same * type. Use getAttribute(BlackboardAttribute.Type) instead. */ @Deprecated public List getAttributes(final BlackboardAttribute.ATTRIBUTE_TYPE attributeType) throws TskCoreException { if (loadedCacheFromDb == false) { List attrs = getSleuthkitCase().getBlackboardAttributes(this); attrsCache.clear(); attrsCache.addAll(attrs); loadedCacheFromDb = true; } ArrayList filteredAttributes = new ArrayList(); for (BlackboardAttribute attr : attrsCache) { if (attr.getAttributeType().getTypeID() == attributeType.getTypeID()) { filteredAttributes.add(attr); } } return filteredAttributes; } @Override public long getId() { return this.artifactObjId; } /** * Gets the object ids of children of this artifact, if any * * @return A list of the object ids of children. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getChildrenIds() throws TskCoreException { List childrenIDs = new ArrayList(); childrenIDs.addAll(getSleuthkitCase().getAbstractFileChildrenIds(this)); childrenIDs.addAll(getSleuthkitCase().getBlackboardArtifactChildrenIds(this)); return childrenIDs; } @Override public int getChildrenCount() throws TskCoreException { if (childrenCount != -1) { return childrenCount; } childrenCount = this.getSleuthkitCase().getContentChildrenCount(this); hasChildren = childrenCount > 0; checkedHasChildren = true; return childrenCount; } @Override public boolean hasChildren() throws TskCoreException { if (checkedHasChildren == true) { return hasChildren; } childrenCount = this.getSleuthkitCase().getContentChildrenCount(this); hasChildren = childrenCount > 0; checkedHasChildren = true; return hasChildren; } /** * Get all children of this artifact, if any. * * @return A list of the children. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getChildren() throws TskCoreException { List children = new ArrayList<>(); children.addAll(getSleuthkitCase().getAbstractFileChildren(this)); children.addAll(getSleuthkitCase().getBlackboardArtifactChildren(this)); return children; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java000644 000765 000024 00000017543 14137073413 030205 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.ResourceBundle; import org.apache.commons.lang3.StringUtils; /** * Realm encapsulates the scope of an OsAccount. An account is unique within a realm. * * A realm may be host scoped, say for a local standalone computer, or * domain scoped. * * Many times, we may learn about the existence of a realm without fully understanding * it. Such as when we find a Windows SID before we've parsed the registry to know if * it is for the local computer or domain. By default, a realm is created with a * host-level scope and a confidence of "inferred". */ public final class OsAccountRealm { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private final long id; // row id // a realm may have multiple names - for exmple, for a user ABCCorp\\user1 or user1@ABCcorp.com - 'ABCCorp' and 'ABCcorp.com' both refer to the same realm. // currently we only support a single name, this could be expanded in future. private final String realmName; // realm name private final String realmAddr; // realm address private String signature; // either realm address or name (if address is not known), plus a scope indicator private final Host host; // if the realm consists of a single host. Will be null if the realm is domain scoped. private final ScopeConfidence scopeConfidence; // confidence in realm scope. private final RealmDbStatus dbStatus; // Status of row in database. /** * Creates OsAccountRealm. * * @param id Row Id. * @param realmName Realm name, may be null. * @param realmAddr Unique numeric address for realm, may be null only * if realm name is not null. * @param signature Either the address or the name, plus a scope indicator. * @param host Host if the realm is host scoped. * @param scopeConfidence Scope confidence. */ OsAccountRealm(long id, String realmName, String realmAddr, String signature, Host host, ScopeConfidence scopeConfidence, RealmDbStatus dbStatus) { this.id = id; this.realmName = realmName; this.realmAddr = realmAddr; this.signature = signature; this.host = host; this.scopeConfidence = scopeConfidence; this.dbStatus = dbStatus; } /** * Get the realm row id. * * @return Realm id. */ long getRealmId() { return id; } /** * Get realm names list. * * Currently we only support a single name for realm, so this list may have * at most a single name. And the list may be empty if there is no name. * * @return List of realm names, may be empty. */ public List getRealmNames() { List namesList = new ArrayList<>(); if (!Objects.isNull(realmName)) { namesList.add(realmName); } return namesList; } /** * Get the realm address, such as part of a Windows SID. * * @return Optional realm unique address. */ public Optional getRealmAddr() { return Optional.ofNullable(realmAddr); } /** * Get the realm signature. * * @return Realm signature. */ String getSignature() { return signature; } /** * Get the realm scope host, if it's a single host realm. * * @return Optional host. Is empty if the scope of the realm is domain-scoped. */ public Optional getScopeHost() { return Optional.ofNullable(host); } /** * Get realm scope confidence. * * @return Realm scope confidence. */ public ScopeConfidence getScopeConfidence() { return scopeConfidence; } /** * Get the database status of this realm. * * @return Realm database status. */ RealmDbStatus getDbStatus() { return dbStatus; } /** * Get the realm scope. * * @return Realm scope. */ public RealmScope getScope() { return getScopeHost().isPresent() ? RealmScope.LOCAL : RealmScope.DOMAIN; } /** * Enum to encapsulate a realm scope. * * Scope of a realm may extend to a single host (local) * or to a domain. */ public enum RealmScope { UNKNOWN(0, bundle.getString("OsAccountRealm.Unknown.text")), // realm scope is unknown. LOCAL(1, bundle.getString("OsAccountRealm.Local.text")), // realm scope is a single host. DOMAIN(2, bundle.getString("OsAccountRealm.Domain.text")); // realm scope is a domain. private final int id; private final String name; RealmScope(int id, String name) { this.id = id; this.name = name; } /** * Get the id of the realm scope. * * @return Realm scope id. */ public int getId() { return id; } /** * Get the realm scope name. * * @return Realm scope name. */ public String getName() { return name; } /** * Gets a realm scope confidence enum by id. * * @param typeId Realm scope confidence id. * * @return ScopeConfidence enum. */ public static RealmScope fromID(int typeId) { for (RealmScope scopeType : RealmScope.values()) { if (scopeType.ordinal() == typeId) { return scopeType; } } return null; } } /** * Enum to encapsulate scope confidence. * * We may know for sure that a realm is domain scope or host scope, based * on where it is found. Occasionally, we may have to infer or assume a scope to * initially create a realm. */ public enum ScopeConfidence { KNOWN(0, bundle.getString("OsAccountRealm.Known.text")), // realm scope is known for sure. INFERRED(1, bundle.getString("OsAccountRealm.Inferred.text")); // realm scope is inferred private final int id; private final String name; ScopeConfidence(int id, String name) { this.id = id; this.name = name; } /** * Get the id of the realm scope confidence. * * @return Realm scope confidence id. */ public int getId() { return id; } /** * Get the realm scope confidence name. * * @return Realm scope confidence name. */ public String getName() { return name; } /** * Gets a realm scope confidence enum by id. * * @param typeId Realm scope confidence id. * * @return ScopeConfidence enum. */ public static ScopeConfidence fromID(int typeId) { for (ScopeConfidence statusType : ScopeConfidence.values()) { if (statusType.ordinal() == typeId) { return statusType; } } return null; } } /** * Set the signature for the account realm. * * @param signature Realm signature. * * @return Returns true of the address is set, false if the address was not * changed. */ boolean setSignature(String signature) { if (StringUtils.isNotBlank(signature)) { this.signature = signature; return true; } return false; } /** * Encapsulates status of realm row. */ enum RealmDbStatus { ACTIVE(0, "Active"), MERGED(1, "Merged"), DELETED(2, "Deleted"); private final int id; private final String name; RealmDbStatus(int id, String name) { this.id = id; this.name = name; } int getId() { return id; } String getName() { return name; } static RealmDbStatus fromID(int typeId) { for (RealmDbStatus type : RealmDbStatus.values()) { if (type.ordinal() == typeId) { return type; } } return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AnalysisResult.java000644 000765 000024 00000014460 14137073413 030303 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * An AnalysisResult represents the outcome of some analysis technique that was * applied to some data (i.e. Content) to determine the data's relevance. The * result should have a conclusion and a relevance score. The score of the * AnalysisResult will be used to calculate the aggregate score of the parent * data. Additional metadata can be stored as BlackboardAttributes. */ public class AnalysisResult extends BlackboardArtifact { private final String conclusion; // conclusion of analysis - may be an empty string private final Score score; // relevance score based on the conclusion private final String configuration; // Optional descriptor of configuration of analysis technique (such as a set name). Maybe empty string private final String justification; // justification/explanation of the conclusion. Maybe empty string. private boolean ignoreResult = false; // ignore this analysis result when computing score of the parent object. /** * Constructs an analysis result. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param sourceObjId The unique id of the content with which this * artifact is associated. * @param artifactObjId The unique id this artifact, in tsk_objects. * @param dataSourceObjId Object ID of the datasource where the artifact * was found. May be null. * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * @param reviewStatus The review status of this artifact. * @param score The score assigned by the analysis. * @param conclusion Conclusion arrived at by the analysis. May be * null. * @param configuration Configuration used for analysis. May be null. * @param justification Justification for the analysis. May be null. */ AnalysisResult(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, Score score, String conclusion, String configuration, String justification) { super(sleuthkitCase, artifactID, sourceObjId, artifactObjId, dataSourceObjId, artifactTypeID, artifactTypeName, displayName, reviewStatus); this.score = score; this.conclusion = (conclusion != null) ? conclusion : ""; this.configuration = (configuration != null) ? configuration : ""; this.justification = (justification != null) ? justification : ""; } /** * Constructs an analysis result. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param artifactID The unique id for this artifact. * @param sourceObjId The unique id of the content with which this * artifact is associated. * @param artifactObjId The unique id this artifact, in tsk_objects. * @param dataSourceObjId Object ID of the datasource where the artifact * was found. May be null. * @param artifactTypeID The type id of this artifact. * @param artifactTypeName The type name of this artifact. * @param displayName The display name of this artifact. * @param reviewStatus The review status of this artifact. * @param isNew If this analysis result is newly created. * @param score The score assigned by the analysis. * @param conclusion Conclusion arrived at by the analysis. May be * null. * @param configuration Configuration used for analysis. May be null. * @param justification Justification for the analysis. May be null. */ AnalysisResult(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjID, Long dataSourceObjID, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, boolean isNew, Score score, String conclusion, String configuration, String justification) { super(sleuthkitCase, artifactID, sourceObjId, artifactObjID, dataSourceObjID, artifactTypeID, artifactTypeName, displayName, reviewStatus, isNew); this.score = score; this.conclusion = (conclusion != null) ? conclusion : ""; this.configuration = (configuration != null) ? configuration : ""; this.justification = (justification != null) ? justification : ""; } /** * Returns analysis result conclusion. * * @return Conclusion, returns an empty string if not set. */ public String getConclusion() { return conclusion; } /** * Returns relevance score based on conclusion * * @return Score. */ public Score getScore() { return score; } /** * Returns configuration used in analysis. * * @return Configuration, returns an empty string if not set. */ public String getConfiguration() { return configuration; } /** * Returns justification for conclusion * * @return justification, returns an empty string if not set. */ public String getJustification() { return justification; } /** * Sets if this result is to be ignored when calculating the aggregate score * of the parent object. * * @param ignore if the result should be ignored or not. */ public void setIgnoreResult(boolean ignore) { ignoreResult = ignore; } /** * Checks if this result is to be ignored. * * @return true is the result should should be ignored, false otherwise. */ public boolean ignoreResult() { return ignoreResult; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskFileRange.java000644 000765 000024 00000003670 14137073413 027640 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * An object representation of an entry in tsk_file_layout table Any file can * have one or more file ranges defined to specify physical file layout. This is * especially useful for non-fs "virtual" files created for the purpose of data * analysis */ public class TskFileRange { private long byteStart; private long byteLen; private long sequence; /** * Create file range to map the database object * * @param byteStart byte start with respect to the image * @param byteLen length of the range in bytes * @param sequence sequence order of the range for the file */ public TskFileRange(long byteStart, long byteLen, long sequence) { this.byteStart = byteStart; this.byteLen = byteLen; this.sequence = sequence; } /** * Get start byte of the range, with respect to the image * * @return start bye of the range */ public long getByteStart() { return byteStart; } /** * Get the byte length of the range * * @return length in bytes */ public long getByteLen() { return byteLen; } /** * Get sequence of this range defining ordering of this range with respect * to other ranges for the file * * @return sequence number of this range */ public long getSequence() { return sequence; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SpecialDirectory.java000644 000765 000024 00000005553 14137073413 030571 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import java.util.List; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * Parent class for special directory types (Local and Virtual) */ public abstract class SpecialDirectory extends AbstractFile { SpecialDirectory(SleuthkitCase db, long objId, long dataSourceObjectId, TskData.TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, TskData.TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType) { super(db, objId, dataSourceObjectId, attrType, attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList()); } /** * Gets the extents in terms of byte addresses of this directory * within its data source, always an empty list. * * @return An empty list. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getRanges() throws TskCoreException { return Collections.emptyList(); } /** * Indicates whether or not this is a data source. * * @return True or false. */ public boolean isDataSource() { return (this.getDataSourceObjectId() == this.getId()); } /** * Does nothing, a special directory cannot be opened, read, or closed. */ @Override public void close() { } /** * Indicates whether or not this directory is the root of a file * system, always returns false. * * @return False. */ @Override public boolean isRoot() { return false; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AccountFileInstance.java000644 000765 000024 00000006450 14137073413 031202 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collection; /** * An instance of an Account in a specific file. An Account may be found in * multiple Content objects (such as different databases) on a single device. * There is a 1:N relationship between Account objects and AccountFileInstance * objects. A TSK_ACCOUNT artifact is created for every account file instance. * * AccountFileInstances can optionally have BlackboardAttributes to store more * details. */ public final class AccountFileInstance { private final BlackboardArtifact artifact; private final Account account; AccountFileInstance(BlackboardArtifact artifact, Account account) throws TskCoreException { this.artifact = artifact; this.account = account; } /** * Gets the first occurrence of an attribute by type. * * @param attrType The attribute type. * * @return The attribute, or null if no attribute of the given type exists. * * @throws TskCoreException if an there is an error getting the attribute. */ public BlackboardAttribute getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE attrType) throws TskCoreException { return this.artifact.getAttribute(new BlackboardAttribute.Type(attrType)); } /** * Adds an attribute. It is faster to add multiple attributes as a * collection using addAttributes(). * * @param bbatr The attribute to add. * * @throws TskCoreException if an there is an error adding the attribute. */ public void addAttribute(BlackboardAttribute bbatr) throws TskCoreException { this.artifact.addAttribute(bbatr); } /** * Adds a collection of attributes * * @param bbatrs The collection of attributes to add. * * @throws TskCoreException if an there is an error adding the attributes. */ public void addAttributes(Collection bbatrs) throws TskCoreException { this.artifact.addAttributes(bbatrs); } /** * Gets the underlying Account for this instance. * * @return The account. * * @throws TskCoreException if an there is an error getting the account. */ public Account getAccount() throws TskCoreException { return this.account; } /** * Gets the source content (data source or file within a * data source) of the underlying Account artifact for this instance. * * @return The source content. * * @throws TskCoreException */ public Content getFile() throws TskCoreException { return artifact.getSleuthkitCase().getContentById(artifact.getObjectID()); } /** * Get the object ID of the artifact this account file instance maps to. * * @return A Data Source Object ID */ Long getDataSourceObjectID() { return artifact.getDataSourceObjectID(); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/LocalDirectory.java000644 000765 000024 00000011464 14137073413 030241 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A local directory that can be used as a parent for local files. * Not a file system */ public class LocalDirectory extends SpecialDirectory { private static final Logger logger = Logger.getLogger(LocalDirectory.class.getName()); /** * Constructs a local directory that can be used as a parent for * local files. Not a file system directory. * * @param db The case database. * @param objId The object id of the local directory. * @param dataSourceObjectId The object id of the data source for the * local directory * @param name The name of the local directory. * @param dirType The TSK_FS_NAME_TYPE_ENUM for the local * directory. * @param metaType The TSK_FS_META_TYPE_ENUM for the local * directory. * @param dirFlag The TSK_FS_META_TYPE_ENUM for the local * directory. * @param metaFlags The meta flags for the local directory. * @param size The size of the local directory, should be * zero. * @param md5Hash The MD5 hash for the local directory. * @param knownState The known state for the local directory * @param parentPath The parent path for the local directory */ LocalDirectory(SleuthkitCase db, long objId, long dataSourceObjectId, String name, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath) { super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR, 0L, 0, dirType, metaType, dirFlag, metaFlags, 0L, 0L, 0L, 0L, 0L, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, null); } /** * Check whether this LocalDirectory is a data source. * Will always be false. * @return false */ public boolean isDataSource() { return false; } /** * Indicates whether or not this directory is the root of a file * system. Local directories should only be the root of a file * system in a portable case. * * @return true if the parent of this directory is a file system */ @Override public boolean isRoot() { try { return (getParent() instanceof FileSystem); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error getting parent of LocalDirectory with object ID " + getId(), ex); return false; } } /** * Accepts a content visitor (Visitor design pattern). * * @param visitor A ContentVisitor supplying an algorithm to run using this * local directory as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this local directory as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Provides a string representation of this local directory. * * @param preserveState True if state should be included in the string * representation of this object. * * @return string representation of this local directory * @throws TskCoreException if there was an error querying the case * database. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "LocalDirectory [\t" + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HashUtility.java000644 000765 000024 00000014274 14137073413 027573 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.Map; import java.util.HashMap; import java.util.Arrays; /** * Utility to calculate a hash for FsContent and store in TSK database */ public class HashUtility { private final static int BUFFER_SIZE = 16 * 1024; /** * Calculate hashes of the content object. * * @param content The content object to hash * @param hashTypes The types of hash to compute * * @return A list of the hash results * * @throws TskCoreException */ static public List calculateHashes(Content content, Collection hashTypes) throws TskCoreException { Map digests = new HashMap<>(); for (HashType type : hashTypes) { try { digests.put(type, MessageDigest.getInstance(type.getName())); } catch (NoSuchAlgorithmException ex) { throw new TskCoreException("No algorithm found matching name " + type.getName(), ex); } } // Read in byte size chunks and update the hash value with the data. byte[] data = new byte[BUFFER_SIZE]; int totalChunks = (int) Math.ceil((double) content.getSize() / (double) BUFFER_SIZE); int read; for (long i = 0; i < totalChunks; i++) { try { read = content.read(data, i * BUFFER_SIZE, BUFFER_SIZE); } catch (TskCoreException ex) { throw new TskCoreException("Error reading data at address " + i * BUFFER_SIZE + " from content with ID: " + content.getId(), ex); } // Check for EOF if (read == -1) { break; } // Only update with the read bytes. if (read == BUFFER_SIZE) { for (HashType type : hashTypes) { digests.get(type).update(data); } } else { byte[] subData = Arrays.copyOfRange(data, 0, read); for (HashType type : hashTypes) { digests.get(type).update(subData); } } } List results = new ArrayList<>(); for (HashType type : hashTypes) { byte hashData[] = digests.get(type).digest(); StringBuilder sb = new StringBuilder(); for (byte b : hashData) { sb.append(String.format("%02x", b)); } results.add(new HashResult(type, sb.toString())); } return results; } /** * Determines whether a string representation of an MD5 hash is valid. * * @param md5Hash The hash. * * @return True or false. */ public static boolean isValidMd5Hash(String md5Hash) { return md5Hash.matches("^[A-Fa-f0-9]{32}$"); } /** * Determines whether a string representation of a SHA-1 hash is valid. * * @param sha1Hash The hash. * * @return True or false. */ public static boolean isValidSha1Hash(String sha1Hash) { return sha1Hash.matches("^[A-Fa-f0-9]{40}$"); } /** * Determines whether a string representation of a SHA-256 hash is valid. * * @param sha256Hash The hash. * * @return True or false. */ public static boolean isValidSha256Hash(String sha256Hash) { return sha256Hash.matches("^[A-Fa-f0-9]{64}$"); } /** * Determine if the passed in Hash value is that for no data (i.e. an empty * file). Looking these values up or correlating on them causes lots of * false positives. * * @param md5 * * @return True if it is the empty hash value */ public static boolean isNoDataMd5(String md5) { return md5.toLowerCase().equals("d41d8cd98f00b204e9800998ecf8427e"); //NON-NLS } /** * Utility class to hold a hash value along with its type. */ public static class HashResult { private final HashType type; private final String value; public HashResult(HashType type, String value) { this.type = type; this.value = value; } public HashType getType() { return type; } public String getValue() { return value; } } /** * Hash types that can be calculated. */ public enum HashType { MD5("MD5"), SHA256("SHA-256"); private final String name; // This should be the string expected by MessageDigest HashType(String name) { this.name = name; } String getName() { return name; } } /** * Calculate the MD5 hash for the given FsContent and store it in the * database * * @param file file object whose md5 hash we want to calculate * * @return md5 of the given FsContent object * * @throws java.io.IOException * * @deprecated Use calculateHashes() instead */ @Deprecated static public String calculateMd5(AbstractFile file) throws IOException { Logger logger = Logger.getLogger(HashUtility.class.getName()); String md5Hash = calculateMd5Hash(file); try { file.getSleuthkitCase().setMd5Hash(file, md5Hash); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Error updating content's md5 in database", ex); //NON-NLS } return md5Hash; } /** * Calculate the MD5 hash for the given FsContent * * @param content content object whose md5 hash we want to calculate * * @return md5 of the given FsContent object * * @throws java.io.IOException * * @deprecated Use calculateHashes() instead */ @Deprecated static public String calculateMd5Hash(Content content) throws IOException { try { List results = calculateHashes(content, Arrays.asList(HashType.MD5)); return results.stream() .filter(result -> result.getType().equals(HashType.MD5)) .findFirst().get().getValue(); } catch (TskCoreException ex) { // Wrap in an IOException to retain the current method signature throw new IOException(ex); } } }sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/LocalFilesDataSource.java000755 000765 000024 00000032335 14137073413 031315 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; import java.util.logging.Logger; /** * A local/logical files and/or directories data source. * * NOTE: The DataSource interface is an emerging feature and at present is only * useful for obtaining the object id and the device id, an ASCII-printable * identifier for the device associated with the data source that is intended to * be unique across multiple cases (e.g., a UUID). In the future, this interface * will extend the Content interface and the AbstractDataSource will become an * abstract superclass. */ public class LocalFilesDataSource extends VirtualDirectory implements DataSource { private final long objectId; private final String deviceId; private final String timezone; private volatile Host host; private static final Logger LOGGER = Logger.getLogger(LocalFilesDataSource.class.getName()); /** * Constructs a local/logical files and/or directories data source. * * @param db The case database. * @param objId The object id of the virtual directory. * @param dataSourceObjectId The object id of the data source for the * virtual directory; same as objId if the virtual * directory is a data source. * @param name The name of the virtual directory. * @param dirType The TSK_FS_NAME_TYPE_ENUM for the virtual * directory. * @param deviceId The device ID for the data source. * @param metaType The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param dirFlag The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param metaFlags The meta flags for the virtual directory. * @param timezone The timezone for the data source. * @param md5Hash The MD5 hash for the virtual directory. * @param sha256Hash The SHA-256 hash for the virtual directory. * @param knownState The known state for the virtual directory * @param parentPath The parent path for the virtual directory, * should be "/" if the virtual directory is a * data source. */ public LocalFilesDataSource(SleuthkitCase db, long objId, long dataSourceObjectId, String deviceId, String name, TskData.TSK_FS_NAME_TYPE_ENUM dirType, TskData.TSK_FS_META_TYPE_ENUM metaType, TskData.TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, String timezone, String md5Hash, String sha256Hash, TskData.FileKnown knownState, String parentPath) { super(db, objId, dataSourceObjectId, name, dirType, metaType, dirFlag, metaFlags, md5Hash, sha256Hash, knownState, parentPath); this.objectId = objId; this.deviceId = deviceId; this.timezone = timezone; } /** * Returns the VirtualDirectory instance. /deprecated LocalFilesDataSource * is already a VirtualDirectory. * * @return This object. * * @deprecated LocalFilesDataSource is already a VirtualDirectory. */ @Deprecated public VirtualDirectory getRootDirectory() { return this; } /** * Gets the ASCII-printable identifier for the device associated with the * data source. This identifier is intended to be unique across multiple * cases (e.g., a UUID). * * @return The device id. */ @Override public String getDeviceId() { return deviceId; } /** * Gets the time zone that was used to process the data source. * * @return The time zone. */ @Override public String getTimeZone() { return timezone; } /** * Set the name for this data source. * * @param newName The new name for the data source * * @throws TskCoreException Thrown if an error occurs while updating the database */ @Override public void setDisplayName(String newName) throws TskCoreException { this.getSleuthkitCase().setFileName(newName, objectId); } /** * Gets the size of the contents of the data source in bytes. This size can * change as archive files within the data source are expanded, files are * carved, etc., and is different from the size of the data source as * returned by Content.getSize, which is the size of the data source as a * file. * * @param sleuthkitCase The sleuthkit case instance from which to make calls * to the database. * * @return The size in bytes. * * @throws TskCoreException Thrown when there is an issue trying to retrieve * data from the database. */ @Override public long getContentSize(SleuthkitCase sleuthkitCase) throws TskCoreException { return getContentSize(sleuthkitCase, objectId); } /** * Gets the size of the contents of the data source in bytes given a data * source object ID. This size can change as archive files within the data * source are expanded, files are carved, etc., and is different from the * size of the data source as returned by Content.getSize, which is the size * of the data source as a file. * * @param sleuthkitCase The sleuthkit case instance from which to make calls * to the database. * * @return The size in bytes. * * @throws TskCoreException Thrown when there is an issue trying to retrieve * data from the database. */ static long getContentSize(SleuthkitCase sleuthkitCase, long dataSourceObjId) throws TskCoreException { SleuthkitCase.CaseDbConnection connection; Statement statement = null; ResultSet resultSet = null; long contentSize = 0; connection = sleuthkitCase.getConnection(); try { statement = connection.createStatement(); resultSet = connection.executeQuery(statement, "SELECT SUM (size) FROM tsk_files WHERE tsk_files.data_source_obj_id = " + dataSourceObjId); if (resultSet.next()) { contentSize = resultSet.getLong("sum"); } } catch (SQLException ex) { throw new TskCoreException(String.format("There was a problem while querying the database for size data for object ID %d.", dataSourceObjId), ex); } finally { closeResultSet(resultSet); closeStatement(statement); connection.close(); } return contentSize; } /** * Sets the acquisition details field in the case database. * * @param details The acquisition details * * @throws TskCoreException Thrown if the data can not be written */ @Override public void setAcquisitionDetails(String details) throws TskCoreException { getSleuthkitCase().setAcquisitionDetails(this, details); } /** * Sets the acquisition tool details such as its name, version number and * any settings used during the acquisition to acquire data. * * @param name The name of the acquisition tool. May be NULL. * @param version The acquisition tool version number. May be NULL. * @param settings The settings used by the acquisition tool. May be NULL. * * @throws TskCoreException Thrown if the data can not be written */ @Override public void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException { getSleuthkitCase().setAcquisitionToolDetails(this, name, version, settings); } /** * Gets the acquisition details field from the case database. * * @return The acquisition details * * @throws TskCoreException Thrown if the data can not be read */ @Override public String getAcquisitionDetails() throws TskCoreException { return getSleuthkitCase().getAcquisitionDetails(this); } /** * Gets the acquisition tool settings field from the case database. * * @return The acquisition tool settings. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ @Override public String getAcquisitionToolSettings() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_settings"); } /** * Gets the acquisition tool name field from the case database. * * @return The acquisition tool name. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ public String getAcquisitionToolName() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_name"); } /** * Gets the acquisition tool version field from the case database. * * @return The acquisition tool version. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ public String getAcquisitionToolVersion() throws TskCoreException{ return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_version"); } /** * Gets the host for this data source. * * @return The host * * @throws TskCoreException */ @Override public Host getHost() throws TskCoreException { // This is a check-then-act race condition that may occasionally result // in additional processing but is safer than using locks. if (host == null) { host = getSleuthkitCase().getHostManager().getHostByDataSource(this); } return host; } /** * Gets the added date field from the case database. * * @return The date time when the image was added in epoch seconds. * * @throws TskCoreException Thrown if the data can not be read */ public Long getDateAdded() throws TskCoreException { return getSleuthkitCase().getDataSourceInfoLong(this, "added_date_time"); } /** * Close a ResultSet. * * @param resultSet The ResultSet to be closed. */ private static void closeResultSet(ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { LOGGER.log(Level.SEVERE, "Error closing ResultSet", ex); //NON-NLS } } } /** * Close a Statement. * * @param statement The Statement to be closed. */ private static void closeStatement(Statement statement) { if (statement != null) { try { statement.close(); } catch (SQLException ex) { LOGGER.log(Level.SEVERE, "Error closing Statement", ex); //NON-NLS } } } /** * Accepts a content visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A ContentVisitor supplying an algorithm to run using this * virtual directory as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this virtual directory as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Constructs a local/logical files and/or directories data source. * * @param db The case database. * @param objId The object id of the virtual directory. * @param dataSourceObjectId The object id of the data source for the * virtual directory; same as objId if the virtual * directory is a data source. * @param name The name of the virtual directory. * @param dirType The TSK_FS_NAME_TYPE_ENUM for the virtual * directory. * @param deviceId The device ID for the data source. * @param metaType The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param dirFlag The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param metaFlags The meta flags for the virtual directory. * @param timezone The timezone for the data source. * @param md5Hash The MD5 hash for the virtual directory. * @param knownState The known state for the virtual directory * @param parentPath The parent path for the virtual directory, * should be "/" if the virtual directory is a * data source. * * @deprecated Use version with SHA-256 parameter */ @Deprecated public LocalFilesDataSource(SleuthkitCase db, long objId, long dataSourceObjectId, String deviceId, String name, TskData.TSK_FS_NAME_TYPE_ENUM dirType, TskData.TSK_FS_META_TYPE_ENUM metaType, TskData.TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, String timezone, String md5Hash, TskData.FileKnown knownState, String parentPath) { this(db, objId, dataSourceObjectId, deviceId, name, dirType, metaType, dirFlag, metaFlags, timezone, md5Hash, null, knownState, parentPath); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HostAddressManager.java000644 000765 000024 00000072331 14137073413 031040 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.HostAddress.HostAddressType; /** * Responsible for creating/updating/retrieving host addresses. */ public class HostAddressManager { private static final Logger LOGGER = Logger.getLogger(HostAddressManager.class.getName()); private final SleuthkitCase db; /** * An HostAddress Object Id entry is maintained in this cache when a * hostaddress and ip mapping is added. This is here to improve the * performance of {@link #hostNameAndIpMappingExists(long) } check. */ private final Cache recentHostNameAndIpMappingCache = CacheBuilder.newBuilder().maximumSize(200000).build(); /** * Recently added or accessed Host Address Objects are cached. This is * here to improve performance of the * {@link #hostAddressExists(org.sleuthkit.datamodel.HostAddress.HostAddressType, java.lang.String)} * check as well as the {@link #getHostAddress(org.sleuthkit.datamodel.HostAddress.HostAddressType, java.lang.String) } */ private final Cache recentHostAddressCache = CacheBuilder.newBuilder().maximumSize(200000).build(); /** * Recently added host address usage is cached. This is intended to improve * the performance of {@link #addUsage(org.sleuthkit.datamodel.Content, org.sleuthkit.datamodel.HostAddress) } * Key: DatasourceId # Host Id # Content Id. Value has no significance. it will be set to true if there is * a value in cache for the key. */ private final Cache hostAddressUsageCache = CacheBuilder.newBuilder().maximumSize(200000).build(); /** * Construct a HostAddressManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ HostAddressManager(SleuthkitCase skCase) { this.db = skCase; } /** * Gets an address record with given type and address. * * @param type Address type. * @param address Address. * * @return Matching address. * * @throws TskCoreException */ public Optional getHostAddress(HostAddress.HostAddressType type, String address) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection()) { return HostAddressManager.this.getHostAddress(type, address, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets an address record with given type and address. * * @param type Address type. * @param address Address. * @param connection Connection to use for DB operation. * * @return Matching address. * * @throws TskCoreException */ private Optional getHostAddress(HostAddress.HostAddressType type, String address, CaseDbConnection connection) throws TskCoreException { HostAddress hostAddress = recentHostAddressCache.getIfPresent(createRecentHostAddressKey(type, address)); if (Objects.nonNull(hostAddress)) { return Optional.of(hostAddress); } HostAddress.HostAddressType addressType = type; if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) { addressType = getDNSType(address); } String normalizedAddress = getNormalizedAddress(address); String queryString = "SELECT * FROM tsk_host_addresses" + " WHERE address = ? AND address_type = ?"; try { PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS); query.clearParameters(); query.setString(1, normalizedAddress.toLowerCase()); query.setInt(2, addressType.getId()); try (ResultSet rs = query.executeQuery()) { if (!rs.next()) { return Optional.empty(); // no match found } else { HostAddress newHostAddress = new HostAddress(db, rs.getLong("id"), HostAddressType.fromID(rs.getInt("address_type")), rs.getString("address")); recentHostAddressCache.put(createRecentHostAddressKey(newHostAddress.getAddressType(), normalizedAddress), newHostAddress); return Optional.of(newHostAddress); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host address with type = %s and address = %s", type.getName(), address), ex); } } /** * Create a key string for use as a cache key. * * @param type Address type. * @param address Address. * * @return Cache key defined as typeId + # + address lowercased. */ private String createRecentHostAddressKey(HostAddressType type, String address) { return createRecentHostAddressKey(type.getId(), address); } /** * Create a key string for use as a cache key. * * @param typeId Address type Id. * @param address Address. * * @return Cache key defined as typeId + # + address lowercased. */ private String createRecentHostAddressKey(int typeId, String address) { return typeId + "#" + address.toLowerCase(); } /** * Add a new address with the given type and address. If the address already * exists in the database, the existing entry will be returned. * * @param type Address type. * @param address Address (case-insensitive). * * @return HostAddress * * @throws TskCoreException */ public HostAddress newHostAddress(HostAddress.HostAddressType type, String address) throws TskCoreException { db.acquireSingleUserCaseWriteLock(); CaseDbConnection connection = this.db.getConnection(); try { return HostAddressManager.this.newHostAddress(type, address, connection); } catch (TskCoreException ex) { // The insert may have failed because the HostAddress already exists, so // try loading it from the database. Optional hostAddress = HostAddressManager.this.getHostAddress(type, address, connection); if (hostAddress.isPresent()) { return hostAddress.get(); } throw ex; } finally { connection.close(); db.releaseSingleUserCaseWriteLock(); } } /** * Insert a row in the tsk_host_addresses with the given type and address. * * @param type Address type. * @param address Address. * @param connection Database connection to use. * * @return HostAddress. * * @throws TskCoreException */ private HostAddress newHostAddress(HostAddress.HostAddressType type, String address, CaseDbConnection connection) throws TskCoreException { HostAddress.HostAddressType addressType = type; if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) { addressType = getDNSType(address); } String normalizedAddress = getNormalizedAddress(address); try { // TODO: need to get the correct parent obj id. long parentObjId = 0; int objTypeId = TskData.ObjectType.HOST_ADDRESS.getObjectType(); long objId = db.addObject(parentObjId, objTypeId, connection); String hostAddressInsertSQL = "INSERT INTO tsk_host_addresses(id, address_type, address) VALUES (?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(hostAddressInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, objId); preparedStatement.setInt(2, addressType.getId()); preparedStatement.setString(3, normalizedAddress.toLowerCase()); connection.executeUpdate(preparedStatement); HostAddress hostAddress = new HostAddress(db, objId, addressType, normalizedAddress); recentHostAddressCache.put(createRecentHostAddressKey(addressType, normalizedAddress), hostAddress); return hostAddress; } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding host address of type = %s, with address = %s", type.getName(), address), ex); } } /** * Add a host to address mapping. * * @param host Host. * @param hostAddress Address. * @param time Time at which the mapping was valid. * @param source Content from where this mapping was derived. * * @throws TskCoreException */ public void assignHostToAddress(Host host, HostAddress hostAddress, Long time, Content source) throws TskCoreException { String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_map(host_id, addr_obj_id, source_obj_id, time) " + " VALUES(?, ?, ?, ?) "); db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = this.db.getConnection()) { PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, host.getHostId()); preparedStatement.setLong(2, hostAddress.getId()); preparedStatement.setLong(3, source.getId()); if (time != null) { preparedStatement.setLong(4, time); } else { preparedStatement.setNull(4, java.sql.Types.BIGINT); } connection.executeUpdate(preparedStatement); } catch (SQLException ex) { LOGGER.log(Level.SEVERE, null, ex); throw new TskCoreException(String.format("Error adding host address mapping for host name = %s, with address = %s", host.getName(), hostAddress.getAddress()), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Get all the addresses that have been assigned to the given host. * * @param host Host to get addresses for. * * @return List of addresses, may be empty. */ List getHostAddressesAssignedTo(Host host) throws TskCoreException { String queryString = "SELECT addr_obj_id FROM tsk_host_address_map " + " WHERE host_id = " + host.getHostId(); List addresses = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { addresses.add(HostAddressManager.this.getHostAddress(rs.getLong("addr_obj_id"), connection)); } return addresses; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host addresses for host " + host.getName()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets an address for the given object id. * * @param id Object id. * * @return The corresponding HostAddress object. * * @throws TskCoreException */ public HostAddress getHostAddress(long id) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection()) { return HostAddressManager.this.getHostAddress(id, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets an address for the given object id. * * @param id Id of the host address. * @param connection Current connection * * @return The corresponding HostAddress. * * @throws TskCoreException */ private HostAddress getHostAddress(long id, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_host_addresses" + " WHERE id = " + id; try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { throw new TskCoreException(String.format("No address found with id = %d", id)); } else { long objId = rs.getLong("id"); int type = rs.getInt("address_type"); String address = rs.getString("address"); HostAddress hostAddress = new HostAddress(db, objId, HostAddress.HostAddressType.fromID(type), address); recentHostAddressCache.put(createRecentHostAddressKey(type, address), hostAddress); return hostAddress; } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host address with id = %d", id), ex); } } /** * Adds a row to the ipAddress table. * * @param dnsNameAddress The DNS name. * @param ipAddress An IP address associated with the DNS name. * @param time Timestamp when this relationship was true. * @param source The source. * * @throws TskCoreException */ public void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source) throws TskCoreException { db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = this.db.getConnection()) { addHostNameAndIpMapping(dnsNameAddress, ipAddress, time, source, connection); } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s", dnsNameAddress.getAddress(), ipAddress.getAddress()), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Adds a row to the host address dns ip map table. * * @param dnsNameAddress The DNS name. * @param ipAddress An IP address associated with the DNS name. * @param time Timestamp when this relationship was true. * @param source The source. * @param caseDbTransaction The transaction in the scope of which the * operation is to be performed, managed by the * caller. Null is not permitted. * * @throws TskCoreException */ public void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException { if (Objects.isNull(caseDbTransaction)) { throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s, null caseDbTransaction passed to addHostNameAndIpMapping", dnsNameAddress.getAddress(), ipAddress.getAddress())); } try { addHostNameAndIpMapping(dnsNameAddress, ipAddress, time, source, caseDbTransaction.getConnection()); } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s", dnsNameAddress.getAddress(), ipAddress.getAddress()), ex); } } /** * Adds a row to the host address dns ip map table. * * @param dnsNameAddress The DNS name. * @param ipAddress An IP address associated with the DNS name. * @param time Timestamp when this relationship was true. * @param source The source. * @param connection The db connection. Null is not permitted. * * @throws TskCoreException */ private void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source, final CaseDbConnection connection) throws SQLException, TskCoreException { if (dnsNameAddress.getAddressType() != HostAddress.HostAddressType.HOSTNAME) { throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping: A host name address is expected."); } if ((ipAddress.getAddressType() != HostAddress.HostAddressType.IPV4) && (ipAddress.getAddressType() != HostAddress.HostAddressType.IPV6)) { throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping:An IPv4/IPv6 address is expected."); } if (Objects.isNull(connection)) { throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping: null connection passed to addHostNameAndIpMapping"); } String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_dns_ip_map(dns_address_id, ip_address_id, source_obj_id, time) " + " VALUES(?, ?, ?, ?) "); PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, dnsNameAddress.getId()); preparedStatement.setLong(2, ipAddress.getId()); preparedStatement.setLong(3, source.getId()); if (time != null) { preparedStatement.setLong(4, time); } else { preparedStatement.setNull(4, java.sql.Types.BIGINT); } connection.executeUpdate(preparedStatement); recentHostNameAndIpMappingCache.put(ipAddress.getId(), new Byte((byte) 1)); recentHostNameAndIpMappingCache.put(dnsNameAddress.getId(), new Byte((byte) 1)); } /** * Returns true if addressObjectId is used as either IP or host name *
* Note: This api call uses a database connection. Do not invoke * within a transaction. * * @param addressObjectId * * @return * * @throws TskCoreException */ public boolean hostNameAndIpMappingExists(long addressObjectId) throws TskCoreException { Byte isPresent = recentHostNameAndIpMappingCache.getIfPresent(addressObjectId); if (Objects.nonNull(isPresent)) { return true; } String queryString = "SELECT count(*) as mappingCount FROM tsk_host_address_dns_ip_map WHERE ip_address_id = ? OR dns_address_id = ? "; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); PreparedStatement ps = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);) { ps.clearParameters(); ps.setLong(1, addressObjectId); ps.setLong(2, addressObjectId); try (ResultSet rs = ps.executeQuery()) { if (!rs.next()) { return false; } else { boolean status = rs.getLong("mappingCount") > 0; if (status) { recentHostNameAndIpMappingCache.put(addressObjectId, new Byte((byte) 1)); } return status; } } } catch (SQLException ex) { throw new TskCoreException("Error looking up host address / Ip mapping for address = " + addressObjectId, ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Returns ObjectId of HostAddress if it exists. *
* Note: This api call uses a database connection. Do not invoke * within a transaction. * * @param type * @param address * * @return * * @throws TskCoreException */ public Optional hostAddressExists(HostAddress.HostAddressType type, String address) throws TskCoreException { HostAddress hostAddress = recentHostAddressCache.getIfPresent(createRecentHostAddressKey(type, address)); if (Objects.nonNull(hostAddress)) { return Optional.of(hostAddress.getId()); } HostAddress.HostAddressType addressType = type; if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) { addressType = getDNSType(address); } String normalizedAddress = getNormalizedAddress(address); String queryString = "SELECT id, address_type, address FROM tsk_host_addresses" + " WHERE address = ? AND address_type = ?"; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);) { query.clearParameters(); query.setString(1, normalizedAddress.toLowerCase()); query.setInt(2, addressType.getId()); try (ResultSet rs = query.executeQuery()) { if (!rs.next()) { return Optional.empty(); // no match found } else { long objId = rs.getLong("id"); int addrType = rs.getInt("address_type"); String addr = rs.getString("address"); HostAddress hostAddr = new HostAddress(db, objId, HostAddress.HostAddressType.fromID(addrType), addr); recentHostAddressCache.put(createRecentHostAddressKey(addrType, normalizedAddress), hostAddr); return Optional.of(objId); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host address with type = %s and address = %s", type.getName(), address), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets the IP addresses for a given HOSTNAME name. * * @param hostname HOSTNAME name to look for. * * @return List of IP Addresses mapped to this dns name. May be empty. * * @throws TskCoreException */ public List getIpAddress(String hostname) throws TskCoreException { String queryString = "SELECT ip_address_id FROM tsk_host_address_dns_ip_map as map " + " JOIN tsk_host_addresses as addresses " + " ON map.dns_address_id = addresses.id " + " WHERE addresses.address_type = " + HostAddress.HostAddressType.HOSTNAME.getId() + " AND addresses.address = ?"; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection()) { List IpAddresses = new ArrayList<>(); PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS); query.clearParameters(); query.setString(1, hostname.toLowerCase()); try (ResultSet rs = query.executeQuery()) { while (rs.next()) { long ipAddressObjId = rs.getLong("ip_address_id"); IpAddresses.add(HostAddressManager.this.getHostAddress(ipAddressObjId, connection)); recentHostNameAndIpMappingCache.put(ipAddressObjId, new Byte((byte) 1)); } return IpAddresses; } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host addresses for host name: " + hostname), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets the host names for a given IP address. * * @param ipAddress IP address to look for. * * @return All corresponding host names. * * @throws TskCoreException */ List getHostNameByIp(String ipAddress) throws TskCoreException { String queryString = "SELECT dns_address_id FROM tsk_host_address_dns_ip_map as map " + " JOIN tsk_host_addresses as addresses " + " ON map.ip_address_id = addresses.id " + " WHERE ( addresses.address_type = " + HostAddress.HostAddressType.IPV4.getId() + " OR addresses.address_type = " + HostAddress.HostAddressType.IPV6.getId() + ")" + " AND addresses.address = ?"; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection()) { List dnsNames = new ArrayList<>(); PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS); query.clearParameters(); query.setString(1, ipAddress.toLowerCase()); try (ResultSet rs = query.executeQuery()) { while (rs.next()) { long dnsAddressId = rs.getLong("dns_address_id"); dnsNames.add(HostAddressManager.this.getHostAddress(dnsAddressId, connection)); recentHostNameAndIpMappingCache.put(dnsAddressId, new Byte((byte) 1)); } return dnsNames; } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host addresses for IP address: " + ipAddress), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Associate the given artifact with a HostAddress. * * @param content The content/item using the address. * @param hostAddress The host address. */ public void addUsage(Content content, HostAddress hostAddress) throws TskCoreException { String key = content.getDataSource().getId() + "#" + hostAddress.getId() + "#" + content.getId(); Boolean cachedValue = hostAddressUsageCache.getIfPresent(key); if (null != cachedValue) { return; } final String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_usage(addr_obj_id, obj_id, data_source_obj_id) " + " VALUES(" + hostAddress.getId() + ", " + content.getId() + ", " + content.getDataSource().getId() + ") "); db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement()) { connection.executeUpdate(s, insertSQL); hostAddressUsageCache.put(key, true); } catch (SQLException ex) { throw new TskCoreException(String.format("Error associating host address %s with artifact with id %d", hostAddress.getAddress(), content.getId()), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } private final String ADDRESS_USAGE_QUERY = "SELECT addresses.id as id, addresses.address_type as address_type, addresses.address as address " + " FROM tsk_host_address_usage as usage " + " JOIN tsk_host_addresses as addresses " + " ON usage.addr_obj_id = addresses.id "; /** * Get all the addresses that have been used by the given content. * * @param content Content to get addresses used for. * * @return List of addresses, may be empty. * * @throws TskCoreException */ public List getHostAddressesUsedByContent(Content content) throws TskCoreException { String queryString = ADDRESS_USAGE_QUERY + " WHERE usage.obj_id = " + content.getId(); return getHostAddressesUsed(queryString); } /** * Get all the addresses that have been used by the given data source. * * @param dataSource Data source to get addresses used for. * * @return List of addresses, may be empty. * * @throws TskCoreException */ public List getHostAddressesUsedOnDataSource(Content dataSource) throws TskCoreException { String queryString = ADDRESS_USAGE_QUERY + " WHERE usage.data_source_obj_id = " + dataSource.getId(); return getHostAddressesUsed(queryString); } /** * Gets the host addresses used by running the given query. * * @param addressesUsedSQL SQL query to run. * * @return List of addresses, may be empty. * * @throws TskCoreException */ private List getHostAddressesUsed(String addressesUsedSQL) throws TskCoreException { List addressesUsed = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, addressesUsedSQL)) { while (rs.next()) { addressesUsed.add(new HostAddress(db, rs.getLong("id"), HostAddress.HostAddressType.fromID(rs.getInt("address_type")), rs.getString("address"))); } return addressesUsed; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host addresses used with query string = %s", addressesUsedSQL), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Detects format of address. * * @param address The address. * * @return The detected type. */ private HostAddress.HostAddressType getDNSType(String address) { if (isIPv4(address)) { return HostAddress.HostAddressType.IPV4; } else if (isIPv6(address)) { return HostAddress.HostAddressType.IPV6; } else { return HostAddress.HostAddressType.HOSTNAME; } } private static final Pattern IPV4_PATTERN = Pattern.compile("^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(\\.(?!$)|$)){4}$"); /** * Test if an address is IPv4. * * @param ipAddress The address. * * @return true if it is IPv4 format, false otherwise. */ private static boolean isIPv4(String ipAddress) { if (ipAddress != null) { return IPV4_PATTERN.matcher(ipAddress).matches(); } return false; } // IPV6 address examples: // Standard: 684D:1111:222:3333:4444:5555:6:77 // Compressed: 1234:fd2:5621:1:89::4500 // With zone/interface specifier: fe80::1ff:fe23:4567:890a%eth2 // fe80::1ff:fe23:4567:890a%3 private static final Pattern IPV6_STD_PATTERN = Pattern.compile("^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}(%.+)?$"); private static final Pattern IPV6_HEX_COMPRESSED_PATTERN = Pattern.compile("^((?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)::((?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)(%.+)?$"); private static boolean isIPv6StdAddress(final String input) { return IPV6_STD_PATTERN.matcher(input).matches(); } private static boolean isIPv6HexCompressedAddress(final String input) { return IPV6_HEX_COMPRESSED_PATTERN.matcher(input).matches(); } /** * Test if an address is IPv6. * * @param ipAddress The address. * * @return true if it is IPv6 format, false otherwise. */ private static boolean isIPv6(String ipAddress) { if (ipAddress != null) { return isIPv6StdAddress(ipAddress) || isIPv6HexCompressedAddress(ipAddress); } return false; } /** * Normalizes an address. * * It intentionally does NOT convert to lowercase so that the case may be * preserved, and only converted where needed. * * @param address * * @return Normalized address. */ private static String getNormalizedAddress(String address) { String normalizedAddress = address; if (isIPv6(address)) { normalizedAddress = getNormalizedIPV6Address(address); } return normalizedAddress; } /** * Normalize an IPv6 address: * - removing the zone/interface specifier if one exists. * * It intentionally does NOT convert to lowercase so that the case may be * preserved, and only converted where needed. * * @param address Address to normalize. * * @return Normalized IPv6 address. */ private static String getNormalizedIPV6Address(String address) { String normalizedAddress = address; if ( normalizedAddress.contains("%") ) { normalizedAddress = normalizedAddress.substring(0, normalizedAddress.indexOf("%")); } return normalizedAddress; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/LayoutFile.java000644 000765 000024 00000030007 14137073413 027371 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a layout file that has been added to a case. Layout files * are not file system files, but "virtual" files created from blocks of data * (e.g. unallocated) that are treated as files for convenience and uniformity. * * Because layout files are not real file system files, they only utilize a * subset of meta-data attributes. A layout file normally contains one or more * entry in tsk_file_layout table that define ordered byte block ranges, with * respect to the image. * * The class also supports reads of layout files, reading blocks across ranges * in a sequence. */ public class LayoutFile extends AbstractFile { private long imageHandle = -1; /** * Constructs a representation of a layout file that has been added to a * case. Layout files are not file system files, but "virtual" files created * from blocks of data (e.g. unallocated) that are treated as files for * convenience and uniformity. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param name The name of the file. * @param fileType The type of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The creation time of the file. * @param atime The accessed time of the file * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ LayoutFile(SleuthkitCase db, long objId, long dataSourceObjectId, String name, TSK_DB_FILES_TYPE_ENUM fileType, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType, String ownerUid, Long osAccountObjId) { super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, fileType, 0L, 0, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, SleuthkitCase.extractExtension(name), ownerUid, osAccountObjId, Collections.emptyList()); } /** * Gets the number of file layout ranges associated with this layout file. * * @return The number of file layout ranges. */ public int getNumParts() { int numParts = 0; try { numParts = getRanges().size(); } catch (TskCoreException ex) { Logger.getLogger(LayoutFile.class.getName()).log(Level.SEVERE, String.format("Error getting layout ranges for layout file (objId = %d)", getId()), ex); //NON-NLS } return numParts; } /** * Indicates whether or not this layout file is the root of a file system, * always returns false. * * @return False. */ @Override public boolean isRoot() { return false; } /** * Does nothing, a layout file cannot be directly opened, read, or closed. * Use the readInt method to get layout file content. */ @Override public void close() { } /** * Reads bytes from the layout ranges associated with this file. * * @param buf Buffer to read into. * @param offset Start position in the file. * @param len Number of bytes to read. * * @return Number of bytes read. * * @throws TskCoreException if there is a problem reading the file. */ @Override protected int readInt(byte[] buf, long offset, long len) throws TskCoreException { long offsetInThisLayoutContent = 0; // current offset in this LayoutContent int bytesRead = 0; // Bytes read so far // if the caller has requested more data than we have in the file // then make sure we don't go beyond the end of the file long readLen = len; if (offset + readLen > size) readLen = size - offset; if (imageHandle == -1) { Content dataSource = getDataSource(); if ((dataSource != null) && (dataSource instanceof Image)) { Image image = (Image) dataSource; imageHandle = image.getImageHandle(); } else { throw new TskCoreException("Data Source of LayoutFile is not Image"); } } for (TskFileRange range : getRanges()) { if (bytesRead < readLen) { // we haven't read enough yet if (offset < offsetInThisLayoutContent + range.getByteLen()) { // if we are in a range object we want to read from long offsetInRange = 0; // how far into the current range object to start reading if (bytesRead == 0) { // we haven't read anything yet so we want to read from the correct offset in this range object offsetInRange = offset - offsetInThisLayoutContent; // start reading from the correct offset } long offsetInImage = range.getByteStart() + offsetInRange; // how far into the image to start reading long lenToReadInRange = Math.min(range.getByteLen() - offsetInRange, readLen - bytesRead); // how much we can read this time int lenRead = readImgToOffset(imageHandle, buf, bytesRead, offsetInImage, (int) lenToReadInRange); bytesRead += lenRead; if (lenToReadInRange != lenRead) { // If image read failed or was cut short break; } } offsetInThisLayoutContent += range.getByteLen(); } else { // we're done reading break; } } return bytesRead; } /** * Reads bytes from an image into a buffer, starting at given position in * buffer. * * @param imgHandle The image to read from. * @param buf The array to read into. * @param offsetInBuf Where to start in the array. * @param offsetInImage Where to start in the image. * @param lenToRead How far to read in the image. * * @return the number of characters read, or -1 if the end of the stream has * been reached * * @throws TskCoreException exception thrown if critical error occurs within * TSK */ private int readImgToOffset(long imgHandle, byte[] buf, int offsetInBuf, long offsetInImage, int lenToRead) throws TskCoreException { byte[] currentBuffer = new byte[lenToRead]; // the buffer for the current range object int lenRead = SleuthkitJNI.readImg(imgHandle, currentBuffer, offsetInImage, lenToRead); System.arraycopy(currentBuffer, 0, buf, offsetInBuf, lenToRead); // copy what we just read into the main buffer return lenRead; } /** * Accepts a content visitor (Visitor design pattern). * * @param visitor A ContentVisitor supplying an algorithm to run using this * file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this file as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Provides a string representation of this file. * * @param preserveState True if state should be included in the string * representation of this object. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "LayoutFile [\t" + "]\t"; //NON-NLS } /** * Constructs a representation of a layout file that has been added to a * case. Layout files are not file system files, but "virtual" files created * from blocks of data (e.g. unallocated) that are treated as files for * convenience and uniformity. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param fileType The type of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected LayoutFile(SleuthkitCase db, long objId, String name, TSK_DB_FILES_TYPE_ENUM fileType, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, db.getDataSourceObjectId(objId), name, fileType, dirType, metaType, dirFlag, metaFlags, size, 0L, 0L, 0L, 0L, md5Hash, null, knownState, parentPath, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Tag.java000755 000765 000024 00000002737 14137073413 026043 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * This class is a base class for data transfer object (DTO) classes that model * tags applied to content and blackboard artifacts by users. */ public abstract class Tag { static long ID_NOT_SET = -1; private long tagID = ID_NOT_SET; private final TagName name; private final String comment; private final String userName; Tag(long tagID, TagName name, String comment, String userName) { this.tagID = tagID; this.name = name; this.comment = comment; this.userName = userName; } /** * Get Tag ID (unique amongst tags) * * @return */ public long getId() { return tagID; } public TagName getName() { return name; } public String getComment() { return comment; } public String getUserName() { return userName == null ? "" : userName; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CommunicationsFilter.java000644 000765 000024 00000021553 14137073413 031460 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * Defines an aggregate of filters to apply to a CommunicationsManager query. */ final public class CommunicationsFilter { /** * For now all filters are anded together */ private final List andFilters; /** * Create a new empty CommunicationsFilter. */ public CommunicationsFilter() { this(Collections.emptyList()); } CommunicationsFilter(List andSubFilters) { this.andFilters = new ArrayList(andSubFilters); } /** * Returns the list of filters that will be ANDed together when applied to a * query. * * NOTE: The returned list is unmodifiable, new filters should be added via * addAndFilter. * * @return An unmodifiable list of the filter. */ public List getAndFilters() { return Collections.unmodifiableList(andFilters); } /** * Adds a filter to list of filters that will be ANDed together when applied * to a query. * * @param subFilter The SubFilter to add. */ public void addAndFilter(SubFilter subFilter) { andFilters.add(subFilter); } /** * Unit level filter. */ public static abstract class SubFilter { /** * Returns a string description of the filter. * * @return A string description of the filter. */ public abstract String getDescription(); /** * Get the SQL string for the filter. * * @param commsManager Communications manager. * * @return SQL String for the filter. */ abstract String getSQL(CommunicationsManager commsManager); } /** * Filters relationships by relationship type. * */ final public static class RelationshipTypeFilter extends SubFilter { private final Set relationshipTypes; /** * Constructs a RelationshipTypeFilter. * * @param relationshipTypes set of relationship types */ public RelationshipTypeFilter(Collection relationshipTypes) { this.relationshipTypes = new HashSet(relationshipTypes); } @Override public String getDescription() { return "Filters relationships by relationship type."; } /** * Get the SQL string for the filter. * * @param commsManager Communications manager. * * @return SQL String for the filter. */ @Override public String getSQL(CommunicationsManager commsManager) { if (relationshipTypes.isEmpty()) { return ""; } List relationShipTypeIds = new ArrayList(); for (Relationship.Type relType : relationshipTypes) { relationShipTypeIds.add(relType.getTypeID()); } return " relationships.relationship_type IN ( " + StringUtils.buildCSVString(relationShipTypeIds) + " )"; } } /** * Filters communications by date range */ final public static class DateRangeFilter extends SubFilter { private final long startDate; private final long endDate; private static final long SECS_PER_DAY = 86400; /** * Constructs a DateRangeFilter. * * @param startDate start date in epoch. Use 0 to not specify a date * @param endDate end date in epoch. Use 0 to not specify a date. */ public DateRangeFilter(long startDate, long endDate) { this.startDate = startDate; // Add a day to end date to make it inclusive in the range if (endDate > 0) { this.endDate = endDate + SECS_PER_DAY; } else { this.endDate = endDate; } } /** * Get the start date. * * @return Seconds from java epoch or zero if no value was set */ public long getStartDate() { return startDate; } /** * Get the end date. * @return Seconds from java epoch or zero if no value was set */ public long getEndDate() { return endDate; } @Override public String getDescription() { return "Filters communications by date range."; } /** * Get the SQL string for the filter. * * @param commsManager Communications manager. * * @return SQL String for the filter. */ @Override public String getSQL(CommunicationsManager commsManager) { if ((0 == startDate) && (0 == endDate)) { return ""; } String sql = ""; if (startDate > 0) { sql = "(" + " relationships.date_time IS NULL OR relationships.date_time >= " + startDate + ")"; } if (endDate > 0) { if (!sql.isEmpty()) { sql += " AND "; } sql += "(" + " relationships.date_time IS NULL OR relationships.date_time < " + endDate + ")"; } return sql; } } /** * Filter accounts and relationships by account type. * */ final public static class AccountTypeFilter extends SubFilter { private final Set accountTypes; /** * Constructs a AccountTypeFilter. * * @param accountTypes set of account types to filter on. */ public AccountTypeFilter(Collection accountTypes) { super(); this.accountTypes = new HashSet(accountTypes); } /** * Get the selected Account Types. * * @return A Set of Type values */ public Set getAccountTypes() { return accountTypes; } @Override public String getDescription() { return "Filters accounts and relationships by account type."; } /** * Get the SQL string for the filter. * * @param commsManager Communications manager. * * @return SQL String for the filter. */ @Override public String getSQL(CommunicationsManager commsManager) { if (accountTypes.isEmpty()) { return ""; } List type_ids = new ArrayList(); for (Account.Type accountType : accountTypes) { type_ids.add(commsManager.getAccountTypeId(accountType)); } String account_type_ids_list = StringUtils.buildCSVString(type_ids); return " account_types.account_type_id IN ( " + account_type_ids_list + " )"; } } /** * Filter by device ids. * */ final public static class DeviceFilter extends SubFilter { private final Set deviceIds; /** * Constructs a device filter. * * @param deviceIds set of device Ids to filter on. */ public DeviceFilter(Collection deviceIds) { super(); this.deviceIds = new HashSet(deviceIds); } @Override public String getDescription() { return "Filters accounts and relationships by device id."; } /** * Gets a set of device ids * * @return Collection of device ids */ public Collection getDevices() { return deviceIds; } /** * Get the SQL string for the filter. * * @param commsManager Communications manager. * * @return SQL String for the filter. */ @Override public String getSQL(CommunicationsManager commsManager) { if (deviceIds.isEmpty()) { return ""; } String sql = ""; List ds_ids = new ArrayList(); for (String deviceId : deviceIds) { try { ds_ids.addAll(commsManager.getSleuthkitCase().getDataSourceObjIds(deviceId)); } catch (TskCoreException ex) { Logger.getLogger(DeviceFilter.class.getName()).log(Level.WARNING, "failed to get datasource object ids for deviceId", ex); } } String datasource_obj_ids_list = StringUtils.buildCSVString(ds_ids); if (!datasource_obj_ids_list.isEmpty()) { sql = " relationships.data_source_obj_id IN ( " + datasource_obj_ids_list + " )"; } return sql; } } /** * Filters by the most recent given relationships. */ final public static class MostRecentFilter extends SubFilter { private final int limit; /** * Constructs a MostRecentFilter. * * @param limit An integer limit value or -1 for no limit. * */ public MostRecentFilter(int limit) { super(); this.limit = limit; } /** * Returns the filter limit. * * @return Integer filter limit */ public int getLimit() { return limit; } @Override public String getDescription() { return "Filters accounts and relationships by the most recent given relationships."; } @Override String getSQL(CommunicationsManager commsManager) { if(limit > 0) { return "ORDER BY relationships.date_time DESC LIMIT " + limit; } else { return ""; } } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java000755 000765 000024 00000015603 14137073414 030710 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Objects; import java.util.ResourceBundle; /** * An OsAccountInstance represents the appearance of a particular OsAccount on a * particular data source. */ public class OsAccountInstance implements Comparable { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private final SleuthkitCase skCase; private final long instanceId; private final long accountId; private final long dataSourceId; private final OsAccountInstanceType instanceType; private OsAccount account; private DataSource dataSource; /** * Constructs a representation of an OS account instance. * * * @param skCase The case database. * @param instanceId The instance ID. * @param account The OS account of which this object is an * instance. * @param dataSourceObjId The object ID of the data source where the * instance was found. * @param instanceType The instance type. */ OsAccountInstance(SleuthkitCase skCase, long instanceId, OsAccount account, long dataSourceId, OsAccountInstanceType instanceType) { this(skCase, instanceId, account.getId(), dataSourceId, instanceType); this.account = account; } /** * Constructs a representation of an OS account instance. * * @param skCase The case database. * @param instanceId The instance ID. * @param accountObjId The object ID of the OS account of which this * object is an instance. * @param dataSourceObjId The object ID of the data source where the * instance was found. * @param instanceType The instance type. */ OsAccountInstance(SleuthkitCase skCase, long instanceId, long accountObjId, long dataSourceObjId, OsAccountInstanceType instanceType) { this.skCase = skCase; this.instanceId = instanceId; this.accountId = accountObjId; this.dataSourceId = dataSourceObjId; this.instanceType = instanceType; } /** * Gets the instance ID of this OS account instance. * * @return The instance ID. */ public long getInstanceId() { return instanceId; } /** * Returns the OsAccount object for this instance. * * @return The OsAccount object. * * @throws TskCoreException Exception thrown if there is an error querying * the case database. */ public OsAccount getOsAccount() throws TskCoreException { if (account == null) { try { account = skCase.getOsAccountManager().getOsAccountByObjectId(accountId); } catch (TskCoreException ex) { throw new TskCoreException(String.format("Failed to get OsAccount for id %d", accountId), ex); } } return account; } /** * Returns the data source for this account instance. * * @return Return the data source instance. * * @throws TskCoreException */ public DataSource getDataSource() throws TskCoreException { if (dataSource == null) { try { dataSource = skCase.getDataSource(dataSourceId); } catch (TskDataException ex) { throw new TskCoreException(String.format("Failed to get DataSource for id %d", dataSourceId), ex); } } return dataSource; } /** * Returns the type for this OsAccount instance. * * @return */ public OsAccountInstanceType getInstanceType() { return instanceType; } /** * Return the dataSourceId value. * * @return Id of the instance data source. */ private long getDataSourceId() { return dataSourceId; } @Override public int compareTo(OsAccountInstance other) { if (equals(other)) { return 0; } if (dataSourceId != other.getDataSourceId()) { return Long.compare(dataSourceId, other.getDataSourceId()); } return Long.compare(accountId, other.accountId); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final OsAccountInstance other = (OsAccountInstance) obj; if (this.accountId != other.accountId) { return false; } return this.dataSourceId == other.getDataSourceId(); } @Override public int hashCode() { int hash = 7; hash = 67 * hash + Objects.hashCode(this.dataSourceId); hash = 67 * hash + Objects.hashCode(this.accountId); hash = 67 * hash + Objects.hashCode(this.instanceType); return hash; } /** * Describes the relationship between an os account instance and the host * where the instance was found. * * Whether an os account actually performed any action on the host or if * just a reference to it was found on the host (such as in a log file) */ public enum OsAccountInstanceType { LAUNCHED(0, bundle.getString("OsAccountInstanceType.Launched.text"), bundle.getString("OsAccountInstanceType.Launched.descr.text")), // the user launched a program on the host ACCESSED(1, bundle.getString("OsAccountInstanceType.Accessed.text"), bundle.getString("OsAccountInstanceType.Accessed.descr.text")), // user accesed a resource for read/write REFERENCED(2, bundle.getString("OsAccountInstanceType.Referenced.text"), bundle.getString("OsAccountInstanceType.Referenced.descr.text")); // user was referenced, e.g. in a event log. private final int id; private final String name; private final String description; OsAccountInstanceType(int id, String name, String description) { this.id = id; this.name = name; this.description = description; } /** * Get account instance type id. * * @return Account instance type id. */ public int getId() { return id; } /** * Get account instance type name. * * @return Account instance type name. */ public String getName() { return name; } /** * Get account instance type description. * * @return Account instance type description. */ public String getDescription() { return description; } /** * Gets account instance type enum from id. * * @param typeId Id to look for. * * @return Account instance type enum. */ public static OsAccountInstanceType fromID(int typeId) { for (OsAccountInstanceType statusType : OsAccountInstanceType.values()) { if (statusType.ordinal() == typeId) { return statusType; } } return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AbstractAttribute.java000644 000765 000024 00000026726 14137073413 030760 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.Objects; /** * An abstract base class for attributes as name-value pairs with type safety. * The attribute type field indicates which one of the value fields is valid. */ public abstract class AbstractAttribute { private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); private final BlackboardAttribute.Type attributeType; private final int valueInt; private final long valueLong; private final double valueDouble; private final String valueString; private final byte[] valueBytes; private SleuthkitCase sleuthkitCase; /** * Constructs an attribute with an integer value. * * @param attributeType The attribute type. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER. */ public AbstractAttribute(BlackboardAttribute.Type attributeType, int valueInt) { if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) { throw new IllegalArgumentException("Type mismatched with value type"); } this.attributeType = attributeType; this.valueInt = valueInt; this.valueLong = 0; this.valueDouble = 0; this.valueString = ""; this.valueBytes = new byte[0]; } /** * Constructs an attribute with a long/datetime value. * * @param attributeType The attribute type. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME. */ public AbstractAttribute(BlackboardAttribute.Type attributeType, long valueLong) { if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG && attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) { throw new IllegalArgumentException("Type mismatched with value type"); } this.attributeType = attributeType; this.valueInt = 0; this.valueLong = valueLong; this.valueDouble = 0; this.valueString = ""; this.valueBytes = new byte[0]; } /** * Constructs an attribute with a double value. * * @param attributeType The attribute type. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE. */ public AbstractAttribute(BlackboardAttribute.Type attributeType, double valueDouble) { if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) { throw new IllegalArgumentException("Type mismatched with value type"); } this.attributeType = attributeType; this.valueInt = 0; this.valueLong = 0; this.valueDouble = valueDouble; this.valueString = ""; this.valueBytes = new byte[0]; } /** * Constructs an attribute with a string value. * * @param attributeType The attribute type. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING. */ public AbstractAttribute(BlackboardAttribute.Type attributeType, String valueString) { if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING && attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) { throw new IllegalArgumentException("Type mismatched with value type"); } this.attributeType = attributeType; this.valueInt = 0; this.valueLong = 0; this.valueDouble = 0; if (valueString == null) { this.valueString = ""; } else { this.valueString = replaceNulls(valueString).trim(); } this.valueBytes = new byte[0]; } /** * Constructs an attribute with a byte array value. * * @param attributeType The attribute type. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE. */ public AbstractAttribute(BlackboardAttribute.Type attributeType, byte[] valueBytes) { if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) { throw new IllegalArgumentException("Type mismatched with value type"); } this.attributeType = attributeType; this.valueInt = 0; this.valueLong = 0; this.valueDouble = 0; this.valueString = ""; if (valueBytes == null) { this.valueBytes = new byte[0]; } else { this.valueBytes = valueBytes; } } /** * Constructs an attribute. * * @param attributeTypeID The attribute type id. * @param valueType The attribute value type. * @param valueInt The value from the the value_int32 column. * @param valueLong The value from the the value_int64 column. * @param valueDouble The value from the the value_double column. * @param valueString The value from the the value_text column. * @param valueBytes The value from the the value_byte column. * @param sleuthkitCase A reference to the SleuthkitCase object * representing the case database. */ AbstractAttribute(BlackboardAttribute.Type attributeType, int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes, SleuthkitCase sleuthkitCase) { this.attributeType = attributeType; this.valueInt = valueInt; this.valueLong = valueLong; this.valueDouble = valueDouble; if (valueString == null) { this.valueString = ""; } else { this.valueString = replaceNulls(valueString).trim(); } if (valueBytes == null) { this.valueBytes = new byte[0]; } else { this.valueBytes = valueBytes; } this.sleuthkitCase = sleuthkitCase; } /** * Gets the attribute value as a string, formatted as required. * * @return The value as a string. */ public String getDisplayString() { switch (attributeType.getValueType()) { case STRING: return getValueString(); case INTEGER: if (attributeType.getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_READ_STATUS.getTypeID()) { if (getValueInt() == 0) { return "Unread"; } else { return "Read"; } } return Integer.toString(getValueInt()); case LONG: return Long.toString(getValueLong()); case DOUBLE: return Double.toString(getValueDouble()); case BYTE: return bytesToHexString(getValueBytes()); case DATETIME: // once we have TSK timezone, that should be used here. return TimeUtilities.epochToTime(getValueLong()); case JSON: { return getValueString(); } } return ""; } /** * Gets the type of this attribute. * * @return The attribute type. */ public BlackboardAttribute.Type getAttributeType() { return this.attributeType; } /** * Gets the value type of this attribute. * * @return The value type */ public BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getValueType() { return attributeType.getValueType(); } /** * Gets the value of this attribute. The value is only valid if the * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER. * * @return The attribute value. */ public int getValueInt() { return valueInt; } /** * Gets the value of this attribute. The value is only valid if the * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG. * * @return The attribute value. */ public long getValueLong() { return valueLong; } /** * Gets the value of this attribute. The value is only valid if the * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE. * * @return The attribute value. */ public double getValueDouble() { return valueDouble; } /** * Gets the value of this attribute. The value is only valid if the * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON. * * @return The attribute value. */ public String getValueString() { return valueString; } /** * Gets the value of this attribute. The value is only valid if the * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE. * * @return The attribute value. */ public byte[] getValueBytes() { return Arrays.copyOf(valueBytes, valueBytes.length); } /** * Gets the reference to the SleuthkitCase object that represents the case * database where this attribute is stored. * * @return A reference to a SleuthkitCase object. */ SleuthkitCase getCaseDatabase() { return this.sleuthkitCase; } /** * Sets the reference to the SleuthkitCase object that represents the case * database where this attribute is stored. * * @param sleuthkitCase A reference to a SleuthkitCase object. */ void setCaseDatabase(SleuthkitCase sleuthkitCase) { this.sleuthkitCase = sleuthkitCase; } /** * Converts a byte array to a string. * * @param bytes The byte array. * * @return The string. */ static String bytesToHexString(byte[] bytes) { // from http://stackoverflow.com/questions/9655181/convert-from-byte-array-to-hex-string-in-java char[] hexChars = new char[bytes.length * 2]; for (int j = 0; j < bytes.length; j++) { int v = bytes[j] & 0xFF; hexChars[j * 2] = HEX_ARRAY[v >>> 4]; hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F]; } return new String(hexChars); } /** * Replace all NUL characters in the string with the SUB character * * @param text The input string. * * @return The output string. */ static String replaceNulls(String text) { return text.replace((char) 0x00, (char) 0x1A); } /** * Checks whether all of the the value fields of this attribute are equal to * that of another attribute. * * @param that Another attribute. * * @return True or false. */ boolean areValuesEqual(Object that) { if (that instanceof AbstractAttribute) { AbstractAttribute other = (AbstractAttribute) that; Object[] thisObject = new Object[]{this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(), this.getValueString(), this.getValueBytes()}; Object[] otherObject = new Object[]{other.getAttributeType(), other.getValueInt(), other.getValueLong(), other.getValueDouble(), other.getValueString(), other.getValueBytes()}; return Objects.deepEquals(thisObject, otherObject); } else { return false; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Directory.java000644 000765 000024 00000027623 14137073413 027272 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a file system directory that has been added to a case. */ public class Directory extends FsContent { /** * Constructs a representation of a file system directory that has been * added to a case. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ Directory(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String ownerUid, Long osAccountObjId ) { super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, null, null, ownerUid, osAccountObjId, Collections.emptyList()); } /** * Accepts a content visitor (Visitor design pattern). * * @param visitor A ContentVisitor supplying an algorithm to run using this * directory as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this directory as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor v) { return v.visit(this); } /** * Provides a string representation of this directory. * * @param preserveState True if state should be included in the string * representation of this object. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "Directory [\t" + "]\t"; //NON-NLS } /** * Constructs a representation of a file system directory that has been * added to a case. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param fsObjId The object id of the file system to which this file * belongs. * @param attrType The type attribute given to the file by the file * system. * @param attrId The type id given to the file by the file system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected Directory(SleuthkitCase db, long objId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, db.getDataSourceObjectId(objId), fsObjId, attrType, attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, knownState, parentPath); } /** * Constructs a representation of a file system directory that has been * added to a case. This deprecated version has attrId filed defined as a * short which has since been changed to an int. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") Directory(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/VersionNumber.java000644 000765 000024 00000004514 14137073413 030116 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * A version number consisting of three parts: Major.Minor.Patch. The compareTo * method implements numerical ordering with decreasing precedence from left to * right, e.g., 1.0.0 < 2.0.0 < 2.1.0 < 2.1.1. */ public class VersionNumber implements Comparable { private final int major; private final int minor; private final int patch; public VersionNumber(int majorVersion, int minorVersion, int patchVersion) { major = majorVersion; minor = minorVersion; patch = patchVersion; } public int getMajor() { return major; } public int getMinor() { return minor; } public int getPatch() { return patch; } @Override public String toString() { return major + "." + minor + "." + patch; } @Override public int compareTo(VersionNumber vs) { int majorComp = Integer.compare(this.getMajor(), vs.getMajor()); if (majorComp != 0) { return majorComp; } else { final int minorCompare = Integer.compare(this.getMinor(), vs.getMinor()); if (minorCompare != 0) { return minorCompare; } else { return Integer.compare(this.getPatch(), vs.getPatch()); } } } @Override public int hashCode() { int hash = 3; hash = 97 * hash + this.major; hash = 97 * hash + this.minor; hash = 97 * hash + this.patch; return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final VersionNumber other = (VersionNumber) obj; return this.major == other.getMajor() && this.minor == other.getMinor() && this.patch == other.getPatch(); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Pool.java000644 000765 000024 00000010640 14137073413 026226 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.TskData.TSK_POOL_TYPE_ENUM; /** * Represents a pool. Populated based on data in database. */ public class Pool extends AbstractContent { private static final Logger logger = Logger.getLogger(Pool.class.getName()); private volatile long poolHandle = 0; private final long type; /** * Constructor most inputs are from the database * * @param db case database handle * @param obj_id the unique content object id for the pool * @param name name of the pool * @param type type of the pool */ protected Pool(SleuthkitCase db, long obj_id, String name, long type) { super(db, obj_id, name); this.type = type; } @Override public int read(byte[] readBuffer, long offset, long len) throws TskCoreException { synchronized (this) { if (poolHandle == 0) { getPoolHandle(); } } return SleuthkitJNI.readPool(poolHandle, readBuffer, offset, len); } @Override public long getSize() { try { return getParent().getSize(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error getting parent of pool with obj ID {0}", getId()); return 0; } } /** * get the type * * @return type */ public TSK_POOL_TYPE_ENUM getType() { return TskData.TSK_POOL_TYPE_ENUM.valueOf(type); } /** * Lazily loads the internal pool structure: won't be loaded until * this is called and maintains the handle to it to reuse it * * @return a pool pointer from the sleuthkit * * @throws TskCoreException exception throw if an internal tsk core error * occurs */ long getPoolHandle() throws TskCoreException { // Note that once poolHandle is set, it will never be changed or reset to zero if (poolHandle == 0) { synchronized (this) { if (poolHandle == 0) { Content dataSource = getDataSource(); if ((dataSource != null) && (dataSource instanceof Image)) { Image image = (Image) dataSource; poolHandle = SleuthkitJNI.openPool(image.getImageHandle(), getPoolOffset(image), getSleuthkitCase()); } else { throw new TskCoreException("Data Source of pool is not an image"); } } } } return this.poolHandle; } /** * Get the offset of the pool from the parent object. * Needs to be in bytes. * * @return the offset to the pool */ private long getPoolOffset(Image image) throws TskCoreException { if (this.getParent() instanceof Image) { // If the parent is an image, then the pool starts at offset zero return 0; } else if (this.getParent() instanceof Volume) { // If the parent is a volume, then the pool starts at the volume offset Volume parent = (Volume)this.getParent(); return parent.getStart() * image.getSsize(); // Offset needs to be in bytes } throw new TskCoreException("Pool with object ID " + this.getId() + " does not have Image or Volume parent"); } @Override public void close() { // Pools will be closed during case closing by the JNI code. } @Override protected void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public List getChildren() throws TskCoreException { return getSleuthkitCase().getPoolChildren(this); } @Override public List getChildrenIds() throws TskCoreException { return getSleuthkitCase().getPoolChildrenIds(this); } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "Pool [\t" + "type " + type + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/VirtualDirectory.java000644 000765 000024 00000012414 14137073413 030631 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A virtual directory that can be used as a parent for unallocated space files, * carved files, or derived files. A virtual directory can also be a data * source, with local/logical files as its children. Not a file system * directory. */ public class VirtualDirectory extends SpecialDirectory { /** * The name given to a virtual directory that contains unallocated space * files. */ public static final String NAME_UNALLOC = "$Unalloc"; //NON-NLS /** * The name given to a virtual directory that contains carved files. */ public static final String NAME_CARVED = "$CarvedFiles"; //NON-NLS /** * Constructs a virtual directory that can be used as a parent for * unallocated space files, carved files, or derived files. A virtual * directory can also be a data source, with local/logical files as its * children. Not a file system directory. * * @param db The case database. * @param objId The object id of the virtual directory. * @param dataSourceObjectId The object id of the data source for the * virtual directory; same as objId if the virtual * directory is a data source. * @param name The name of the virtual directory. * @param dirType The TSK_FS_NAME_TYPE_ENUM for the virtual * directory. * @param metaType The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param dirFlag The TSK_FS_META_TYPE_ENUM for the virtual * directory. * @param metaFlags The meta flags for the virtual directory. * @param md5Hash The MD5 hash for the virtual directory. * @param knownState The known state for the virtual directory * @param parentPath The parent path for the virtual directory, * should be "/" if the virtual directory is a * data source. */ VirtualDirectory(SleuthkitCase db, long objId, long dataSourceObjectId, String name, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath) { super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR, 0L, 0, dirType, metaType, dirFlag, metaFlags, 0L, 0L, 0L, 0L, 0L, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, null); } /** * Gets the data source (e.g., image, virtual directory, etc.) for this * directory. If the directory is itself a data source, returns the * directory. * * @return The data source. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public Content getDataSource() throws TskCoreException { if (this.getDataSourceObjectId() == this.getId()) { // This virtual directory is a data source. return this; } else { return super.getDataSource(); } } /** * Accepts a content visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A ContentVisitor supplying an algorithm to run using this * virtual directory as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this virtual directory as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Provides a string representation of this virtual directory. * * @param preserveState True if state should be included in the string * representation of this object. * * @return The string representation of the virtual directory. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "VirtualDirectory [\t" + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealmManager.java000644 000765 000024 00000112525 14137073414 031475 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.Strings; import org.apache.commons.lang3.StringUtils; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.logging.Logger; import org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * Create/Retrieve/Update OS account realms. Realms represent either an individual * host with local accounts or a domain. */ public final class OsAccountRealmManager { private static final Logger LOGGER = Logger.getLogger(OsAccountRealmManager.class.getName()); private final SleuthkitCase db; /** * Construct a OsAccountRealmManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ OsAccountRealmManager(SleuthkitCase skCase) { this.db = skCase; } /** * Create realm based on Windows information. The input SID is a user/group * SID.The domain SID is extracted from this incoming SID. * * @param accountSid User/group SID. May be null only if name is not * null. * @param realmName Realm name. May be null only if SID is not null. * @param referringHost Host where realm reference is found. * @param realmScope Scope of realm. Use UNKNOWN if you are not sure and * the method will try to detect the correct scope. * * @return OsAccountRealm. * * @throws TskCoreException If there is an error * creating the realm. * @throws OsAccountManager.NotUserSIDException If the SID is not a user * SID. */ public OsAccountRealm newWindowsRealm(String accountSid, String realmName, Host referringHost, OsAccountRealm.RealmScope realmScope) throws TskCoreException, OsAccountManager.NotUserSIDException { if (realmScope == null) { throw new TskCoreException("RealmScope cannot be null. Use UNKNOWN if scope is not known."); } if (referringHost == null) { throw new TskCoreException("A referring host is required to create a realm."); } if (StringUtils.isBlank(accountSid) && StringUtils.isBlank(realmName)) { throw new TskCoreException("Either an address or a name is required to create a realm."); } Host scopeHost; OsAccountRealm.ScopeConfidence scopeConfidence; switch (realmScope) { case DOMAIN: scopeHost = null; scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN; break; case LOCAL: scopeHost = referringHost; scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN; break; case UNKNOWN: default: // check if the referring host already has a realm boolean isHostRealmKnown = isHostRealmKnown(referringHost); if (isHostRealmKnown) { scopeHost = null; // the realm does not scope to the referring host since it already has one. scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN; } else { scopeHost = referringHost; scopeConfidence = OsAccountRealm.ScopeConfidence.INFERRED; } break; } // get windows realm address from sid String realmAddr = null; if (!Strings.isNullOrEmpty(accountSid)) { if (!WindowsAccountUtils.isWindowsUserSid(accountSid)) { throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", accountSid )); } realmAddr = WindowsAccountUtils.getWindowsRealmAddress(accountSid); // if the account is special windows account, create a local realm for it. if (realmAddr.equals(WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR)) { scopeHost = referringHost; scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN; } } String signature = makeRealmSignature(realmAddr, realmName, scopeHost); // create a realm return newRealm(realmName, realmAddr, signature, scopeHost, scopeConfidence); } /** * Get a windows realm by the account SID, or the domain name. The input SID * is an user/group account SID. The domain SID is extracted from this * incoming SID. * * @param accountSid Account SID, may be null. * @param realmName Realm name, may be null only if accountSid is not * null. * @param referringHost Referring Host. * * @return Optional with OsAccountRealm, Optional.empty if no matching realm * is found. * * @throws TskCoreException * @throws OsAccountManager.NotUserSIDException If the SID is not a user * SID. */ public Optional getWindowsRealm(String accountSid, String realmName, Host referringHost) throws TskCoreException, OsAccountManager.NotUserSIDException { if (referringHost == null) { throw new TskCoreException("A referring host is required get a realm."); } // need at least one of the two, the addr or name to look up if (Strings.isNullOrEmpty(accountSid) && Strings.isNullOrEmpty(realmName)) { throw new TskCoreException("Realm address or name is required get a realm."); } try (CaseDbConnection connection = this.db.getConnection()) { return getWindowsRealm(accountSid, realmName, referringHost, connection); } } /** * Get a windows realm by the account SID, or the domain name. * The input SID is an user/group account SID. The domain SID is extracted from this incoming SID. * * @param accountSid Account SID, may be null. * @param realmName Realm name, may be null only if accountSid is not * null. * @param referringHost Referring Host. * @param connection Database connection to use. * * @return Optional with OsAccountRealm, Optional.empty if no matching realm is found. * * @throws TskCoreException */ Optional getWindowsRealm(String accountSid, String realmName, Host referringHost, CaseDbConnection connection) throws TskCoreException, OsAccountManager.NotUserSIDException { if (referringHost == null) { throw new TskCoreException("A referring host is required get a realm."); } // need at least one of the two, the addr or name to look up if (StringUtils.isBlank(accountSid) && StringUtils.isBlank(realmName)) { throw new TskCoreException("Realm address or name is required get a realm."); } // If an accountSID is provided search for realm by addr. if (!Strings.isNullOrEmpty(accountSid)) { if (!WindowsAccountUtils.isWindowsUserSid(accountSid)) { throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", accountSid )); } // get realm addr from the account SID. String realmAddr = WindowsAccountUtils.getWindowsRealmAddress(accountSid); Optional realm = getRealmByAddr(realmAddr, referringHost, connection); if (realm.isPresent()) { return realm; } } // No realm addr so search by name. Optional realm = getRealmByName(realmName, referringHost, connection); if (realm.isPresent() && !Strings.isNullOrEmpty(accountSid)) { // If we were given an accountSID, make sure there isn't one set on the matching realm. // We know it won't match because the previous search by SID failed. if (realm.get().getRealmAddr().isPresent()) { return Optional.empty(); } } return realm; } /** * Get a windows realm by the account SID, or the domain name. The input SID * is an user/group account SID. The domain SID is extracted from this * incoming SID. * * If a realm is found but is missing either the SID or the realmName, then * the realm is updated. * * @param accountSid Account SID, may be null. * @param realmName Realm name, may be null only if accountSid is not * null. * @param referringHost Referring Host. * @param connection Database connection to use. * * @return Optional with OsAccountRealm, Optional.empty if no matching realm * is found. * * @throws TskCoreException */ Optional getAndUpdateWindowsRealm(String accountSid, String realmName, Host referringHost, CaseDbConnection connection) throws TskCoreException, OsAccountManager.NotUserSIDException { // get realm Optional realmOptional = getWindowsRealm(accountSid, realmName, referringHost, connection ); // if found, update it if needed if (realmOptional.isPresent()) { String realmAddr = StringUtils.isNotBlank(accountSid) ? WindowsAccountUtils.getWindowsRealmAddress(accountSid) : null; OsRealmUpdateResult realmUpdateResult = updateRealm(realmOptional.get(), realmAddr, realmName, connection); // if realm was updated, return the updated realm if (realmUpdateResult.getUpdateStatus() == OsRealmUpdateStatus.UPDATED) { return realmUpdateResult.getUpdatedRealm(); } } return realmOptional; // return the found realm as is, if any } /** * Updates the realm address and/or name, if a non blank address/name is * specified and the current address/name is blank. * * NOTE: This will not merge two realms if the updated information exists * for another realm (i.e. such as adding an address to a realm that has * only a name and there is already a realm with that address). * * * @param realm Realm to update. * @param realmAddr Realm address, may be null if the address doesn't need * to be updated. * @param realmName Realm name, may be null if the name doesn't need to be * updated. * * @return OsRealmUpdateResult Update status and updated realm. * * @throws TskCoreException If there is a database error or if a realm * already exists with that information. */ public OsRealmUpdateResult updateRealm(OsAccountRealm realm, String realmAddr, String realmName) throws TskCoreException { try (CaseDbConnection connection = db.getConnection()) { return updateRealm(realm, realmAddr, realmName, connection); } } /** * Updates the realm address and/or name, if a non blank address/name is * specified and the current address/name is blank. * * @param realm Realm to update. * @param realmAddr Realm address, may be null if the address doesn't need * to be updated. * @param realmName Realm name, may be null if the name doesn't need to be * updated. * @param connection Current database connection. * * @return OsRealmUpdateResult Update status and updated realm. * * @throws TskCoreException If there is a database error or if a realm * already exists with that information. */ private OsRealmUpdateResult updateRealm(OsAccountRealm realm, String realmAddr, String realmName, CaseDbConnection connection) throws TskCoreException { // need at least one of the two if (StringUtils.isBlank(realmAddr) && StringUtils.isBlank(realmName)) { throw new TskCoreException("Realm address or name is required to update realm."); } OsRealmUpdateStatus updateStatusCode = OsRealmUpdateStatus.NO_CHANGE; OsAccountRealm updatedRealm = null; db.acquireSingleUserCaseWriteLock(); try { List realmNames = realm.getRealmNames(); String currRealmName = realmNames.isEmpty() ? null : realmNames.get(0); // currently there is only one name. String currRealmAddr = realm.getRealmAddr().orElse(null); // set name and address to new values only if the current value is blank and the new value isn't. if ((StringUtils.isBlank(currRealmAddr) && StringUtils.isNotBlank(realmAddr))) { updateRealmColumn(realm.getRealmId(), "realm_addr", realmAddr, connection); updateStatusCode = OsRealmUpdateStatus.UPDATED; } if (StringUtils.isBlank(currRealmName) && StringUtils.isNotBlank(realmName)) { updateRealmColumn(realm.getRealmId(), "realm_name", realmName, connection); updateStatusCode = OsRealmUpdateStatus.UPDATED; } // if nothing is to be changed, return if (updateStatusCode == OsRealmUpdateStatus.NO_CHANGE) { return new OsRealmUpdateResult(updateStatusCode, realm); } // update realm signature - based on the most current address and name OsAccountRealm currRealm = getRealmByRealmId(realm.getRealmId(), connection); String newRealmAddr = currRealm.getRealmAddr().orElse(null); String newRealmName = (currRealm.getRealmNames().isEmpty() == false) ? currRealm.getRealmNames().get(0) : null; // make new signature String newSignature = makeRealmSignature(newRealmAddr, newRealmName, realm.getScopeHost().orElse(null)); // Use a random string as the signature if the realm is not active. String updateSQL = "UPDATE tsk_os_account_realms SET " + " realm_signature = " + " CASE WHEN db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId() + " THEN ? ELSE realm_signature END " + " WHERE id = ?"; PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, newSignature); // Is only set for active accounts preparedStatement.setLong(2, realm.getRealmId()); connection.executeUpdate(preparedStatement); // read the updated realm updatedRealm = this.getRealmByRealmId(realm.getRealmId(), connection); return new OsRealmUpdateResult(updateStatusCode, updatedRealm); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating realm with id = %d, name = %s, addr = %s", realm.getRealmId(), realmName != null ? realmName : "Null", realm.getRealmAddr().orElse("Null")), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Updates specified column in the tsk_os_account_realms table to the specified value. * * @param Type of value - must be a String, Long or an Integer. * @param realmId Id of the realm to be updated. * @param colName Name of column o be updated. * @param colValue New column value. * @param connection Database connection to use. * * @throws SQLException If there is an error updating the database. * @throws TskCoreException If the value type is not handled. */ private void updateRealmColumn(long realmId, String colName, T colValue, CaseDbConnection connection) throws SQLException, TskCoreException { String updateSQL = "UPDATE tsk_os_account_realms " + " SET " + colName + " = ? " + " WHERE id = ?"; db.acquireSingleUserCaseWriteLock(); try { PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); if (Objects.isNull(colValue)) { preparedStatement.setNull(1, Types.NULL); // handle null value } else { if (colValue instanceof String) { preparedStatement.setString(1, (String) colValue); } else if (colValue instanceof Long) { preparedStatement.setLong(1, (Long) colValue); } else if (colValue instanceof Integer) { preparedStatement.setInt(1, (Integer) colValue); } else { throw new TskCoreException(String.format("Unhandled column data type received while updating the realm (id = %d) ", realmId)); } } preparedStatement.setLong(2, realmId); connection.executeUpdate(preparedStatement); } finally { db.releaseSingleUserCaseWriteLock(); } } private final static String REALM_QUERY_STRING = "SELECT realms.id as realm_id, realms.realm_name as realm_name," + " realms.realm_addr as realm_addr, realms.realm_signature as realm_signature, realms.scope_host_id, realms.scope_confidence, realms.db_status," + " hosts.id, hosts.name as host_name " + " FROM tsk_os_account_realms as realms" + " LEFT JOIN tsk_hosts as hosts" + " ON realms.scope_host_id = hosts.id"; /** * Get the realm from the given row id. * * @param id Realm row id. * * @return Realm. * @throws TskCoreException on error */ public OsAccountRealm getRealmByRealmId(long id) throws TskCoreException { try (CaseDbConnection connection = this.db.getConnection()) { return getRealmByRealmId(id, connection); } } /** * Get the realm from the given row id. * * @param id Realm row id. * @param connection Database connection to use. * * @return Realm. * @throws TskCoreException */ OsAccountRealm getRealmByRealmId(long id, CaseDbConnection connection) throws TskCoreException { String queryString = REALM_QUERY_STRING + " WHERE realms.id = " + id; db.acquireSingleUserCaseReadLock(); try ( Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { OsAccountRealm accountRealm = null; if (rs.next()) { accountRealm = resultSetToAccountRealm(rs); } else { throw new TskCoreException(String.format("No realm found with id = %d", id)); } return accountRealm; } catch (SQLException ex) { throw new TskCoreException(String.format("Error running the realms query = %s", queryString), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the realm with the given realm address. * * @param realmAddr Realm address. * @param host Host for realm, may be null. * @param connection Database connection to use. * * @return Optional with OsAccountRealm, Optional.empty if no realm found with matching real address. * * @throws TskCoreException. */ Optional getRealmByAddr(String realmAddr, Host host, CaseDbConnection connection) throws TskCoreException { // If a host is specified, we want to match the realm with matching addr and specified host, or a realm with matching addr and no host. // If no host is specified, then we return the first realm with matching addr. String whereHostClause = (host == null) ? " 1 = 1 " : " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL) "; String queryString = REALM_QUERY_STRING + " WHERE LOWER(realms.realm_addr) = LOWER('"+ realmAddr + "') " + " AND " + whereHostClause + " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId() + " ORDER BY realms.scope_host_id IS NOT NULL, realms.scope_host_id"; // ensure that non null host_id is at the front db.acquireSingleUserCaseReadLock(); try ( Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { OsAccountRealm accountRealm = null; if (rs.next()) { Host realmHost = null; long hostId = rs.getLong("scope_host_id"); if (!rs.wasNull()) { if (host != null ) { realmHost = host; // exact match on given host } else { realmHost = new Host(hostId, rs.getString("host_name")); } } accountRealm = new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), rs.getString("realm_addr"), rs.getString("realm_signature"), realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")), OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status"))); } return Optional.ofNullable(accountRealm); } catch (SQLException ex) { throw new TskCoreException(String.format("Error running the realms query = %s with realmaddr = %s and host name = %s", queryString, realmAddr, (host != null ? host.getName() : "Null")), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the realm with the given name and specified host. * * @param realmName Realm name. * @param host Host for realm, may be null. * @param connection Database connection to use. * * @return Optional with OsAccountRealm, Optional.empty if no matching realm is found. * @throws TskCoreException. */ Optional getRealmByName(String realmName, Host host, CaseDbConnection connection) throws TskCoreException { // If a host is specified, we want to match the realm with matching name and specified host, or a realm with matching name and no host. // If no host is specified, then we return the first realm with matching name. String whereHostClause = (host == null) ? " 1 = 1 " : " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL ) "; String queryString = REALM_QUERY_STRING + " WHERE LOWER(realms.realm_name) = LOWER('" + realmName + "')" + " AND " + whereHostClause + " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId() + " ORDER BY realms.scope_host_id IS NOT NULL, realms.scope_host_id"; // ensure that non null host_id are at the front db.acquireSingleUserCaseReadLock(); try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { OsAccountRealm accountRealm = null; if (rs.next()) { Host realmHost = null; long hostId = rs.getLong("scope_host_id"); if (!rs.wasNull()) { if (host != null ) { realmHost = host; } else { realmHost = new Host(hostId, rs.getString("host_name")); } } accountRealm = new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), rs.getString("realm_addr"), rs.getString("realm_signature"), realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")), OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status"))); } return Optional.ofNullable(accountRealm); } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting account realm for with name = %s", realmName), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Check is there is any realm with a host-scope and KNOWN confidence for the given host. * If we can assume that a host will have only a single host-scoped realm, then you can * assume a new realm is domain-scoped when this method returns true. I.e. once we know * the host-scoped realm, then everything else is domain-scoped. * * @param host Host for which to look for a realm. * * @return True if there exists a a realm with the host scope matching the host. False otherwise */ private boolean isHostRealmKnown(Host host) throws TskCoreException { // check if this host has a local known realm aleady, other than the special windows realm. String queryString = REALM_QUERY_STRING + " WHERE realms.scope_host_id = " + host.getHostId() + " AND realms.scope_confidence = " + OsAccountRealm.ScopeConfidence.KNOWN.getId() + " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId() + " AND LOWER(realms.realm_addr) <> LOWER('"+ WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR + "') "; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { // return true if there is any match. return rs.next(); } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting account realm for with host = %s", host.getName()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Creates a OsAccountRealm from the resultset of a REALM_QUERY_STRING query. * * @param rs ResultSet * @return * @throws SQLException */ private OsAccountRealm resultSetToAccountRealm(ResultSet rs) throws SQLException { long hostId = rs.getLong("scope_host_id"); Host realmHost = null; if (!rs.wasNull()) { realmHost = new Host(hostId, rs.getString("host_name")); } return new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), rs.getString("realm_addr"), rs.getString("realm_signature"), realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")), OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status"))); } // /** // * Get all realms. // * // * @return Collection of OsAccountRealm // */ // Collection getRealms() throws TskCoreException { // String queryString = "SELECT realms.id as realm_id, realms.realm_name as realm_name, realms.realm_addr as realm_addr, realms.scope_host_id, realms.scope_confidence, " // + " hosts.id, hosts.name as host_name " // + " FROM tsk_os_account_realms as realms" // + " LEFT JOIN tsk_hosts as hosts" // + " ON realms.scope_host_id = hosts.id"; // // db.acquireSingleUserCaseReadLock(); // try (CaseDbConnection connection = this.db.getConnection(); // Statement s = connection.createStatement(); // ResultSet rs = connection.executeQuery(s, queryString)) { // // ArrayList accountRealms = new ArrayList<>(); // while (rs.next()) { // long hostId = rs.getLong("scope_host_id"); // Host host = null; // if (!rs.wasNull()) { // host = new Host(hostId, rs.getString("host_name")); // } // // accountRealms.add(new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), // ScopeConfidence.fromID(rs.getInt("scope_confidence")), // rs.getString("realm_addr"), host)); // } // // return accountRealms; // } catch (SQLException ex) { // throw new TskCoreException(String.format("Error running the realms query = %s", queryString), ex); // } // finally { // db.releaseSingleUserCaseReadLock(); // } // } /** * Adds a row to the realms table. * * If the add fails, it tries to get the realm, in case the realm already exists. * * @param realmName Realm name, may be null. * @param realmAddr SID or some other identifier. May be null if name * is not null. * @param signature Signature, either the address or the name. * @param host Host, if the realm is host scoped. Can be null * realm is domain scoped. * @param scopeConfidence Confidence in realm scope. * * @return OsAccountRealm Realm just created. * * @throws TskCoreException If there is an internal error. */ private OsAccountRealm newRealm(String realmName, String realmAddr, String signature, Host host, OsAccountRealm.ScopeConfidence scopeConfidence) throws TskCoreException { db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = this.db.getConnection()) { String realmInsertSQL = "INSERT INTO tsk_os_account_realms(realm_name, realm_addr, realm_signature, scope_host_id, scope_confidence)" + " VALUES (?, ?, ?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(realmInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, realmName); preparedStatement.setString(2, realmAddr); preparedStatement.setString(3, signature); if (host != null) { preparedStatement.setLong(4, host.getHostId()); } else { preparedStatement.setNull(4, java.sql.Types.BIGINT); } preparedStatement.setInt(5, scopeConfidence.getId()); connection.executeUpdate(preparedStatement); // Read back the row id try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) { long rowId = resultSet.getLong(1); // last_insert_rowid() return new OsAccountRealm(rowId, realmName, realmAddr, signature, host, scopeConfidence, OsAccountRealm.RealmDbStatus.ACTIVE); } } catch (SQLException ex) { // Create may have failed if the realm already exists. Try and get the matching realm try (CaseDbConnection connection = this.db.getConnection()) { if (!Strings.isNullOrEmpty(realmAddr)) { Optional accountRealm = this.getRealmByAddr(realmAddr, host, connection); if (accountRealm.isPresent()) { return accountRealm.get(); } } else if (!Strings.isNullOrEmpty(realmName)) { Optional accountRealm = this.getRealmByName(realmName, host, connection); if (accountRealm.isPresent()) { return accountRealm.get(); } } // some other failure - throw an exception throw new TskCoreException(String.format("Error creating realm with address = %s and name = %s, with host = %s", realmAddr != null ? realmAddr : "", realmName != null ? realmName : "", host != null ? host.getName() : ""), ex); } } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Makes a realm signature based on given realm address, name scope host. * * The signature is primarily to provide uniqueness in the database. * * Signature is built as: * (addr|name)_(hostId|"DOMAIN") * * @param realmAddr Realm address, may be null. * @param realmName Realm name, may be null only if address is not null. * @param scopeHost Realm scope host. May be null. * * @return Realm Signature. * * @throws TskCoreException If there is an error making the signature. */ static String makeRealmSignature(String realmAddr, String realmName, Host scopeHost) throws TskCoreException { // need at least one of the two, the addr or name to look up if (Strings.isNullOrEmpty(realmAddr) && Strings.isNullOrEmpty(realmName)) { throw new TskCoreException("Realm address and name can't both be null."); } String signature = String.format("%s_%s", !Strings.isNullOrEmpty(realmAddr) ? realmAddr : realmName, scopeHost != null ? scopeHost.getHostId() : "DOMAIN"); return signature; } /** * Create a random signature for realms that have been merged. * * @return The random signature. */ private String makeMergedRealmSignature() { return "MERGED " + UUID.randomUUID().toString(); } /** * Move source realm into the destination host or merge with an existing realm. * * @param sourceRealm * @param destHost * @param trans * @throws TskCoreException */ void moveOrMergeRealm(OsAccountRealm sourceRealm, Host destHost, CaseDbTransaction trans) throws TskCoreException { // Look for a matching realm by address Optional optDestRealmAddr = Optional.empty(); if (sourceRealm.getRealmAddr().isPresent()) { optDestRealmAddr = db.getOsAccountRealmManager().getRealmByAddr(sourceRealm.getRealmAddr().get(), destHost, trans.getConnection()); } // Look for a matching realm by name Optional optDestRealmName = Optional.empty(); if (!sourceRealm.getRealmNames().isEmpty()) { optDestRealmName = db.getOsAccountRealmManager().getRealmByName(sourceRealm.getRealmNames().get(0), destHost, trans.getConnection()); } // Decide how to proceed: // - If we only got one match: // -- If the address matched, set destRealm to the matching address realm // -- If the name matched but the original and the matching realm have different addresses, leave destRealm null (it'll be a move) // -- If the name matched and at least one of the address fields was null, set destRealm to the matching name realm // - If we got no matches, leave destRealm null (we'll do a move not a merge) // - If we got two of the same matches, set destRealm to that realm // - If we got two different matches: // -- If the name match has no address set, merge the matching name realm into the matching address realm, then // set destRealm to the matching address realm // -- Otherwise we're in the case where the addresses are different. We will consider the address the // stronger match and set destRealm to the matching address realm and leave the matching name realm as-is. OsAccountRealm destRealm = null; if (optDestRealmAddr.isPresent() && optDestRealmName.isPresent()) { if (optDestRealmAddr.get().getRealmId() == optDestRealmName.get().getRealmId()) { // The two matches are the same destRealm = optDestRealmAddr.get(); } else { if (optDestRealmName.get().getRealmAddr().isPresent()) { // The addresses are different, so use the one with the matching address destRealm = optDestRealmAddr.get(); } else { // Merge the realm with the matching name into the realm with the matching address. // Reload from database afterward to make sure everything is up-to-date. mergeRealms(optDestRealmName.get(), optDestRealmAddr.get(), trans); destRealm = getRealmByRealmId(optDestRealmAddr.get().getRealmId(), trans.getConnection()); } } } else if (optDestRealmAddr.isPresent()) { // Only address matched - use it destRealm = optDestRealmAddr.get(); } else if (optDestRealmName.isPresent()) { // Only name matched - check whether both have addresses set. // Due to earlier checks we know the address fields can't be the same, so // don't do anything if both have addresses - we consider the address to be a stronger identifier than the name if (! (optDestRealmName.get().getRealmAddr().isPresent() && sourceRealm.getRealmAddr().isPresent())) { destRealm = optDestRealmName.get(); } } // Move or merge the source realm if (destRealm == null) { moveRealm(sourceRealm, destHost, trans); } else { mergeRealms(sourceRealm, destRealm, trans); } } /** * Move a realm to a different host. * A check should be done to make sure there are no matching realms in * the destination host before calling this method. * * @param sourceRealm The source realm. * @param destHost The destination host. * @param trans The open transaction. * * @throws TskCoreException */ private void moveRealm(OsAccountRealm sourceRealm, Host destHost, CaseDbTransaction trans) throws TskCoreException { try(Statement s = trans.getConnection().createStatement()) { String query = "UPDATE tsk_os_account_realms SET scope_host_id = " + destHost.getHostId() + " WHERE id = " + sourceRealm.getRealmId(); s.executeUpdate(query); } catch (SQLException ex) { throw new TskCoreException("Error moving realm with id: " + sourceRealm.getRealmId() + " to host with id: " + destHost.getHostId(), ex); } } /** * Merge one realm into another, moving or combining all associated OsAccounts. * * @param sourceRealm The sourceRealm realm. * @param destRealm The destination realm. * @param trans The open transaction. * * @throws TskCoreException */ void mergeRealms(OsAccountRealm sourceRealm, OsAccountRealm destRealm, CaseDbTransaction trans) throws TskCoreException { // Update accounts db.getOsAccountManager().mergeOsAccountsForRealms(sourceRealm, destRealm, trans); // Update the sourceRealm realm CaseDbConnection connection = trans.getConnection(); try (Statement statement = connection.createStatement()) { String updateStr = "UPDATE tsk_os_account_realms SET db_status = " + OsAccountRealm.RealmDbStatus.MERGED.getId() + ", merged_into = " + destRealm.getRealmId() + ", realm_signature = '" + makeMergedRealmSignature() + "' " + " WHERE id = " + sourceRealm.getRealmId(); connection.executeUpdate(statement, updateStr); } catch (SQLException ex) { throw new TskCoreException ("Error updating status of realm with id: " + sourceRealm.getRealmId(), ex); } // Update the destination realm if it doesn't have the name or addr set and the source realm does if (!destRealm.getRealmAddr().isPresent() && sourceRealm.getRealmAddr().isPresent()) { updateRealm(destRealm, sourceRealm.getRealmAddr().get(), null, trans.getConnection()); } else if (destRealm.getRealmNames().isEmpty() && !sourceRealm.getRealmNames().isEmpty()) { updateRealm(destRealm, null, sourceRealm.getRealmNames().get(0), trans.getConnection()); } } /** * Get all realms associated with the given host. * * @param host The host. * @param connection The current database connection. * * @return List of realms for the given host. * * @throws TskCoreException */ List getRealmsByHost(Host host, CaseDbConnection connection) throws TskCoreException { List results = new ArrayList<>(); String queryString = REALM_QUERY_STRING + " WHERE realms.scope_host_id = " + host.getHostId(); db.acquireSingleUserCaseReadLock(); try ( Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { results.add(resultSetToAccountRealm(rs)); } return results; } catch (SQLException ex) { throw new TskCoreException(String.format("Error gettings realms for host with id = " + host.getHostId()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Status of a realm update. */ public enum OsRealmUpdateStatus { NO_CHANGE, /// no change was made to account. UPDATED, /// account was updated MERGED /// account update triggered a merge } /** * Container to encapsulate the status returned by the realm update api, and * the updated realm. */ public final static class OsRealmUpdateResult { private final OsRealmUpdateStatus updateStatus; private final OsAccountRealm updatedRealm; OsRealmUpdateResult(OsRealmUpdateStatus updateStatus, OsAccountRealm updatedRealm) { this.updateStatus = updateStatus; this.updatedRealm = updatedRealm; } public OsRealmUpdateStatus getUpdateStatus() { return updateStatus; } public Optional getUpdatedRealm() { return Optional.ofNullable(updatedRealm); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CollectionUtils.java000644 000765 000024 00000002307 14137073413 030432 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; /** * Static utilities for dealing with Collections. At some point this could be * replaced with apache commons or guava... */ final class CollectionUtils { @SuppressWarnings("unchecked") static HashSet hashSetOf(T... values) { return new HashSet<>(Arrays.asList(values)); } static boolean isNotEmpty(Collection collection) { return collection.isEmpty() == false; } private CollectionUtils() { } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CarvedFileContainer.java000755 000765 000024 00000002714 14137073413 031172 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; /** * @deprecated Use CarvingResult instead. */ @Deprecated public final class CarvedFileContainer { private final String mCarvedFileName; private final long mCarvedFileSize; private final long mContainerId; private final List mRangeData; public CarvedFileContainer(String carvedFileName, long carvedFileSize, long containerId, List rangeData) { mCarvedFileName = carvedFileName; mCarvedFileSize = carvedFileSize; mContainerId = containerId; mRangeData = rangeData; } public String getName() { return mCarvedFileName; } public long getSize() { return mCarvedFileSize; } public long getId() { return mContainerId; } public List getRanges() { return mRangeData; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Examiner.java000644 000765 000024 00000002734 14137073413 027072 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Encapsulates the concept of an examiner associated with a case. */ final public class Examiner { private final long id; private final String loginName; private final String displayName; Examiner(long id, String loginName, String displayName) { this.id = id; this.loginName = loginName; this.displayName = displayName; } /** * Returns the id * * @return id */ public long getId() { return id; } /** * Returns the login name of examiner * * @return login name */ public String getLoginName() { return this.loginName; } /** * Returns the display name of examiner * * @return display name, may be a blank string */ public String getDisplayName() { if (displayName == null) { return ""; } return this.displayName; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java000644 000765 000024 00000104336 14137073414 030736 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.annotations.Beta; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableSortedSet; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.SortedSet; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.*; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.*; import org.sleuthkit.datamodel.BlackboardAttribute.Type; import static org.sleuthkit.datamodel.BundleProvider.getBundle; import org.sleuthkit.datamodel.TimelineEventTypes.EmptyExtractor; import org.sleuthkit.datamodel.TimelineEventTypes.FilePathArtifactEventType; import org.sleuthkit.datamodel.TimelineEventTypes.FilePathEventType; import org.sleuthkit.datamodel.TimelineEventTypes.URLArtifactEventType; import org.sleuthkit.datamodel.TimelineEventTypes.GPSTrackArtifactEventType; import org.sleuthkit.datamodel.TimelineEventArtifactTypeImpl.AttributeExtractor; import static org.sleuthkit.datamodel.TimelineEventArtifactTypeImpl.getAttributeSafe; /** * An interface implemented by timeline event types. Timeline event types are * organized into a type hierarchy. This type hierarchy has three levels: the * root level, the category level (e.g, file system events, web activity * events), and the actual event level (e.g., file modified events, web download * events). * * Currently (9/20/19), all supported timeline event types are defined as * members of this interface. * * WARNING: THIS INTERFACE IS A "BETA" INTERFACE AND IS SUBJECT TO CHANGE AT ANY * TIME. */ @Beta public interface TimelineEventType extends Comparable { /** * Gets the display name of this event type. * * @return The event type display name. */ String getDisplayName(); /** * Gets the unique ID of this event type in the case database. * * @return The event type ID. */ long getTypeID(); /** * Gets the type hierarchy level of this event type. * * @return The type hierarchy level. */ TimelineEventType.HierarchyLevel getTypeHierarchyLevel(); /** * Gets the child event types of this event type in the type hierarchy. * * @return A sorted set of the child event types. */ SortedSet getChildren(); /** * Gets a specific child event type of this event type in the type * hierarchy. * * @param displayName The display name of the desired child event type. * * @return The child event type in an Optional object, may be empty. */ Optional getChild(String displayName); /** * Gets the parent event type of this event type in the type hierarchy. * * @return The parent event type. */ TimelineEventType getParent(); /** * Gets the category level event type for this event type in the type * hierarchy. * * @return The category event type. */ default TimelineEventType getCategory() { TimelineEventType parentType = getParent(); return parentType.equals(ROOT_EVENT_TYPE) ? this : parentType.getCategory(); } /** * Gets the sibling event types of this event type in the type hierarchy. * * @return The sibling event types. */ default SortedSet getSiblings() { return this.equals(ROOT_EVENT_TYPE) ? ImmutableSortedSet.of(ROOT_EVENT_TYPE) : this.getParent().getChildren(); } @Override default int compareTo(TimelineEventType otherType) { return Comparator.comparing(TimelineEventType::getDisplayName).compare(this, otherType); } /** * An enumeration of the levels in the event type hierarchy. */ public enum HierarchyLevel { /** * The root level of the event types hierarchy. */ ROOT(getBundle().getString("EventTypeHierarchyLevel.root")), /** * The category level of the event types hierarchy. Event types at this * level represent event categories such as file system events and web * activity events. */ CATEGORY(getBundle().getString("EventTypeHierarchyLevel.category")), /** * The actual events level of the event types hierarchy. Event types at * this level represent actual events such as file modified time events * and web download events. */ EVENT(getBundle().getString("EventTypeHierarchyLevel.event")); private final String displayName; /** * Gets the display name of this element of the enumeration of the * levels in the event type hierarchy. * * @return The display name. */ public String getDisplayName() { return displayName; } /** * Constructs an element of the enumeration of the levels in the event * type hierarchy. * * @param displayName The display name of this hierarchy level. */ private HierarchyLevel(String displayName) { this.displayName = displayName; } } /** * The root type of all event types. No event should actually have this * type. */ TimelineEventType ROOT_EVENT_TYPE = new TimelineEventTypeImpl(0, getBundle().getString("RootEventType.eventTypes.name"), // NON-NLS HierarchyLevel.ROOT, null) { @Override public SortedSet< TimelineEventType> getChildren() { ImmutableSortedSet.Builder builder = ImmutableSortedSet.orderedBy(new Comparator() { @Override public int compare(TimelineEventType o1, TimelineEventType o2) { return ((Long) o1.getTypeID()).compareTo(o2.getTypeID()); } }); builder.add(FILE_SYSTEM, WEB_ACTIVITY, MISC_TYPES); return builder.build(); } }; TimelineEventType FILE_SYSTEM = new TimelineEventTypeImpl(1, getBundle().getString("BaseTypes.fileSystem.name"),// NON-NLS HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) { @Override public SortedSet< TimelineEventType> getChildren() { return ImmutableSortedSet.of(FILE_MODIFIED, FILE_ACCESSED, FILE_CREATED, FILE_CHANGED); } }; TimelineEventType WEB_ACTIVITY = new TimelineEventTypeImpl(2, getBundle().getString("BaseTypes.webActivity.name"), // NON-NLS HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) { @Override public SortedSet< TimelineEventType> getChildren() { return ImmutableSortedSet.of(WEB_DOWNLOADS, WEB_COOKIE, WEB_COOKIE_ACCESSED, WEB_COOKIE_END, WEB_BOOKMARK, WEB_HISTORY, WEB_SEARCH, WEB_FORM_AUTOFILL, WEB_FORM_ADDRESSES, WEB_FORM_ADDRESSES_MODIFIED, WEB_FORM_AUTOFILL_ACCESSED, WEB_CACHE, WEB_HISTORY_CREATED); } }; // The MISC_TYPE events are sorted alphebetically by their display name instead of their // "natural order" which is by their event ID. TimelineEventType MISC_TYPES = new TimelineEventTypeImpl(3, getBundle().getString("BaseTypes.miscTypes.name"), // NON-NLS HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) { @Override public SortedSet getChildren() { return ImmutableSortedSet.of(CALL_LOG, CALL_LOG_END, DEVICES_ATTACHED, EMAIL, EMAIL_RCVD, EXIF, GPS_BOOKMARK, GPS_LAST_KNOWN_LOCATION, GPS_TRACKPOINT, GPS_ROUTE, GPS_SEARCH, GPS_TRACK, INSTALLED_PROGRAM, LOG_ENTRY, MESSAGE, METADATA_LAST_PRINTED, METADATA_LAST_SAVED, METADATA_CREATED, PROGRAM_EXECUTION, RECENT_DOCUMENTS, REGISTRY, BACKUP_EVENT_START, BACKUP_EVENT_END, BLUETOOTH_PAIRING, CALENDAR_ENTRY_START, CALENDAR_ENTRY_END, PROGRAM_DELETED, OS_INFO, WIFI_NETWORK, USER_DEVICE_EVENT_START, USER_DEVICE_EVENT_END, SERVICE_ACCOUNT, SCREEN_SHOT, PROGRAM_NOTIFICATION, BLUETOOTH_PAIRING_ACCESSED, BLUETOOTH_ADAPTER, CUSTOM_ARTIFACT_CATCH_ALL, STANDARD_ARTIFACT_CATCH_ALL, USER_CREATED); } }; TimelineEventType FILE_MODIFIED = new FilePathEventType(4, getBundle().getString("FileSystemTypes.fileModified.name"), // NON-NLS HierarchyLevel.EVENT, FILE_SYSTEM); TimelineEventType FILE_ACCESSED = new FilePathEventType(5, getBundle().getString("FileSystemTypes.fileAccessed.name"), // NON-NLS HierarchyLevel.EVENT, FILE_SYSTEM); TimelineEventType FILE_CREATED = new FilePathEventType(6, getBundle().getString("FileSystemTypes.fileCreated.name"), // NON-NLS HierarchyLevel.EVENT, FILE_SYSTEM); TimelineEventType FILE_CHANGED = new FilePathEventType(7, getBundle().getString("FileSystemTypes.fileChanged.name"), // NON-NLS HierarchyLevel.EVENT, FILE_SYSTEM); TimelineEventType WEB_DOWNLOADS = new URLArtifactEventType(8, getBundle().getString("WebTypes.webDownloads.name"), // NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_DOWNLOAD), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_URL)); TimelineEventType WEB_COOKIE = new URLArtifactEventType(9, getBundle().getString("WebTypes.webCookies.name"),// NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_COOKIE), new Type(TSK_DATETIME_CREATED), new Type(TSK_URL)); TimelineEventType WEB_BOOKMARK = new URLArtifactEventType(10, getBundle().getString("WebTypes.webBookmarks.name"), // NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_BOOKMARK), new Type(TSK_DATETIME_CREATED), new Type(TSK_URL)); TimelineEventType WEB_HISTORY = new URLArtifactEventType(11, getBundle().getString("WebTypes.webHistory.name"), // NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_HISTORY), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_URL)); TimelineEventType WEB_SEARCH = new URLArtifactEventType(12, getBundle().getString("WebTypes.webSearch.name"), // NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_SEARCH_QUERY), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_DOMAIN)); TimelineEventType MESSAGE = new TimelineEventArtifactTypeImpl(13, getBundle().getString("MiscTypes.message.name"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_MESSAGE), new Type(TSK_DATETIME), new TimelineEventArtifactTypeImpl.AttributeExtractor(new Type(TSK_MESSAGE_TYPE)), artf -> { final BlackboardAttribute dir = getAttributeSafe(artf, new Type(TSK_DIRECTION)); final BlackboardAttribute readStatus = getAttributeSafe(artf, new Type(TSK_READ_STATUS)); final BlackboardAttribute name = getAttributeSafe(artf, new Type(TSK_NAME)); final BlackboardAttribute subject = getAttributeSafe(artf, new Type(TSK_SUBJECT)); BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER)); // Make our best effort to find a valid phoneNumber for the description if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO)); } if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM)); } List asList = Arrays.asList( stringValueOf(dir), stringValueOf(readStatus), name == null && phoneNumber == null ? "" : toFrom(dir), name != null || phoneNumber != null ? stringValueOf(MoreObjects.firstNonNull(name, phoneNumber)) : "", stringValueOf(subject) ); return String.join(" ", asList); }, new AttributeExtractor(new Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT))); TimelineEventType GPS_ROUTE = new TimelineEventArtifactTypeImpl(14, getBundle().getString("MiscTypes.GPSRoutes.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_ROUTE), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_PROG_NAME)), new AttributeExtractor(new Type(TSK_LOCATION)), artf -> { final BlackboardAttribute latStart = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE_START)); final BlackboardAttribute longStart = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE_START)); final BlackboardAttribute latEnd = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE_END)); final BlackboardAttribute longEnd = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE_END)); return String.format("From latitude: %1$s longitude: %2$s To latitude: %3$s longitude: %4$s", stringValueOf(latStart), stringValueOf(longStart), stringValueOf(latEnd), stringValueOf(longEnd)); // NON-NLS }); @SuppressWarnings("deprecation") TimelineEventType GPS_TRACKPOINT = new TimelineEventArtifactTypeImpl(15, getBundle().getString("MiscTypes.GPSTrackpoint.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_TRACKPOINT), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_PROG_NAME)), artf -> { final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE)); final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE)); return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS }, new EmptyExtractor()); TimelineEventType CALL_LOG = new TimelineEventArtifactTypeImpl(16, getBundle().getString("MiscTypes.Calls.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_CALLLOG), new Type(TSK_DATETIME_START), new AttributeExtractor(new Type(TSK_NAME)), artf -> { BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER)); if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO)); } if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM)); } return "Phone Number: " + stringValueOf(phoneNumber); }, new AttributeExtractor(new Type(TSK_DIRECTION))); TimelineEventType EMAIL = new TimelineEventArtifactTypeImpl(17, getBundle().getString("MiscTypes.Email.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_EMAIL_MSG), new Type(TSK_DATETIME_SENT), artf -> { String emailFrom = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_FROM))); if (emailFrom.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) { emailFrom = emailFrom.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX); } String emailTo = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_TO))); if (emailTo.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) { emailTo = emailTo.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX); } return "Sent from: " + emailFrom + "Sent to: " + emailTo; // NON-NLS }, new AttributeExtractor(new Type(TSK_SUBJECT)), artf -> { final BlackboardAttribute msgAttribute = getAttributeSafe(artf, new Type(TSK_EMAIL_CONTENT_PLAIN)); String msg = stringValueOf(msgAttribute); if (msg.length() > TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX) { msg = msg.substring(0, TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX); } return msg; }); TimelineEventType RECENT_DOCUMENTS = new FilePathArtifactEventType(18, getBundle().getString("MiscTypes.recentDocuments.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_RECENT_OBJECT), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_PATH)); TimelineEventType INSTALLED_PROGRAM = new TimelineEventArtifactTypeImpl(19, getBundle().getString("MiscTypes.installedPrograms.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_INSTALLED_PROG), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_PROG_NAME)), new EmptyExtractor(), new EmptyExtractor()); TimelineEventType EXIF = new TimelineEventArtifactTypeImpl(20, getBundle().getString("MiscTypes.exif.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_METADATA_EXIF), new Type(TSK_DATETIME_CREATED), new AttributeExtractor(new Type(TSK_DEVICE_MAKE)), new AttributeExtractor(new Type(TSK_DEVICE_MODEL)), artf -> artf.getSleuthkitCase().getAbstractFileById(artf.getObjectID()).getName() ); TimelineEventType DEVICES_ATTACHED = new TimelineEventArtifactTypeImpl(21, getBundle().getString("MiscTypes.devicesAttached.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_DEVICE_ATTACHED), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_DEVICE_MAKE)), new AttributeExtractor(new Type(TSK_DEVICE_MODEL)), new AttributeExtractor(new Type(TSK_DEVICE_ID))); // TimelineEventType with id 22 has been deprecated. Trying to reuse 22 // may cause backwards combatibility issues and is not recommened. If 22 // is reused create upgrade code to reassign event 22 to MISC_TYPE id = 3. int DEPRECATED_OTHER_EVENT_ID = 22; // Event for any artifact event with an artifact type for which we don't have // a hard-corded event type. In other words, we recognize the artifact type // as a standard artifact type, but we have not updated the Timeline code // to have a corresponding inner TimelineEventType TimelineEventType STANDARD_ARTIFACT_CATCH_ALL = new TimelineEventArtifactTypeSingleDescription(23, getBundle().getString("CustomTypes.other.name"), //NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_TL_EVENT), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DESCRIPTION)); //new misc types TimelineEventType LOG_ENTRY = new TimelineEventArtifactTypeSingleDescription(24, getBundle().getString("MiscTypes.LogEntry.name"), //NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_TL_EVENT), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DESCRIPTION)); TimelineEventType REGISTRY = new TimelineEventArtifactTypeSingleDescription(25, getBundle().getString("MiscTypes.Registry.name"), //NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_TL_EVENT), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DESCRIPTION)); // Event for any artifact event with a custom artifact type (e.g. shell bag // artifact) TimelineEventType CUSTOM_ARTIFACT_CATCH_ALL = new TimelineEventArtifactTypeSingleDescription(26, getBundle().getString("CustomTypes.customArtifact.name"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_TL_EVENT), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DESCRIPTION)); TimelineEventType WEB_FORM_AUTOFILL = new TimelineEventArtifactTypeImpl(27, getBundle().getString("WebTypes.webFormAutoFill.name"),//NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_FORM_AUTOFILL), new Type(TSK_DATETIME_CREATED), artf -> { final BlackboardAttribute name = getAttributeSafe(artf, new Type(TSK_NAME)); final BlackboardAttribute value = getAttributeSafe(artf, new Type(TSK_VALUE)); final BlackboardAttribute count = getAttributeSafe(artf, new Type(TSK_COUNT)); return stringValueOf(name) + ":" + stringValueOf(value); // NON-NLS }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType WEB_FORM_ADDRESSES = new URLArtifactEventType(28, getBundle().getString("WebTypes.webFormAddress.name"),//NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_FORM_ADDRESS), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_EMAIL)); TimelineEventType GPS_BOOKMARK = new TimelineEventArtifactTypeImpl(29, getBundle().getString("MiscTypes.GPSBookmark.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_BOOKMARK), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_NAME)), artf -> { final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE)); final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE)); return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS }, new EmptyExtractor()); TimelineEventType GPS_LAST_KNOWN_LOCATION = new TimelineEventArtifactTypeImpl(30, getBundle().getString("MiscTypes.GPSLastknown.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_LAST_KNOWN_LOCATION), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_NAME)), artf -> { final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE)); final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE)); return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS }, new EmptyExtractor()); TimelineEventType GPS_SEARCH = new TimelineEventArtifactTypeImpl(31, getBundle().getString("MiscTypes.GPSearch.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_SEARCH), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_NAME)), artf -> { final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE)); final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE)); return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS }, new EmptyExtractor()); TimelineEventType GPS_TRACK = new GPSTrackArtifactEventType(32, getBundle().getString("MiscTypes.GPSTrack.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_GPS_TRACK), new Type(TSK_NAME)); TimelineEventType METADATA_LAST_PRINTED = new TimelineEventArtifactTypeImpl(33, getBundle().getString("MiscTypes.metadataLastPrinted.name"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_METADATA), new BlackboardAttribute.Type(TSK_LAST_PRINTED_DATETIME), artf -> { return getBundle().getString("MiscTypes.metadataLastPrinted.name"); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType METADATA_LAST_SAVED = new TimelineEventArtifactTypeImpl(34, getBundle().getString("MiscTypes.metadataLastSaved.name"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_METADATA), new BlackboardAttribute.Type(TSK_DATETIME_MODIFIED), artf -> { return getBundle().getString("MiscTypes.metadataLastSaved.name"); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType METADATA_CREATED = new TimelineEventArtifactTypeImpl(35, getBundle().getString("MiscTypes.metadataCreated.name"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_METADATA), new BlackboardAttribute.Type(TSK_DATETIME_CREATED), artf -> { return getBundle().getString("MiscTypes.metadataCreated.name"); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType PROGRAM_EXECUTION = new TimelineEventArtifactTypeImpl(36, getBundle().getString("MiscTypes.programexecuted.name"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_PROG_RUN), new Type(TSK_DATETIME), new AttributeExtractor(new Type(TSK_PROG_NAME)), artf -> { String userName = stringValueOf(getAttributeSafe(artf, new Type(TSK_USER_NAME))); if (userName != null) { return userName; } return ""; }, new AttributeExtractor(new Type(TSK_COMMENT))); TimelineEventType WEB_FORM_AUTOFILL_ACCESSED = new TimelineEventArtifactTypeImpl(37, getBundle().getString("WebTypes.webFormAutofillAccessed.name"), WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_FORM_AUTOFILL), new Type(TSK_DATETIME_ACCESSED), artf -> { final BlackboardAttribute name = getAttributeSafe(artf, new Type(TSK_NAME)); final BlackboardAttribute value = getAttributeSafe(artf, new Type(TSK_VALUE)); final BlackboardAttribute count = getAttributeSafe(artf, new Type(TSK_COUNT)); return stringValueOf(name) + ":" + stringValueOf(value) + " Access count: " + stringValueOf(count); // NON-NLS }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType CALL_LOG_END = new TimelineEventArtifactTypeImpl(38, getBundle().getString("MiscTypes.CallsEnd.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_CALLLOG), new Type(TSK_DATETIME_END), new AttributeExtractor(new Type(TSK_NAME)), artf -> { BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER)); if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO)); } if (phoneNumber == null) { phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM)); } return "Phone number: " + stringValueOf(phoneNumber); }, new AttributeExtractor(new Type(TSK_DIRECTION))); TimelineEventType EMAIL_RCVD = new TimelineEventArtifactTypeImpl(39, getBundle().getString("MiscTypes.EmailRcvd.name"), // NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_EMAIL_MSG), new Type(TSK_DATETIME_RCVD), artf -> { String emailFrom = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_FROM))); if (emailFrom.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) { emailFrom = emailFrom.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX); } String emailTo = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_TO))); if (emailTo.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) { emailTo = emailTo.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX); } return "Message from: " + emailFrom + " To: " + emailTo; // NON-NLS }, new AttributeExtractor(new Type(TSK_SUBJECT)), artf -> { final BlackboardAttribute msgAttribute = getAttributeSafe(artf, new Type(TSK_EMAIL_CONTENT_PLAIN)); String msg = stringValueOf(msgAttribute); if (msg.length() > TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX) { msg = msg.substring(0, TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX); } return msg; }); TimelineEventType WEB_FORM_ADDRESSES_MODIFIED = new URLArtifactEventType(40, getBundle().getString("WebTypes.webFormAddressModified.name"),//NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_FORM_ADDRESS), new Type(TSK_DATETIME_MODIFIED), new Type(TSK_EMAIL)); TimelineEventType WEB_COOKIE_ACCESSED = new URLArtifactEventType(41, getBundle().getString("WebTypes.webCookiesAccessed.name"),// NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_COOKIE), new Type(TSK_DATETIME_ACCESSED), new Type(TSK_URL)); TimelineEventType WEB_COOKIE_END = new URLArtifactEventType(42, getBundle().getString("WebTypes.webCookiesEnd.name"),// NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_COOKIE), new Type(TSK_DATETIME_END), new Type(TSK_URL)); TimelineEventType BACKUP_EVENT_START = new TimelineEventArtifactTypeImpl(43, getBundle().getString("TimelineEventType.BackupEventStart.txt"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_BACKUP_EVENT), new BlackboardAttribute.Type(TSK_DATETIME_START), artf -> { return getBundle().getString("TimelineEventType.BackupEvent.description.start"); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType BACKUP_EVENT_END = new TimelineEventArtifactTypeImpl(44, getBundle().getString("TimelineEventType.BackupEventEnd.txt"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_BACKUP_EVENT), new BlackboardAttribute.Type(TSK_DATETIME_END), artf -> { return getBundle().getString("TimelineEventType.BackupEvent.description.end"); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType BLUETOOTH_PAIRING = new TimelineEventArtifactTypeSingleDescription(45, getBundle().getString("TimelineEventType.BluetoothPairing.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_BLUETOOTH_PAIRING), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DEVICE_NAME)); TimelineEventType CALENDAR_ENTRY_START = new TimelineEventArtifactTypeSingleDescription(46, getBundle().getString("TimelineEventType.CalendarEntryStart.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_CALENDAR_ENTRY), new BlackboardAttribute.Type(TSK_DATETIME_START), new BlackboardAttribute.Type(TSK_DESCRIPTION)); TimelineEventType CALENDAR_ENTRY_END = new TimelineEventArtifactTypeSingleDescription(47, getBundle().getString("TimelineEventType.CalendarEntryEnd.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_CALENDAR_ENTRY), new BlackboardAttribute.Type(TSK_DATETIME_END), new BlackboardAttribute.Type(TSK_DESCRIPTION)); TimelineEventType PROGRAM_DELETED = new TimelineEventArtifactTypeSingleDescription(48, getBundle().getString("TimelineEventType.DeletedProgram.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_DELETED_PROG), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_PROG_NAME)); TimelineEventType OS_INFO = new TimelineEventArtifactTypeSingleDescription(49, getBundle().getString("TimelineEventType.OSInfo.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_OS_INFO), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_PROG_NAME)); TimelineEventType PROGRAM_NOTIFICATION = new TimelineEventArtifactTypeSingleDescription(50, getBundle().getString("TimelineEventType.ProgramNotification.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_PROG_NOTIFICATIONS), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_PROG_NAME)); TimelineEventType SCREEN_SHOT = new TimelineEventArtifactTypeSingleDescription(51, getBundle().getString("TimelineEventType.ScreenShot.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_SCREEN_SHOTS), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_PROG_NAME)); TimelineEventType SERVICE_ACCOUNT = new TimelineEventArtifactTypeImpl(52, getBundle().getString("TimelineEventType.ServiceAccount.txt"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_SERVICE_ACCOUNT), new BlackboardAttribute.Type(TSK_DATETIME_CREATED), artf -> { String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME))); String userId = stringValueOf(getAttributeSafe(artf, new Type(TSK_USER_ID))); return String.format("Program Name: %s User ID: %s", progName, userId); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType USER_DEVICE_EVENT_START = new TimelineEventArtifactTypeImpl(53, getBundle().getString("TimelineEventType.UserDeviceEventStart.txt"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_USER_DEVICE_EVENT), new BlackboardAttribute.Type(TSK_DATETIME_START), artf -> { String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME))); String activityType = stringValueOf(getAttributeSafe(artf, new Type(TSK_ACTIVITY_TYPE))); String connectionType = stringValueOf(getAttributeSafe(artf, new Type(TSK_VALUE))); return String.format("Program Name: %s Activity Type: %s Connection Type: %s", progName, activityType, connectionType); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType USER_DEVICE_EVENT_END = new TimelineEventArtifactTypeImpl(54, getBundle().getString("TimelineEventType.UserDeviceEventEnd.txt"),// NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_USER_DEVICE_EVENT), new BlackboardAttribute.Type(TSK_DATETIME_END), artf -> { String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME))); String activityType = stringValueOf(getAttributeSafe(artf, new Type(TSK_ACTIVITY_TYPE))); String connectionType = stringValueOf(getAttributeSafe(artf, new Type(TSK_VALUE))); return String.format("Program Name: %s Activity Type: %s Connection Type: %s", progName, activityType, connectionType); }, new EmptyExtractor(), new EmptyExtractor()); TimelineEventType WEB_CACHE = new URLArtifactEventType(55, getBundle().getString("TimelineEventType.WebCache.text"),// NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_CACHE), new Type(TSK_DATETIME_CREATED), new Type(TSK_URL)); TimelineEventType WIFI_NETWORK = new TimelineEventArtifactTypeSingleDescription(56, getBundle().getString("TimelineEventType.WIFINetwork.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_WIFI_NETWORK), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_SSID)); TimelineEventType WEB_HISTORY_CREATED = new URLArtifactEventType(57, getBundle().getString("WebTypes.webHistoryCreated.name"),// NON-NLS WEB_ACTIVITY, new BlackboardArtifact.Type(TSK_WEB_HISTORY), new Type(TSK_DATETIME_CREATED), new Type(TSK_URL)); TimelineEventType BLUETOOTH_ADAPTER = new TimelineEventArtifactTypeSingleDescription(58, getBundle().getString("TimelineEventType.BluetoothAdapter.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_BLUETOOTH_ADAPTER), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_NAME)); TimelineEventType BLUETOOTH_PAIRING_ACCESSED = new TimelineEventArtifactTypeSingleDescription(59, getBundle().getString("TimelineEventType.BluetoothPairingLastConnection.txt"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_BLUETOOTH_PAIRING), new BlackboardAttribute.Type(TSK_DATETIME_ACCESSED), new BlackboardAttribute.Type(TSK_DEVICE_NAME)); //User manually created events, created with the "Add Event" button in the // timeline UI. TimelineEventType USER_CREATED = new TimelineEventArtifactTypeSingleDescription(60, getBundle().getString("CustomTypes.userCreated.name"),//NON-NLS MISC_TYPES, new BlackboardArtifact.Type(TSK_TL_EVENT), new BlackboardAttribute.Type(TSK_DATETIME), new BlackboardAttribute.Type(TSK_DESCRIPTION)); static SortedSet getCategoryTypes() { return ROOT_EVENT_TYPE.getChildren(); } static SortedSet getFileSystemTypes() { return FILE_SYSTEM.getChildren(); } static SortedSet getWebActivityTypes() { return WEB_ACTIVITY.getChildren(); } static SortedSet getMiscTypes() { return MISC_TYPES.getChildren(); } static String stringValueOf(BlackboardAttribute attr) { return Optional.ofNullable(attr) .map(BlackboardAttribute::getDisplayString) .orElse(""); } static String toFrom(BlackboardAttribute dir) { if (dir == null) { return ""; } else { switch (dir.getDisplayString()) { case "Incoming": // NON-NLS return "from"; // NON-NLS case "Outgoing": // NON-NLS return "to"; // NON-NLS default: return " "; // NON-NLS } } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OSUtility.java000644 000765 000024 00000012036 14137073413 027223 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; import java.util.ArrayList; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; /** * Utility class to combine information from various OS info artifacts into * fewer objects. */ public class OSUtility { private OSUtility() { } /** * Get all non-backup OSInfo data * * @param skCase - Have to pass this in because we don't have access to the * normal method * * @return List of OSInfo objects * * @throws TskCoreException */ public static List getOSInfo(SleuthkitCase skCase) throws TskCoreException { return getOSInfoInternal(skCase, false, false, 0); } /** * Get OSInfo from the same file system as the given object. Will not * include backups. * * @param skCase - Have to pass this in because we don't have access to the * normal method * @param fsc - FsContent from the same file system we want the OS * information from * * @return - List of OSInfo objects * * @throws TskCoreException */ public static List getOSInfo(SleuthkitCase skCase, FsContent fsc) throws TskCoreException { return getOSInfoInternal(skCase, false, true, fsc.getFileSystemId()); } /** * Creates a list of all OS Info data on any file system, including the * backups * * @param skCase - Have to pass this in because we don't have access to the * normal method * * @return - List of OSInfo objects * * @throws TskCoreException */ public static List getAllOSInfo(SleuthkitCase skCase) throws TskCoreException { return getOSInfoInternal(skCase, true, false, 0); } /** * Internal method to find and combine the requested OS Info data. * * @param skCase - Have to pass this in because we don't have access * to the normal method * @param includeBackups - true if we should include registry data found in * "RegBack" * @param restrictFs - true if an file system id is being provided to * match against * @param fsId - the file system ID that the registry hives must * be on (if restrictFs is set) * * @return - List of OSInfo objects * * @throws TskCoreException */ private static List getOSInfoInternal(SleuthkitCase skCase, boolean includeBackups, boolean restrictFs, long fsId) throws TskCoreException { List infoList = new ArrayList(); // Get all OS_INFO artifacts for this case ArrayList results = skCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO); for (BlackboardArtifact art : results) { AbstractFile file = skCase.getAbstractFileById(art.getObjectID()); if (file == null) { continue; } // Check if we're in a backup directory. If so and we're not including backups, // skip this artifact. boolean isBackup = file.getParentPath().contains("RegBack"); if (isBackup && (!includeBackups)) { continue; } // FsContent allows us to get the file system ID. if (file instanceof FsContent) { FsContent fsc = (FsContent) file; // If we're restricting the file system, skip any that don't match if (restrictFs && (fsId != fsc.getFileSystemId())) { continue; } // Make a new OSInfo object OSInfo newInfo = new OSInfo(art, isBackup, fsc.getFileSystemId(), file.getParent()); // Attempt to merge it with an existing object boolean mergedInfo = false; for (OSInfo info : infoList) { if (info.matches(newInfo)) { info.combine(newInfo); mergedInfo = true; break; } } // If nothing matched, add the new object to the list if (!mergedInfo) { infoList.add(newInfo); } } else if (!restrictFs) { // Make a new OSInfo object (no file system ID in this case) OSInfo newInfo = new OSInfo(art, isBackup, file.getParent()); // Attempt to merge it with an existing object boolean mergedInfo = false; for (OSInfo info : infoList) { if (info.matches(newInfo)) { info.combine(newInfo); mergedInfo = true; break; } } // If nothing matched, add the new object to the list if (!mergedInfo) { infoList.add(newInfo); } } else { // If we're limiting the search to one FS, don't include any // data we can't find the FS for } } return infoList; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/ScoringManager.java000644 000765 000024 00000035635 14137073413 030227 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Map; import java.util.logging.Logger; import java.util.stream.Collectors; import org.sleuthkit.datamodel.Score.Priority; import org.sleuthkit.datamodel.Score.Significance; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * The scoring manager is responsible for updating and querying the score of * objects. * */ public class ScoringManager { private static final Logger LOGGER = Logger.getLogger(ScoringManager.class.getName()); private final SleuthkitCase db; /** * Construct a ScoringManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ ScoringManager(SleuthkitCase skCase) { this.db = skCase; } /** * Get the aggregate score for the given object. * * @param objId Object id. * * @return Score, if it is found, unknown otherwise. * * @throws TskCoreException */ public Score getAggregateScore(long objId) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection()) { return getAggregateScore(objId, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the aggregate scores for the given list of object ids. * * @param objIds Object id list. * * @return Map Each input object id will be mapped. If a score * is not found for an object Unknown score will be mapped. * * @throws TskCoreException */ public Map getAggregateScores(List objIds) throws TskCoreException { if (objIds.isEmpty()) { return Collections.emptyMap(); } String queryString = "SELECT obj_id, significance, priority FROM tsk_aggregate_score WHERE obj_id in " + objIds.stream().map(l -> l.toString()).collect(Collectors.joining(",", "(", ")")); Map results = objIds.stream().collect(Collectors.toMap( key -> key, key -> Score.SCORE_UNKNOWN)); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection()) { try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { Long objId = rs.getLong("obj_id"); Score score = new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority"))); results.put(objId, score); } } catch (SQLException ex) { throw new TskCoreException("SQLException thrown while running query: " + queryString, ex); } } finally { db.releaseSingleUserCaseReadLock(); } return results; } /** * Get the aggregate score for the given object. Uses the connection from the * given transaction. * * @param objId Object id. * @param transaction Transaction that provides the connection to use. * * @return Score, if it is found, unknown otherwise. * * @throws TskCoreException */ private Score getAggregateScore(long objId, CaseDbTransaction transaction) throws TskCoreException { CaseDbConnection connection = transaction.getConnection(); return getAggregateScore(objId, connection); } /** * Get the aggregate score for the given object. * * @param objId Object id. * @param connection Connection to use for the query. * * @return Score, if it is found, SCORE_UNKNOWN otherwise. * * @throws TskCoreException */ private Score getAggregateScore(long objId, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT significance, priority FROM tsk_aggregate_score WHERE obj_id = " + objId; try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (rs.next()) { return new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority"))); } else { return Score.SCORE_UNKNOWN; } } catch (SQLException ex) { throw new TskCoreException("SQLException thrown while running query: " + queryString, ex); } } /** * Inserts or updates the score for the given object. * * @param objId Object id of the object. * @param dataSourceObjectId Data source object id, may be null. * @param score Score to be inserted/updated. * @param transaction Transaction to use for the update. * * @throws TskCoreException */ private void setAggregateScore(long objId, Long dataSourceObjectId, Score score, CaseDbTransaction transaction) throws TskCoreException { String insertSQLString = "INSERT INTO tsk_aggregate_score (obj_id, data_source_obj_id, significance , priority) VALUES (?, ?, ?, ?)" + " ON CONFLICT (obj_id) DO UPDATE SET significance = ?, priority = ?"; CaseDbConnection connection = transaction.getConnection(); try { PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQLString, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, objId); if (dataSourceObjectId != null) { preparedStatement.setLong(2, dataSourceObjectId); } else { preparedStatement.setNull(2, java.sql.Types.NULL); } preparedStatement.setInt(3, score.getSignificance().getId()); preparedStatement.setInt(4, score.getPriority().getId()); preparedStatement.setInt(5, score.getSignificance().getId()); preparedStatement.setInt(6, score.getPriority().getId()); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating aggregate score, query: %s for objId = %d", insertSQLString, objId), ex);//NON-NLS } } /** * Updates the score for the specified object after a result has been * added. Is optimized to do nothing if the new score is less than the * current aggregate score. * * @param objId Object id. * @param dataSourceObjectId Object id of the data source, may be null. * @param newResultScore Score for a newly added analysis result. * @param transaction Transaction to use for the update. * * @return Aggregate score for the object. * * @throws TskCoreException */ Score updateAggregateScoreAfterAddition(long objId, Long dataSourceObjectId, Score newResultScore, CaseDbTransaction transaction) throws TskCoreException { /* get an exclusive write lock on the DB before we read anything so that we know we are * the only one reading existing scores and updating. The risk is that two computers * could update the score and the aggregate score ends up being incorrect. * * NOTE: The alternative design is to add a 'version' column for opportunistic locking * and calculate these outside of a transaction. We opted for table locking for performance * reasons so that we can still add the analysis results in a batch. That remains an option * if we get into deadlocks with the current design. */ try { CaseDbConnection connection = transaction.getConnection(); connection.getAggregateScoreTableWriteLock(); } catch (SQLException ex) { throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS } // Get the current score Score currentAggregateScore = ScoringManager.this.getAggregateScore(objId, transaction); // If current score is Unknown And newscore is not Unknown - allow None (good) to be recorded // or if the new score is higher than the current score if ( (currentAggregateScore.compareTo(Score.SCORE_UNKNOWN) == 0 && newResultScore.compareTo(Score.SCORE_UNKNOWN) != 0) || (Score.getScoreComparator().compare(newResultScore, currentAggregateScore) > 0)) { setAggregateScore(objId, dataSourceObjectId, newResultScore, transaction); // register score change in the transaction. transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentAggregateScore, newResultScore)); return newResultScore; } else { // return the current score return currentAggregateScore; } } /** * Recalculate the aggregate score after an analysis result was * deleted. * * @param objId Content that had result deleted from * @param dataSourceObjectId Data source content is in * @param transaction * @return New Score * @throws TskCoreException */ Score updateAggregateScoreAfterDeletion(long objId, Long dataSourceObjectId, CaseDbTransaction transaction) throws TskCoreException { CaseDbConnection connection = transaction.getConnection(); /* get an exclusive write lock on the DB before we read anything so that we know we are * the only one reading existing scores and updating. The risk is that two computers * could update the score and the aggregate score ends up being incorrect. * * NOTE: The alternative design is to add a 'version' column for opportunistic locking * and calculate these outside of a transaction. We opted for table locking for performance * reasons so that we can still add the analysis results in a batch. That remains an option * if we get into deadlocks with the current design. */ try { connection.getAggregateScoreTableWriteLock(); } catch (SQLException ex) { throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS } // Get the current score Score currentScore = ScoringManager.this.getAggregateScore(objId, transaction); // Calculate the score from scratch by getting all of them and getting the highest List analysisResults = db.getBlackboard().getAnalysisResults(objId, connection); Score newScore = Score.SCORE_UNKNOWN; for (AnalysisResult iter : analysisResults) { Score iterScore = iter.getScore(); if (Score.getScoreComparator().compare(iterScore, newScore) > 0) { newScore = iterScore; } } // get the maximum score of the calculated aggregate score of analysis results // or the score derived from the maximum known status of a content tag on this content. Optional tagScore = db.getTaggingManager().getMaxTagKnownStatus(objId, transaction) .map(knownStatus -> TaggingManager.getTagScore(knownStatus)); if (tagScore.isPresent() && Score.getScoreComparator().compare(tagScore.get(), newScore) > 0) { newScore = tagScore.get(); } // only change the DB if we got a new score. if (newScore.compareTo(currentScore) != 0) { setAggregateScore(objId, dataSourceObjectId, newScore, transaction); // register the score change with the transaction so an event can be fired for it. transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentScore, newScore)); } return newScore; } /** * Get the count of contents within the specified data source * with the specified significance. * * @param dataSourceObjectId Data source object id. * @param significance Significance to look for. * * @return Number of contents with given score. * @throws TskCoreException if there is an error getting the count. */ public long getContentCount(long dataSourceObjectId, Score.Significance significance) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection()) { return getContentCount(dataSourceObjectId, significance, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the count of contents with the specified significance. Uses the * specified database connection. * * @param dataSourceObjectId Data source object id. * @param significance Significance to look for. * @param connection Database connection to use.. * * @return Number of contents with given score. * * @throws TskCoreException if there is an error getting the count. */ private long getContentCount(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT COUNT(obj_id) AS count FROM tsk_aggregate_score" + " WHERE data_source_obj_id = " + dataSourceObjectId + " AND significance = " + significance.getId(); try (Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { long count = 0; if (resultSet.next()) { count = resultSet.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting count of items with significance = " + significance.toString(), ex); } } /** * Get the contents with the specified score. * * @param dataSourceObjectId Data source object id. * @param significance Significance to look for. * * @return Collection of contents with given score. * * @throws TskCoreException if there is an error getting the contents. */ public List getContent(long dataSourceObjectId, Score.Significance significance) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection()) { return getContent(dataSourceObjectId, significance, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets the contents with the specified score. Uses the specified * database connection. * * @param dataSourceObjectId Data source object id. * @param significance Significance to look for. * @param connection Connection to use for the query. * * @return List of contents with given score. * * @throws TskCoreException */ private List getContent(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT obj_id FROM tsk_aggregate_score" + " WHERE data_source_obj_id = " + dataSourceObjectId + " AND significance = " + significance.getId(); try (Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { List items = new ArrayList<>(); while (resultSet.next()) { long objId = resultSet.getLong("obj_id"); items.add(db.getContentById(objId)); } return items; } catch (SQLException ex) { throw new TskCoreException("Error getting list of items with significance = " + significance.toString(), ex); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HashEntry.java000755 000765 000024 00000002773 14137073413 027235 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Used to pass info about a hash so that it can be added into the TSK-db from * Autopsy. HashHitInfo is for the reverse direction. */ public class HashEntry { private String fileName; private String md5Hash; private String sha1Hash; private String sha256Hash; private String comment; public HashEntry(String fileName, String md5Hash, String sha1Hash, String sha256Hash, String comment) { this.fileName = fileName; this.md5Hash = md5Hash; this.sha1Hash = sha1Hash; this.sha256Hash = sha256Hash; this.comment = comment; } public String getFileName() { return fileName; } public String getMd5Hash() { return md5Hash; } public String getSha1Hash() { return sha1Hash; } public String getSha256Hash() { return sha256Hash; } public String getComment() { return comment; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java000755 000765 000024 00000305060 14137073414 031234 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.ResourceBundle; import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; /** * Represents an attribute of an artifact posted to the blackboard. Instances * should be constructed and then added to an instance of the BlackboardArtifact * class. * * Attributes are a name-value pairs. The name is the type of the attribute, as * represented by the BlackboardAttribute.Type class. Standard attribute types * are specified by the ATTRIBUTE_TYPE enumeration. Custom attribute types may * be created by constructing a BlackboardAttribute.Type object and calling the * SleuthkitCase.addArtifactAttributeType method. The BlackboardAttribute.Type * object that is returned can then be used to create instances of the custom * attribute by calling the appropriate BlackboardAttribute constructor. It can * also be used to do blackboard queries involving the custom type. */ public class BlackboardAttribute extends AbstractAttribute { private static final Logger LOGGER = Logger.getLogger(BlackboardAttribute.class.getName()); private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private String context; private String sources; private long artifactID; // Cached parent artifact. This field is populated lazily upon the first // call to getParentArtifact(). private BlackboardArtifact parentArtifact; // The parent data source is defined as being // the data source of the parent artifact. private Long parentDataSourceID; /** * Constructs a standard attribute with an integer value. The attribute * should be added to an appropriate artifact. * * @param attributeType The standard attribute type. * @param source The source of this attribute. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER. */ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, int valueInt) throws IllegalArgumentException { super(new BlackboardAttribute.Type(attributeType), valueInt); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs an attribute with an integer value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param source The source of this attribute. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER. */ public BlackboardAttribute(Type attributeType, String source, int valueInt) throws IllegalArgumentException { super(attributeType, valueInt); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs a standard attribute with a long/datetime value. If the value * is a datetime, it should be seconds from January 1, 1970. The attribute * should be added to an appropriate artifact. * * @param attributeType The standard attribute type. * @param source The source of this attribute. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME. */ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, long valueLong) throws IllegalArgumentException { super(new BlackboardAttribute.Type(attributeType), valueLong); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs an attribute with a long/datetime value. The attribute should * be added to an appropriate artifact. * * @param attributeType The attribute type. * @param source The source of this attribute. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME. */ public BlackboardAttribute(Type attributeType, String source, long valueLong) throws IllegalArgumentException { super(attributeType, valueLong); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs a standard attribute with a double value. The attribute should * be added to an appropriate artifact. * * @param attributeType The standard attribute type. * @param source The source of this attribute. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE. */ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, double valueDouble) throws IllegalArgumentException { super(new BlackboardAttribute.Type(attributeType), valueDouble); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs an attribute with a double value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param source The source of this attribute. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE. */ public BlackboardAttribute(Type attributeType, String source, double valueDouble) throws IllegalArgumentException { super(attributeType, valueDouble); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs a standard attribute with an string value. The attribute * should be added to an appropriate artifact. * * @param attributeType The standard attribute type. * @param source The source of this attribute. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON */ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, String valueString) throws IllegalArgumentException { super(new BlackboardAttribute.Type(attributeType), valueString); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs an attribute with a string value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param source The source of this attribute. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING. */ public BlackboardAttribute(Type attributeType, String source, String valueString) throws IllegalArgumentException { super(attributeType, valueString); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs a standard attribute with a byte array value. The attribute * should be added to an appropriate artifact. * * @param attributeType The standard attribute type. * @param source The source of this attribute. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE. */ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, byte[] valueBytes) throws IllegalArgumentException { super(new BlackboardAttribute.Type(attributeType), valueBytes); this.sources = replaceNulls(source); this.context = ""; } /** * Constructs an attribute with a byte array value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param source The source of this attribute. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE. */ public BlackboardAttribute(Type attributeType, String source, byte[] valueBytes) throws IllegalArgumentException { super(attributeType, valueBytes); this.sources = replaceNulls(source); this.context = ""; } /** * Gets the id of the artifact associated with this attribute, if the * attribute was added to an artifact. Attributes should always be added to * artifacts after they are constructed. * * @return The artifact id or zero if the artifact id has not been set. */ public long getArtifactID() { return artifactID; } /** * Sets the artifact id. * * @param artifactID The artifact id. */ void setArtifactId(long artifactID) { this.artifactID = artifactID; } /** * Gets the sources of this attribute. * * @return A list of sources, may be empty. */ public List getSources() { if (null != sources && !this.sources.isEmpty()) { List modules = Arrays.asList(sources.split(",")); return modules; } else { return Collections.emptyList(); } } /** * Adds a source to the sources of this attribute. * * @param source The source name. * * @throws org.sleuthkit.datamodel.TskCoreException */ public void addSource(String source) throws TskCoreException { this.sources = getCaseDatabase().addSourceToArtifactAttribute(this, source); } /** * Gets the artifact associated with this attribute. The artifact can be * used to get the source content for the artifact as well as any other * attributes associated with the artifact. * * @return The artifact. * * @throws TskCoreException If there is no artifact associated with this * attribute or there is an error reading from the * case database. */ public BlackboardArtifact getParentArtifact() throws TskCoreException { if (parentArtifact == null) { parentArtifact = getCaseDatabase().getBlackboardArtifact(getArtifactID()); } return parentArtifact; } @Override public int hashCode() { return Objects.hash( this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(), this.getValueString(), this.getValueBytes(), this.getSources(), getContext()); } @Override public boolean equals(Object that) { if (this == that) { return true; } else if (that instanceof BlackboardAttribute) { BlackboardAttribute other = (BlackboardAttribute) that; Object[] thisObject = new Object[]{this.getSources(), this.getContext()}; Object[] otherObject = new Object[]{other.getSources(), other.getContext()}; return areValuesEqual(that) && Objects.deepEquals(thisObject, otherObject); } else { return false; } } @Override public String toString() { return "BlackboardAttribute{" + "artifactID=" + getArtifactID() + ", attributeType=" + getAttributeType().toString() + ", moduleName=" + getSources() + ", context=" + context + ", valueInt=" + getValueInt() + ", valueLong=" + getValueLong() + ", valueDouble=" + getValueDouble() + ", valueString=" + getValueString() + ", valueBytes=" + Arrays.toString(getValueBytes()) + ", Case=" + getCaseDatabase() + '}'; //NON-NLS } /** * Gets the attribute value as a string, formatted as required. * * @return The value as a string. */ @Override public String getDisplayString() { switch (getAttributeType().getValueType()) { case DATETIME: { try { if (parentDataSourceID == null) { BlackboardArtifact parent = getParentArtifact(); parentDataSourceID = parent.getDataSourceObjectID(); } final Content dataSource = parentDataSourceID != null ? getCaseDatabase().getContentById(parentDataSourceID) : null; if ((dataSource != null) && (dataSource instanceof Image)) { // return the date/time string in the timezone associated with the datasource, Image image = (Image) dataSource; TimeZone tzone = TimeZone.getTimeZone(image.getTimeZone()); return TimeUtilities.epochToTime(getValueLong(), tzone); } } catch (TskException ex) { LOGGER.log(Level.WARNING, "Could not get timezone for image", ex); //NON-NLS } // return time string in default timezone return TimeUtilities.epochToTime(getValueLong()); } default: { return super.getDisplayString(); } } } /** * Constructs an artifact attribute. To be used when creating an attribute * based on a query of the blackboard _attributes table in the case * database. * * @param artifactID The artifact id for this attribute * @param attributeTypeID The attribute type id. * @param source The source of this attribute. * @param context Contextual information about this attribute. * @param valueType The attribute value type. * @param valueInt The value from the the value_int32 column. * @param valueLong The value from the the value_int64 column. * @param valueDouble The value from the the value_double column. * @param valueString The value from the the value_text column. * @param valueBytes The value from the the value_byte column. * @param sleuthkitCase A reference to the SleuthkitCase object * representing the case database. */ BlackboardAttribute(long artifactID, BlackboardAttribute.Type attributeType, String source, String context, int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes, SleuthkitCase sleuthkitCase) { super(attributeType, valueInt, valueLong, valueDouble, valueString, valueBytes, sleuthkitCase); this.artifactID = artifactID; this.sources = replaceNulls(source); this.context = replaceNulls(context); } /** * Sets the parent data source id. The parent data source is defined as * being the data source of the parent artifact. * * @param parentDataSourceID The parent data source id. */ void setParentDataSourceID(Long parentDataSourceID) { this.parentDataSourceID = parentDataSourceID; } /** * Gets the sources of this attribute. * * @return A comma-separated-values list of sources, may be empty. The CSV * is due to a deliberate denormalization of the source field in the * case database and this method is a helper method for the * SleuthkitCase class. */ String getSourcesCSV() { return sources; } /** * Represents the type of an attribute. */ public static final class Type implements Serializable { public static final Type TSK_URL = new Type(1, "TSK_URL", bundle.getString("BlackboardAttribute.tskUrl.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DATETIME = new Type(2, "TSK_DATETIME", bundle.getString("BlackboardAttribute.tskDatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_NAME = new Type(3, "TSK_NAME", bundle.getString("BlackboardAttribute.tskName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PROG_NAME = new Type(4, "TSK_PROG_NAME", bundle.getString("BlackboardAttribute.tskProgName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_VALUE = new Type(6, "TSK_VALUE", bundle.getString("BlackboardAttribute.tskValue.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_FLAG = new Type(7, "TSK_FLAG", bundle.getString("BlackboardAttribute.tskFlag.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PATH = new Type(8, "TSK_PATH", bundle.getString("BlackboardAttribute.tskPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_KEYWORD = new Type(10, "TSK_KEYWORD", bundle.getString("BlackboardAttribute.tskKeyword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_KEYWORD_REGEXP = new Type(11, "TSK_KEYWORD_REGEXP", bundle.getString("BlackboardAttribute.tskKeywordRegexp.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_KEYWORD_PREVIEW = new Type(12, "TSK_KEYWORD_PREVIEW", bundle.getString("BlackboardAttribute.tskKeywordPreview.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // TSK_KEYWORD_SET (id: 13) has been deprecated. Please use TSK_SET_NAME instead. public static final Type TSK_USER_NAME = new Type(14, "TSK_USER_NAME", bundle.getString("BlackboardAttribute.tskUserName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DOMAIN = new Type(15, "TSK_DOMAIN", bundle.getString("BlackboardAttribute.tskDomain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PASSWORD = new Type(16, "TSK_PASSWORD", bundle.getString("BlackboardAttribute.tskPassword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_NAME_PERSON = new Type(17, "TSK_NAME_PERSON", bundle.getString("BlackboardAttribute.tskNamePerson.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DEVICE_MODEL = new Type(18, "TSK_DEVICE_MODEL", bundle.getString("BlackboardAttribute.tskDeviceModel.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DEVICE_MAKE = new Type(19, "TSK_DEVICE_MAKE", bundle.getString("BlackboardAttribute.tskDeviceMake.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DEVICE_ID = new Type(20, "TSK_DEVICE_ID", bundle.getString("BlackboardAttribute.tskDeviceId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL = new Type(21, "TSK_EMAIL", bundle.getString("BlackboardAttribute.tskEmail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HASH_MD5 = new Type(22, "TSK_HASH_MD5", bundle.getString("BlackboardAttribute.tskHashMd5.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HASH_SHA1 = new Type(23, "TSK_HASH_SHA1", bundle.getString("BlackboardAttribute.tskHashSha1.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HASH_SHA2_256 = new Type(24, "TSK_HASH_SHA2_256", bundle.getString("BlackboardAttribute.tskHashSha225.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HASH_SHA2_512 = new Type(25, "TSK_HASH_SHA2_512", bundle.getString("BlackboardAttribute.tskHashSha2512.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_TEXT = new Type(26, "TSK_TEXT", bundle.getString("BlackboardAttribute.tskText.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_TEXT_FILE = new Type(27, "TSK_TEXT_FILE", bundle.getString("BlackboardAttribute.tskTextFile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_TEXT_LANGUAGE = new Type(28, "TSK_TEXT_LANGUAGE", bundle.getString("BlackboardAttribute.tskTextLanguage.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ENTROPY = new Type(29, "TSK_ENTROPY", bundle.getString("BlackboardAttribute.tskEntropy.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // TSK_HASHSET_NAME (id: 30) has been deprecated. Please use TSK_SET_NAME instead. // TSK_INTERESTING_FILE (id: 31) has been deprecated. Please use TSK_INTERESTING_FILE_HIT instead. public static final Type TSK_REFERRER = new Type(32, "TSK_REFERRER", bundle.getString("BlackboardAttribute.tskReferrer.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DATETIME_ACCESSED = new Type(33, "TSK_DATETIME_ACCESSED", bundle.getString("BlackboardAttribute.tskDateTimeAccessed.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_IP_ADDRESS = new Type(34, "TSK_IP_ADDRESS", bundle.getString("BlackboardAttribute.tskIpAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PHONE_NUMBER = new Type(35, "TSK_PHONE_NUMBER", bundle.getString("BlackboardAttribute.tskPhoneNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PATH_ID = new Type(36, "TSK_PATH_ID", bundle.getString("BlackboardAttribute.tskPathId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); public static final Type TSK_SET_NAME = new Type(37, "TSK_SET_NAME", bundle.getString("BlackboardAttribute.tskSetName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // TSK_ENCRYPTION_DETECTED (id: 38) has been deprecated. Please use TSK_ENCRYPTION_DETECTED as an artifact. public static final Type TSK_MALWARE_DETECTED = new Type(39, "TSK_MALWARE_DETECTED", bundle.getString("BlackboardAttribute.tskMalwareDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); public static final Type TSK_STEG_DETECTED = new Type(40, "TSK_STEG_DETECTED", bundle.getString("BlackboardAttribute.tskStegDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); public static final Type TSK_EMAIL_TO = new Type(41, "TSK_EMAIL_TO", bundle.getString("BlackboardAttribute.tskEmailTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_CC = new Type(42, "TSK_EMAIL_CC", bundle.getString("BlackboardAttribute.tskEmailCc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_BCC = new Type(43, "TSK_EMAIL_BCC", bundle.getString("BlackboardAttribute.tskEmailBcc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_FROM = new Type(44, "TSK_EMAIL_FROM", bundle.getString("BlackboardAttribute.tskEmailFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_CONTENT_PLAIN = new Type(45, "TSK_EMAIL_CONTENT_PLAIN", bundle.getString("BlackboardAttribute.tskEmailContentPlain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_CONTENT_HTML = new Type(46, "TSK_EMAIL_CONTENT_HTML", bundle.getString("BlackboardAttribute.tskEmailContentHtml.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_CONTENT_RTF = new Type(47, "TSK_EMAIL_CONTENT_RTF", bundle.getString("BlackboardAttribute.tskEmailContentRtf.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_MSG_ID = new Type(48, "TSK_MSG_ID", bundle.getString("BlackboardAttribute.tskMsgId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_MSG_REPLY_ID = new Type(49, "TSK_MSG_REPLY_ID", bundle.getString("BlackboardAttribute.tskMsgReplyId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DATETIME_RCVD = new Type(50, "TSK_DATETIME_RCVD", bundle.getString("BlackboardAttribute.tskDateTimeRcvd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_DATETIME_SENT = new Type(51, "TSK_DATETIME_SENT", bundle.getString("BlackboardAttribute.tskDateTimeSent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_SUBJECT = new Type(52, "TSK_SUBJECT", bundle.getString("BlackboardAttribute.tskSubject.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_TITLE = new Type(53, "TSK_TITLE", bundle.getString("BlackboardAttribute.tskTitle.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_GEO_LATITUDE = new Type(54, "TSK_GEO_LATITUDE", bundle.getString("BlackboardAttribute.tskGeoLatitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_LONGITUDE = new Type(55, "TSK_GEO_LONGITUDE", bundle.getString("BlackboardAttribute.tskGeoLongitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_VELOCITY = new Type(56, "TSK_GEO_VELOCITY", bundle.getString("BlackboardAttribute.tskGeoVelocity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_ALTITUDE = new Type(57, "TSK_GEO_ALTITUDE", bundle.getString("BlackboardAttribute.tskGeoAltitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_BEARING = new Type(58, "TSK_GEO_BEARING", bundle.getString("BlackboardAttribute.tskGeoBearing.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_GEO_HPRECISION = new Type(59, "TSK_GEO_HPRECISION", bundle.getString("BlackboardAttribute.tskGeoHPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_VPRECISION = new Type(60, "TSK_GEO_VPRECISION", bundle.getString("BlackboardAttribute.tskGeoVPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_GEO_MAPDATUM = new Type(61, "TSK_GEO_MAPDATUM", bundle.getString("BlackboardAttribute.tskGeoMapDatum.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // TSK_FILE_TYPE_SIG (id: 62) has been deprecated. Please use the mime type field of the AbstractFile object instead. public static final Type TSK_FILE_TYPE_EXT = new Type(63, "TSK_FILE_TYPE_EXT", bundle.getString("BlackboardAttribute.tskFileTypeExt.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // TSK_TAGGED_ARTIFACT (id: 64) has been deprected. Please create a tag as an artifact. // TSK_TAG_NAME (id: 65) has been deprecated. Please create a tag as an artifact. public static final Type TSK_COMMENT = new Type(66, "TSK_COMMENT", bundle.getString("BlackboardAttribute.tskComment.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_URL_DECODED = new Type(67, "TSK_URL_DECODED", bundle.getString("BlackboardAttribute.tskUrlDecoded.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DATETIME_CREATED = new Type(68, "TSK_DATETIME_CREATED", bundle.getString("BlackboardAttribute.tskDateTimeCreated.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_DATETIME_MODIFIED = new Type(69, "TSK_DATETIME_MODIFIED", bundle.getString("BlackboardAttribute.tskDateTimeModified.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_PROCESSOR_ARCHITECTURE = new Type(70, "TSK_PROCESSOR_ARCHITECTURE", bundle.getString("BlackboardAttribute.tskProcessorArchitecture.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_VERSION = new Type(71, "TSK_VERSION", bundle.getString("BlackboardAttribute.tskVersion.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_USER_ID = new Type(72, "TSK_USER_ID", bundle.getString("BlackboardAttribute.tskUserId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DESCRIPTION = new Type(73, "TSK_DESCRIPTION", bundle.getString("BlackboardAttribute.tskDescription.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_MESSAGE_TYPE = new Type(74, "TSK_MESSAGE_TYPE", bundle.getString("BlackboardAttribute.tskMessageType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // SMS or MMS or IM ... public static final Type TSK_PHONE_NUMBER_HOME = new Type(75, "TSK_PHONE_NUMBER_HOME", bundle.getString("BlackboardAttribute.tskPhoneNumberHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PHONE_NUMBER_OFFICE = new Type(76, "TSK_PHONE_NUMBER_OFFICE", bundle.getString("BlackboardAttribute.tskPhoneNumberOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PHONE_NUMBER_MOBILE = new Type(77, "TSK_PHONE_NUMBER_MOBILE", bundle.getString("BlackboardAttribute.tskPhoneNumberMobile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PHONE_NUMBER_FROM = new Type(78, "TSK_PHONE_NUMBER_FROM", bundle.getString("BlackboardAttribute.tskPhoneNumberFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PHONE_NUMBER_TO = new Type(79, "TSK_PHONE_NUMBER_TO", bundle.getString("BlackboardAttribute.tskPhoneNumberTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DIRECTION = new Type(80, "TSK_DIRECTION", bundle.getString("BlackboardAttribute.tskDirection.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Msg/Call direction: incoming, outgoing public static final Type TSK_EMAIL_HOME = new Type(81, "TSK_EMAIL_HOME", bundle.getString("BlackboardAttribute.tskEmailHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_EMAIL_OFFICE = new Type(82, "TSK_EMAIL_OFFICE", bundle.getString("BlackboardAttribute.tskEmailOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_DATETIME_START = new Type(83, "TSK_DATETIME_START", bundle.getString("BlackboardAttribute.tskDateTimeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); // start time of an event - call log, Calendar entry public static final Type TSK_DATETIME_END = new Type(84, "TSK_DATETIME_END", bundle.getString("BlackboardAttribute.tskDateTimeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); // end time of an event - call log, Calendar entry public static final Type TSK_CALENDAR_ENTRY_TYPE = new Type(85, "TSK_CALENDAR_ENTRY_TYPE", bundle.getString("BlackboardAttribute.tskCalendarEntryType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // meeting, task, public static final Type TSK_LOCATION = new Type(86, "TSK_LOCATION", bundle.getString("BlackboardAttribute.tskLocation.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Location string associated with an event - Conf Room Name, Address .... public static final Type TSK_SHORTCUT = new Type(87, "TSK_SHORTCUT", bundle.getString("BlackboardAttribute.tskShortcut.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Short Cut string - short code or dial string for Speed dial, a URL short cut - e.g. bitly string, Windows Desktop Short cut name etc. public static final Type TSK_DEVICE_NAME = new Type(88, "TSK_DEVICE_NAME", bundle.getString("BlackboardAttribute.tskDeviceName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // device name - a user assigned (usually) device name - such as "Joe's computer", "bob_win8", "BT Headset" public static final Type TSK_CATEGORY = new Type(89, "TSK_CATEGORY", bundle.getString("BlackboardAttribute.tskCategory.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // category/type, possible value set varies by the artifact public static final Type TSK_EMAIL_REPLYTO = new Type(90, "TSK_EMAIL_REPLYTO", bundle.getString("BlackboardAttribute.tskEmailReplyTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // ReplyTo address public static final Type TSK_SERVER_NAME = new Type(91, "TSK_SERVER_NAME", bundle.getString("BlackboardAttribute.tskServerName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // server name, e.g. a mail server name - "smtp.google.com", a DNS server name... public static final Type TSK_COUNT = new Type(92, "TSK_COUNT", bundle.getString("BlackboardAttribute.tskCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Count related to the artifact public static final Type TSK_MIN_COUNT = new Type(93, "TSK_MIN_COUNT", bundle.getString("BlackboardAttribute.tskMinCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Minimum number/count public static final Type TSK_PATH_SOURCE = new Type(94, "TSK_PATH_SOURCE", bundle.getString("BlackboardAttribute.tskPathSource.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Path to a source file related to the artifact public static final Type TSK_PERMISSIONS = new Type(95, "TSK_PERMISSIONS", bundle.getString("BlackboardAttribute.tskPermissions.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Permissions public static final Type TSK_ASSOCIATED_ARTIFACT = new Type(96, "TSK_ASSOCIATED_ARTIFACT", bundle.getString("BlackboardAttribute.tskAssociatedArtifact.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); // Artifact ID of a related artifact public static final Type TSK_ISDELETED = new Type(97, "TSK_ISDELETED", bundle.getString("BlackboardAttribute.tskIsDeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // boolean to indicate that the artifact is recovered fom deleted content public static final Type TSK_GEO_LATITUDE_START = new Type(98, "TSK_GEO_LATITUDE_START", bundle.getString("BlackboardAttribute.tskGeoLatitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Starting location lattitude public static final Type TSK_GEO_LATITUDE_END = new Type(99, "TSK_GEO_LATITUDE_END", bundle.getString("BlackboardAttribute.tskGeoLatitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Ending location lattitude public static final Type TSK_GEO_LONGITUDE_START = new Type(100, "TSK_GEO_LONGITUDE_START", bundle.getString("BlackboardAttribute.tskGeoLongitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Starting location longitude public static final Type TSK_GEO_LONGITUDE_END = new Type(101, "TSK_GEO_LONGITUDE_END", bundle.getString("BlackboardAttribute.tskGeoLongitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); //Ending Location longitude public static final Type TSK_READ_STATUS = new Type(102, "TSK_READ_STATUS", bundle.getString("BlackboardAttribute.tskReadStatus.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Message read status: 1 if read, 0 if unread public static final Type TSK_LOCAL_PATH = new Type(103, "TSK_LOCAL_PATH", bundle.getString("BlackboardAttribute.tskLocalPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Local path to a network drive public static final Type TSK_REMOTE_PATH = new Type(104, "TSK_REMOTE_PATH", bundle.getString("BlackboardAttribute.tskRemotePath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Remote path of a network drive public static final Type TSK_TEMP_DIR = new Type(105, "TSK_TEMP_DIR", bundle.getString("BlackboardAttribute.tskTempDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Default temporary files directory public static final Type TSK_PRODUCT_ID = new Type(106, "TSK_PRODUCT_ID", bundle.getString("BlackboardAttribute.tskProductId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Product ID public static final Type TSK_OWNER = new Type(107, "TSK_OWNER", bundle.getString("BlackboardAttribute.tskOwner.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Registered owner of a piece of software public static final Type TSK_ORGANIZATION = new Type(108, "TSK_ORGANIZATION", bundle.getString("BlackboardAttribute.tskOrganization.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Registered Organization for a piece of software public static final Type TSK_CARD_NUMBER = new Type(109, "TSK_CARD_NUMBER", bundle.getString("BlackboardAttribute.tskCardNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_EXPIRATION = new Type(110, "TSK_CARD_EXPIRATION", bundle.getString("BlackboardAttribute.tskCardExpiration.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_SERVICE_CODE = new Type(111, "TSK_CARD_SERVICE_CODE", bundle.getString("BlackboardAttribute.tskCardServiceCode.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_DISCRETIONARY = new Type(112, "TSK_CARD_DISCRETIONARY", bundle.getString("BlackboardAttribute.tskCardDiscretionary.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_LRC = new Type(113, "TSK_CARD_LRC", bundle.getString("BlackboardAttribute.tskCardLRC.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_KEYWORD_SEARCH_DOCUMENT_ID = new Type(114, "TSK_KEYWORD_SEARCH_DOCUMENT_ID", bundle.getString("BlackboardAttribute.tskKeywordSearchDocumentID.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_SCHEME = new Type(115, "TSK_CARD_SCHEME", bundle.getString("BlackboardAttribute.tskCardScheme.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CARD_TYPE = new Type(116, "TSK_CARD_TYPE", bundle.getString("BlackboardAttribute.tskCardType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_BRAND_NAME = new Type(117, "TSK_BRAND_NAME", bundle.getString("BlackboardAttribute.tskBrandName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_BANK_NAME = new Type(118, "TSK_BANK_NAME", bundle.getString("BlackboardAttribute.tskBankName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_COUNTRY = new Type(119, "TSK_COUNTRY", bundle.getString("BlackboardAttribute.tskCountry.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CITY = new Type(120, "TSK_CITY", bundle.getString("BlackboardAttribute.tskCity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ACCOUNT_TYPE = new Type(121, "TSK_ACCOUNT_TYPE", bundle.getString("BlackboardAttribute.tskAccountType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); /** * Keyword search type: exact match, sub-string, or regex. */ public static final Type TSK_KEYWORD_SEARCH_TYPE = new Type(122, "TSK_KEYWORD_SEARCH_TYPE", bundle.getString("BlackboardAttribute.tskKeywordSearchType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); public static final Type TSK_HEADERS = new Type(123, "TSK_HEADERS", bundle.getString("BlackboardAttribute.tskHeaders.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ID = new Type(124, "TSK_ID", bundle.getString("BlackboardAttribute.tskId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_SSID = new Type(125, "TSK_SSID", bundle.getString("BlackboardAttribute.tskSsid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_BSSID = new Type(126, "TSK_BSSID", bundle.getString("BlackboardAttribute.tskBssid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_MAC_ADDRESS = new Type(127, "TSK_MAC_ADDRESS", bundle.getString("BlackboardAttribute.tskMacAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_IMEI = new Type(128, "TSK_IMEI", bundle.getString("BlackboardAttribute.tskImei.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_IMSI = new Type(129, "TSK_IMSI", bundle.getString("BlackboardAttribute.tskImsi.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ICCID = new Type(130, "TSK_ICCID", bundle.getString("BlackboardAttribute.tskIccid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_THREAD_ID = new Type(131, "TSK_THREAD_ID", bundle.getString("BlackboardAttribute.tskthreadid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); /** * The event type of a TSK_TL_EVENT artifact. The value should be the id * of the EventType in the tsk_event_types table. */ public static final Type TSK_TL_EVENT_TYPE = new Type(132, "TSK_TL_EVENT_TYPE", bundle.getString("BlackboardAttribute.tskTLEventType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); public static final Type TSK_DATETIME_DELETED = new Type(133, "TSK_DATETIME_DELETED", bundle.getString("BlackboardAttribute.tskdatetimedeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_DATETIME_PASSWORD_RESET = new Type(134, "TSK_DATETIME_PASSWORD_RESET", bundle.getString("BlackboardAttribute.tskdatetimepwdreset.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_DATETIME_PASSWORD_FAIL = new Type(135, "TSK_DATETIME_PWD_FAIL", bundle.getString("BlackboardAttribute.tskdatetimepwdfail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_DISPLAY_NAME = new Type(136, "TSK_DISPLAY_NAME", bundle.getString("BlackboardAttribute.tskdisplayname.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PASSWORD_SETTINGS = new Type(137, "TSK_PASSWORD_SETTINGS", bundle.getString("BlackboardAttribute.tskpasswordsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ACCOUNT_SETTINGS = new Type(138, "TSK_ACCOUNT_SETTINGS", bundle.getString("BlackboardAttribute.tskaccountsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_PASSWORD_HINT = new Type(139, "TSK_PASSWORD_HINT", bundle.getString("BlackboardAttribute.tskpasswordhint.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_GROUPS = new Type(140, "TSK_GROUPS", bundle.getString("BlackboardAttribute.tskgroups.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); /* * Use * org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments * to create and process TSK_ATTACHMENTS attributes. */ public static final Type TSK_ATTACHMENTS = new Type(141, "TSK_ATTACHMENTS", bundle.getString("BlackboardAttribute.tskattachments.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON); /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints * to create and process TSK_GEO_TRACKPOINTS attributes. */ public static final Type TSK_GEO_TRACKPOINTS = new Type(142, "TSK_GEO_TRACKPOINTS", bundle.getString("BlackboardAttribute.tskgeopath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON); /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints * to create and process TSK_GEO_WAYPOINTS attributes. */ public static final Type TSK_GEO_WAYPOINTS = new Type(143, "TSK_GEO_WAYPOINTS", bundle.getString("BlackboardAttribute.tskgeowaypoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON); public static final Type TSK_DISTANCE_TRAVELED = new Type(144, "TSK_DISTANCE_TRAVELED", bundle.getString("BlackboardAttribute.tskdistancetraveled.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_DISTANCE_FROM_HOMEPOINT = new Type(145, "TSK_DISTANCE_FROM_HOMEPOINT", bundle.getString("BlackboardAttribute.tskdistancefromhome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); public static final Type TSK_HASH_PHOTODNA = new Type(146, "TSK_HASH_PHOTODNA", bundle.getString("BlackboardAttribute.tskhashphotodna.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_BYTES_SENT = new Type(147, "TSK_BYTES_SENT", bundle.getString("BlackboardAttribute.tskbytessent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); public static final Type TSK_BYTES_RECEIVED = new Type(148, "TSK_BYTES_RECEIVED", bundle.getString("BlackboardAttribute.tskbytesreceived.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); public static final Type TSK_LAST_PRINTED_DATETIME = new Type(149, "TSK_LAST_PRINTED_DATETIME", bundle.getString("BlackboardAttribute.tsklastprinteddatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); public static final Type TSK_RULE = new Type(150, "TSK_RULE", bundle.getString("BlackboardAttribute.tskrule.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_ACTIVITY_TYPE = new Type(151, "TSK_ACTIVITY_TYPE", bundle.getString("BlackboardAttribute.tskActivityType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoAreaPoints * to create and process TSK_GEO_AREAPOINTS attributes. */ public static final Type TSK_GEO_AREAPOINTS = new Type(152, "TSK_GEO_AREAPOINTS", bundle.getString("BlackboardAttribute.tskgeoareapoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON); public static final Type TSK_REALM = new Type(153, "TSK_REALM", bundle.getString("BlackboardAttribute.tskRealm.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HOST = new Type(154, "TSK_HOST", bundle.getString("BlackboardAttribute.tskHost.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_HOME_DIR = new Type(155, "TSK_HOME_DIR", bundle.getString("BlackboardAttribute.tskHomeDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_IS_ADMIN = new Type(156, "TSK_IS_ADMIN", bundle.getString("BlackboardAttribute.tskIsAdmin.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); public static final Type TSK_CORRELATION_TYPE = new Type(157, "TSK_CORRELATION_TYPE", bundle.getString("BlackboardAttribute.tskCorrelationType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_CORRELATION_VALUE = new Type(158, "TSK_CORRELATION_VALUE", bundle.getString("BlackboardAttribute.tskCorrelationValue.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); public static final Type TSK_OTHER_CASES = new Type(159, "TSK_OTHER_CASES", bundle.getString("BlackboardAttribute.tskOtherCases.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // NOTE: When adding a new standard BlackboardAttribute.Type, add the instance and then add to the STANDARD_TYPES list. /** * A list of all the standard attribute types. */ static final List STANDARD_TYPES = Collections.unmodifiableList(Arrays.asList( TSK_URL, TSK_DATETIME, TSK_NAME, TSK_PROG_NAME, TSK_VALUE, TSK_FLAG, TSK_PATH, TSK_KEYWORD, TSK_KEYWORD_REGEXP, TSK_KEYWORD_PREVIEW, TSK_USER_NAME, TSK_DOMAIN, TSK_PASSWORD, TSK_NAME_PERSON, TSK_DEVICE_MODEL, TSK_DEVICE_MAKE, TSK_DEVICE_ID, TSK_EMAIL, TSK_HASH_MD5, TSK_HASH_SHA1, TSK_HASH_SHA2_256, TSK_HASH_SHA2_512, TSK_TEXT, TSK_TEXT_FILE, TSK_TEXT_LANGUAGE, TSK_ENTROPY, TSK_REFERRER, TSK_DATETIME_ACCESSED, TSK_IP_ADDRESS, TSK_PHONE_NUMBER, TSK_PATH_ID, TSK_SET_NAME, TSK_MALWARE_DETECTED, TSK_STEG_DETECTED, TSK_EMAIL_TO, TSK_EMAIL_CC, TSK_EMAIL_BCC, TSK_EMAIL_FROM, TSK_EMAIL_CONTENT_PLAIN, TSK_EMAIL_CONTENT_HTML, TSK_EMAIL_CONTENT_RTF, TSK_MSG_ID, TSK_MSG_REPLY_ID, TSK_DATETIME_RCVD, TSK_DATETIME_SENT, TSK_SUBJECT, TSK_TITLE, TSK_GEO_LATITUDE, TSK_GEO_LONGITUDE, TSK_GEO_VELOCITY, TSK_GEO_ALTITUDE, TSK_GEO_BEARING, TSK_GEO_HPRECISION, TSK_GEO_VPRECISION, TSK_GEO_MAPDATUM, TSK_FILE_TYPE_EXT, TSK_COMMENT, TSK_URL_DECODED, TSK_DATETIME_CREATED, TSK_DATETIME_MODIFIED, TSK_PROCESSOR_ARCHITECTURE, TSK_VERSION, TSK_USER_ID, TSK_DESCRIPTION, TSK_MESSAGE_TYPE, TSK_PHONE_NUMBER_HOME, TSK_PHONE_NUMBER_OFFICE, TSK_PHONE_NUMBER_MOBILE, TSK_PHONE_NUMBER_FROM, TSK_PHONE_NUMBER_TO, TSK_DIRECTION, TSK_EMAIL_HOME, TSK_EMAIL_OFFICE, TSK_DATETIME_START, TSK_DATETIME_END, TSK_CALENDAR_ENTRY_TYPE, TSK_LOCATION, TSK_SHORTCUT, TSK_DEVICE_NAME, TSK_CATEGORY, TSK_EMAIL_REPLYTO, TSK_SERVER_NAME, TSK_COUNT, TSK_MIN_COUNT, TSK_PATH_SOURCE, TSK_PERMISSIONS, TSK_ASSOCIATED_ARTIFACT, TSK_ISDELETED, TSK_GEO_LATITUDE_START, TSK_GEO_LATITUDE_END, TSK_GEO_LONGITUDE_START, TSK_GEO_LONGITUDE_END, TSK_READ_STATUS, TSK_LOCAL_PATH, TSK_REMOTE_PATH, TSK_TEMP_DIR, TSK_PRODUCT_ID, TSK_OWNER, TSK_ORGANIZATION, TSK_CARD_NUMBER, TSK_CARD_EXPIRATION, TSK_CARD_SERVICE_CODE, TSK_CARD_DISCRETIONARY, TSK_CARD_LRC, TSK_KEYWORD_SEARCH_DOCUMENT_ID, TSK_CARD_SCHEME, TSK_CARD_TYPE, TSK_BRAND_NAME, TSK_BANK_NAME, TSK_COUNTRY, TSK_CITY, TSK_ACCOUNT_TYPE, TSK_KEYWORD_SEARCH_TYPE, TSK_HEADERS, TSK_ID, TSK_SSID, TSK_BSSID, TSK_MAC_ADDRESS, TSK_IMEI, TSK_IMSI, TSK_ICCID, TSK_THREAD_ID, TSK_TL_EVENT_TYPE, TSK_DATETIME_DELETED, TSK_DATETIME_PASSWORD_RESET, TSK_DATETIME_PASSWORD_FAIL, TSK_DISPLAY_NAME, TSK_PASSWORD_SETTINGS, TSK_ACCOUNT_SETTINGS, TSK_PASSWORD_HINT, TSK_GROUPS, TSK_ATTACHMENTS, TSK_GEO_TRACKPOINTS, TSK_GEO_WAYPOINTS, TSK_DISTANCE_TRAVELED, TSK_DISTANCE_FROM_HOMEPOINT, TSK_HASH_PHOTODNA, TSK_BYTES_SENT, TSK_BYTES_RECEIVED, TSK_LAST_PRINTED_DATETIME, TSK_RULE, TSK_ACTIVITY_TYPE, TSK_GEO_AREAPOINTS, TSK_REALM, TSK_HOST, TSK_HOME_DIR, TSK_IS_ADMIN, TSK_CORRELATION_TYPE, TSK_CORRELATION_VALUE, TSK_OTHER_CASES )); private static final long serialVersionUID = 1L; private final String typeName; private final int typeID; private final String displayName; private final TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType; /** * Constructs an attribute type. * * @param typeID The type id. * @param typeName The type name. * @param displayName The display name for the type. * @param valueType The type of the value. */ public Type(int typeID, String typeName, String displayName, TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType) { this.typeID = typeID; this.typeName = typeName; this.displayName = displayName; this.valueType = valueType; } /** * Constructs a standard attribute type. * * @param type The specification of the type provided by the * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE enumeration. */ public Type(BlackboardAttribute.ATTRIBUTE_TYPE type) { this.typeID = type.getTypeID(); this.typeName = type.getLabel(); this.displayName = type.getDisplayName(); this.valueType = type.getValueType(); } /** * Gets the value type of this attribute type. * * @return The value type. */ public TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getValueType() { return this.valueType; } /** * Gets the type name of this attribute type. * * @return The type name. */ public String getTypeName() { return this.typeName; } /** * Gets the type id of this attribute type. * * @return The type id. */ public int getTypeID() { return this.typeID; } /** * Gets the display name of this attribute type. * * @return The display name. */ public String getDisplayName() { return this.displayName; } @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof BlackboardAttribute.Type)) { return false; } else { return ((BlackboardAttribute.Type) that).sameType(this); } } /** * Determines if this attribute type object is equivalent to another * attribute type object. * * @param that the other type * * @return true if it is the same type */ private boolean sameType(BlackboardAttribute.Type that) { return this.typeName.equals(that.getTypeName()) && this.displayName.equals(that.getDisplayName()) && this.typeID == that.getTypeID() && this.valueType == that.getValueType(); } @Override public int hashCode() { int hash = 7; hash = 63 * hash + Objects.hashCode(this.typeID); hash = 63 * hash + Objects.hashCode(this.displayName); hash = 63 * hash + Objects.hashCode(this.typeName); hash = 63 * hash + Objects.hashCode(this.valueType); return hash; } @Override public String toString() { return "(typeID= " + this.typeID + ", displayName=" + this.displayName + ", typeName=" + this.typeName + ", valueType=" + this.valueType + ")"; } } /** * Specifies the type ids and display names of the supported attribute value * types. */ public enum TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE { /** * The value type of the attribute is a string. */ STRING(0, "String"), //NON-NLS /** * The value type of the attribute is an int. */ INTEGER(1, "Integer"), //NON-NLS /** * The value type of the attribute is a long. */ LONG(2, "Long"), //NON-NLS /** * The value type of the attribute is a double. */ DOUBLE(3, "Double"), //NON-NLS /** * The value type of the attribute is a byte array. */ BYTE(4, "Byte"), //NON-NLS /** * The value type of the attribute is a long representing seconds from * January 1, 1970. */ DATETIME(5, "DateTime"), /** * The value type of the attribute is a JSON string. */ JSON(6, "Json"); private final long typeId; private final String typeName; /* * TODO (AUT-2070): Add a localized displayName field and a * getDisplayName method for API consistency. */ /** * Constructs an attribute value type object. * * @param type The type id of the value type. * @param typeName The type name of the value type. */ private TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE(long type, String typeName) { this.typeId = type; this.typeName = typeName; } /** * Gets the type id for this attribute value type. * * TODO (AUT-2070): Deprecate and provide a getTypeId method instead for * API consistency. * * @return attribute value type id */ public long getType() { return typeId; } /** * Gets the type name for this attribute value type. * * TODO (AUT-2070): Deprecate and provide a getTypeName method instead * for API consistency. * * @return attribute value type name */ public String getLabel() { return this.typeName; } /** * Gets the attribute value type for a given value type id. * * @param typeId A value type id. * * @return A BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE * object. * * @throws IllegalArgumentException If the given type id does not map to * a supported value type. * * TODO (AUT-2070): Deprecate and provide a fromTypeId method instead * for API consistency. */ static public TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE fromType(long typeId) { for (TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType : TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.values()) { if (valueType.getType() == typeId) { return valueType; } } throw new IllegalArgumentException("No TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE matching type: " + typeId); } /** * Gets the attribute value type for a given value type name. * * @param typeName A type name. * * @return A BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE * object. * * @throws IllegalArgumentException If the given type name does not map * to a supported value type. * * TODO (AUT-2070): Deprecate and provide a fromTypeName method instead * for API consistency. */ static public TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE fromLabel(String typeName) { for (TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType : TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.values()) { if (valueType.getLabel().equals(typeName)) { return valueType; } } throw new IllegalArgumentException("No TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE matching type: " + typeName); } } /** * Specifies the type ids, type names, display names, and value types of the * standard attribute types. See * http://wiki.sleuthkit.org/index.php?title=Artifact_Examples for more * information. */ public enum ATTRIBUTE_TYPE { TSK_URL(1, "TSK_URL", //NON-NLS bundle.getString("BlackboardAttribute.tskUrl.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DATETIME(2, "TSK_DATETIME", //NON-NLS bundle.getString("BlackboardAttribute.tskDatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_NAME(3, "TSK_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PROG_NAME(4, "TSK_PROG_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskProgName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_VALUE(6, "TSK_VALUE", //NON-NLS bundle.getString("BlackboardAttribute.tskValue.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_FLAG(7, "TSK_FLAG", //NON-NLS bundle.getString("BlackboardAttribute.tskFlag.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PATH(8, "TSK_PATH", //NON-NLS bundle.getString("BlackboardAttribute.tskPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_KEYWORD(10, "TSK_KEYWORD", //NON-NLS bundle.getString("BlackboardAttribute.tskKeyword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_KEYWORD_REGEXP(11, "TSK_KEYWORD_REGEXP", //NON-NLS bundle.getString("BlackboardAttribute.tskKeywordRegexp.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_KEYWORD_PREVIEW(12, "TSK_KEYWORD_PREVIEW", //NON-NLS bundle.getString("BlackboardAttribute.tskKeywordPreview.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * @deprecated Use a TSK_SET_NAME attribute instead. */ @Deprecated TSK_KEYWORD_SET(13, "TSK_KEYWORD_SET", //NON-NLS bundle.getString("BlackboardAttribute.tskKeywordSet.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_USER_NAME(14, "TSK_USER_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskUserName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DOMAIN(15, "TSK_DOMAIN", //NON-NLS bundle.getString("BlackboardAttribute.tskDomain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PASSWORD(16, "TSK_PASSWORD", //NON-NLS bundle.getString("BlackboardAttribute.tskPassword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_NAME_PERSON(17, "TSK_NAME_PERSON", //NON-NLS bundle.getString("BlackboardAttribute.tskNamePerson.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DEVICE_MODEL(18, "TSK_DEVICE_MODEL", //NON-NLS bundle.getString("BlackboardAttribute.tskDeviceModel.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DEVICE_MAKE(19, "TSK_DEVICE_MAKE", //NON-NLS bundle.getString("BlackboardAttribute.tskDeviceMake.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DEVICE_ID(20, "TSK_DEVICE_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskDeviceId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL(21, "TSK_EMAIL", //NON-NLS bundle.getString("BlackboardAttribute.tskEmail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HASH_MD5(22, "TSK_HASH_MD5", //NON-NLS bundle.getString("BlackboardAttribute.tskHashMd5.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HASH_SHA1(23, "TSK_HASH_SHA1", //NON-NLS bundle.getString("BlackboardAttribute.tskHashSha1.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HASH_SHA2_256(24, "TSK_HASH_SHA2_256", //NON-NLS bundle.getString("BlackboardAttribute.tskHashSha225.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HASH_SHA2_512(25, "TSK_HASH_SHA2_512", //NON-NLS bundle.getString("BlackboardAttribute.tskHashSha2512.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_TEXT(26, "TSK_TEXT", //NON-NLS bundle.getString("BlackboardAttribute.tskText.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_TEXT_FILE(27, "TSK_TEXT_FILE", //NON-NLS bundle.getString("BlackboardAttribute.tskTextFile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_TEXT_LANGUAGE(28, "TSK_TEXT_LANGUAGE", //NON-NLS bundle.getString("BlackboardAttribute.tskTextLanguage.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ENTROPY(29, "TSK_ENTROPY", //NON-NLS bundle.getString("BlackboardAttribute.tskEntropy.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), /** * @deprecated Use a TSK_SET_NAME attribute instead. */ @Deprecated TSK_HASHSET_NAME(30, "TSK_HASHSET_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskHashsetName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * @deprecated Use a TSK_INTERESTING_FILE_HIT artifact instead. */ @Deprecated TSK_INTERESTING_FILE(31, "TSK_INTERESTING_FILE", //NON-NLS bundle.getString("BlackboardAttribute.tskInterestingFile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), TSK_REFERRER(32, "TSK_REFERRER", //NON-NLS bundle.getString("BlackboardAttribute.tskReferrer.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DATETIME_ACCESSED(33, "TSK_DATETIME_ACCESSED", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeAccessed.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_IP_ADDRESS(34, "TSK_IP_ADDRESS", //NON-NLS bundle.getString("BlackboardAttribute.tskIpAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PHONE_NUMBER(35, "TSK_PHONE_NUMBER", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PATH_ID(36, "TSK_PATH_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskPathId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), TSK_SET_NAME(37, "TSK_SET_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskSetName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * @deprecated Use a TSK_ENCRYPTION_DETECTED artifact instead. */ @Deprecated TSK_ENCRYPTION_DETECTED(38, "TSK_ENCRYPTION_DETECTED", //NON-NLS bundle.getString("BlackboardAttribute.tskEncryptionDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), TSK_MALWARE_DETECTED(39, "TSK_MALWARE_DETECTED", //NON-NLS bundle.getString("BlackboardAttribute.tskMalwareDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), TSK_STEG_DETECTED(40, "TSK_STEG_DETECTED", //NON-NLS bundle.getString("BlackboardAttribute.tskStegDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), TSK_EMAIL_TO(41, "TSK_EMAIL_TO", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_CC(42, "TSK_EMAIL_CC", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailCc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_BCC(43, "TSK_EMAIL_BCC", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailBcc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_FROM(44, "TSK_EMAIL_FROM", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_CONTENT_PLAIN(45, "TSK_EMAIL_CONTENT_PLAIN", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailContentPlain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_CONTENT_HTML(46, "TSK_EMAIL_CONTENT_HTML", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailContentHtml.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_CONTENT_RTF(47, "TSK_EMAIL_CONTENT_RTF", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailContentRtf.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_MSG_ID(48, "TSK_MSG_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskMsgId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_MSG_REPLY_ID(49, "TSK_MSG_REPLY_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskMsgReplyId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DATETIME_RCVD(50, "TSK_DATETIME_RCVD", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeRcvd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_DATETIME_SENT(51, "TSK_DATETIME_SENT", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeSent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_SUBJECT(52, "TSK_SUBJECT", //NON-NLS bundle.getString("BlackboardAttribute.tskSubject.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_TITLE(53, "TSK_TITLE", //NON-NLS bundle.getString("BlackboardAttribute.tskTitle.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_GEO_LATITUDE(54, "TSK_GEO_LATITUDE", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLatitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_LONGITUDE(55, "TSK_GEO_LONGITUDE", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLongitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_VELOCITY(56, "TSK_GEO_VELOCITY", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoVelocity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_ALTITUDE(57, "TSK_GEO_ALTITUDE", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoAltitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_BEARING(58, "TSK_GEO_BEARING", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoBearing.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_GEO_HPRECISION(59, "TSK_GEO_HPRECISION", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoHPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_VPRECISION(60, "TSK_GEO_VPRECISION", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoVPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_GEO_MAPDATUM(61, "TSK_GEO_MAPDATUM", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoMapDatum.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * @deprecated Use the mime type field of the AbstractFile object * instead. */ @Deprecated TSK_FILE_TYPE_SIG(62, "TSK_FILE_TYPE_SIG", //NON-NLS bundle.getString("BlackboardAttribute.tskFileTypeSig.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_FILE_TYPE_EXT(63, "TSK_FILE_TYPE_EXT", //NON-NLS bundle.getString("BlackboardAttribute.tskFileTypeExt.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * @deprecated Do not use. Tags are no longer implemented as artifact * attributes. */ @Deprecated TSK_TAGGED_ARTIFACT(64, "TSK_TAGGED_ARTIFACT", //NON-NLS bundle.getString("BlackboardAttribute.tskTaggedArtifact.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), /** * @deprecated Do not use. Tags are no longer implemented as artifact * attributes. */ @Deprecated TSK_TAG_NAME(65, "TSK_TAG_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskTagName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_COMMENT(66, "TSK_COMMENT", //NON-NLS bundle.getString("BlackboardAttribute.tskComment.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_URL_DECODED(67, "TSK_URL_DECODED", //NON-NLS bundle.getString("BlackboardAttribute.tskUrlDecoded.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DATETIME_CREATED(68, "TSK_DATETIME_CREATED", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeCreated.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_DATETIME_MODIFIED(69, "TSK_DATETIME_MODIFIED", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeModified.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_PROCESSOR_ARCHITECTURE(70, "TSK_PROCESSOR_ARCHITECTURE", //NON-NLS bundle.getString("BlackboardAttribute.tskProcessorArchitecture.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_VERSION(71, "TSK_VERSION", //NON-NLS bundle.getString("BlackboardAttribute.tskVersion.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_USER_ID(72, "TSK_USER_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskUserId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DESCRIPTION(73, "TSK_DESCRIPTION", //NON-NLS bundle.getString("BlackboardAttribute.tskDescription.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_MESSAGE_TYPE(74, "TSK_MESSAGE_TYPE", //NON-NLS bundle.getString("BlackboardAttribute.tskMessageType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // SMS or MMS or IM ... TSK_PHONE_NUMBER_HOME(75, "TSK_PHONE_NUMBER_HOME", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumberHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PHONE_NUMBER_OFFICE(76, "TSK_PHONE_NUMBER_OFFICE", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumberOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PHONE_NUMBER_MOBILE(77, "TSK_PHONE_NUMBER_MOBILE", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumberMobile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PHONE_NUMBER_FROM(78, "TSK_PHONE_NUMBER_FROM", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumberFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PHONE_NUMBER_TO(79, "TSK_PHONE_NUMBER_TO", //NON-NLS bundle.getString("BlackboardAttribute.tskPhoneNumberTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DIRECTION(80, "TSK_DIRECTION", //NON-NLS bundle.getString("BlackboardAttribute.tskDirection.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Msg/Call direction: incoming, outgoing TSK_EMAIL_HOME(81, "TSK_EMAIL_HOME", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_EMAIL_OFFICE(82, "TSK_EMAIL_OFFICE", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_DATETIME_START(83, "TSK_DATETIME_START", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), // start time of an event - call log, Calendar entry TSK_DATETIME_END(84, "TSK_DATETIME_END", //NON-NLS bundle.getString("BlackboardAttribute.tskDateTimeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), // end time of an event - call log, Calendar entry TSK_CALENDAR_ENTRY_TYPE(85, "TSK_CALENDAR_ENTRY_TYPE", //NON-NLS bundle.getString("BlackboardAttribute.tskCalendarEntryType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // meeting, task, TSK_LOCATION(86, "TSK_LOCATION", //NON-NLS bundle.getString("BlackboardAttribute.tskLocation.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Location string associated with an event - Conf Room Name, Address .... TSK_SHORTCUT(87, "TSK_SHORTCUT", //NON-NLS bundle.getString("BlackboardAttribute.tskShortcut.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Short Cut string - short code or dial string for Speed dial, a URL short cut - e.g. bitly string, Windows Desktop Short cut name etc. TSK_DEVICE_NAME(88, "TSK_DEVICE_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskDeviceName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // device name - a user assigned (usually) device name - such as "Joe's computer", "bob_win8", "BT Headset" TSK_CATEGORY(89, "TSK_CATEGORY", //NON-NLS bundle.getString("BlackboardAttribute.tskCategory.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // category/type, possible value set varies by the artifact TSK_EMAIL_REPLYTO(90, "TSK_EMAIL_REPLYTO", //NON-NLS bundle.getString("BlackboardAttribute.tskEmailReplyTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // ReplyTo address TSK_SERVER_NAME(91, "TSK_SERVER_NAME", //NON-NLS bundle.getString("BlackboardAttribute.tskServerName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // server name, e.g. a mail server name - "smtp.google.com", a DNS server name... TSK_COUNT(92, "TSK_COUNT", //NON-NLS bundle.getString("BlackboardAttribute.tskCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), // Count related to the artifact TSK_MIN_COUNT(93, "TSK_MIN_COUNT", //NON-NLS bundle.getString("BlackboardAttribute.tskMinCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), // Minimum number/count TSK_PATH_SOURCE(94, "TSK_PATH_SOURCE", //NON-NLS bundle.getString("BlackboardAttribute.tskPathSource.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Path to a source file related to the artifact TSK_PERMISSIONS(95, "TSK_PERMISSIONS", //NON-NLS bundle.getString("BlackboardAttribute.tskPermissions.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Permissions TSK_ASSOCIATED_ARTIFACT(96, "TSK_ASSOCIATED_ARTIFACT", //NON-NLS bundle.getString("BlackboardAttribute.tskAssociatedArtifact.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), // Artifact ID of a related artifact TSK_ISDELETED(97, "TSK_ISDELETED", //NON-NLS bundle.getString("BlackboardAttribute.tskIsDeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // boolean to indicate that the artifact is recovered fom deleted content TSK_GEO_LATITUDE_START(98, "TSK_GEO_LATITUDE_START", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLatitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), // Starting location lattitude TSK_GEO_LATITUDE_END(99, "TSK_GEO_LATITUDE_END", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLatitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), // Ending location lattitude TSK_GEO_LONGITUDE_START(100, "TSK_GEO_LONGITUDE_START", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLongitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), // Starting location longitude TSK_GEO_LONGITUDE_END(101, "TSK_GEO_LONGITUDE_END", //NON-NLS bundle.getString("BlackboardAttribute.tskGeoLongitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), //Ending Location longitude TSK_READ_STATUS(102, "TSK_READ_STATUS", //NON-NLS bundle.getString("BlackboardAttribute.tskReadStatus.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), // Message read status: 1 if read, 0 if unread TSK_LOCAL_PATH(103, "TSK_LOCAL_PATH", //NON-NLS bundle.getString("BlackboardAttribute.tskLocalPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Local path to a network drive TSK_REMOTE_PATH(104, "TSK_REMOTE_PATH", //NON-NLS bundle.getString("BlackboardAttribute.tskRemotePath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Remote path of a network drive TSK_TEMP_DIR(105, "TSK_TEMP_DIR", //NON-NLS bundle.getString("BlackboardAttribute.tskTempDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Default temporary files directory TSK_PRODUCT_ID(106, "TSK_PRODUCT_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskProductId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Product ID TSK_OWNER(107, "TSK_OWNER", //NON-NLS bundle.getString("BlackboardAttribute.tskOwner.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Registered owner of a piece of software TSK_ORGANIZATION(108, "TSK_ORGANIZATION", //NON-NLS bundle.getString("BlackboardAttribute.tskOrganization.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), // Registered Organization for a piece of software TSK_CARD_NUMBER(109, "TSK_CARD_NUMBER", //NON-NLS bundle.getString("BlackboardAttribute.tskCardNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_EXPIRATION(110, "TSK_CARD_EXPIRATION", //for card as 4 digits MMYY //NON-NLS bundle.getString("BlackboardAttribute.tskCardExpiration.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_SERVICE_CODE(111, "TSK_CARD_SERVICE_CODE", // 3 digits //NON-NLS bundle.getString("BlackboardAttribute.tskCardServiceCode.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_DISCRETIONARY(112, "TSK_CARD_DISCRETIONARY", //data used at the discretion of the issuer //NON-NLS bundle.getString("BlackboardAttribute.tskCardDiscretionary.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_LRC(113, "TSK_CARD_LRC", //NON-NLS //Longitudunal Redundancy Check character //NON-NLS bundle.getString("BlackboardAttribute.tskCardLRC.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_KEYWORD_SEARCH_DOCUMENT_ID(114, "TSK_KEYWORD_SEARCH_DOCUMENT_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskKeywordSearchDocumentID.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_SCHEME(115, "TSK_CARD_SCHEME", //amex, visa, mastercard, discover, etc //NON-NLS bundle.getString("BlackboardAttribute.tskCardScheme.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CARD_TYPE(116, "TSK_CARD_TYPE", // debit vs credit //NON-NLS bundle.getString("BlackboardAttribute.tskCardType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_BRAND_NAME(117, "TSK_BRAND_NAME", bundle.getString("BlackboardAttribute.tskBrandName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_BANK_NAME(118, "TSK_BANK_NAME", bundle.getString("BlackboardAttribute.tskBankName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_COUNTRY(119, "TSK_COUNTRY", bundle.getString("BlackboardAttribute.tskCountry.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CITY(120, "TSK_CITY", bundle.getString("BlackboardAttribute.tskCity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ACCOUNT_TYPE(121, "TSK_ACCOUNT_TYPE", bundle.getString("BlackboardAttribute.tskAccountType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * Keyword search type: exact match, sub-string, or regex. */ TSK_KEYWORD_SEARCH_TYPE(122, "TSK_KEYWORD_SEARCH_TYPE", //NON-NLS bundle.getString("BlackboardAttribute.tskKeywordSearchType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), TSK_HEADERS(123, "TSK_HEADERS", //NON-NLS bundle.getString("BlackboardAttribute.tskHeaders.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ID(124, "TSK_ID", //NON-NLS bundle.getString("BlackboardAttribute.tskId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_SSID(125, "TSK_SSID", //NON-NLS bundle.getString("BlackboardAttribute.tskSsid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_BSSID(126, "TSK_BSSID", //NON-NLS bundle.getString("BlackboardAttribute.tskBssid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_MAC_ADDRESS(127, "TSK_MAC_ADDRESS", //NON-NLS bundle.getString("BlackboardAttribute.tskMacAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_IMEI(128, "TSK_IMEI", //NON-NLS bundle.getString("BlackboardAttribute.tskImei.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_IMSI(129, "TSK_IMSI", //NON-NLS bundle.getString("BlackboardAttribute.tskImsi.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ICCID(130, "TSK_ICCID", //NON-NLS bundle.getString("BlackboardAttribute.tskIccid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_THREAD_ID(131, "TSK_THREAD_ID", bundle.getString("BlackboardAttribute.tskthreadid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /** * The event type of a TSK_TL_EVENT artifact. The value should be the id * of the EventType in the tsk_event_types table. */ TSK_TL_EVENT_TYPE(132, "TSK_TL_EVENT_TYPE", //NON-NLS bundle.getString("BlackboardAttribute.tskTLEventType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), TSK_DATETIME_DELETED(133, "TSK_DATETIME_DELETED", //NON-NLS bundle.getString("BlackboardAttribute.tskdatetimedeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_DATETIME_PASSWORD_RESET(134, "TSK_DATETIME_PASSWORD_RESET", bundle.getString("BlackboardAttribute.tskdatetimepwdreset.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_DATETIME_PASSWORD_FAIL(135, "TSK_DATETIME_PWD_FAIL", bundle.getString("BlackboardAttribute.tskdatetimepwdfail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_DISPLAY_NAME(136, "TSK_DISPLAY_NAME", bundle.getString("BlackboardAttribute.tskdisplayname.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PASSWORD_SETTINGS(137, "TSK_PASSWORD_SETTINGS", bundle.getString("BlackboardAttribute.tskpasswordsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ACCOUNT_SETTINGS(138, "TSK_ACCOUNT_SETTINGS", bundle.getString("BlackboardAttribute.tskaccountsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_PASSWORD_HINT(139, "TSK_PASSWORD_HINT", bundle.getString("BlackboardAttribute.tskpasswordhint.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_GROUPS(140, "TSK_GROUPS", bundle.getString("BlackboardAttribute.tskgroups.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /* * Use * org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments * to create and process TSK_ATTACHMENTS attributes. */ TSK_ATTACHMENTS(141, "TSK_ATTACHMENTS", bundle.getString("BlackboardAttribute.tskattachments.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON), /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints * to create and process TSK_GEO_TRACKPOINTS attributes. */ TSK_GEO_TRACKPOINTS(142, "TSK_GEO_TRACKPOINTS", bundle.getString("BlackboardAttribute.tskgeopath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON), /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints * to create and process TSK_GEO_WAYPOINTS attributes. */ TSK_GEO_WAYPOINTS(143, "TSK_GEO_WAYPOINTS", bundle.getString("BlackboardAttribute.tskgeowaypoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON), TSK_DISTANCE_TRAVELED(144, "TSK_DISTANCE_TRAVELED", bundle.getString("BlackboardAttribute.tskdistancetraveled.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_DISTANCE_FROM_HOMEPOINT(145, "TSK_DISTANCE_FROM_HOMEPOINT", bundle.getString("BlackboardAttribute.tskdistancefromhome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE), TSK_HASH_PHOTODNA(146, "TSK_HASH_PHOTODNA", bundle.getString("BlackboardAttribute.tskhashphotodna.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_BYTES_SENT(147, "TSK_BYTES_SENT", bundle.getString("BlackboardAttribute.tskbytessent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), TSK_BYTES_RECEIVED(148, "TSK_BYTES_RECEIVED", bundle.getString("BlackboardAttribute.tskbytesreceived.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG), TSK_LAST_PRINTED_DATETIME(149, "TSK_LAST_PRINTED_DATETIME", bundle.getString("BlackboardAttribute.tsklastprinteddatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME), TSK_RULE(150, "TSK_RULE", bundle.getString("BlackboardAttribute.tskrule.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_ACTIVITY_TYPE(151, "TSK_ACTIVITY_TYPE", bundle.getString("BlackboardAttribute.tskActivityType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), /* * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoAreaPoints * to create and process TSK_GEO_AREAPOINTS attributes. */ TSK_GEO_AREAPOINTS(152, "TSK_GEO_AREAPOINTS", bundle.getString("BlackboardAttribute.tskgeoareapoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON), TSK_REALM(153, "TSK_REALM", bundle.getString("BlackboardAttribute.tskRealm.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HOST(154, "TSK_HOST", bundle.getString("BlackboardAttribute.tskHost.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_HOME_DIR(155, "TSK_HOME_DIR", bundle.getString("BlackboardAttribute.tskHomeDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_IS_ADMIN(156, "TSK_IS_ADMIN", bundle.getString("BlackboardAttribute.tskIsAdmin.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER), TSK_CORRELATION_TYPE(157, "TSK_CORRELATION_TYPE", bundle.getString("BlackboardAttribute.tskCorrelationType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_CORRELATION_VALUE(158, "TSK_CORRELATION_VALUE", bundle.getString("BlackboardAttribute.tskCorrelationValue.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING), TSK_OTHER_CASES(159, "TSK_OTHER_CASES", bundle.getString("BlackboardAttribute.tskOtherCases.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),; private final int typeID; private final String typeName; private final String displayName; private final TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType; /** * Constructs a standard attribute type. * * @param typeID The id of the type. * @param typeName The name of the type. * @param displayName The display name of the type * @param valueType The value type of the type. */ private ATTRIBUTE_TYPE(int typeID, String typeName, String displayName, TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType) { this.typeID = typeID; this.typeName = typeName; this.displayName = displayName; this.valueType = valueType; } /** * Gets the type id of this standard attribute type. * * @return The type id. */ public int getTypeID() { return this.typeID; } /** * Gets the type name of this standard attribute type. * * @return The type name. * * TODO (AUT-2070): Deprecate and provide a getTypeName method instead * for API consistency. */ public String getLabel() { return this.typeName; } /** * Gets the display name of this standard attribute type. * * @return The display name. */ public String getDisplayName() { return this.displayName; } /** * Gets the value type of this standard attribute type. * * @return the value type */ public TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getValueType() { return this.valueType; } /** * Gets the standard attribute type for a given type id. * * @param typeID A standard attribute type id. * * @return A BlackboardAttribute.ATTRIBUTE_TYPE object. * * @throws IllegalArgumentException If the given type id does not map to * a standard attribute type. * * TODO (AUT-2070): Deprecate and provide a fromTypeId method instead * for API consistency. */ static public ATTRIBUTE_TYPE fromID(int typeID) { for (ATTRIBUTE_TYPE attrType : ATTRIBUTE_TYPE.values()) { if (attrType.getTypeID() == typeID) { return attrType; } } throw new IllegalArgumentException("No ATTRIBUTE_TYPE matching type: " + typeID); } /** * Gets the standard attribute type for a given type name. * * @param typeName A standard attribute type name. * * @return A BlackboardAttribute.ATTRIBUTE_TYPE object. * * @throws IllegalArgumentException If the given type name does not map * to a standard attribute type. * * TODO (AUT-2070): Deprecate and provide a fromTypeName method instead * for API consistency. */ static public ATTRIBUTE_TYPE fromLabel(String typeName) { for (ATTRIBUTE_TYPE attrType : ATTRIBUTE_TYPE.values()) { if (attrType.getLabel().equals(typeName)) { return attrType; } } throw new IllegalArgumentException("No ATTRIBUTE_TYPE matching type: " + typeName); } } /** * Creates a standard attribute with an integer value. The attribute should * be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, int valueInt) throws IllegalArgumentException { this(ATTRIBUTE_TYPE.fromID(attributeTypeID), moduleName, valueInt); } /** * Creates a standard attribute with an integer value. The attribute should * be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param context Extra information about the attribute. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String context, int valueInt) { this(attributeTypeID, moduleName, valueInt); this.context = replaceNulls(context); } /** * Creates a standard attribute with a long/datetime value. The attribute * should be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module that creating this * attribute. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, long valueLong) throws IllegalArgumentException { this(ATTRIBUTE_TYPE.fromID(attributeTypeID), moduleName, valueLong); } /** * Creates a standard attribute with a long/datetime value. The attribute * should be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module that creating this * attribute. * @param context Extra information about the attribute. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String context, long valueLong) { this(attributeTypeID, moduleName, valueLong); this.context = replaceNulls(context); } /** * Creates a standard attribute with a double value. The attribute should be * added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, double valueDouble) throws IllegalArgumentException { this(ATTRIBUTE_TYPE.fromID(attributeTypeID), moduleName, valueDouble); } /** * Creates a standard attribute with a double value. The attribute should be * added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param context Extra information about the attribute. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String context, double valueDouble) { this(attributeTypeID, moduleName, valueDouble); this.context = replaceNulls(context); } /** * Creates a standard attribute with a string value. The attribute should be * added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String valueString) throws IllegalArgumentException { this(ATTRIBUTE_TYPE.fromID(attributeTypeID), moduleName, valueString); } /** * Creates a standard attribute with a string value. The attribute should be * added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param context Extra information about the attribute. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String context, String valueString) { this(attributeTypeID, moduleName, valueString); this.context = replaceNulls(context); } /** * Creates a standard attribute with a byte array value. The attribute * should be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, byte[] valueBytes) throws IllegalArgumentException { this(ATTRIBUTE_TYPE.fromID(attributeTypeID), moduleName, valueBytes); } /** * Creates a standard attribute with a byte array value. The attribute * should be added to an appropriate artifact. * * @param attributeTypeID The standard attribute type id. * @param moduleName The display name of the module creating this * attribute. * @param context Extra information about the attribute. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE * or the type id is not for a standard * type. * @deprecated */ @Deprecated public BlackboardAttribute(int attributeTypeID, String moduleName, String context, byte[] valueBytes) { this(attributeTypeID, moduleName, valueBytes); this.context = replaceNulls(context); } /** * Sets the artifact id. * * @param artifactID The artifact id. * * @deprecated The preferred method for doing this is to add the attribute * to a BlackboardArtifact object by calling BlackboardArtifact.addAttribute * or BlackboardArtifact.addAttributes, both of which post the attributes to * the blackboard. */ @Deprecated protected void setArtifactID(long artifactID) { setArtifactId(artifactID); } /** * Sets the reference to the SleuthkitCase object that represents the case * database. * * @param sleuthkitCase A reference to a SleuthkitCase object. * * @deprecated The preferred method for doing this is to add the attribute * to a BlackboardArtifact object by calling BlackboardArtifact.addAttribute * or BlackboardArtifact.addAttributes, both of which post the attributes to * the blackboard. */ @Deprecated protected void setCase(SleuthkitCase sleuthkitCase) { setCaseDatabase(sleuthkitCase); } /** * Gets the context of this attribute. * * @return The context, may be the empty string. * * @deprecated Setting context for an attribute is deprecated. */ @Deprecated public String getContext() { return context; } /** * Gets the context of this attribute. * * @return The context, may be the empty string. * * @deprecated Setting context for an attribute is deprecated. */ @Deprecated String getContextString() { return context; } /** * Gets the attribute type id. * * @return The type id. * * @deprecated Use BlackboardAttribute.getAttributeType.getTypeID instead. */ @Deprecated public int getAttributeTypeID() { return getAttributeType().getTypeID(); } /** * Gets the attribute type name. * * @return The type name. * * @throws org.sleuthkit.datamodel.TskCoreException * * @deprecated Use BlackboardAttribute.getAttributeType.getTypeName instead. */ @Deprecated public String getAttributeTypeName() throws TskCoreException { return getAttributeType().getTypeName(); } /** * Gets the attribute type display name. * * @return type The display name. * * @throws org.sleuthkit.datamodel.TskCoreException * * @deprecated Use BlackboardAttribute.getAttributeType.getDisplayName * instead. */ @Deprecated public String getAttributeTypeDisplayName() throws TskCoreException { return getAttributeType().getDisplayName(); } /** * Gets the name of the first module identified as a sources of this * attribute. * * @return A comma-separated-values list of module names, may be empty. * * @deprecated Use getSources instead. */ @Deprecated public String getModuleName() { return getSourcesCSV(); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/package.html000644 000765 000024 00000005016 14137073413 026734 0ustar00carrierstaff000000 000000

This package represents the data that can be obtained from The Sleuth Kit (TSK).

Object Hierarchy

The data found in a disk image has a hierarchy to it. There is an image. It may have a volume system with one or more volumes. Each volume may have a file system in it. Each file system has one or more files. Because of this ordering, the objects are organized in a tree with parents and children. The Case being the top of the tree, images being the next layer down, etc.

The Content class is the interface for the Image, VolumeSystem, etc. classes. The getChildren() method of each object returns the children objects as a Content object. You can use ContentVisitor to use the visitor pattern to correctly process it.

In general, the type of a child is known, but there are a few cases where this is not the case. One obvious example is that the child of an Image can be either a VolumeSystem or a FileSystem. The former occurs when the image has partitions and the latter occurs if the image is just of a file system.

Basic Usage

Case

A case represents one or more hard drive images. The SleuthkitCase class is used to represent a case. It is directly tied to a Sqlite database. You can access the forensic data from those images from the SleuthkitCase class. This is the first object you should make.

You get access to a SleuthkitCase class using either SleuthkitCase.newCase() or SleuthkitCase.openCase(). With the case object, you can add an image, get its children, or run queries against the database.

Data Mapping

Use TskData to map integer and enum values back to their meaning. For example, the file system type will be returned as an integer and TskData maps it to NTFS or FAT.

JNI

The bulk of the analysis occurs in the C/C++ code. JNI is used to run the C/C++ code from Java. The JNI methods are all located in the SleuthkitJNI java class as static methods. It returns handles that refer to data structures in the C/C++ land. You should never have to directly call these static Java methods. This class and its methods are used by the other Java data model classes.

Error Handling

The C/C++ code uses return codes and such for error handling. The C/C++ JNI code turns any errors into a TskException that gets thrown into the Java code. The Datamodel Java code will not catch these exceptions and instead they will be passed up to the other Java code that called the datamodel code.

sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SlackFile.java000644 000765 000024 00000015000 14137073413 027145 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a slack file that has been added to a case. */ public class SlackFile extends FsContent { /** * Constructs a representation of the slack space from a file system file * that has been added to the case. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the base file, usually as reported * in the name structure of the file system. May * be set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the base file, usually as reported * in the metadata structure of the file system. * May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the base file, usually * as reported in the name structure of the file * system. * @param metaFlags The allocated status of the base file, usually * as reported in the metadata structure of the * file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. * */ SlackFile(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType, String extension, String ownerUid, Long osAccountObjId) { super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.SLACK, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList()); } /** * Reads bytes from the slack space * * @param buf Buffer to read into. * @param offset Start position in the slack space. * @param len Number of bytes to read. * * @return Number of bytes read. * * @throws TskCoreException if there is a problem reading the file. */ @Override @SuppressWarnings("deprecation") protected int readInt(byte[] buf, long offset, long len) throws TskCoreException { if (offset == 0 && size == 0) { //special case for 0-size file return 0; } loadFileHandle(); return SleuthkitJNI.readFileSlack(fileHandle, buf, offset, len); } /** * Accepts a content visitor (Visitor design pattern). * * @param v A ContentVisitor supplying an algorithm to run using this file * as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param v A SleuthkitItemVisitor supplying an algorithm to run using this * file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor v) { return v.visit(this); } /** * Provides a string representation of this file. * * @param preserveState True if state should be included in the string * representation of this object. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "SlackFile [\t" + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskUnsupportedSchemaVersionException.java000644 000765 000024 00000003447 14137073413 034704 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Subtype of TskCoreException that is thrown when there is an attempt to open or use * a resource with an unsupported schema version. * * For example, as of Sleuthkit 4.5.0 database schema versions are two part: * Major.Minor. This versioning schema is based on semantic versioning, but * without using the patch number (in practice it is always the default value of * zero). The major part is incremented for incompatible changes such as removing * a table or column, i.e., it will not be usable by an older version of the software. A * TskUnsupportedSchemaVersionException should be thrown from an attempt to open a * db with an incompatible db schema. * * @see CaseDBSchemaVersionNumber for more details on db schema compatibility. */ public class TskUnsupportedSchemaVersionException extends TskCoreException { private static final long serialVersionUID = 1L; public TskUnsupportedSchemaVersionException() { } public TskUnsupportedSchemaVersionException(String msg) { super(msg); } public TskUnsupportedSchemaVersionException(String msg, Exception ex) { super(msg, ex); } } bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeSingleDescription.java000644 000765 000024 00000005157 14137073413 035703 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.logging.Level; import java.util.logging.Logger; /** * Package level extension of TimelineEventArtifactTypeImpl for event types that have only one description (because we don't know how to break it up further). */ class TimelineEventArtifactTypeSingleDescription extends TimelineEventArtifactTypeImpl { private static final Logger logger = Logger.getLogger(TimelineEventArtifactTypeSingleDescription.class.getName()); @Override public TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifact) throws TskCoreException { String description = extractFullDescription(artifact); if (description.length() > MAX_FULL_DESCRIPTION_LENGTH) { description = description.substring(0, MAX_FULL_DESCRIPTION_LENGTH); } BlackboardAttribute timeAttribute = artifact.getAttribute(getDateTimeAttributeType()); if (timeAttribute == null) { return null; } long time = timeAttribute.getValueLong(); return new TimelineEventDescriptionWithTime(time, timeAttribute.getDisplayString(), null, description); } TimelineEventArtifactTypeSingleDescription(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type timeAttribute, BlackboardAttribute.Type descriptionAttribute) { super(typeID, displayName, superType, artifactType, timeAttribute, new NullExtractor(), new NullExtractor(), new AttributeExtractor(descriptionAttribute)); } TimelineEventDescription parseDescription(String fullDescription, String medDescription, String shortDescription) { return new TimelineEventDescription(fullDescription); } /** * Function that always returns the empty string no matter what it is * applied to. * */ final static class NullExtractor implements TSKCoreCheckedFunction { @Override public String apply(BlackboardArtifact ignored) throws TskCoreException { return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SleuthkitItemVisitor.java000644 000765 000024 00000015367 14137073413 031503 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Interface for implementing a visitor pattern on all displayable items: * Content implementations and blackboard artifacts. * * Visitor implements an algorithm on the content and blackboard artifacts * objects. The algorithm is completely decoupled from the data object. The * visitor pattern emulates double dispatch mechanism. It allows to act * differently depending on the instance type, without need to test what the * actual type is. E.g. it allows for processing an object hierarchy without * using instanceof statements. Generic type parameter T is a return type from * the visit methods. * * @param return type of visit methods */ public interface SleuthkitItemVisitor { /** * Act on (visit) a Directory content object * * @param d the directory to visit / act on * * @return result of the visit */ T visit(Directory d); /** * Act on (visit) a File content object * * @param f the file to visit / act on * * @return result of the visit */ T visit(File f); /** * Act on (visit) a FileSystem content object * * @param fs the filesystem to visit / act on * * @return result of the visit */ T visit(FileSystem fs); /** * Act on (visit) an Image content object * * @param i the image to visit / act on * * @return result of the visit */ T visit(Image i); /** * Act on (visit) a Volume content object * * @param v the volume to visit / act on * * @return result of the visit */ T visit(Volume v); /** * Act on (visit) a VolumeSystem content object * * @param vs the volume system to visit / act on * * @return result of the visit */ T visit(VolumeSystem vs); /** * Act on (visit) a Pool content object * * @param pool the volume system to visit / act on * * @return result of the visit */ T visit(Pool pool); /** * Act on (visit) a blackboard artifact object * * @param ba blackboard artifact object to visit / act on * * @return result of the visit */ T visit(BlackboardArtifact ba); /** * Act on (visit) a blackboard artifact type * * @param tw blackboard artifact type to visit / act on * * @return result of the visit */ T visit(BlackboardArtifact.ARTIFACT_TYPE tw); /** * Act on (visit) a layout file content object * * @param lf layout file to visit / act on * * @return result of the visit */ T visit(LayoutFile lf); /** * Act on (visit) a VirtualDirectory content object * * @param ld layout dir to visit / act on * * @return result of the visit */ T visit(VirtualDirectory ld); /** * Act on (visit) a LocalDirectory content object * * @param ld layout dir to visit / act on * * @return result of the visit */ T visit(LocalDirectory ld); /** * Act on (visit) a DerivedFile content object * * @param df derived file to visit / act on * * @return result of the visit */ T visit(DerivedFile df); /** * Act on (visit) a LocalFile content object * * @param lf local file to visit / act on * * @return result of the visit */ T visit(LocalFile lf); /** * Act on (visit) a SlackFile content object * * @param sf slack file to visit / act on * * @return result of the visit */ T visit(SlackFile sf); /** * Act on (visit) a Report content object * * @param report report to visit / act on * * @return result of the visit */ T visit(Report report); /** * Act on (visit) a OsAccount content object * * @param account report to visit / act on * * @return result of the visit */ T visit(OsAccount account); /** * Act on (visit) an UnsupportedContent object * * @param unsupportedContent content to visit / act on * * @return result of the visit */ T visit(UnsupportedContent unsupportedContent); /** * Act on (visit) a LocalFilesDataSource content object * * @param localFilesDataSource report to visit / act on * * @return result of the visit */ T visit(LocalFilesDataSource localFilesDataSource); /** * The default visitor - quickest method for implementing a custom visitor. * Every visit method delegates to the defaultVisit method, the only * required method to be implemented. Then, implement the specific visit * methods for the objects on which the algorithm needs to act differently. * * @param generic type, signifies the object type to be returned from * visit() */ static abstract public class Default implements SleuthkitItemVisitor { protected abstract T defaultVisit(SleuthkitVisitableItem s); @Override public T visit(Directory d) { return defaultVisit(d); } @Override public T visit(File f) { return defaultVisit(f); } @Override public T visit(FileSystem fs) { return defaultVisit(fs); } @Override public T visit(Image i) { return defaultVisit(i); } @Override public T visit(Volume v) { return defaultVisit(v); } @Override public T visit(VolumeSystem vs) { return defaultVisit(vs); } @Override public T visit(Pool p) { return defaultVisit(p); } @Override public T visit(BlackboardArtifact ba) { return defaultVisit(ba); } @Override public T visit(BlackboardArtifact.ARTIFACT_TYPE tw) { return defaultVisit(tw); } @Override public T visit(LayoutFile lf) { return defaultVisit(lf); } @Override public T visit(VirtualDirectory vd) { return defaultVisit(vd); } @Override public T visit(LocalDirectory ld) { return defaultVisit(ld); } @Override public T visit(DerivedFile df) { return defaultVisit(df); } @Override public T visit(LocalFile lf) { return defaultVisit(lf); } @Override public T visit(SlackFile sf) { return defaultVisit(sf); } @Override public T visit(Report report) { return defaultVisit(report); } @Override public T visit(OsAccount account) { return defaultVisit(account); } @Override public T visit(UnsupportedContent unsupportedContent) { return defaultVisit(unsupportedContent); } @Override public T visit(LocalFilesDataSource localFilesDataSource) { return defaultVisit(localFilesDataSource); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Account.java000644 000765 000024 00000014336 14137073413 026717 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2016-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.List; /** * An entity that has a type and a unique (within that type) identifier . * Example types include a Bank Account, Credit Card, Email address, Phone * number, phone, Application, Web-site login, etc. Accounts are unique to the * case. */ public final class Account { /** * primary key in the Accounts table, unique at the case-level */ private final long account_id; private final Account.Type accountType; /** * id of the account, specific to the accounts type. For example: email * address, phone number, or website user name. */ private final String typeSpecificID; public static final class Type { //JIRA-900:Should the display names of predefined types be internationalized? public static final Account.Type CREDIT_CARD = new Type("CREDIT_CARD", "Credit Card"); public static final Account.Type DEVICE = new Type("DEVICE", "Device"); public static final Account.Type PHONE = new Type("PHONE", "Phone"); public static final Account.Type EMAIL = new Type("EMAIL", "Email"); public static final Account.Type FACEBOOK = new Type("FACEBOOK", "Facebook"); public static final Account.Type TWITTER = new Type("TWITTER", "Twitter"); public static final Account.Type INSTAGRAM = new Type("INSTAGRAM", "Instagram"); public static final Account.Type WHATSAPP = new Type("WHATSAPP", "WhatsApp"); public static final Account.Type MESSAGING_APP = new Type("MESSAGING_APP", "MessagingApp"); public static final Account.Type WEBSITE = new Type("WEBSITE", "Website"); public static final Account.Type IMO = new Type("IMO", "IMO"); public static final Account.Type LINE = new Type("LINE", "LINE"); public static final Account.Type SKYPE = new Type("SKYPE", "Skype"); public static final Account.Type TANGO = new Type("TANGO", "Tango"); public static final Account.Type TEXTNOW = new Type("TEXTNOW", "TextNow"); public static final Account.Type THREEMA = new Type("THREEMA", "ThreeMa"); public static final Account.Type VIBER = new Type("VIBER", "Viber"); public static final Account.Type XENDER = new Type("XENDER", "Xender"); public static final Account.Type ZAPYA = new Type("ZAPYA", "Zapya"); public static final Account.Type SHAREIT = new Type("SHAREIT", "ShareIt"); public static final List PREDEFINED_ACCOUNT_TYPES = Arrays.asList( CREDIT_CARD, DEVICE, PHONE, EMAIL, FACEBOOK, TWITTER, INSTAGRAM, WHATSAPP, MESSAGING_APP, WEBSITE, IMO, LINE, SKYPE, TANGO, TEXTNOW, THREEMA, VIBER, XENDER, ZAPYA, SHAREIT ); private final String typeName; private final String displayName; /** * Constructs an Account type. * * @param typeName The type name. * @param displayName The display name for the type. */ public Type(String typeName, String displayName) { this.typeName = typeName; this.displayName = displayName; } /** * Gets the type name * * @return The type name. */ public String getTypeName() { return this.typeName; } /** * Gets the display name * * @return The display name. */ public String getDisplayName() { return displayName; } @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof Account.Type)) { return false; } Account.Type thatType = (Account.Type) that; // DB table enforces uniqueness for type name return this.typeName.equals(thatType.getTypeName()); } @Override public int hashCode() { int hash = 11; hash = 83 * hash + (this.typeName != null ? this.typeName.hashCode() : 0); hash = 83 * hash + (this.displayName != null ? this.displayName.hashCode() : 0); return hash; } @Override public String toString() { return " displayName=" + this.displayName + ", typeName=" + this.typeName + ")"; } } Account(long account_id, Account.Type accountType, String typeSpecificId) throws TskCoreException { this.account_id = account_id; this.accountType = accountType; this.typeSpecificID = typeSpecificId; } /** * Gets unique identifier (assigned by a provider) for the account. Example * includes an email address, a phone number, or a website username. * * @return type specific account id. */ public String getTypeSpecificID() { return this.typeSpecificID; } /** * Gets the account type * * @return account type */ public Account.Type getAccountType() { return this.accountType; } /** * Gets a case-specific unique identifier for this account (from the * database) * * @return unique row id. */ public long getAccountID() { return this.account_id; } @Override public int hashCode() { int hash = 5; hash = 43 * hash + (int) (this.account_id ^ (this.account_id >>> 32)); hash = 43 * hash + (this.accountType != null ? this.accountType.hashCode() : 0); hash = 43 * hash + (this.typeSpecificID != null ? this.typeSpecificID.hashCode() : 0); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Account other = (Account) obj; if (this.account_id != other.account_id) { return false; } if ((this.typeSpecificID == null) ? (other.typeSpecificID != null) : !this.typeSpecificID.equals(other.typeSpecificID)) { return false; } if (this.accountType != other.accountType && (this.accountType == null || !this.accountType.equals(other.accountType))) { return false; } return true; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/DerivedFile.java000644 000765 000024 00000026742 14137073413 027511 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.text.MessageFormat; import java.util.Collections; import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a file or directory that has been derived from another * file and is stored outside of the data source (e.g., on a user's machine). A * typical example of a derived file is a file extracted from an archive file. */ public class DerivedFile extends AbstractFile { private volatile DerivedMethod derivedMethod; private static final Logger logger = Logger.getLogger(DerivedFile.class.getName()); private static ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private boolean hasDerivedMethod = true; /** * Constructs a representation of a file or directory that has been derived * from another file and is stored outside of the data source (e.g., on a * user's machine). A typical example of a derived file is a file extracted * from an archive file. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param name The name of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param localPath The absolute path of the file in secondary * storage. * @param parentId The object id of parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param encodingType The encoding type of the file. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ DerivedFile(SleuthkitCase db, long objId, long dataSourceObjectId, String name, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String localPath, long parentId, String mimeType, TskData.EncodingType encodingType, String extension, String ownerUid, Long osAccountObjId) { // TODO (AUT-1904): The parent id should be passed to AbstractContent // through the class hierarchy contructors. super(db, objId, dataSourceObjectId, TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, TSK_DB_FILES_TYPE_ENUM.DERIVED, 0L, 0, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList()); setLocalFilePath(localPath); setEncodingType(encodingType); } /** * Indicates whether or not this derived file is the root of a file system, * always returns false. * * @return False. */ @Override public boolean isRoot() { return false; } /** * Gets the method used to derive this file, if it has been recorded. * * @return Derived method or null. * * @throws TskCoreException if there was an error querying the case * database. */ public synchronized DerivedMethod getDerivedMethod() throws TskCoreException { if (derivedMethod == null && hasDerivedMethod == true) { try { derivedMethod = getSleuthkitCase().getDerivedMethod(getId()); if (derivedMethod == null) { hasDerivedMethod = false; //do not attempt to lazy load } } catch (TskCoreException e) { String msg = MessageFormat.format(bundle.getString("DerviedFile.derivedMethod.exception.msg1.text"), getId()); logger.log(Level.WARNING, msg, e); throw new TskCoreException(msg, e); } } return derivedMethod; } /** * Accepts a content visitor (Visitor design pattern). * * @param visitor A ContentVisitor supplying an algorithm to run using this * derived file as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this derived file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor v) { return v.visit(this); } /** * Closes this derived file, if it was open. * * @throws Throwable */ @Override protected void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } /** * Provides a string representation of this derived file. * * @param preserveState True if state should be included in the string * representation of this object. * */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "DerivedFile{" //NON-NLS + "derivedMethod=" + derivedMethod //NON-NLS + ", hasDerivedMethod=" + hasDerivedMethod //NON-NLS + '}'; } /** * A description of the method used to derive a file. */ public static class DerivedMethod { private final int derivedId; private String toolName; private String toolVersion; private String other; private String rederiveDetails; public DerivedMethod(int derivedId, String rederiveDetails) { this.derivedId = derivedId; this.rederiveDetails = rederiveDetails; if (this.rederiveDetails == null) { this.rederiveDetails = ""; } this.toolName = ""; this.toolVersion = ""; this.other = ""; } void setToolName(String toolName) { this.toolName = toolName; } void setToolVersion(String toolVersion) { this.toolVersion = toolVersion; } void setOther(String other) { this.other = other; } public int getDerivedId() { return derivedId; } public String getToolName() { return toolName; } public String getToolVersion() { return toolVersion; } public String getOther() { return other; } public String getRederiveDetails() { return rederiveDetails; } @Override public String toString() { return "DerivedMethod{" + "derived_id=" + derivedId + ", toolName=" + toolName + ", toolVersion=" + toolVersion + ", other=" + other + ", rederiveDetails=" + rederiveDetails + '}'; //NON-NLS } } /** * Constructs a representation of a file or directory that has been derived * from another file and is stored outside of the data source (e.g., on a * user's machine). A typical example of a derived file is a file extracted * from an archive file. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param localPath The absolute path of the file in secondary storage. * @param parentId The object id of parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected DerivedFile(SleuthkitCase db, long objId, String name, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, FileKnown knownState, String parentPath, String localPath, long parentId) { this(db, objId, db.getDataSourceObjectId(objId), name, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, md5Hash, null, knownState, parentPath, localPath, parentId, null, TskData.EncodingType.NONE, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineLevelOfDetail.java000755 000765 000024 00000004457 14137073413 031477 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ResourceBundle; /** * An enumeration of the levels of detail of various aspects of timeline data. */ public enum TimelineLevelOfDetail { LOW(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.low")), MEDIUM(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.medium")), HIGH(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.high")); private final String displayName; /** * Gets the display name of this level of detail. * * @return The display name. */ public String getDisplayName() { return displayName; } /** * Constructs an element of the enumeration of the levels of detail of * various aspects of timeline data such as event descriptions and the * timeline event types hierarchy. * * @param displayName The display name of the level of detail. */ private TimelineLevelOfDetail(String displayName) { this.displayName = displayName; } /** * Gets the next higher level of detail relative to this level of detail. * * @return The next higher level of detail, may be null. */ public TimelineLevelOfDetail moreDetailed() { try { return values()[ordinal() + 1]; } catch (ArrayIndexOutOfBoundsException e) { return null; } } /** * Gets the next lower level of detail relative to this level of detail. * * @return The next lower level of detail, may be null. */ public TimelineLevelOfDetail lessDetailed() { try { return values()[ordinal() - 1]; } catch (ArrayIndexOutOfBoundsException e) { return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TaggingManager.java000755 000765 000024 00000066753 14137073413 030213 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import static org.sleuthkit.datamodel.TskData.DbType.POSTGRESQL; import org.sleuthkit.datamodel.TskEvent.TagNamesAddedTskEvent; import org.sleuthkit.datamodel.TskEvent.TagNamesDeletedTskEvent; import org.sleuthkit.datamodel.TskEvent.TagNamesUpdatedTskEvent; import org.sleuthkit.datamodel.TskEvent.TagSetsAddedTskEvent; import org.sleuthkit.datamodel.TskEvent.TagSetsDeletedTskEvent; /** * Provides an API to manage Tags. */ public class TaggingManager { private final SleuthkitCase skCase; /** * Construct a TaggingManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase. */ TaggingManager(SleuthkitCase skCase) { this.skCase = skCase; } /** * Returns a list of all the TagSets that exist in the case. * * @return A List of TagSet objects or an empty list if none were found. * * @throws TskCoreException */ public List getTagSets() throws TskCoreException { List tagSetList = new ArrayList<>(); skCase.acquireSingleUserCaseReadLock(); String getAllTagSetsQuery = "SELECT * FROM tsk_tag_sets"; try (CaseDbConnection connection = skCase.getConnection(); Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(getAllTagSetsQuery);) { while (resultSet.next()) { int setID = resultSet.getInt("tag_set_id"); String setName = resultSet.getString("name"); TagSet set = new TagSet(setID, setName, getTagNamesByTagSetID(setID)); tagSetList.add(set); } } catch (SQLException ex) { throw new TskCoreException("Error occurred getting TagSet list.", ex); } finally { skCase.releaseSingleUserCaseReadLock(); } return tagSetList; } /** * Inserts a row into the tag_sets table in the case database. * * @param name The tag set name. * @param tagNames * * @return A TagSet object for the new row. * * @throws TskCoreException */ public TagSet addTagSet(String name, List tagNames) throws TskCoreException { if (name == null || name.isEmpty()) { throw new IllegalArgumentException("Error adding TagSet, TagSet name must be non-empty string."); } TagSet tagSet = null; CaseDbTransaction trans = skCase.beginTransaction(); try (Statement stmt = trans.getConnection().createStatement()) { String query = String.format("INSERT INTO tsk_tag_sets (name) VALUES('%s')", name); if (skCase.getDatabaseType() == POSTGRESQL) { stmt.execute(query, Statement.RETURN_GENERATED_KEYS); } else { stmt.execute(query); } try (ResultSet resultSet = stmt.getGeneratedKeys()) { resultSet.next(); int setID = resultSet.getInt(1); List updatedTags = new ArrayList<>(); if (tagNames != null) { // Get all of the TagName ids they can be updated in one // SQL call. for (int index = 0; index < tagNames.size(); index++) { TagName tagName = tagNames.get(index); stmt.executeUpdate(String.format("UPDATE tag_names SET tag_set_id = %d, rank = %d WHERE tag_name_id = %d", setID, index, tagName.getId())); updatedTags.add(new TagName(tagName.getId(), tagName.getDisplayName(), tagName.getDescription(), tagName.getColor(), tagName.getKnownStatus(), setID, index)); } } tagSet = new TagSet(setID, name, updatedTags); skCase.fireTSKEvent(new TagSetsAddedTskEvent(Collections.singletonList(tagSet))); skCase.fireTSKEvent(new TagNamesUpdatedTskEvent(updatedTags)); } trans.commit(); } catch (SQLException ex) { trans.rollback(); throw new TskCoreException(String.format("Error adding tag set %s", name), ex); } return tagSet; } /** * Remove a row from the tag set table. If the given TagSet has a valid list * of TagNames the TagNames will be removed from the tag_name table if there * are not references to the TagNames in the content_tag or * blackboard_artifact_tag table. * * @param tagSet TagSet to be deleted. * * @throws TskCoreException */ public void deleteTagSet(TagSet tagSet) throws TskCoreException { if (tagSet == null) { throw new IllegalArgumentException("Error adding deleting TagSet, TagSet object was null"); } if (isTagSetInUse(tagSet)) { throw new TskCoreException("Unable to delete TagSet (%d). TagSet TagName list contains TagNames that are currently in use."); } CaseDbTransaction trans = skCase.beginTransaction(); try (Statement stmt = trans.getConnection().createStatement()) { String queryTemplate = "DELETE FROM tag_names WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)"; stmt.execute(String.format(queryTemplate, tagSet.getId())); queryTemplate = "DELETE FROM tsk_tag_sets WHERE tag_set_id = '%d'"; stmt.execute(String.format(queryTemplate, tagSet.getId())); trans.commit(); List tagNameIds = new ArrayList<>(); for (TagName tagName : tagSet.getTagNames()) { tagNameIds.add(tagName.getId()); } skCase.fireTSKEvent(new TagSetsDeletedTskEvent(Collections.singletonList(tagSet.getId()))); skCase.fireTSKEvent(new TagNamesDeletedTskEvent(tagNameIds)); } catch (SQLException ex) { trans.rollback(); throw new TskCoreException(String.format("Error deleting tag set where id = %d.", tagSet.getId()), ex); } } /** * Gets the tag set a tag name (tag definition) belongs to, if any. * * @param tagName The tag name. * * @return A TagSet object or null. * * @throws TskCoreException If there is an error querying the case database. */ public TagSet getTagSet(TagName tagName) throws TskCoreException { if (tagName == null) { throw new IllegalArgumentException("Null tagName argument"); } if (tagName.getTagSetId() <= 0) { return null; } skCase.acquireSingleUserCaseReadLock(); TagSet tagSet = null; String sqlQuery = String.format("SELECT * FROM tsk_tag_sets WHERE tag_set_id = %d", tagName.getTagSetId()); try (CaseDbConnection connection = skCase.getConnection(); Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(sqlQuery);) { if (resultSet.next()) { int setID = resultSet.getInt("tag_set_id"); String setName = resultSet.getString("name"); tagSet = new TagSet(setID, setName, getTagNamesByTagSetID(setID)); } return tagSet; } catch (SQLException ex) { throw new TskCoreException(String.format("Error occurred getting TagSet for TagName '%s' (ID=%d)", tagName.getDisplayName(), tagName.getId()), ex); } finally { skCase.releaseSingleUserCaseReadLock(); } } /** * Return a TagSet object for the given id. * * @param id TagSet id. * * @return The TagSet represented by the given it, or null if one was not * found. * * @throws TskCoreException */ public TagSet getTagSet(long id) throws TskCoreException { TagSet tagSet = null; String preparedQuery = "Select * FROM tsk_tag_sets WHERE tag_set_id = ?"; skCase.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = skCase.getConnection(); PreparedStatement statement = connection.getPreparedStatement(preparedQuery, Statement.NO_GENERATED_KEYS)) { statement.setLong(1, id); try (ResultSet resultSet = statement.executeQuery()) { if (resultSet.next()) { int setID = resultSet.getInt("tag_set_id"); String setName = resultSet.getString("name"); tagSet = new TagSet(setID, setName, getTagNamesByTagSetID(setID)); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error occurred getting TagSet (ID=%d)", id), ex); } finally { skCase.releaseSingleUserCaseReadLock(); } return tagSet; } /** * Inserts a row into the blackboard_artifact_tags table in the case * database. * * @param artifact The blackboard artifact to tag. * @param tagName The name to use for the tag. * @param comment A comment to store with the tag. * * @return A BlackboardArtifactTag data transfer object (DTO) for the new * row. * * @throws TskCoreException */ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, TagName tagName, String comment) throws TskCoreException { if (artifact == null || tagName == null) { throw new IllegalArgumentException("NULL argument passed to addArtifactTag"); } List removedTags = new ArrayList<>(); List removedTagIds = new ArrayList<>(); CaseDbTransaction trans = null; try { // If a TagName is part of a TagSet remove any existing tags from the // set that are currenctly on the artifact long tagSetId = tagName.getTagSetId(); if (tagSetId > 0) { // Get the list of all of the blackboardArtifactTags that use // TagName for the given artifact. String selectQuery = String.format("SELECT * from blackboard_artifact_tags JOIN tag_names ON tag_names.tag_name_id = blackboard_artifact_tags.tag_name_id JOIN tsk_examiners on tsk_examiners.examiner_id = blackboard_artifact_tags.examiner_id WHERE artifact_id = %d AND tag_names.tag_set_id = %d", artifact.getArtifactID(), tagSetId); TagName removedTag; try (Statement stmt = skCase.getConnection().createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) { while (resultSet.next()) { removedTag = new TagName( resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), tagSetId, resultSet.getInt("rank") ); BlackboardArtifactTag bat = new BlackboardArtifactTag(resultSet.getLong("tag_id"), artifact, skCase.getContentById(artifact.getObjectID()), removedTag, resultSet.getString("comment"), resultSet.getString("login_name")); removedTags.add(bat); removedTagIds.add(Long.toString(bat.getId())); } } } Content content = skCase.getContentById(artifact.getObjectID()); Examiner currentExaminer = skCase.getCurrentExaminer(); trans = skCase.beginTransaction(); CaseDbConnection connection = trans.getConnection(); if (!removedTags.isEmpty()) { // Remove the tags. String removeQuery = String.format("DELETE FROM blackboard_artifact_tags WHERE tag_id IN (%s)", String.join(",", removedTagIds)); try (Statement stmt = connection.createStatement()) { stmt.executeUpdate(removeQuery); } } // Add the new Tag. BlackboardArtifactTag artifactTag; try (Statement stmt = connection.createStatement()) { String query = String.format( "INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment, examiner_id) VALUES (%d, %d, '%s', %d)", artifact.getArtifactID(), tagName.getId(), comment, currentExaminer.getId()); if (skCase.getDatabaseType() == POSTGRESQL) { stmt.execute(query, Statement.RETURN_GENERATED_KEYS); } else { stmt.execute(query); } try (ResultSet resultSet = stmt.getGeneratedKeys()) { resultSet.next(); artifactTag = new BlackboardArtifactTag(resultSet.getLong(1), //last_insert_rowid() artifact, content, tagName, comment, currentExaminer.getLoginName()); } } skCase.getScoringManager().updateAggregateScoreAfterAddition( artifact.getId(), artifact.getDataSourceObjectID(), getTagScore(tagName.getKnownStatus()), trans); trans.commit(); return new BlackboardArtifactTagChange(artifactTag, removedTags); } catch (SQLException ex) { if (trans != null) { trans.rollback(); } throw new TskCoreException("Error adding row to blackboard_artifact_tags table (obj_id = " + artifact.getArtifactID() + ", tag_name_id = " + tagName.getId() + ")", ex); } } /** * Translates the known status of a tag defnition into an item score. This * supports scoring of tagged items. * * @param knownStatus The known status of a tag definition. * * @return The corresponding item score. */ static Score getTagScore(TskData.FileKnown knownStatus) { switch (knownStatus) { case BAD: /* * The "bad" known status is used to define tags that are * "notable." An item tagged with a "notable" tag is scored as * notable. */ return Score.SCORE_NOTABLE; case UNKNOWN: case KNOWN: default: // N/A /* * All other known status values have no special significance in * a tag definition. However, if an item has been tagged at all * by a user, the item is scored as likely notable. */ return Score.SCORE_LIKELY_NOTABLE; } } /** * Retrieves the maximum FileKnown status of any tag associated with the * object id. * * @param objectId The object id of the item. * @param transaction The case db transaction to perform this query. * * @return The maximum FileKnown status for this object or empty. * * @throws TskCoreException */ Optional getMaxTagKnownStatus(long objectId, CaseDbTransaction transaction) throws TskCoreException { // query content tags and blackboard artifact tags for highest // known status associated with a tag associated with this object id String queryString = "SELECT tag_names.knownStatus AS knownStatus\n" + " FROM (\n" + " SELECT ctags.tag_name_id AS tag_name_id FROM content_tags ctags WHERE ctags.obj_id = " + objectId + "\n" + " UNION\n" + " SELECT btags.tag_name_id AS tag_name_id FROM blackboard_artifact_tags btags \n" + " INNER JOIN blackboard_artifacts ba ON btags.artifact_id = ba.artifact_id\n" + " WHERE ba.artifact_obj_id = " + objectId + "\n" + " ) tag_name_ids\n" + " INNER JOIN tag_names ON tag_name_ids.tag_name_id = tag_names.tag_name_id\n" + " ORDER BY tag_names.knownStatus DESC\n" + " LIMIT 1"; try (Statement statement = transaction.getConnection().createStatement(); ResultSet resultSet = transaction.getConnection().executeQuery(statement, queryString);) { if (resultSet.next()) { return Optional.ofNullable(TskData.FileKnown.valueOf(resultSet.getByte("knownStatus"))); } else { return Optional.empty(); } } catch (SQLException ex) { throw new TskCoreException("Error getting content tag FileKnown status for content with id: " + objectId); } } /** * Inserts a row into the content_tags table in the case database. * * @param content The content to tag. * @param tagName The name to use for the tag. * @param comment A comment to store with the tag. * @param beginByteOffset Designates the beginning of a tagged section. * @param endByteOffset Designates the end of a tagged section. * * @return A ContentTag data transfer object (DTO) for the new row. * * @throws TskCoreException */ public ContentTagChange addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws TskCoreException { List removedTags = new ArrayList<>(); List removedTagIds = new ArrayList<>(); Examiner currentExaminer = skCase.getCurrentExaminer(); CaseDbTransaction trans = skCase.beginTransaction(); CaseDbConnection connection = trans.getConnection(); try { long tagSetId = tagName.getTagSetId(); if (tagSetId > 0) { String selectQuery = String.format("SELECT * from content_tags JOIN tag_names ON tag_names.tag_name_id = content_tags.tag_name_id JOIN tsk_examiners on tsk_examiners.examiner_id = content_tags.examiner_id WHERE obj_id = %d AND tag_names.tag_set_id = %d", content.getId(), tagSetId); try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) { while (resultSet.next()) { TagName removedTag = new TagName( resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), tagSetId, resultSet.getInt("rank") ); ContentTag bat = new ContentTag(resultSet.getLong("tag_id"), content, removedTag, resultSet.getString("comment"), resultSet.getLong("begin_byte_offset"), resultSet.getLong("end_byte_offset"), resultSet.getString("login_name")); removedTagIds.add(Long.toString(bat.getId())); removedTags.add(bat); } } if (!removedTags.isEmpty()) { String removeQuery = String.format("DELETE FROM content_tags WHERE tag_id IN (%s)", String.join(",", removedTagIds)); try (Statement stmt = connection.createStatement()) { stmt.executeUpdate(removeQuery); } } } String queryTemplate = "INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset, examiner_id) VALUES (%d, %d, '%s', %d, %d, %d)"; ContentTag contentTag = null; try (Statement stmt = connection.createStatement()) { String query = String.format(queryTemplate, content.getId(), tagName.getId(), comment, beginByteOffset, endByteOffset, currentExaminer.getId()); if (skCase.getDatabaseType() == POSTGRESQL) { stmt.executeUpdate(query, Statement.RETURN_GENERATED_KEYS); } else { stmt.executeUpdate(query); } try (ResultSet resultSet = stmt.getGeneratedKeys()) { resultSet.next(); contentTag = new ContentTag(resultSet.getLong(1), //last_insert_rowid() content, tagName, comment, beginByteOffset, endByteOffset, currentExaminer.getLoginName()); } } Long dataSourceId = content.getDataSource() != null ? content.getDataSource().getId() : null; skCase.getScoringManager().updateAggregateScoreAfterAddition( content.getId(), dataSourceId, getTagScore(tagName.getKnownStatus()), trans); trans.commit(); return new ContentTagChange(contentTag, removedTags); } catch (SQLException ex) { trans.rollback(); throw new TskCoreException("Error adding row to content_tags table (obj_id = " + content.getId() + ", tag_name_id = " + tagName.getId() + ")", ex); } } /** * Inserts row into the tags_names table, or updates the existing row if the * displayName already exists in the tag_names table in the case database. * * @param displayName The display name for the new tag name. * @param description The description for the new tag name. * @param color The HTML color to associate with the new tag name. * @param knownStatus The TskData.FileKnown value to associate with the new * tag name. * * @return A TagName data transfer object (DTO) for the new row. * * @throws TskCoreException */ public TagName addOrUpdateTagName(String displayName, String description, TagName.HTML_COLOR color, TskData.FileKnown knownStatus) throws TskCoreException { String insertQuery = "INSERT INTO tag_names (display_name, description, color, knownStatus) VALUES (?, ?, ?, ?) ON CONFLICT (display_name) DO UPDATE SET description = ?, color = ?, knownStatus = ?"; boolean isUpdated = false; skCase.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = skCase.getConnection()) { try (PreparedStatement statement = connection.getPreparedStatement("SELECT * FROM tag_names WHERE display_name = ?", Statement.NO_GENERATED_KEYS)) { statement.setString(1, displayName); try (ResultSet resultSet = statement.executeQuery()) { isUpdated = resultSet.next(); } } try (PreparedStatement statement = connection.getPreparedStatement(insertQuery, Statement.RETURN_GENERATED_KEYS);) { statement.clearParameters(); statement.setString(5, description); statement.setString(6, color.getName()); statement.setByte(7, knownStatus.getFileKnownValue()); statement.setString(1, displayName); statement.setString(2, description); statement.setString(3, color.getName()); statement.setByte(4, knownStatus.getFileKnownValue()); statement.executeUpdate(); } try (PreparedStatement statement = connection.getPreparedStatement("SELECT * FROM tag_names where display_name = ?", Statement.NO_GENERATED_KEYS)) { statement.setString(1, displayName); try (ResultSet resultSet = connection.executeQuery(statement)) { resultSet.next(); TagName newTag = new TagName(resultSet.getLong("tag_name_id"), displayName, description, color, knownStatus, resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); if (!isUpdated) { skCase.fireTSKEvent(new TagNamesAddedTskEvent(Collections.singletonList(newTag))); } else { skCase.fireTSKEvent(new TagNamesUpdatedTskEvent(Collections.singletonList(newTag))); } return newTag; } } } catch (SQLException ex) { throw new TskCoreException("Error adding row for " + displayName + " tag name to tag_names table", ex); } finally { skCase.releaseSingleUserCaseWriteLock(); } } /** * Return the TagName object for the given id. * * @param id The TagName id. * * @return The TagName object for the given id. * * @throws TskCoreException */ public TagName getTagName(long id) throws TskCoreException { String preparedQuery = "SELECT * FROM tag_names where tag_name_id = ?"; skCase.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = skCase.getConnection()) { try (PreparedStatement statement = connection.getPreparedStatement(preparedQuery, Statement.NO_GENERATED_KEYS)) { statement.clearParameters(); statement.setLong(1, id); try (ResultSet resultSet = statement.executeQuery()) { if (resultSet.next()) { return new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knowStatus")), resultSet.getLong("tag_set_id"), resultSet.getInt("rank")); } } } } catch (SQLException ex) { throw new TskCoreException("", ex); } finally { skCase.releaseSingleUserCaseWriteLock(); } return null; } /** * Determine if the given TagSet contains TagNames that are currently in * use, ie there is an existing ContentTag or ArtifactTag that uses TagName. * * @param tagSet The Tagset to check. * * @return Return true if the TagSet is in use. * * @throws TskCoreException */ private boolean isTagSetInUse(TagSet tagSet) throws TskCoreException { skCase.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = skCase.getConnection()) { List tagNameList = tagSet.getTagNames(); if (tagNameList != null && !tagNameList.isEmpty()) { String statement = String.format("SELECT tag_id FROM content_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId()); try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) { if (resultSet.next()) { return true; } } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex); } statement = String.format("SELECT tag_id FROM blackboard_artifact_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId()); try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) { if (resultSet.next()) { return true; } } catch (SQLException ex) { throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex); } } } finally { skCase.releaseSingleUserCaseReadLock(); } return false; } /** * Returns a list of all of the TagNames that are apart of the given TagSet. * * @param tagSetId ID of a TagSet. * * @return List of TagNames for the TagSet or empty list if none were found. * * @throws TskCoreException */ private List getTagNamesByTagSetID(int tagSetId) throws TskCoreException { if (tagSetId <= 0) { throw new IllegalArgumentException("Invalid tagSetID passed to getTagNameByTagSetID"); } List tagNameList = new ArrayList<>(); skCase.acquireSingleUserCaseReadLock(); String query = String.format("SELECT * FROM tag_names WHERE tag_set_id = %d", tagSetId); try (CaseDbConnection connection = skCase.getConnection(); Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(query)) { while (resultSet.next()) { tagNameList.add(new TagName(resultSet.getLong("tag_name_id"), resultSet.getString("display_name"), resultSet.getString("description"), TagName.HTML_COLOR.getColorByName(resultSet.getString("color")), TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")), tagSetId, resultSet.getInt("rank"))); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting tag names for tag set (%d)", tagSetId), ex); } finally { skCase.releaseSingleUserCaseReadLock(); } return tagNameList; } /** * Object to store the tag change from a call to addArtifactTag. */ public static class BlackboardArtifactTagChange { private final BlackboardArtifactTag addedTag; private final List removedTagList; /** * Construct a new artifact tag change object. * * @param added Newly created artifact tag. * @param removed List of removed tags. */ BlackboardArtifactTagChange(BlackboardArtifactTag added, List removed) { this.addedTag = added; this.removedTagList = removed; } /** * Returns the newly created tag. * * @return Add artifact tag. */ public BlackboardArtifactTag getAddedTag() { return addedTag; } /** * Returns a list of the artifacts tags that were removed. * * @return */ public List getRemovedTags() { return Collections.unmodifiableList(removedTagList); } } /** * Object to store the tag change from a call to addContentTag. */ public static class ContentTagChange { private final ContentTag addedTag; private final List removedTagList; /** * Construct a new content tag change object. * * @param added Newly created artifact tag. * @param removed List of removed tags. */ ContentTagChange(ContentTag added, List removed) { this.addedTag = added; this.removedTagList = removed; } /** * Returns the newly created tag. * * @return Add artifact tag. */ public ContentTag getAddedTag() { return addedTag; } /** * Returns a list of the artifacts tags that were removed. * * @return */ public List getRemovedTags() { return Collections.unmodifiableList(removedTagList); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCaseAdminUtil.java000644 000765 000024 00000002470 14137073413 031676 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Utility methods for administering a case database. */ public final class SleuthkitCaseAdminUtil { /** * Deletes a data source from a case database. * * @param caseDB The case database. * @param dataSourceObjID The object ID of the data source to be deleted. * * @throws TskCoreException If there is an error deleting the data source. */ public static void deleteDataSource(SleuthkitCase caseDB, long dataSourceObjID) throws TskCoreException { caseDB.deleteDataSource(dataSourceObjID); } /** * Prevent instantiation of this utility class. */ private SleuthkitCaseAdminUtil() { } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/IngestModuleInfo.java000755 000765 000024 00000006340 14137073413 030535 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2014-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ResourceBundle; /** * Represents information about an ingest module factory, used in ingest job * info to show which ingest modules were run. */ public final class IngestModuleInfo { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); /** * Used to keep track of the module types */ public static enum IngestModuleType { /* * IMPORTANT: DO NOT CHANGE ORDER, THE ORDINAL VALUES OF THE ENUM ARE * STORED IN THE CASE DATABASE */ DATA_SOURCE_LEVEL(bundle.getString("IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName")), FILE_LEVEL(bundle.getString("IngestModuleInfo.IngestModuleType.FileLevel.displayName")), DATA_ARTIFACT(bundle.getString("IngestModuleInfo.IngestModuleType.DataArtifact.displayName")), MULTIPLE("IngestModuleInfo.IngestModuleType.Multiple.displayName"); private final String displayName; private IngestModuleType(String displayName) { this.displayName = displayName; } public static IngestModuleType fromID(int typeId) { for (IngestModuleType moduleType : IngestModuleType.values()) { if (moduleType.ordinal() == typeId) { return moduleType; } } return null; } /** * @return the displayName */ public String getDisplayName() { return displayName; } } private final long ingestModuleId; private final String displayName; private final String uniqueName; private final IngestModuleType type; private final String version; /** * * @param ingestModuleId The id of the ingest module * @param displayName The display name of the ingest module * @param uniqueName The unique name of the ingest module. * @param type The ingest module type of the module. * @param version The version number of the module. */ IngestModuleInfo(long ingestModuleId, String displayName, String uniqueName, IngestModuleType type, String version) { this.ingestModuleId = ingestModuleId; this.displayName = displayName; this.uniqueName = uniqueName; this.type = type; this.version = version; } /** * @return the ingestModuleId */ public long getIngestModuleId() { return ingestModuleId; } /** * @return the displayName */ public String getDisplayName() { return displayName; } /** * @return the uniqueName */ public String getUniqueName() { return uniqueName; } /** * @return the typeID */ public IngestModuleType getType() { return type; } /** * @return the version */ public String getVersion() { return version; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java000755 000765 000024 00000153116 14137073414 027353 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.collect.ImmutableSet; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * A representation of the blackboard, a place where artifacts and their * attributes are posted. */ public final class Blackboard { private static final Logger LOGGER = Logger.getLogger(Blackboard.class.getName()); private final SleuthkitCase caseDb; /** * Constructs a representation of the blackboard, a place where artifacts * and their attributes are posted. * * @param casedb The case database. */ Blackboard(SleuthkitCase casedb) { this.caseDb = Objects.requireNonNull(casedb, "Cannot create Blackboard for null SleuthkitCase"); } /** * Posts the artifact. The artifact should be complete (all attributes have * been added) before being posted. Posting the artifact includes making any * timeline events that may be derived from it, and broadcasting a * notification that the artifact is ready for further analysis. * * @param artifact The artifact to be posted. * @param moduleName The name of the module that is posting the artifacts. * * @throws BlackboardException If there is a problem posting the artifact. */ public void postArtifact(BlackboardArtifact artifact, String moduleName) throws BlackboardException { postArtifacts(Collections.singleton(artifact), moduleName); } /** * Posts a Collection of artifacts. The artifacts should be complete (all * attributes have been added) before being posted. Posting the artifacts * includes making any events that may be derived from them, and * broadcasting notifications that the artifacts are ready for further * analysis. * * * @param artifacts The artifacts to be posted . * @param moduleName The name of the module that is posting the artifacts. * * * @throws BlackboardException If there is a problem posting the artifacts. * */ public void postArtifacts(Collection artifacts, String moduleName) throws BlackboardException { /* * For now this just processes them one by one, but in the future it * could be smarter and use transactions, etc. */ for (BlackboardArtifact artifact : artifacts) { try { caseDb.getTimelineManager().addArtifactEvents(artifact); } catch (TskCoreException ex) { throw new BlackboardException("Failed to add events for artifact: " + artifact, ex); } } caseDb.fireTSKEvent(new ArtifactsPostedEvent(artifacts, moduleName)); } /** * Gets an artifact type, creating it if it does not already exist. Use this * method to define custom artifact types. * * This assumes that the artifact type is of category DATA_ARTIFACT. * * @param typeName The type name of the artifact type. * @param displayName The display name of the artifact type. * * @return A type object representing the artifact type. * * @throws BlackboardException If there is a problem getting or adding the * artifact type. */ public BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException { return getOrAddArtifactType(typeName, displayName, BlackboardArtifact.Category.DATA_ARTIFACT); } /** * Gets an artifact type, creating it if it does not already exist. Use this * method to define custom artifact types. * * @param typeName The type name of the artifact type. * @param displayName The display name of the artifact type. * @param category The artifact type category. * * @return A type object representing the artifact type. * * @throws BlackboardException If there is a problem getting or adding the * artifact type. */ public BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName, BlackboardArtifact.Category category) throws BlackboardException { if (category == null) { throw new BlackboardException("Category provided must be non-null"); } try { return caseDb.addBlackboardArtifactType(typeName, displayName, category); } catch (TskDataException typeExistsEx) { try { return caseDb.getArtifactType(typeName); } catch (TskCoreException ex) { throw new BlackboardException("Failed to get or add artifact type", ex); } } catch (TskCoreException ex) { throw new BlackboardException("Failed to get or add artifact type", ex); } } /** * Adds new analysis result artifact. * * @param artifactType Type of analysis result artifact to create. * @param objId Object id of parent. * @param dataSourceObjId Data source object id, may be null. * @param score Score associated with this analysis result. * @param conclusion Conclusion of the analysis, may be null or an * empty string. * @param configuration Configuration associated with this analysis, may * be null or an empty string. * @param justification Justification, may be null or an empty string. * @param attributesList Attributes to be attached to this analysis result * artifact. * * @return AnalysisResultAdded The analysis return added and the current * aggregate score of content. * * @throws TskCoreException * @throws BlackboardException exception thrown if a critical error occurs * within TSK core */ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, Collection attributesList) throws BlackboardException, TskCoreException { if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) { throw new BlackboardException(String.format("Artifact type (name = %s) is not of Analysis Result category. ", artifactType.getTypeName())); } CaseDbTransaction transaction = caseDb.beginTransaction(); try { AnalysisResultAdded analysisResult = newAnalysisResult(artifactType, objId, dataSourceObjId, score, conclusion, configuration, justification, attributesList, transaction); transaction.commit(); return analysisResult; } catch (TskCoreException | BlackboardException ex) { try { transaction.rollback(); } catch (TskCoreException ex2) { LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception. " + "Error invoking newAnalysisResult with dataSourceObjId: " + (dataSourceObjId == null ? "" : dataSourceObjId) + ", sourceObjId: " + objId, ex2); } throw ex; } } /** * Adds new analysis result artifact. * * @param artifactType Type of analysis result artifact to create. * @param objId Object id of parent. * @param dataSourceObjId Data source object id, may be null. * @param score Score associated with this analysis result. * @param conclusion Conclusion of the analysis, may be null or an * empty string. * @param configuration Configuration associated with this analysis, may * be null or an empty string. * @param justification Justification, may be null or an empty string. * @param attributesList Attributes to be attached to this analysis result * artifact. * @param transaction DB transaction to use. * * @return AnalysisResultAdded The analysis return added and the current * aggregate score of content. * * @throws BlackboardException exception thrown if a critical error occurs * within TSK core */ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, Collection attributesList, CaseDbTransaction transaction) throws BlackboardException { if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) { throw new BlackboardException(String.format("Artifact type (name = %s) is not of Analysis Result category. ", artifactType.getTypeName())); } try { // add analysis result AnalysisResult analysisResult = caseDb.newAnalysisResult(artifactType, objId, dataSourceObjId, score, conclusion, configuration, justification, transaction.getConnection()); // add the given attributes if (attributesList != null && !attributesList.isEmpty()) { analysisResult.addAttributes(attributesList, transaction); } // update the final score for the object Score aggregateScore = caseDb.getScoringManager().updateAggregateScoreAfterAddition(objId, dataSourceObjId, analysisResult.getScore(), transaction); // return the analysis result and the current aggregate score. return new AnalysisResultAdded(analysisResult, aggregateScore); } catch (TskCoreException ex) { throw new BlackboardException("Failed to add analysis result.", ex); } } /** * Delete the specified analysis result. * * Deletes the result from blackboard_artifacts and tsk_analysis_results, * and recalculates and updates the aggregate score of the content. Fires an * event to indicate that the analysis result has been deleted and that the * score of the item has changed. * * @param analysisResult AnalysisResult to delete. * * @return New score of the content. * * @throws TskCoreException */ public Score deleteAnalysisResult(AnalysisResult analysisResult) throws TskCoreException { CaseDbTransaction transaction = this.caseDb.beginTransaction(); try { Score score = deleteAnalysisResult(analysisResult, transaction); transaction.commit(); transaction = null; return score; } finally { if (transaction != null) { transaction.rollback(); } } } /** * Delete the specified analysis result. * * Deletes the result from blackboard_artifacts and tsk_analysis_results, * and recalculates and updates the aggregate score of the content. * * @param artifactObjId Artifact Obj Id to be deleted * @param transaction * * @return * * @throws TskCoreException */ public Score deleteAnalysisResult(long artifactObjId, CaseDbTransaction transaction) throws TskCoreException { List analysisResults = getAnalysisResultsWhere(" artifacts.artifact_obj_id = " + artifactObjId, transaction.getConnection()); if (analysisResults.isEmpty()) { throw new TskCoreException(String.format("Analysis Result not found for artifact obj id %d", artifactObjId)); } return deleteAnalysisResult(analysisResults.get(0), transaction); } /** * Delete the specified analysis result. * * Deletes the result from blackboard_artifacts and tsk_analysis_results, * and recalculates and updates the aggregate score of the content. * * @param analysisResult AnalysisResult to delete. * @param transaction Transaction to use for database operations. * * @return New score of the content. * * @throws TskCoreException */ private Score deleteAnalysisResult(AnalysisResult analysisResult, CaseDbTransaction transaction) throws TskCoreException { try { CaseDbConnection connection = transaction.getConnection(); // delete the blackboard artifacts row. This will also delete the tsk_analysis_result row String deleteSQL = "DELETE FROM blackboard_artifacts WHERE artifact_obj_id = ?"; PreparedStatement deleteStatement = connection.getPreparedStatement(deleteSQL, Statement.RETURN_GENERATED_KEYS); deleteStatement.clearParameters(); deleteStatement.setLong(1, analysisResult.getId()); deleteStatement.executeUpdate(); // register the deleted result with the transaction so an event can be fired for it. transaction.registerDeletedAnalysisResult(analysisResult.getObjectID()); return caseDb.getScoringManager().updateAggregateScoreAfterDeletion(analysisResult.getObjectID(), analysisResult.getDataSourceObjectID(), transaction); } catch (SQLException ex) { throw new TskCoreException(String.format("Error deleting analysis result with artifact obj id %d", analysisResult.getId()), ex); } } private final static String ANALYSIS_RESULT_QUERY_STRING = "SELECT DISTINCT artifacts.artifact_id AS artifact_id, " //NON-NLS + " artifacts.obj_id AS obj_id, artifacts.artifact_obj_id AS artifact_obj_id, artifacts.data_source_obj_id AS data_source_obj_id, artifacts.artifact_type_id AS artifact_type_id, " + " types.type_name AS type_name, types.display_name AS display_name, types.category_type as category_type,"//NON-NLS + " artifacts.review_status_id AS review_status_id, " //NON-NLS + " results.conclusion AS conclusion, results.significance AS significance, results.priority AS priority, " + " results.configuration AS configuration, results.justification AS justification " + " FROM blackboard_artifacts AS artifacts " + " JOIN blackboard_artifact_types AS types " //NON-NLS + " ON artifacts.artifact_type_id = types.artifact_type_id" //NON-NLS + " LEFT JOIN tsk_analysis_results AS results " + " ON artifacts.artifact_obj_id = results.artifact_obj_id " //NON-NLS + " WHERE artifacts.review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID() //NON-NLS + " AND types.category_type = " + BlackboardArtifact.Category.ANALYSIS_RESULT.getID(); // NON-NLS /** * Get all analysis results of given artifact type. * * @param artifactTypeId The artifact type id for which to search. * * @return The list of analysis results. * * @throws TskCoreException Exception thrown if a critical error occurs * within TSK core. */ public List getAnalysisResultsByType(int artifactTypeId) throws TskCoreException { return getAnalysisResultsWhere(" artifacts.artifact_type_id = " + artifactTypeId); } /** * Get all analysis results of given artifact type. * * @param artifactTypeId The artifact type id for which to search. * @param dataSourceObjId Object Id of the data source to look under. * * @return The list of analysis results. * * @throws TskCoreException Exception thrown if a critical error occurs * within TSK core. */ public List getAnalysisResultsByType(int artifactTypeId, long dataSourceObjId) throws TskCoreException { return getAnalysisResultsWhere(" artifacts.artifact_type_id = " + artifactTypeId + " AND artifacts.data_source_obj_id = " + dataSourceObjId); } /** * Get all analysis results for a given object. * * @param sourceObjId Object id. * * @return list of analysis results. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public List getAnalysisResults(long sourceObjId) throws TskCoreException { return getAnalysisResultsWhere(" artifacts.obj_id = " + sourceObjId); } /** * Get all data artifacts for a given object. * * @param sourceObjId Object id. * * @return List of data artifacts. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ List getDataArtifactsBySource(long sourceObjId) throws TskCoreException { caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { return getDataArtifactsWhere(String.format(" artifacts.obj_id = %d", sourceObjId), connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Returns true if there are data artifacts belonging to the sourceObjId. * * @param sourceObjId The source content object id. * * @return True if there are data artifacts belonging to this source obj id. * * @throws TskCoreException */ public boolean hasDataArtifacts(long sourceObjId) throws TskCoreException { return hasArtifactsOfCategory(BlackboardArtifact.Category.DATA_ARTIFACT, sourceObjId); } /** * Returns true if there are analysis results belonging to the sourceObjId. * * @param sourceObjId The source content object id. * * @return True if there are analysis results belonging to this source obj * id. * * @throws TskCoreException */ public boolean hasAnalysisResults(long sourceObjId) throws TskCoreException { return hasArtifactsOfCategory(BlackboardArtifact.Category.ANALYSIS_RESULT, sourceObjId); } /** * Returns true if there are artifacts of the given category belonging to * the sourceObjId. * * @param category The category of the artifacts. * @param sourceObjId The source content object id. * * @return True if there are artifacts of the given category belonging to * this source obj id. * * @throws TskCoreException */ private boolean hasArtifactsOfCategory(BlackboardArtifact.Category category, long sourceObjId) throws TskCoreException { String queryString = "SELECT COUNT(*) AS count " //NON-NLS + " FROM blackboard_artifacts AS arts " + " JOIN blackboard_artifact_types AS types " //NON-NLS + " ON arts.artifact_type_id = types.artifact_type_id" //NON-NLS + " WHERE types.category_type = " + category.getID() + " AND arts.obj_id = " + sourceObjId; caseDb.acquireSingleUserCaseReadLock(); try (SleuthkitCase.CaseDbConnection connection = caseDb.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { if (resultSet.next()) { return resultSet.getLong("count") > 0; } return false; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact types is use for data source." + ex.getMessage(), ex); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all analysis results for a given object. * * @param sourceObjId Object id. * @param connection Database connection to use. * * * @return list of analysis results. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ List getAnalysisResults(long sourceObjId, CaseDbConnection connection) throws TskCoreException { return getAnalysisResultsWhere(" artifacts.obj_id = " + sourceObjId, connection); } /** * Get analysis results of the given type, for the given object. * * @param sourceObjId Object id. * @param artifactTypeId Result type to get. * * @return list of analysis results. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public List getAnalysisResults(long sourceObjId, int artifactTypeId) throws TskCoreException { // Get the artifact type to check that it in the analysis result category. BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeId); if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) { throw new TskCoreException(String.format("Artifact type id %d is not in analysis result catgeory.", artifactTypeId)); } String whereClause = " types.artifact_type_id = " + artifactTypeId + " AND artifacts.obj_id = " + sourceObjId; return getAnalysisResultsWhere(whereClause); } /** * Get all analysis results matching the given where sub-clause. * * * @param whereClause Where sub clause, specifies conditions to match. * * @return list of analysis results. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public List getAnalysisResultsWhere(String whereClause) throws TskCoreException { caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { return getAnalysisResultsWhere(whereClause, connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all analysis results matching the given where sub-clause. Uses the * given database connection to execute the query. * * @param whereClause Where sub clause, specifies conditions to match. * @param connection Database connection to use. * * @return list of analysis results. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ List getAnalysisResultsWhere(String whereClause, CaseDbConnection connection) throws TskCoreException { final String queryString = ANALYSIS_RESULT_QUERY_STRING + " AND " + whereClause; try (Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { List analysisResults = resultSetToAnalysisResults(resultSet); return analysisResults; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting analysis results for WHERE clause = '%s'", whereClause), ex); } } /** * Get the analysis results by its artifact_obj_id. * * @param artifactObjId Artifact object id of the analysis result. * * @return AnalysisResult. * * @throws TskCoreException If a critical error occurred within TSK core. */ public AnalysisResult getAnalysisResultById(long artifactObjId) throws TskCoreException { String whereClause = " artifacts.artifact_obj_id = " + artifactObjId; List results = getAnalysisResultsWhere(whereClause); if (results.isEmpty()) { // throw an error if no analysis result found by id. throw new TskCoreException(String.format("Error getting analysis result with id = '%d'", artifactObjId)); } if (results.size() > 1) { // should not happen - throw an error throw new TskCoreException(String.format("Multiple analysis results found with id = '%d'", artifactObjId)); } return results.get(0); } /** * Creates AnalysisResult objects for the result set of a table query of the * form "SELECT * FROM blackboard_artifacts JOIN WHERE XYZ". * * @param rs A result set from a query of the blackboard_artifacts table of * the form "SELECT * FROM blackboard_artifacts, * tsk_analysis_results WHERE ...". * * @return A list of BlackboardArtifact objects. * * @throws SQLException Thrown if there is a problem iterating through * the result set. * @throws TskCoreException Thrown if there is an error looking up the * artifact type id. */ private List resultSetToAnalysisResults(ResultSet resultSet) throws SQLException, TskCoreException { ArrayList analysisResults = new ArrayList<>(); while (resultSet.next()) { analysisResults.add(new AnalysisResult(caseDb, resultSet.getLong("artifact_id"), resultSet.getLong("obj_id"), resultSet.getLong("artifact_obj_id"), resultSet.getObject("data_source_obj_id") != null ? resultSet.getLong("data_source_obj_id") : null, resultSet.getInt("artifact_type_id"), resultSet.getString("type_name"), resultSet.getString("display_name"), BlackboardArtifact.ReviewStatus.withID(resultSet.getInt("review_status_id")), new Score(Score.Significance.fromID(resultSet.getInt("significance")), Score.Priority.fromID(resultSet.getInt("priority"))), resultSet.getString("conclusion"), resultSet.getString("configuration"), resultSet.getString("justification"))); } //end for each resultSet return analysisResults; } private final static String DATA_ARTIFACT_QUERY_STRING = "SELECT DISTINCT artifacts.artifact_id AS artifact_id, " //NON-NLS + "artifacts.obj_id AS obj_id, artifacts.artifact_obj_id AS artifact_obj_id, artifacts.data_source_obj_id AS data_source_obj_id, artifacts.artifact_type_id AS artifact_type_id, " //NON-NLS + " types.type_name AS type_name, types.display_name AS display_name, types.category_type as category_type,"//NON-NLS + " artifacts.review_status_id AS review_status_id, " //NON-NLS + " data_artifacts.os_account_obj_id as os_account_obj_id " //NON-NLS + " FROM blackboard_artifacts AS artifacts " + " JOIN blackboard_artifact_types AS types " //NON-NLS + " ON artifacts.artifact_type_id = types.artifact_type_id" //NON-NLS + " LEFT JOIN tsk_data_artifacts AS data_artifacts " + " ON artifacts.artifact_obj_id = data_artifacts.artifact_obj_id " //NON-NLS + " WHERE artifacts.review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID() //NON-NLS + " AND types.category_type = " + BlackboardArtifact.Category.DATA_ARTIFACT.getID(); // NON-NLS /** * Gets all data artifacts of a given type for a given data source. To get * all the data artifacts for the data source, pass null for the type ID. * * @param dataSourceObjId The object ID of the data source. * @param artifactTypeID The type ID of the desired artifacts or null. * * @return A list of the data artifacts, possibly empty. * * @throws TskCoreException This exception is thrown if there is an error * querying the case database. */ public List getDataArtifacts(long dataSourceObjId, Integer artifactTypeID) throws TskCoreException { caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { String whereClause = " artifacts.data_source_obj_id = " + dataSourceObjId; if (artifactTypeID != null) { whereClause += " AND artifacts.artifact_type_id = " + artifactTypeID; } return getDataArtifactsWhere(whereClause, connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all data artifacts of a given type for a given data source. * * @param artifactTypeID Artifact type to get. * @param dataSourceObjId Data source to look under. * * @return List of data artifacts. May be an empty list. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public List getDataArtifacts(int artifactTypeID, long dataSourceObjId) throws TskCoreException { // Get the artifact type to check that it in the data artifact category. BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeID); if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) { throw new TskCoreException(String.format("Artifact type id %d is not in data artifact catgeory.", artifactTypeID)); } caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { String whereClause = "artifacts.data_source_obj_id = " + dataSourceObjId + " AND artifacts.artifact_type_id = " + artifactTypeID; return getDataArtifactsWhere(whereClause, connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all data artifacts of a given type. * * @param artifactTypeID Artifact type to get. * * @return List of data artifacts. May be an empty list. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public List getDataArtifacts(int artifactTypeID) throws TskCoreException { // Get the artifact type to check that it in the data artifact category. BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeID); if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) { throw new TskCoreException(String.format("Artifact type id %d is not in data artifact catgeory.", artifactTypeID)); } caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { String whereClause = " artifacts.artifact_type_id = " + artifactTypeID; return getDataArtifactsWhere(whereClause, connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get the data artifact with the given artifact obj id. * * @param artifactObjId Object id of the data artifact to get. * * @return Data artifact with given artifact object id. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ public DataArtifact getDataArtifactById(long artifactObjId) throws TskCoreException { caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { String whereClause = " artifacts.artifact_obj_id = " + artifactObjId; List artifacts = getDataArtifactsWhere(whereClause, connection); if (artifacts.isEmpty()) { // throw an error if no analysis result found by id. throw new TskCoreException(String.format("Error getting data artifact with id = '%d'", artifactObjId)); } if (artifacts.size() > 1) { // should not happen - throw an error throw new TskCoreException(String.format("Multiple data artifacts found with id = '%d'", artifactObjId)); } return artifacts.get(0); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all data artifacts matching the given where sub-clause. * * @param whereClause SQL Where sub-clause, specifies conditions to match. * * @return List of data artifacts. May be an empty list. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ List getDataArtifactsWhere(String whereClause) throws TskCoreException { caseDb.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = caseDb.getConnection()) { return getDataArtifactsWhere(whereClause, connection); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get all data artifacts matching the given where sub-clause. Uses the * given database connection to execute the query. * * @param whereClause SQL Where sub-clause, specifies conditions to match. * @param connection Database connection to use. * * @return List of data artifacts. May be an empty list. * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core. */ List getDataArtifactsWhere(String whereClause, CaseDbConnection connection) throws TskCoreException { final String queryString = DATA_ARTIFACT_QUERY_STRING + " AND ( " + whereClause + " )"; try (Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { List dataArtifacts = resultSetToDataArtifacts(resultSet, connection); return dataArtifacts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting data artifacts with queryString = %s", queryString), ex); } } /** * Creates DataArtifacts objects for the resultset of a table query of the * form "SELECT * FROM blackboard_artifacts JOIN data_artifacts WHERE ...". * * @param resultSet A result set from a query of the blackboard_artifacts * table of the form "SELECT * FROM blackboard_artifacts, * tsk_data_artifacts WHERE ...". * @param connection Database connection. * * @return A list of DataArtifact objects. * * @throws SQLException Thrown if there is a problem iterating through * the result set. * @throws TskCoreException Thrown if there is an error looking up the * artifact type id. */ private List resultSetToDataArtifacts(ResultSet resultSet, CaseDbConnection connection) throws SQLException, TskCoreException { ArrayList dataArtifacts = new ArrayList<>(); while (resultSet.next()) { Long osAccountObjId = resultSet.getLong("os_account_obj_id"); if (resultSet.wasNull()) { osAccountObjId = null; } dataArtifacts.add(new DataArtifact(caseDb, resultSet.getLong("artifact_id"), resultSet.getLong("obj_id"), resultSet.getLong("artifact_obj_id"), resultSet.getObject("data_source_obj_id") != null ? resultSet.getLong("data_source_obj_id") : null, resultSet.getInt("artifact_type_id"), resultSet.getString("type_name"), resultSet.getString("display_name"), BlackboardArtifact.ReviewStatus.withID(resultSet.getInt("review_status_id")), osAccountObjId, false)); } //end for each resultSet return dataArtifacts; } /** * Get the artifact type associated with an artifact type id. * * @param artTypeId An artifact type id. * * @return The artifact type. * * @throws TskCoreException If an error occurs accessing the case database * or no value is found. * */ public BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException { return caseDb.getArtifactType(artTypeId); } /** * Gets an attribute type, creating it if it does not already exist. Use * this method to define custom attribute types. * * @param typeName The type name of the attribute type. * @param valueType The value type of the attribute type. * @param displayName The display name of the attribute type. * * @return A type object representing the attribute type. * * @throws BlackboardException If there is a problem getting or adding the * attribute type. */ public BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException { try { return caseDb.addArtifactAttributeType(typeName, valueType, displayName); } catch (TskDataException typeExistsEx) { try { return caseDb.getAttributeType(typeName); } catch (TskCoreException ex) { throw new BlackboardException("Failed to get or add attribute type", ex); } } catch (TskCoreException ex) { throw new BlackboardException("Failed to get or add attribute type", ex); } } /** * Gets the list of all artifact types in use for the given data source. * Gets both standard and custom types. * * @param dataSourceObjId data source object id * * @return The list of artifact types * * @throws TskCoreException exception thrown if a critical error occurred * within tsk core */ public List getArtifactTypesInUse(long dataSourceObjId) throws TskCoreException { final String queryString = "SELECT DISTINCT arts.artifact_type_id AS artifact_type_id, " + "types.type_name AS type_name, " + "types.display_name AS display_name, " + "types.category_type AS category_type " + "FROM blackboard_artifact_types AS types " + "INNER JOIN blackboard_artifacts AS arts " + "ON arts.artifact_type_id = types.artifact_type_id " + "WHERE arts.data_source_obj_id = " + dataSourceObjId; caseDb.acquireSingleUserCaseReadLock(); try (SleuthkitCase.CaseDbConnection connection = caseDb.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { List uniqueArtifactTypes = new ArrayList<>(); while (resultSet.next()) { uniqueArtifactTypes.add(new BlackboardArtifact.Type(resultSet.getInt("artifact_type_id"), resultSet.getString("type_name"), resultSet.getString("display_name"), BlackboardArtifact.Category.fromID(resultSet.getInt("category_type")))); } return uniqueArtifactTypes; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact types is use for data source." + ex.getMessage(), ex); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /** * Get count of all blackboard artifacts of a given type for the given data * source. Does not include rejected artifacts. * * @param artifactTypeID artifact type id (must exist in database) * @param dataSourceObjId data source object id * * @return count of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public long getArtifactsCount(int artifactTypeID, long dataSourceObjId) throws TskCoreException { return getArtifactsCountHelper(artifactTypeID, "blackboard_artifacts.data_source_obj_id = '" + dataSourceObjId + "';"); } /** * Get all blackboard artifacts of a given type. Does not included rejected * artifacts. * * @param artifactTypeID artifact type to get * @param dataSourceObjId data source to look under * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public List getArtifacts(int artifactTypeID, long dataSourceObjId) throws TskCoreException { String whereClause = String.format("artifacts.data_source_obj_id = %d", dataSourceObjId); return getArtifactsWhere(caseDb.getArtifactType(artifactTypeID), whereClause); } /** * Get all blackboard artifacts of the given type(s) for the given data * source(s). Does not included rejected artifacts. * * @param artifactTypes list of artifact types to get * @param dataSourceObjIds data sources to look under * * @return list of blackboard artifacts * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public List getArtifacts(Collection artifactTypes, Collection dataSourceObjIds) throws TskCoreException { if (artifactTypes.isEmpty() || dataSourceObjIds.isEmpty()) { return new ArrayList<>(); } String analysisResultQuery = ""; String dataArtifactQuery = ""; for (BlackboardArtifact.Type type : artifactTypes) { if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { if (!analysisResultQuery.isEmpty()) { analysisResultQuery += " OR "; } analysisResultQuery += "types.artifact_type_id = " + type.getTypeID(); } else { if (!dataArtifactQuery.isEmpty()) { dataArtifactQuery += " OR "; } dataArtifactQuery += "types.artifact_type_id = " + type.getTypeID(); } } String dsQuery = ""; for (long dsId : dataSourceObjIds) { if (!dsQuery.isEmpty()) { dsQuery += " OR "; } dsQuery += "artifacts.data_source_obj_id = " + dsId; } List artifacts = new ArrayList<>(); if (!analysisResultQuery.isEmpty()) { String fullQuery = "( " + analysisResultQuery + " ) AND (" + dsQuery + ") "; artifacts.addAll(this.getAnalysisResultsWhere(fullQuery)); } if (!dataArtifactQuery.isEmpty()) { String fullQuery = "( " + dataArtifactQuery + " ) AND (" + dsQuery + ") "; artifacts.addAll(this.getDataArtifactsWhere(fullQuery)); } return artifacts; } /** * Gets count of blackboard artifacts of given type that match a given WHERE * clause. Uses a SELECT COUNT(*) FROM blackboard_artifacts statement * * @param artifactTypeID artifact type to count * @param whereClause The WHERE clause to append to the SELECT statement. * * @return A count of matching BlackboardArtifact . * * @throws TskCoreException If there is a problem querying the case * database. */ private long getArtifactsCountHelper(int artifactTypeID, String whereClause) throws TskCoreException { String queryString = "SELECT COUNT(*) AS count FROM blackboard_artifacts " + "WHERE blackboard_artifacts.artifact_type_id = " + artifactTypeID + " AND blackboard_artifacts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID() + " AND " + whereClause; caseDb.acquireSingleUserCaseReadLock(); try (SleuthkitCase.CaseDbConnection connection = caseDb.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = connection.executeQuery(statement, queryString);) { //NON-NLS long count = 0; if (resultSet.next()) { count = resultSet.getLong("count"); } return count; } catch (SQLException ex) { throw new TskCoreException("Error getting artifact types is use for data source." + ex.getMessage(), ex); } finally { caseDb.releaseSingleUserCaseReadLock(); } } /* * Determine if an artifact of a given type exists for given content with a * specific list of attributes. * * @param content The content whose artifacts need to be looked at. @param * artifactType The type of artifact to look for. @param attributesList The * list of attributes to look for. * * @return True if the specific artifact exists; otherwise false. * * @throws TskCoreException If there is a problem getting artifacts or * attributes. */ public boolean artifactExists(Content content, BlackboardArtifact.ARTIFACT_TYPE artifactType, Collection attributesList) throws TskCoreException { ArrayList artifactsList; /* * Get the content's artifacts. */ artifactsList = content.getArtifacts(artifactType); if (artifactsList.isEmpty()) { return false; } /* * Get each artifact's attributes and analyze them for matches. */ for (BlackboardArtifact artifact : artifactsList) { if (attributesMatch(artifact.getAttributes(), attributesList)) { /* * The exact artifact exists, so we don't need to look any * further. */ return true; } } /* * None of the artifacts have the exact set of attribute type/value * combinations. The provided content does not have the artifact being * sought. */ return false; } /** * Determine if the expected attributes can all be found in the supplied * file attributes list. * * @param fileAttributesList The list of attributes to analyze. * @param expectedAttributesList The list of attribute to check for. * * @return True if all attributes are found; otherwise false. */ private boolean attributesMatch(Collection fileAttributesList, Collection expectedAttributesList) { for (BlackboardAttribute expectedAttribute : expectedAttributesList) { boolean match = false; for (BlackboardAttribute fileAttribute : fileAttributesList) { BlackboardAttribute.Type attributeType = fileAttribute.getAttributeType(); if (attributeType.getTypeID() != expectedAttribute.getAttributeType().getTypeID()) { continue; } Object fileAttributeValue; Object expectedAttributeValue; switch (attributeType.getValueType()) { case BYTE: fileAttributeValue = fileAttribute.getValueBytes(); expectedAttributeValue = expectedAttribute.getValueBytes(); break; case DOUBLE: fileAttributeValue = fileAttribute.getValueDouble(); expectedAttributeValue = expectedAttribute.getValueDouble(); break; case INTEGER: fileAttributeValue = fileAttribute.getValueInt(); expectedAttributeValue = expectedAttribute.getValueInt(); break; case LONG: // Fall-thru case DATETIME: fileAttributeValue = fileAttribute.getValueLong(); expectedAttributeValue = expectedAttribute.getValueLong(); break; case STRING: // Fall-thru case JSON: fileAttributeValue = fileAttribute.getValueString(); expectedAttributeValue = expectedAttribute.getValueString(); break; default: fileAttributeValue = fileAttribute.getDisplayString(); expectedAttributeValue = expectedAttribute.getDisplayString(); break; } /* * If the exact attribute was found, mark it as a match to * continue looping through the expected attributes list. */ if (fileAttributeValue instanceof byte[]) { if (Arrays.equals((byte[]) fileAttributeValue, (byte[]) expectedAttributeValue)) { match = true; break; } } else if (fileAttributeValue.equals(expectedAttributeValue)) { match = true; break; } } if (!match) { /* * The exact attribute type/value combination was not found. */ return false; } } /* * All attribute type/value combinations were found in the provided * attributes list. */ return true; } /** * A Blackboard exception. */ public static final class BlackboardException extends Exception { private static final long serialVersionUID = 1L; /** * Constructs a blackboard exception with the specified message. * * @param message The message. */ BlackboardException(String message) { super(message); } /** * Constructs a blackboard exception with the specified message and * cause. * * @param message The message. * @param cause The cause. */ BlackboardException(String message, Throwable cause) { super(message, cause); } } /** * Add a new data artifact with the given type. * * @param artifactType The type of the data artifact. * @param sourceObjId The content that is the source of this artifact. * @param dataSourceObjId The data source the artifact source content * belongs to, may be the same as the sourceObjId. * May be null. * @param attributes The attributes. May be empty or null. * @param osAccountId The OS account id associated with the artifact. * May be null. * * @return DataArtifact A new data artifact. * * @throws TskCoreException If a critical error occurs within tsk core. */ public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection attributes, Long osAccountId) throws TskCoreException { if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) { throw new TskCoreException(String.format("Artifact type (name = %s) is not of Data Artifact category. ", artifactType.getTypeName())); } CaseDbTransaction transaction = caseDb.beginTransaction(); try { DataArtifact dataArtifact = newDataArtifact(artifactType, sourceObjId, dataSourceObjId, attributes, osAccountId, transaction); transaction.commit(); return dataArtifact; } catch (TskCoreException ex) { try { transaction.rollback(); } catch (TskCoreException ex2) { LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception. " + "Error invoking newDataArtifact with dataSourceObjId: " + dataSourceObjId + ", sourceObjId: " + sourceObjId, ex2); } throw ex; } } /** * Add a new data artifact with the given type. * * This api executes in the context of the given transaction. * * @param artifactType The type of the data artifact. * @param sourceObjId The content that is the source of this artifact. * @param dataSourceObjId The data source the artifact source content * belongs to, may be the same as the sourceObjId. * May be null. * @param attributes The attributes. May be empty or null. * @param osAccountObjId The OS account associated with the artifact. May * be null. * @param transaction The transaction in the scope of which the * operation is to be performed. * * @return DataArtifact New blackboard artifact * * @throws TskCoreException If a critical error occurs within tsk core. */ public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection attributes, Long osAccountObjId, final CaseDbTransaction transaction) throws TskCoreException { if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) { throw new TskCoreException(String.format("Artifact type (name = %s) is not of Data Artifact category. ", artifactType.getTypeName())); } try { CaseDbConnection connection = transaction.getConnection(); long artifact_obj_id = caseDb.addObject(sourceObjId, TskData.ObjectType.ARTIFACT.getObjectType(), connection); PreparedStatement statement = caseDb.createInsertArtifactStatement(artifactType.getTypeID(), sourceObjId, artifact_obj_id, dataSourceObjId, connection); connection.executeUpdate(statement); try (ResultSet resultSet = statement.getGeneratedKeys()) { resultSet.next(); DataArtifact dataArtifact = new DataArtifact(caseDb, resultSet.getLong(1), //last_insert_rowid() sourceObjId, artifact_obj_id, dataSourceObjId, artifactType.getTypeID(), artifactType.getTypeName(), artifactType.getDisplayName(), BlackboardArtifact.ReviewStatus.UNDECIDED, osAccountObjId, true); // Add a row in tsk_data_artifact if the os account is present if (osAccountObjId != null) { String insertDataArtifactSQL = "INSERT INTO tsk_data_artifacts (artifact_obj_id, os_account_obj_id) VALUES (?, ?)"; statement = connection.getPreparedStatement(insertDataArtifactSQL, Statement.NO_GENERATED_KEYS); statement.clearParameters(); statement.setLong(1, artifact_obj_id); statement.setLong(2, osAccountObjId); connection.executeUpdate(statement); } // if attributes are provided, add them to the artifact. if (Objects.nonNull(attributes) && !attributes.isEmpty()) { dataArtifact.addAttributes(attributes, transaction); } return dataArtifact; } } catch (SQLException ex) { throw new TskCoreException(String.format("Error creating a data artifact with type id = %d, objId = %d, and data source oj id = %d ", artifactType.getTypeID(), sourceObjId, dataSourceObjId), ex); } } /** * Returns a list of BlackboardArtifacts of the given artifact type and * source object id. * * @param artifactType The artifact type. * @param sourceObjId The artifact parent source id (obj_id) * * @return A list of BlackboardArtifacts for the given parameters. * * @throws TskCoreException */ List getArtifactsBySourceId(BlackboardArtifact.Type artifactType, long sourceObjId) throws TskCoreException { String whereClause = String.format("artifacts.obj_id = %d", sourceObjId); return getArtifactsWhere(artifactType, whereClause); } /** * Returns a list of artifacts of the given type. * * @param artifactType The type of artifacts to retrieve. * * @return A list of artifacts of the given type. * * @throws TskCoreException */ List getArtifactsByType(BlackboardArtifact.Type artifactType) throws TskCoreException { List artifacts = new ArrayList<>(); if (artifactType.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { artifacts.addAll(getAnalysisResultsByType(artifactType.getTypeID())); } else { artifacts.addAll(getDataArtifacts(artifactType.getTypeID())); } return artifacts; } /** * Returns a list of artifacts for the given artifact type with the given * where clause. * * The Where clause will be added to the basic query for retrieving * DataArtifacts or AnalysisResults from the DB. The where clause should not * include the artifact type. This method will add the artifact type to the * where clause. * * @param artifactType The artifact type. * @param whereClause Additional where clause. * * @return A list of BlackboardArtifacts of the given type with the given * conditional. * * @throws TskCoreException */ private List getArtifactsWhere(BlackboardArtifact.Type artifactType, String whereClause) throws TskCoreException { List artifacts = new ArrayList<>(); String whereWithType = whereClause + " AND artifacts.artifact_type_id = " + artifactType.getTypeID(); if (artifactType.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { artifacts.addAll(getAnalysisResultsWhere(whereWithType)); } else { artifacts.addAll(getDataArtifactsWhere(whereWithType)); } return artifacts; } /** * Event published by SleuthkitCase when one or more artifacts are posted. A * posted artifact is complete (all attributes have been added) and ready * for further processing. */ final public class ArtifactsPostedEvent { private final String moduleName; private final ImmutableSet artifactTypes; private final ImmutableSet artifacts; private ArtifactsPostedEvent(Collection artifacts, String moduleName) throws BlackboardException { Set typeIDS = artifacts.stream() .map(BlackboardArtifact::getArtifactTypeID) .collect(Collectors.toSet()); Set types = new HashSet<>(); for (Integer typeID : typeIDS) { try { types.add(caseDb.getArtifactType(typeID)); } catch (TskCoreException tskCoreException) { throw new BlackboardException("Error getting artifact type by id.", tskCoreException); } } artifactTypes = ImmutableSet.copyOf(types); this.artifacts = ImmutableSet.copyOf(artifacts); this.moduleName = moduleName; } public Collection getArtifacts() { return ImmutableSet.copyOf(artifacts); } public Collection getArtifacts(BlackboardArtifact.Type artifactType) { Set tempSet = artifacts.stream() .filter(artifact -> artifact.getArtifactTypeID() == artifactType.getTypeID()) .collect(Collectors.toSet()); return ImmutableSet.copyOf(tempSet); } public String getModuleName() { return moduleName; } public Collection getArtifactTypes() { return ImmutableSet.copyOf(artifactTypes); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifactTag.java000755 000765 000024 00000003111 14137073413 031451 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Instances of this class are data transfer objects (DTOs) that represent tags * a user can apply to blackboard artifacts. */ public class BlackboardArtifactTag extends Tag { private final BlackboardArtifact artifact; private final Content content; // Clients of the org.sleuthkit.datamodel package should not directly create these objects. BlackboardArtifactTag(long id, BlackboardArtifact artifact, Content content, TagName name, String comment, String userName) { super(id, name, comment, userName); this.artifact = artifact; this.content = content; } /** * Returns the tagged artifact * * @return tagged artifact */ public BlackboardArtifact getArtifact() { return artifact; } /** * Returns source content of the tagged artifact * * @return source content of the tagged artifact */ public Content getContent() { return content; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescriptionWithTime.java000755 000765 000024 00000002337 14137073413 033753 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Bundles a description of an event along with the timestamp for the event. * Used as an intermediate object when parsing data before it is entered into * the DB. */ final class TimelineEventDescriptionWithTime extends TimelineEventDescription { final private long time; long getTime() { return time; } TimelineEventDescriptionWithTime(long time, String shortDescription, String medDescription, String fullDescription) { super(fullDescription, medDescription, shortDescription); this.time = time; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java000644 000765 000024 00000135040 14137073414 027663 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.sql.SQLException; import java.sql.Statement; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.ResourceBundle; import java.util.Set; import java.util.SortedSet; import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * An abstract base class for classes that represent files that have been added * to the case. */ public abstract class AbstractFile extends AbstractContent { protected final TskData.TSK_DB_FILES_TYPE_ENUM fileType; protected final TSK_FS_NAME_TYPE_ENUM dirType; protected final TSK_FS_META_TYPE_ENUM metaType; protected TSK_FS_NAME_FLAG_ENUM dirFlag; protected Set metaFlags; protected long size; protected final long metaAddr, ctime, crtime, atime, mtime; protected final int metaSeq; protected final int uid, gid; protected final int attrId; protected final TskData.TSK_FS_ATTR_TYPE_ENUM attrType; protected final Set modes; //local file support private boolean localPathSet = false; ///< if set by setLocalPath(), reads are done on local file private String localPath; ///< local path as stored in db tsk_files_path, is relative to the db, private String localAbsPath; ///< absolute path representation of the local path private volatile RandomAccessFile localFileHandle; private volatile java.io.File localFile; private TskData.EncodingType encodingType; //range support private List ranges; /* * path of parent directory */ protected final String parentPath; /** * knownState status in database */ protected TskData.FileKnown knownState; private boolean knownStateDirty = false; /* * md5 hash */ protected String md5Hash; private boolean md5HashDirty = false; /* * SHA-256 hash */ protected String sha256Hash; private boolean sha256HashDirty = false; private String mimeType; private boolean mimeTypeDirty = false; private static final Logger LOGGER = Logger.getLogger(AbstractFile.class.getName()); private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private long dataSourceObjectId; private final String extension; private final List fileAttributesCache = new ArrayList(); private boolean loadedAttributesCacheFromDb = false; private final String ownerUid; // string owner uid, for example a Windows SID. // different from the numeric uid which is more commonly found // on Unix based file systems. private final Long osAccountObjId; // obj id of the owner's OS account, may be null private volatile String uniquePath; /** * Initializes common fields used by AbstactFile implementations (objects in * tsk_files table) * * @param db case / db handle where this file belongs to * @param objId object id in tsk_objects table * @param dataSourceObjectId The object id of the root data source of this * file. * @param attrType * @param attrId * @param name name field of the file * @param fileType type of the file * @param metaAddr * @param metaSeq * @param dirType * @param metaType * @param dirFlag * @param metaFlags * @param size * @param ctime * @param crtime * @param atime * @param mtime * @param modes * @param uid * @param gid * @param md5Hash md5sum of the file, or null or "NULL" if not * present * @param sha256Hash sha256 hash of the file, or null or "NULL" if * not present * @param knownState knownState status of the file, or null if * unknown (default) * @param parentPath * @param mimeType The MIME type of the file, can be null. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid Owner uid/SID, can be null if not available. * @param osAccountObjectId Object Id of the owner OsAccount, may be null. * */ AbstractFile(SleuthkitCase db, long objId, long dataSourceObjectId, TskData.TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, TskData.TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType, String extension, String ownerUid, Long osAccountObjectId, List fileAttributes) { super(db, objId, name); this.dataSourceObjectId = dataSourceObjectId; this.attrType = attrType; this.attrId = attrId; this.fileType = fileType; this.metaAddr = metaAddr; this.metaSeq = metaSeq; this.dirType = dirType; this.metaType = metaType; this.dirFlag = dirFlag; this.metaFlags = TSK_FS_META_FLAG_ENUM.valuesOf(metaFlags); this.size = size; this.ctime = ctime; this.crtime = crtime; this.atime = atime; this.mtime = mtime; this.uid = uid; this.gid = gid; this.modes = TskData.TSK_FS_META_MODE_ENUM.valuesOf(modes); this.md5Hash = md5Hash; this.sha256Hash = sha256Hash; if (knownState == null) { this.knownState = FileKnown.UNKNOWN; } else { this.knownState = knownState; } this.parentPath = parentPath; this.mimeType = mimeType; this.extension = extension == null ? "" : extension; this.encodingType = TskData.EncodingType.NONE; this.ownerUid = ownerUid; this.osAccountObjId = osAccountObjectId; if (Objects.nonNull(fileAttributes) && !fileAttributes.isEmpty()) { this.fileAttributesCache.addAll(fileAttributes); loadedAttributesCacheFromDb = true; } } /** * Gets type of the abstract file as defined in TSK_DB_FILES_TYPE_ENUM * * @return the type of the abstract file */ public TskData.TSK_DB_FILES_TYPE_ENUM getType() { return fileType; } /** * Get the attribute type * * @return attribute type */ public TskData.TSK_FS_ATTR_TYPE_ENUM getAttrType() { return attrType; } /** * Get the attribute id * * @return attribute id */ public int getAttributeId() { return attrId; } /** * Get the change time * * @return change time */ public long getCtime() { return ctime; } /** * Get the change time as Date (in local timezone) * * @return change time as Date */ public String getCtimeAsDate() { return epochToTime(ctime); } /** * Get the creation time * * @return creation time */ public long getCrtime() { return crtime; } /** * Get the creation time as Date (in local timezone) * * @return creation time as Date */ public String getCrtimeAsDate() { return epochToTime(crtime); } /** * Get the access time * * @return access time */ public long getAtime() { return atime; } /** * Get the access time as Date (in local timezone) * * @return access time as Date */ public String getAtimeAsDate() { return epochToTime(atime); } /** * Get the modified time * * @return modified time */ public long getMtime() { return mtime; } /** * Get the modified time as Date (in local timezone) * * @return modified time as Date */ public String getMtimeAsDate() { return epochToTime(mtime); } /** * Get the user id * * @return user id */ public int getUid() { return uid; } /** * Get the group id * * @return group id */ public int getGid() { return gid; } /** * Get the file meta address * * @return Address of the meta data structure */ public long getMetaAddr() { return metaAddr; } /** * Get the file meta address sequence. Only useful with NTFS. Incremented * each time a structure is re-allocated. * * @return Address of the meta data structure sequence. */ public long getMetaSeq() { return metaSeq; } /** * Get the file's mode as a user-displayable string * * @return formatted user-displayable string for mode */ public String getModesAsString() { int mode = TskData.TSK_FS_META_MODE_ENUM.toInt(modes); String result = ""; short isuid = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_ISUID.getMode(); short isgid = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_ISGID.getMode(); short isvtx = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_ISVTX.getMode(); short irusr = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IRUSR.getMode(); short iwusr = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IWUSR.getMode(); short ixusr = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IXUSR.getMode(); short irgrp = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IRGRP.getMode(); short iwgrp = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IWGRP.getMode(); short ixgrp = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IXGRP.getMode(); short iroth = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IROTH.getMode(); short iwoth = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IWOTH.getMode(); short ixoth = TskData.TSK_FS_META_MODE_ENUM.TSK_FS_META_MODE_IXOTH.getMode(); // first character = the Meta Type result += metaType.toString(); // second and third characters = user permissions if ((mode & irusr) == irusr) { result += "r"; //NON-NLS } else { result += "-"; //NON-NLS } if ((mode & iwusr) == iwusr) { result += "w"; //NON-NLS } else { result += "-"; //NON-NLS } // fourth character = set uid if ((mode & isuid) == isuid) { if ((mode & ixusr) == ixusr) { result += "s"; //NON-NLS } else { result += "S"; //NON-NLS } } else { if ((mode & ixusr) == ixusr) { result += "x"; //NON-NLS } else { result += "-"; //NON-NLS } } // fifth and sixth characters = group permissions if ((mode & irgrp) == irgrp) { result += "r"; //NON-NLS } else { result += "-"; //NON-NLS } if ((mode & iwgrp) == iwgrp) { result += "w"; //NON-NLS } else { result += "-"; //NON-NLS } // seventh character = set gid if ((mode & isgid) == isgid) { if ((mode & ixgrp) == ixgrp) { result += "s"; //NON-NLS } else { result += "S"; //NON-NLS } } else { if ((mode & ixgrp) == ixgrp) { result += "x"; //NON-NLS } else { result += "-"; //NON-NLS } } // eighth and ninth character = other permissions if ((mode & iroth) == iroth) { result += "r"; //NON-NLS } else { result += "-"; //NON-NLS } if ((mode & iwoth) == iwoth) { result += "w"; //NON-NLS } else { result += "-"; //NON-NLS } // tenth character = sticky bit if ((mode & isvtx) == isvtx) { if ((mode & ixoth) == ixoth) { result += "t"; //NON-NLS } else { result += "T"; //NON-NLS } } else { if ((mode & ixoth) == ixoth) { result += "x"; //NON-NLS } else { result += "-"; //NON-NLS } } // check the result if (result.length() != 10) { // throw error here result = "ERROR"; //NON-NLS } return result; } /** * Gets the MIME type of this file. * * @return The MIME type name or null if the MIME type has not been set. */ public String getMIMEType() { return mimeType; } /** * Sets the MIME type for this file. * * IMPORTANT: The MIME type is set for this AbstractFile object, but it is * not saved to the case database until AbstractFile.save is called. * * @param mimeType The MIME type of this file. */ public void setMIMEType(String mimeType) { this.mimeType = mimeType; this.mimeTypeDirty = true; } public boolean isModeSet(TskData.TSK_FS_META_MODE_ENUM mode) { return modes.contains(mode); } /** * Sets the MD5 hash for this file. * * IMPORTANT: The MD5 hash is set for this AbstractFile object, but it is * not saved to the case database until AbstractFile.save is called. * * @param md5Hash The MD5 hash of the file. */ public void setMd5Hash(String md5Hash) { this.md5Hash = md5Hash; this.md5HashDirty = true; } /** * Get the md5 hash value as calculated, if present * * @return md5 hash string, if it is present or null if it is not */ public String getMd5Hash() { return this.md5Hash; } /** * Sets the SHA-256 hash for this file. * * IMPORTANT: The SHA-256 hash is set for this AbstractFile object, but it * is not saved to the case database until AbstractFile.save is called. * * @param sha256Hash The SHA-256 hash of the file. */ public void setSha256Hash(String sha256Hash) { this.sha256Hash = sha256Hash; this.sha256HashDirty = true; } /** * Get the SHA-256 hash value as calculated, if present * * @return SHA-256 hash string, if it is present or null if it is not */ public String getSha256Hash() { return this.sha256Hash; } /** * Gets the attributes of this File * * @return * * @throws TskCoreException */ public List getAttributes() throws TskCoreException { synchronized (this) { if (!loadedAttributesCacheFromDb) { ArrayList attributes = getSleuthkitCase().getFileAttributes(this); fileAttributesCache.clear(); fileAttributesCache.addAll(attributes); loadedAttributesCacheFromDb = true; } return Collections.unmodifiableList(fileAttributesCache); } } /** * Adds a collection of attributes to this file in a single operation within * a transaction supplied by the caller. * * @param attributes The collection of attributes. * @param caseDbTransaction The transaction in the scope of which the * operation is to be performed, managed by the * caller. if Null is passed in a local transaction * will be created and used. * * @throws TskCoreException If an error occurs and the attributes were not * added to the artifact. */ public void addAttributes(Collection attributes, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException { if (Objects.isNull(attributes) || attributes.isEmpty()) { throw new TskCoreException("Illegal Argument passed to addAttributes: null or empty attributes passed to addAttributes"); } boolean isLocalTransaction = Objects.isNull(caseDbTransaction); SleuthkitCase.CaseDbTransaction localTransaction = isLocalTransaction ? getSleuthkitCase().beginTransaction() : null; SleuthkitCase.CaseDbConnection connection = isLocalTransaction ? localTransaction.getConnection() : caseDbTransaction.getConnection(); try { for (final Attribute attribute : attributes) { attribute.setAttributeParentId(getId()); attribute.setCaseDatabase(getSleuthkitCase()); getSleuthkitCase().addFileAttribute(attribute, connection); } if (isLocalTransaction) { localTransaction.commit(); localTransaction = null; } // append the new attributes if cache is already loaded. synchronized (this) { if (loadedAttributesCacheFromDb) { fileAttributesCache.addAll(attributes); } } } catch (SQLException ex) { if (isLocalTransaction && null != localTransaction) { try { localTransaction.rollback(); } catch (TskCoreException ex2) { LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2); } } throw new TskCoreException("Error adding file attributes", ex); } } /** * Sets the known state for this file. Passed in value will be ignored if it * is "less" than the current state. A NOTABLE file cannot be downgraded to * KNOWN. * * IMPORTANT: The known state is set for this AbstractFile object, but it is * not saved to the case database until AbstractFile.save is called. * * @param knownState The known state of the file. */ public void setKnown(TskData.FileKnown knownState) { // don't allow them to downgrade the known state if (this.knownState.compareTo(knownState) > 0) { // ideally we'd return some kind of error, but // the API doesn't allow it return; } this.knownState = knownState; this.knownStateDirty = true; } /** * Get "knownState" file status - after running a HashDB ingest on it As * marked by a knownState file database, such as NSRL * * @return file knownState status enum value */ public TskData.FileKnown getKnown() { return knownState; } /** * Get the extension part of the filename, if there is one. We assume that * extensions only have ASCII alphanumeric chars * * @return The filename extension in lowercase (not including the period) or * empty string if there is no extension */ public String getNameExtension() { return extension; } /** * Get size of the file * * @return file size in bytes */ @Override public long getSize() { return size; } /** * Get path of the parent of this file * * @return path string of the parent */ public String getParentPath() { return parentPath; } /** * Gets the data source for this file. * * @return The data source. * * @throws TskCoreException if there was an error querying the case * database. * * To obtain the data source as a DataSource object, use: * getSleuthkitCase().getDataSource(getDataSourceObjectId()); */ @Override public Content getDataSource() throws TskCoreException { return getSleuthkitCase().getContentById(this.dataSourceObjectId); } /** * Gets the object id of the data source for this file. * * @return The object id of the data source. */ public long getDataSourceObjectId() { return dataSourceObjectId; } /** * Gets file ranges associated with the file. File ranges are objects in * tsk_file_layout table Any file type (especially unallocated) may have 1 * or more block ranges associated with it * * @return list of file layout ranges * * @throws TskCoreException exception thrown if critical error occurred * within tsk core */ public List getRanges() throws TskCoreException { if (ranges == null) { ranges = getSleuthkitCase().getFileRanges(this.getId()); } return ranges; } /** * Convert an internal offset to an image offset * * @param fileOffset the byte offset in this layout file to map * * @return the corresponding byte offset in the image where the file offset * is located, or -1 if the file has no range layout information or * if the fileOffset is larger than file size * * @throws TskCoreException exception thrown if critical error occurred * within tsk core and offset could not be * converted */ public long convertToImgOffset(long fileOffset) throws TskCoreException { long imgOffset = -1; for (TskFileRange byteRange : getRanges()) { // if fileOffset is within the current byteRange, calculate the image // offset and break long rangeLength = byteRange.getByteLen(); if (fileOffset < rangeLength) { imgOffset = byteRange.getByteStart() + fileOffset; break; } // otherwise, decrement fileOffset by the length of the current // byte range and continue fileOffset -= rangeLength; } return imgOffset; } /** * Converts a file offset and length into a series of TskFileRange objects * whose offsets are relative to the image. This method will only work on * files with layout ranges. * * @param fileOffset The byte offset in this file to map. * @param length The length of bytes starting at fileOffset requested. * * @return The TskFileRange objects whose offsets are relative to the image. * The sum total of lengths in these ranges will equal the length * requested or will run until the end of this file. * * @throws TskCoreException */ public List convertToImgRanges(long fileOffset, long length) throws TskCoreException { if (fileOffset < 0 || length < 0) { throw new TskCoreException("fileOffset and length must be non-negative"); } List thisRanges = getRanges(); List toRet = new ArrayList<>(); long requestedEnd = fileOffset + length; // the number of bytes counted from the beginning of this file long bytesCounted = 0; for (int curRangeIdx = 0; curRangeIdx < thisRanges.size(); curRangeIdx++) { // if we exceeded length of requested, then we are done if (bytesCounted >= requestedEnd) { break; } TskFileRange curRange = thisRanges.get(curRangeIdx); long curRangeLen = curRange.getByteLen(); // the bytes counted when we reach the end of this range long curRangeEnd = bytesCounted + curRangeLen; // if fileOffset is less than current range's end and we have not // gone past the end we requested, then grab at least part of this // range. if (fileOffset < curRangeEnd) { // offset into range to be returned to user (0 if fileOffset <= bytesCounted) long rangeOffset = Math.max(0, fileOffset - bytesCounted); // calculate the new TskFileRange start by adding on the offset into the current range long newRangeStart = curRange.getByteStart() + rangeOffset; // how much this current range exceeds the length requested (or 0 if within the length requested) long rangeOvershoot = Math.max(0, curRangeEnd - requestedEnd); long newRangeLen = curRangeLen - rangeOffset - rangeOvershoot; toRet.add(new TskFileRange(newRangeStart, newRangeLen, toRet.size())); } bytesCounted = curRangeEnd; } return toRet; } /** * is this a virtual file or directory that was created by The Sleuth Kit or * Autopsy for general structure and organization. * * @return true if it's virtual, false otherwise */ public boolean isVirtual() { return fileType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR) || dirType.equals(TskData.TSK_FS_NAME_TYPE_ENUM.VIRT) || metaType.equals(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT); } /** * Is this object a file. Should return true for all types of files, * including file system, logical, derived, layout, and slack space for * files. * * @return true if a file, false otherwise */ public boolean isFile() { return metaType.equals(TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG) || (metaType.equals(TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF) && dirType.equals(TSK_FS_NAME_TYPE_ENUM.REG)); } /** * Is this object a directory. Should return true for file system folders * and virtual folders. * * @return true if directory, false otherwise */ public boolean isDir() { return (metaType.equals(TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR) || metaType.equals(TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR)); } /** * Is this a root of a file system * * @return true if root of a file system, false otherwise */ public abstract boolean isRoot(); /** * @param uniquePath the unique path to an AbstractFile (or subclass) * usually obtained by a call to * AbstractFile.getUniquePath. * * @return the path to to an AbstractFile (or subclass) with the image and * volume path segments removed. */ public static String createNonUniquePath(String uniquePath) { // split the path into parts String[] pathSegments = uniquePath.split("/"); // see if uniquePath had an image and/or volume name int index = 0; if (pathSegments[0].startsWith("img_")) { //NON-NLS ++index; } if (pathSegments[1].startsWith("vol_")) { //NON-NLS ++index; } // Assemble the non-unique path (skipping over the image and volume // name, if they exist). StringBuilder strbuf = new StringBuilder(); for (; index < pathSegments.length; ++index) { if (!pathSegments[index].isEmpty()) { strbuf.append("/").append(pathSegments[index]); } } return strbuf.toString(); } /** * @return a list of AbstractFiles that are the children of this Directory. * Only returns children of type TskData.TSK_DB_FILES_TYPE_ENUM.FS. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List listFiles() throws TskCoreException { // first, get all children List children = getChildren(); // only keep those that are of type AbstractFile List files = new ArrayList(); for (Content child : children) { if (child instanceof AbstractFile) { AbstractFile afChild = (AbstractFile) child; files.add(afChild); } } return files; } /** * Get the meta data type * * @return meta data type */ public TSK_FS_META_TYPE_ENUM getMetaType() { return metaType; } public String getMetaTypeAsString() { return metaType.toString(); } /** * Get the directory type id * * @return directory type id */ public TSK_FS_NAME_TYPE_ENUM getDirType() { return dirType; } public String getDirTypeAsString() { return dirType.toString(); } /** * @param flag the TSK_FS_NAME_FLAG_ENUM to check * * @return true if the given flag is set in this FsContent object. */ public boolean isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM flag) { return dirFlag == flag; } /** * @return a string representation of the directory name flag (type * TSK_FS_NAME_FLAG_ENUM) */ public String getDirFlagAsString() { return dirFlag.toString(); } /** * Set the directory name flag. * * @param flag Flag to set to. */ void setDirFlag(TSK_FS_NAME_FLAG_ENUM flag) { dirFlag = flag; } /** * @return a string representation of the meta flags */ public String getMetaFlagsAsString() { String str = ""; if (metaFlags.contains(TSK_FS_META_FLAG_ENUM.ALLOC)) { str = TSK_FS_META_FLAG_ENUM.ALLOC.toString(); } else if (metaFlags.contains(TSK_FS_META_FLAG_ENUM.UNALLOC)) { str = TSK_FS_META_FLAG_ENUM.UNALLOC.toString(); } return str; } /** * @param metaFlag the TSK_FS_META_FLAG_ENUM to check * * @return true if the given meta flag is set in this FsContent object. */ public boolean isMetaFlagSet(TSK_FS_META_FLAG_ENUM metaFlag) { return metaFlags.contains(metaFlag); } /** * Set the specified meta flag. * * @param metaFlag Meta flag to set */ void setMetaFlag(TSK_FS_META_FLAG_ENUM metaFlag) { metaFlags.add(metaFlag); } /** * Remove the specified meta flag. * * @param metaFlag Meta flag to remove. */ void removeMetaFlag(TSK_FS_META_FLAG_ENUM metaFlag) { metaFlags.remove(metaFlag); } /** * Get meta flags as an integer. * * @return Integer representation of the meta flags. */ short getMetaFlagsAsInt() { return TSK_FS_META_FLAG_ENUM.toInt(metaFlags); } @Override public final int read(byte[] buf, long offset, long len) throws TskCoreException { //template method //if localPath is set, use local, otherwise, use readCustom() supplied by derived class if (localPathSet) { return readLocal(buf, offset, len); } else { return readInt(buf, offset, len); } } /** * Internal custom read (non-local) method that child classes can implement * * @param buf buffer to read into * @param offset start reading position in the file * @param len number of bytes to read * * @return number of bytes read * * @throws TskCoreException exception thrown when file could not be read */ protected int readInt(byte[] buf, long offset, long len) throws TskCoreException { return 0; } /** * Local file path read support * * @param buf buffer to read into * @param offset start reading position in the file * @param len number of bytes to read * * @return number of bytes read * * @throws TskCoreException exception thrown when file could not be read */ protected final int readLocal(byte[] buf, long offset, long len) throws TskCoreException { if (!localPathSet) { throw new TskCoreException( BUNDLE.getString("AbstractFile.readLocal.exception.msg1.text")); } if (isDir()) { return 0; } // If the file is empty, just return that zero bytes were read. if (getSize() == 0) { return 0; } loadLocalFile(); int bytesRead = 0; if (localFileHandle == null) { synchronized (this) { if (localFileHandle == null) { try { localFileHandle = new RandomAccessFile(localFile, "r"); } catch (FileNotFoundException ex) { final String msg = MessageFormat.format(BUNDLE.getString( "AbstractFile.readLocal.exception.msg4.text"), localAbsPath); LOGGER.log(Level.SEVERE, msg, ex); //file could have been deleted or moved throw new TskCoreException(msg, ex); } } } } try { if (!encodingType.equals(TskData.EncodingType.NONE)) { // The file is encoded, so we need to alter the offset to read (since there's // a header on the encoded file) and then decode each byte long encodedOffset = offset + EncodedFileUtil.getHeaderLength(); //move to the user request offset in the stream long curOffset = localFileHandle.getFilePointer(); if (curOffset != encodedOffset) { localFileHandle.seek(encodedOffset); } bytesRead = localFileHandle.read(buf, 0, (int) len); for (int i = 0; i < bytesRead; i++) { buf[i] = EncodedFileUtil.decodeByte(buf[i], encodingType); } return bytesRead; } else { //move to the user request offset in the stream long curOffset = localFileHandle.getFilePointer(); if (curOffset != offset) { localFileHandle.seek(offset); } //note, we are always writing at 0 offset of user buffer return localFileHandle.read(buf, 0, (int) len); } } catch (IOException ex) { final String msg = MessageFormat.format(BUNDLE.getString("AbstractFile.readLocal.exception.msg5.text"), localAbsPath); LOGGER.log(Level.SEVERE, msg, ex); //local file could have been deleted / moved throw new TskCoreException(msg, ex); } } /** * Set local path for the file, as stored in db tsk_files_path, relative to * the case db path or an absolute path. When set, subsequent invocations of * read() will read the file in the local path. * * @param localPath local path to be set */ void setLocalFilePath(String localPath) { if (localPath == null || localPath.equals("")) { this.localPath = ""; localAbsPath = null; localPathSet = false; } else { // It should always be the case that absolute paths start with slashes or a windows drive letter // and relative paths do not, but some older versions of modules created derived file paths // starting with slashes. So we first check if this file is a DerivedFile before looking at the path. this.localPath = localPath; if (this instanceof DerivedFile) { // DerivedFiles always have relative paths this.localAbsPath = getSleuthkitCase().getDbDirPath() + java.io.File.separator + localPath; } else { // If a path starts with a slash or with a Windows drive letter, then it is // absolute. Otherwise it is relative. if (localPath.startsWith("/") || localPath.startsWith("\\") || localPath.matches("[A-Za-z]:[/\\\\].*")) { this.localAbsPath = localPath; } else { this.localAbsPath = getSleuthkitCase().getDbDirPath() + java.io.File.separator + localPath; } } this.localPathSet = true; } } /** * Get local relative to case db path of the file * * @return local file path if set */ public String getLocalPath() { return localPath; } /** * Get local absolute path of the file, if localPath has been set * * @return local absolute file path if local path has been set, or null */ public String getLocalAbsPath() { return localAbsPath; } /** * Set the type of encoding used on the file (for local/derived files only) * * @param encodingType */ final void setEncodingType(TskData.EncodingType encodingType) { this.encodingType = encodingType; } /** * Check if the file exists. If non-local always true, if local, checks if * actual local path exists * * @return true if the file exists, false otherwise */ public boolean exists() { if (!localPathSet) { return true; } else { try { loadLocalFile(); return localFile.exists(); } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, ex.getMessage()); return false; } } } /** * Check if the file exists and is readable. If non-local (e.g. within an * image), always true, if local, checks if actual local path exists and is * readable * * @return true if the file is readable */ public boolean canRead() { if (!localPathSet) { return true; } else { try { loadLocalFile(); return localFile.canRead(); } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, ex.getMessage()); return false; } } } /** * Lazy load local file handle * * @throws org.sleuthkit.datamodel.TskCoreException If the local path is not * set. */ private void loadLocalFile() throws TskCoreException { if (!localPathSet) { throw new TskCoreException( BUNDLE.getString("AbstractFile.readLocal.exception.msg1.text")); } // already been set if (localFile != null) { return; } synchronized (this) { if (localFile == null) { localFile = new java.io.File(localAbsPath); } } } @Override public void close() { //close local file handle if set if (localFileHandle != null) { synchronized (this) { if (localFileHandle != null) { try { localFileHandle.close(); } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Could not close file handle for file: " + getParentPath() + getName(), ex); //NON-NLS } localFileHandle = null; } } } } @Override protected void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "AbstractFile [\t" //NON-NLS + "\t" + "fileType " + fileType //NON-NLS + "\tctime " + ctime //NON-NLS + "\tcrtime " + crtime //NON-NLS + "\t" + "mtime " + mtime + "\t" + "atime " + atime //NON-NLS + "\t" + "attrId " + attrId //NON-NLS + "\t" + "attrType " + attrType //NON-NLS + "\t" + "dirFlag " + dirFlag + "\t" + "dirType " + dirType //NON-NLS + "\t" + "uid " + uid //NON-NLS + "\t" + "gid " + gid //NON-NLS + "\t" + "metaAddr " + metaAddr + "\t" + "metaSeq " + metaSeq + "\t" + "metaFlags " + metaFlags //NON-NLS + "\t" + "metaType " + metaType + "\t" + "modes " + modes //NON-NLS + "\t" + "parentPath " + parentPath + "\t" + "size " + size //NON-NLS + "\t" + "knownState " + knownState + "\t" + "md5Hash " + md5Hash + "\t" + "sha256Hash " + sha256Hash //NON-NLS + "\t" + "localPathSet " + localPathSet + "\t" + "localPath " + localPath //NON-NLS + "\t" + "localAbsPath " + localAbsPath + "\t" + "localFile " + localFile //NON-NLS + "]\t"; } /** * Possible return values for comparing a file to a list of mime types */ public enum MimeMatchEnum { UNDEFINED, /// file does not have a defined mime time in blackboard TRUE, /// file has a defined mime type and it is one of the given ones FALSE /// file has a defined mime type and it is not one of the given ones. } /** * Determines if this file's type is one of the ones passed in. Uses the * blackboard attribute for file type. * * @param mimeTypes Set of file types to compare against * * @return */ public MimeMatchEnum isMimeType(SortedSet mimeTypes) { if (this.mimeType == null) { return MimeMatchEnum.UNDEFINED; } if (mimeTypes.contains(this.mimeType)) { return MimeMatchEnum.TRUE; } return MimeMatchEnum.FALSE; } /** * Saves the editable properties of this file to the case database, e.g., * the MIME type, MD5 hash, and known state. * * @throws TskCoreException if there is an error saving the editable file * properties to the case database. */ public void save() throws TskCoreException { CaseDbTransaction transaction = null; try { transaction = getSleuthkitCase().beginTransaction(); save(transaction); transaction.commit(); } catch (TskCoreException ex) { if (transaction != null) { transaction.rollback(); } throw ex; } } /** * Saves the editable properties of this file to the case database, e.g., * the MIME type, MD5 hash, and known state, in the context of a given case * database transaction. * * @param transaction The transaction. * * @throws TskCoreException if there is an error saving the editable file * properties to the case database. */ public void save(CaseDbTransaction transaction) throws TskCoreException { if (!(md5HashDirty || sha256HashDirty || mimeTypeDirty || knownStateDirty)) { return; } String updateSql = ""; if (mimeTypeDirty) { updateSql = "mime_type = '" + this.getMIMEType() + "'"; } if (md5HashDirty) { if (!updateSql.isEmpty()) { updateSql += ", "; } updateSql += "md5 = '" + this.getMd5Hash() + "'"; } if (sha256HashDirty) { if (!updateSql.isEmpty()) { updateSql += ", "; } updateSql += "sha256 = '" + this.getSha256Hash() + "'"; } if (knownStateDirty) { if (!updateSql.isEmpty()) { updateSql += ", "; } updateSql += "known = '" + this.getKnown().getFileKnownValue() + "'"; } updateSql = "UPDATE tsk_files SET " + updateSql + " WHERE obj_id = " + this.getId(); SleuthkitCase.CaseDbConnection connection = transaction.getConnection(); try (Statement statement = connection.createStatement()) { connection.executeUpdate(statement, updateSql); md5HashDirty = false; sha256HashDirty = false; mimeTypeDirty = false; knownStateDirty = false; } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating properties of file %s (obj_id = %s)", getName(), getId()), ex); } } /** * Get the owner uid. * * Note this is a string uid, typically a Windows SID. This is different * from the numeric uid commonly found on Unix based file systems. * * @return Optional with owner uid. */ public Optional getOwnerUid() { return Optional.ofNullable(ownerUid); } /** * Get the Object Id of the owner account. * * @return Optional with Object Id of the OsAccount, or Optional.empty. */ public Optional getOsAccountObjectId() { return Optional.ofNullable(osAccountObjId); } @Override public String getUniquePath() throws TskCoreException { if (uniquePath == null) { Content dataSource = getDataSource(); if (dataSource instanceof LocalFilesDataSource) { if(dataSource != this) { uniquePath = dataSource.getUniquePath() + parentPath + getName(); } else { uniquePath = "/" + getName(); } } else { uniquePath = super.getUniquePath(); } } return uniquePath; } @Deprecated @SuppressWarnings("deprecation") @Override public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException { return super.newArtifact(artifactTypeID); } /** * Create and add a data artifact associated with this abstract file. This * method creates the data artifact with the os account id associated with * this abstract file if one exists. * * @param artifactType Type of data artifact to create. * @param attributesList Additional attributes to attach to this data * artifact. * * @return DataArtifact New data artifact. * * @throws TskCoreException If a critical error occurred within tsk core. */ @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList) throws TskCoreException { return super.newDataArtifact(artifactType, attributesList, getOsAccountObjectId().orElse(null)); } /** * Initializes common fields used by AbstactFile implementations (objects in * tsk_files table) * * @param db case / db handle where this file belongs to * @param objId object id in tsk_objects table * @param attrType * @param attrId * @param name name field of the file * @param fileType type of the file * @param metaAddr * @param metaSeq * @param dirType * @param metaType * @param dirFlag * @param metaFlags * @param size * @param ctime * @param crtime * @param atime * @param mtime * @param modes * @param uid * @param gid * @param md5Hash md5sum of the file, or null or "NULL" if not present * @param knownState knownState status of the file, or null if unknown * (default) * @param parentPath * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected AbstractFile(SleuthkitCase db, long objId, TskData.TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, TskData.TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, db.getDataSourceObjectId(objId), attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList()); } /** * Initializes common fields used by AbstactFile implementations (objects in * tsk_files table). This deprecated version has attrId filed defined as a * short which has since been changed to an int. * * @param db case / db handle where this file belongs to * @param objId object id in tsk_objects table * @param dataSourceObjectId The object id of the root data source of this * file. * @param attrType * @param attrId * @param name name field of the file * @param fileType type of the file * @param metaAddr * @param metaSeq * @param dirType * @param metaType * @param dirFlag * @param metaFlags * @param size * @param ctime * @param crtime * @param atime * @param mtime * @param modes * @param uid * @param gid * @param md5Hash md5sum of the file, or null or "NULL" if not * present * @param knownState knownState status of the file, or null if * unknown (default) * @param parentPath * @param mimeType The MIME type of the file, can be null * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") AbstractFile(SleuthkitCase db, long objId, long dataSourceObjectId, TskData.TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, TskData.TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) { this(db, objId, dataSourceObjectId, attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList()); } /** * Get the attribute id * * @return attribute id * * @deprecated Use getAttributeId() method instead as it returns integer * instead of short. */ @Deprecated @SuppressWarnings("deprecation") public short getAttrId() { /* * NOTE: previously attrId used to be stored in AbstractFile as (signed) * short even though it is stored as uint16 in TSK. In extremely rare * occurrences attrId can be larger than what a signed short can hold * (2^15). Changes were made to AbstractFile to store attrId as integer. * Therefore this method has been deprecated. For backwards * compatibility, attribute ids that are larger than 32K are converted * to a negative number. */ return (short) attrId; // casting to signed short converts values over 32K to negative values } /** * Set local path for the file, as stored in db tsk_files_path, relative to * the case db path or an absolute path. When set, subsequent invocations of * read() will read the file in the local path. * * @param localPath local path to be set * @param isAbsolute true if the path is absolute, false if relative to the * case db * * @deprecated Do not make subclasses outside of this package. */ @Deprecated protected void setLocalPath(String localPath, boolean isAbsolute) { setLocalFilePath(localPath); } /* * ------------------------------------------------------------------------- * Util methods to convert / map the data * ------------------------------------------------------------------------- */ /** * Return the epoch into string in ISO 8601 dateTime format * * @param epoch time in seconds * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" * * @deprecated */ @Deprecated public static String epochToTime(long epoch) { return TimeUtilities.epochToTime(epoch); } /** * Return the epoch into string in ISO 8601 dateTime format, in the given * timezone * * @param epoch time in seconds * @param tzone time zone * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" * * @deprecated */ @Deprecated public static String epochToTime(long epoch, TimeZone tzone) { return TimeUtilities.epochToTime(epoch, tzone); } /** * Convert from ISO 8601 formatted date time string to epoch time in seconds * * @param time formatted date time string as "yyyy-MM-dd HH:mm:ss" * * @return epoch time in seconds */ @Deprecated public static long timeToEpoch(String time) { return TimeUtilities.timeToEpoch(time); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/WindowsAccountUtils.java000644 000765 000024 00000020355 14137073414 031312 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; /** * A utility class for handling Windows specific accounts and SIDs. * * Implementation notes: * - SIDs for standard "Service Accounts" are added to a host-scoped special realm. * - SIDs for standard groups are not added as OS Accounts * */ final class WindowsAccountUtils { // Special Windows Accounts with short SIDS are given a special realm "address". final static String SPECIAL_WINDOWS_REALM_ADDR = "SPECIAL_WINDOWS_ACCOUNTS"; final static String SPECIAL_WINDOWS_BACK_UP_POSTFIX = ".bak"; // Windows uses SIDs for groups as well as users. // We dont want to create "User" account for group SIDs. // The lists here help us identify and weed out group SIDs when creating accounts. private static final Set GROUP_SIDS = ImmutableSet.of( "S-1-0-0", // Null SID "S-1-1-0", // Everyone "S-1-2-0", // Local - anyone who has logged on locally "S-1-2-1", // Console Logon "S-1-3-1", // Creator "S-1-3-4", // Owner rights "S-1-5-1", // Dialup "S-1-5-2", // Network "S-1-5-3", // Batch "S-1-5-4", // Interactive "S-1-5-6", // Service "S-1-5-7", // Anonymous "S-1-5-9", // Enterprise Domain Controllers "S-1-5-11", // Authenticated Users "S-1-5-12", // Restricted Code - not a group but not a user SID either "S-1-5-13", // Terminal Server Users "S-1-5-14", // Remote Interactive Logon "S-1-5-15", // This Organization "S-1-5-80-0", // All Services "S-1-5-83-0", // NT Virtual Machine\Virtual Machines "S-1-5-90-0" // Windows Manager\Windows Manager Group ); // Any SIDs with the following prefixes are group SID and should be excluded. private static final Set GROUP_SID_PREFIX = ImmutableSet.of( "S-1-5-32", // Builtin "S-1-5-87" // Task ID prefix ); // SIDS that begin with a domain SID prefix and have on of these private static final String DOMAIN_SID_PREFIX = "S-1-5"; private static final Set DOMAIN_GROUP_SID_SUFFIX = ImmutableSet.of( "-512", // Domain Admins "-513", // Domain Users "-514", // Domain Guests "-515", // Domain Computers "-516", // Domain Controllers "-517", // Cert Publishers "-518", // Schema Admins "-519", // Enterprise Admins "-520", // Group Policy Creator Owners "-526", // Key Admins "-527", // Enterprise Key Admins "-533", // RAS and IAS Servers // Windows 2008 and later "-498", // Enterprise Read-only Domain Controllers "-521", // Read-only Domain Controllers "-571", // Allowed RODC Password Replication Group "-572", // Denied RODC Password Replication Group // Windows 2012 and later "-522" // Cloneable Domain Controllers ); // Some windows SID indicate special account. // These should be handled differently from regular user accounts. private static final Map SPECIAL_SIDS_MAP = ImmutableMap.builder() .put("S-1-5-18", "Local System Account") .put("S-1-5-19", "Local Service Account") .put("S-1-5-20", "Network Service Account") .build(); private static final Map SPECIAL_SID_PREFIXES_MAP = ImmutableMap.builder() .put("S-1-5-80", "Service Virtual Account") .put("S-1-5-82", "IIS AppPool Virtual Account") .put("S-1-5-83", "Virtual Machine Virtual Account") .put("S-1-5-90", "Window Manager Virtual Account") .put("S-1-5-94", "WinRM Virtual accountt") .put("S-1-5-96", "Font Driver Host Virtual Account") .build(); /** * Checks if the given SID is a special Windows SID. * * @param sid SID to check. * * @return True if the SID is a Windows special SID, false otherwise */ static boolean isWindowsSpecialSid(String sid) { String tempSID = stripWindowsBackupPostfix(sid); if (SPECIAL_SIDS_MAP.containsKey(tempSID)) { return true; } for (String specialPrefix: SPECIAL_SID_PREFIXES_MAP.keySet()) { if (tempSID.startsWith(specialPrefix)) { return true; } } // All the prefixes in the range S-1-5-80 to S-1-5-111 are special tempSID = tempSID.replaceFirst(DOMAIN_SID_PREFIX + "-", ""); String subAuthStr = tempSID.substring(0, tempSID.indexOf('-')); Integer subAuth = Optional.ofNullable(subAuthStr).map(Integer::valueOf).orElse(0); if (subAuth >= 80 && subAuth <= 111) { return true; } return false; } /** * Get the name for the given special Windows SID. * * @param sid SID to check. * * @return Name for Windows special SID, an empty string if the SID is not a known special SID. */ static String getWindowsSpecialSidName(String sid) { String tempSID = stripWindowsBackupPostfix(sid); if (SPECIAL_SIDS_MAP.containsKey(tempSID)) { return SPECIAL_SIDS_MAP.get(tempSID); } for (Entry specialPrefixEntry: SPECIAL_SID_PREFIXES_MAP.entrySet()) { if (tempSID.startsWith(specialPrefixEntry.getKey())) { return specialPrefixEntry.getValue(); } } return ""; } /** * Checks if the given SID is a user SID. * * If the given SID is not found among the known group SIDs, is considered a user SID. * * @param sid SID to check. * * @return True if the SID is a user SID, false otherwise */ static boolean isWindowsUserSid(String sid) { String tempSID = stripWindowsBackupPostfix(sid); if (GROUP_SIDS.contains(tempSID)) { return false; } for (String prefix: GROUP_SID_PREFIX) { if (tempSID.startsWith(prefix)) { return false; } } // check for domain groups - they have a domains specific identifier but have a fixed prefix and suffix if (tempSID.startsWith(DOMAIN_SID_PREFIX)) { for (String suffix : DOMAIN_GROUP_SID_SUFFIX) { if (tempSID.endsWith(suffix)) { return false; } } } return true; } /** * Get the windows realm address from the given SID. * * For all regular account SIDs, the realm address is the sub-authority SID. * For special Windows account the realm address is a special address, * SPECIAL_WINDOWS_REALM_ADDR { @link WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR} * * @param sid SID * * @return Realm address for the SID. * * @throws TskCoreException If the given SID is not a valid host/domain SID. */ public static String getWindowsRealmAddress(String sid) throws TskCoreException { String realmAddr; String tempSID = stripWindowsBackupPostfix(sid); // When copying realms into portable cases, the SID may already be set to the special windows string. if (isWindowsSpecialSid(tempSID) || tempSID.equals(SPECIAL_WINDOWS_REALM_ADDR)) { realmAddr = SPECIAL_WINDOWS_REALM_ADDR; } else { // regular SIDs should have at least 5 components: S-1-x-y-z if (org.apache.commons.lang3.StringUtils.countMatches(tempSID, "-") < 4) { throw new TskCoreException(String.format("Invalid SID %s for a host/domain", tempSID)); } // get the sub authority SID realmAddr = sid.substring(0, tempSID.lastIndexOf('-')); } return realmAddr; } /** * Backup windows sid will include the postfix .bak on the end of the sid. * Remove the postfix for easier processing. * * @param sid * * @return The sid with the postfix removed. */ private static String stripWindowsBackupPostfix(String sid) { String tempSID = sid; if(tempSID.endsWith(SPECIAL_WINDOWS_BACK_UP_POSTFIX)) { tempSID = tempSID.replace(SPECIAL_WINDOWS_BACK_UP_POSTFIX, ""); } return tempSID; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/VolumeSystem.java000644 000765 000024 00000010531 14137073413 027770 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.List; import org.sleuthkit.datamodel.TskData.TSK_VS_TYPE_ENUM; /** * Represents a volume system. Populated based on data in database. */ public class VolumeSystem extends AbstractContent { private volatile long volumeSystemHandle = 0; private long type, imgOffset, blockSize; /** * Constructor most inputs are from the database * * @param db case database handle * @param obj_id the unique content object id for the volume system * @param name name of the volume system * @param type type of the volume system * @param imgOffset offset of the volume system with respect to image * @param blockSize block size of this volume system */ protected VolumeSystem(SleuthkitCase db, long obj_id, String name, long type, long imgOffset, long blockSize) { super(db, obj_id, name); this.type = type; this.imgOffset = imgOffset; this.blockSize = blockSize; } @Override public int read(byte[] readBuffer, long offset, long len) throws TskCoreException { synchronized (this) { if (volumeSystemHandle == 0) { getVolumeSystemHandle(); } } return SleuthkitJNI.readVs(volumeSystemHandle, readBuffer, offset, len); } @Override public long getSize() { return 0; } /** * get the type * * @return type */ public TSK_VS_TYPE_ENUM getType() { return TskData.TSK_VS_TYPE_ENUM.valueOf(type); } /** * get the byte offset * * @return byte offset */ public long getOffset() { return imgOffset; } /** * get the block size * * @return block size */ public long getBlockSize() { return blockSize; } /** * get the volume system Handle pointer Open a new handle if needed, * otherwise resuse the existing handle. * * @return volume system Handle pointer * * @throws TskException */ protected synchronized long getVolumeSystemHandle() throws TskCoreException { if (volumeSystemHandle == 0) { Content dataSource = getDataSource(); if ((dataSource != null) && (dataSource instanceof Image)) { Image image = (Image) dataSource; volumeSystemHandle = SleuthkitJNI.openVs(image.getImageHandle(), imgOffset); } else { throw new TskCoreException("Volume System data source is not an image"); } } return volumeSystemHandle; } @Override public void close() { if (volumeSystemHandle != 0) { synchronized (this) { if (volumeSystemHandle != 0) { // SleuthkitJNI.closeVs(volumeSystemHandle); // closeVs is currently a no-op volumeSystemHandle = 0; } } } } @Override public void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public List getChildren() throws TskCoreException { return getSleuthkitCase().getVolumeSystemChildren(this); } @Override public List getChildrenIds() throws TskCoreException { return getSleuthkitCase().getVolumeSystemChildrenIds(this); } /** * @return a list of Volumes that are direct children of this VolumeSystem * * @throws TskCoreException */ public List getVolumes() throws TskCoreException { List volumes = new ArrayList(); for (Content child : getChildren()) { if (child instanceof Volume) { volumes.add((Volume) child); } } return volumes; } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "VolumeSystem [\t" + "blockSize " + blockSize + "\t" + "imgOffset " + imgOffset + "\t" + "type " + type + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HostAddress.java000644 000765 000024 00000007674 14137073413 027555 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Objects; /** * Abstracts an address associated with a host. A host may have multiple * addressed of different types associated with it at a given time. */ public final class HostAddress extends AbstractContent { private final SleuthkitCase sleuthkitCase; private final long id; private final HostAddressType addressType; private final String address; /** * Create a HostAddress object. * * @param skCase Case the host address belongs to. * @param id Id of the host address in the database. * @param type Type of host address. * @param address The host address value. */ HostAddress(SleuthkitCase skCase, long id, HostAddressType type, String address) { super(skCase, id, address + "(" + type.getName() + ")"); this.sleuthkitCase = skCase; this.id = id; this.addressType = type; this.address = address; } @Override public long getId() { return id; } public HostAddressType getAddressType() { return addressType; } public String getAddress() { return address; } @Override public int hashCode() { int hash = 7; hash = 53 * hash + (int) (this.id ^ (this.id >>> 32)); hash = 53 * hash + Objects.hashCode(this.addressType); hash = 53 * hash + Objects.hashCode(this.address); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final HostAddress other = (HostAddress) obj; if (this.id != other.id) { return false; } if (this.addressType != other.addressType) { return false; } if ((this.address == null) ? (other.address != null) : !this.address.equals(other.address)) { return false; } return true; } /** * Gets the SleuthKit case database for this account. * * @return The SleuthKit case object. */ @Override public SleuthkitCase getSleuthkitCase() { return sleuthkitCase; } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { // No data to read. return 0; } @Override public void close() { // Nothing to close } @Override public long getSize() { return 0; } @Override public T accept(ContentVisitor v) { // TODO throw new UnsupportedOperationException("Not supported yet."); } @Override public T accept(SleuthkitItemVisitor v) { // TODO throw new UnsupportedOperationException("Not supported yet."); } /** * A host may have different types of addresses at a given point in time. */ public enum HostAddressType { DNS_AUTO(0, "DNS Auto Detection"), // Used to auto-select the DNS type from HOSTNAME, IPV4, and IPV6 when creating HostAddresses HOSTNAME(1, "Host Name"), IPV4(2, "IPv4"), IPV6(3, "IPv6"), ETHERNET_MAC(4, "Ethernet MAC"), WIFI_MAC(5, "WiFi MAC"), BLUETOOTH_MAC(6, "BlueTooth MAC"); private final int id; private final String name; HostAddressType(int id, String name) { this.id = id; this.name = name; } public int getId() { return id; } String getName() { return name; } public static HostAddressType fromID(int typeId) { for (HostAddressType type : HostAddressType.values()) { if (type.ordinal() == typeId) { return type; } } return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskCaseDbBridge.java000644 000765 000024 00000127573 14137073413 030253 0ustar00carrierstaff000000 000000 /* * Autopsy Forensic Browser * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.Strings; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import org.apache.commons.lang3.StringUtils; import java.util.List; import java.util.Arrays; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.OsAccountManager.NotUserSIDException; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * This is a utility class to allow the native C code to write to the * case database. All callbacks from the native code should come through this class. * Any changes to the method signatures in this class will require changes to the * native code. * * Note that this code should only be used for the add image process, and not * to add additional files afterward. */ class TskCaseDbBridge { private static final Logger logger = Logger.getLogger(TskCaseDbBridge.class.getName()); private final SleuthkitCase caseDb; private CaseDbTransaction trans = null; private final AddDataSourceCallbacks addDataSourceCallbacks; private final Host imageHost; private final Map fsIdToRootDir = new HashMap<>(); private final Map fsIdToFsType = new HashMap<>(); private final Map parentDirCache = new HashMap<>(); private final Map ownerIdToAccountMap = new HashMap<>(); private static final long BATCH_FILE_THRESHOLD = 500; private final Queue batchedFiles = new LinkedList<>(); private final Queue batchedLayoutRanges = new LinkedList<>(); private final List layoutFileIds = new ArrayList<>(); TskCaseDbBridge(SleuthkitCase caseDb, AddDataSourceCallbacks addDataSourceCallbacks, Host host) { this.caseDb = caseDb; this.addDataSourceCallbacks = addDataSourceCallbacks; imageHost = host; trans = null; } /** * Start a transaction * * @throws TskCoreException */ private void beginTransaction() throws TskCoreException { trans = caseDb.beginTransaction(); } /** * Commit the current transaction * * @throws TskCoreException */ private void commitTransaction() throws TskCoreException { trans.commit(); trans = null; } /** * Revert the current transaction */ private void revertTransaction() { try { if (trans != null) { trans.rollback(); trans = null; } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error rolling back transaction", ex); } } /** * Add any remaining files to the database. */ void finish() { addBatchedFilesToDb(); addBatchedLayoutRangesToDb(); processLayoutFiles(); } /** * Add a new image to the database. * Intended to be called from the native code during the add image process. * Will not be called if the image was added to the database prior to starting * the add image process. * * @param type Type of image. * @param ssize Sector size. * @param timezone Time zone. * @param size Image size. * @param md5 MD5 hash. * @param sha1 SHA1 hash. * @param sha256 SHA256 hash. * @param deviceId Device ID. * @param collectionDetails The collection details. * @param paths Data source path(s) * * @return The object ID of the new image or -1 if an error occurred */ long addImageInfo(int type, long ssize, String timezone, long size, String md5, String sha1, String sha256, String deviceId, String collectionDetails, String[] paths) { try { beginTransaction(); long objId = addImageToDb(TskData.TSK_IMG_TYPE_ENUM.valueOf(type), ssize, size, timezone, md5, sha1, sha256, deviceId, collectionDetails, trans); for (int i = 0;i < paths.length;i++) { addImageNameToDb(objId, paths[i], i, trans); } commitTransaction(); return objId; } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding image to the database", ex); revertTransaction(); return -1; } } /** * Add the acquisition details to the image object. * * @param imgId ID of the image * @param details The details */ void addAcquisitionDetails(long imgId, String details) { try { beginTransaction(); caseDb.setAcquisitionDetails(imgId, details, trans); commitTransaction(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding image details \"" + details + "\" to image with ID " + imgId, ex); revertTransaction(); } } /** * Add a volume system to the database. * Intended to be called from the native code during the add image process. * * @param parentObjId * @param vsType * @param imgOffset * @param blockSize * * @return The object ID of the new volume system or -1 if an error occurred */ long addVsInfo(long parentObjId, int vsType, long imgOffset, long blockSize) { try { beginTransaction(); VolumeSystem vs = caseDb.addVolumeSystem(parentObjId, TskData.TSK_VS_TYPE_ENUM.valueOf(vsType), imgOffset, blockSize, trans); commitTransaction(); return vs.getId(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding volume system to the database - parent obj ID: " + parentObjId + ", image offset: " + imgOffset, ex); revertTransaction(); return -1; } } /** * Add a volume to the database. * Intended to be called from the native code during the add image process. * * @param parentObjId * @param addr * @param start * @param length * @param desc * @param flags * * @return The object ID of the new volume or -1 if an error occurred */ long addVolume(long parentObjId, long addr, long start, long length, String desc, long flags) { try { beginTransaction(); Volume vol = caseDb.addVolume(parentObjId, addr, start, length, desc, flags, trans); commitTransaction(); return vol.getId(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding volume to the database - parent object ID: " + parentObjId + ", addr: " + addr, ex); revertTransaction(); return -1; } } /** * Add a pool to the database. * Intended to be called from the native code during the add image process. * * @param parentObjId * @param poolType * * @return The object ID of the new pool or -1 if an error occurred */ long addPool(long parentObjId, int poolType) { try { beginTransaction(); Pool pool = caseDb.addPool(parentObjId, TskData.TSK_POOL_TYPE_ENUM.valueOf(poolType), trans); commitTransaction(); return pool.getId(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding pool to the database - parent object ID: " + parentObjId, ex); revertTransaction(); return -1; } } /** * Add a file system to the database. * Intended to be called from the native code during the add image process. * * @param parentObjId * @param imgOffset * @param fsType * @param blockSize * @param blockCount * @param rootInum * @param firstInum * @param lastInum * * @return The object ID of the new file system or -1 if an error occurred */ long addFileSystem(long parentObjId, long imgOffset, int fsType, long blockSize, long blockCount, long rootInum, long firstInum, long lastInum) { try { beginTransaction(); FileSystem fs = caseDb.addFileSystem(parentObjId, imgOffset, TskData.TSK_FS_TYPE_ENUM.valueOf(fsType), blockSize, blockCount, rootInum, firstInum, lastInum, null, trans); commitTransaction(); fsIdToFsType.put(fs.getId(), TskData.TSK_FS_TYPE_ENUM.valueOf(fsType)); return fs.getId(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding file system to the database - parent object ID: " + parentObjId + ", offset: " + imgOffset, ex); revertTransaction(); return -1; } } /** * Add a file to the database. * File inserts are batched so the file may not be added immediately. * Intended to be called from the native code during the add image process. * * @param parentObjId The parent of the file if known or 0 if unknown. * @param fsObjId The object ID of the file system. * @param dataSourceObjId The data source object ID. * @param fsType The type. * @param attrType The type attribute given to the file by the file system. * @param attrId The type id given to the file by the file system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file from fs_file->name->meta_seq. * @param dirType The type of the file, usually as reported in * the name structure of the file system. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. * @param dirFlags The allocated status of the file, usually as * reported in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file system. * @param size The file size. * @param crtime The created time. * @param ctime The last changed time * @param atime The last accessed time. * @param mtime The last modified time. * @param meta_mode The modes for the file. * @param gid The group identifier. * @param uid The user identifier. * @param md5 The MD5 hash. * @param known The file known status. * @param escaped_path The escaped path to the file. * @param extension The file extension. * @param seq The sequence number from fs_file->meta->seq. * @param parMetaAddr The metadata address of the parent * @param parSeq The parent sequence number if NTFS, -1 otherwise. * @param ownerUid String uid of the file owner. May be an empty string. * * @return 0 if successful, -1 if not */ long addFile(long parentObjId, long fsObjId, long dataSourceObjId, int fsType, int attrType, int attrId, String name, long metaAddr, long metaSeq, int dirType, int metaType, int dirFlags, int metaFlags, long size, long crtime, long ctime, long atime, long mtime, int meta_mode, int gid, int uid, String escaped_path, String extension, long seq, long parMetaAddr, long parSeq, String ownerUid) { // Add the new file to the list batchedFiles.add(new FileInfo(parentObjId, fsObjId, dataSourceObjId, fsType, attrType, attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlags, metaFlags, size, crtime, ctime, atime, mtime, meta_mode, gid, uid, escaped_path, extension, seq, parMetaAddr, parSeq, ownerUid)); // Add the current files to the database if we've exceeded the threshold or if we // have the root folder. if ((fsObjId == parentObjId) || (batchedFiles.size() > BATCH_FILE_THRESHOLD)) { return addBatchedFilesToDb(); } return 0; } /** * Add the current set of files to the database. * * @return 0 if successful, -1 if not */ private long addBatchedFilesToDb() { List newObjIds = new ArrayList<>(); try { // loop through the batch, and make sure owner accounts exist for all the files in the batch. // If not, create accounts. Iterator it = batchedFiles.iterator(); while (it.hasNext()) { FileInfo fileInfo = it.next(); String ownerUid = fileInfo.ownerUid; if (Strings.isNullOrEmpty(fileInfo.ownerUid) == false) { // first check the owner id is in the map, if found, then continue if (this.ownerIdToAccountMap.containsKey(ownerUid)) { continue; } // query the DB to get the owner account try { Optional ownerAccount = caseDb.getOsAccountManager().getWindowsOsAccount(ownerUid, null, null, imageHost); if (ownerAccount.isPresent()) { // found account - add to map ownerIdToAccountMap.put(ownerUid, ownerAccount.get()); } else { // account not found in the database, create the account and add to map // Currently we expect only NTFS systems to provide a windows style SID as owner id. OsAccountManager accountMgr = caseDb.getOsAccountManager(); OsAccount newAccount = accountMgr.newWindowsOsAccount(ownerUid, null, null, imageHost, OsAccountRealm.RealmScope.UNKNOWN); Content ds = caseDb.getContentById(fileInfo.dataSourceObjId); // Data sources are cached so this will only access the database once if (ds instanceof DataSource) { accountMgr.newOsAccountInstance(newAccount, (DataSource)ds, OsAccountInstance.OsAccountInstanceType.ACCESSED); } ownerIdToAccountMap.put(ownerUid, newAccount); } } catch (NotUserSIDException ex) { // if the owner SID is not a user SID, set the owner account to null ownerIdToAccountMap.put(ownerUid, null); } } } beginTransaction(); FileInfo fileInfo; while ((fileInfo = batchedFiles.poll()) != null) { long computedParentObjId = fileInfo.parentObjId; try { // If we weren't given the parent object ID, look it up if (fileInfo.parentObjId == 0) { computedParentObjId = getParentObjId(fileInfo); } Long ownerAccountObjId = OsAccount.NO_ACCOUNT; if (Strings.isNullOrEmpty(fileInfo.ownerUid) == false) { if (ownerIdToAccountMap.containsKey(fileInfo.ownerUid)) { // for any non user SIDs, the map will have a null for account if (Objects.nonNull(ownerIdToAccountMap.get(fileInfo.ownerUid))) { ownerAccountObjId = ownerIdToAccountMap.get(fileInfo.ownerUid).getId(); } } else { // Error - the map should have an account or a null at this point for the owner SID. throw new TskCoreException(String.format("Failed to add file. Owner account not found for file with parent object ID: %d, name: %s, owner id: %s", fileInfo.parentObjId, fileInfo.name, fileInfo.ownerUid)); } } // We've seen a case where the root folder comes in with an undefined meta type. // We've also seen a case where it comes in as a regular file. The root folder should always be // a directory so it will be cached properly and will not cause errors later for // being an unexpected type. if ((fileInfo.parentObjId == fileInfo.fsObjId) && (fileInfo.metaType != TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue())) { fileInfo.metaType = TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue(); } long objId = addFileToDb(computedParentObjId, fileInfo.fsObjId, fileInfo.dataSourceObjId, fileInfo.fsType, fileInfo.attrType, fileInfo.attrId, fileInfo.name, fileInfo.metaAddr, fileInfo.metaSeq, fileInfo.dirType, fileInfo.metaType, fileInfo.dirFlags, fileInfo.metaFlags, fileInfo.size, fileInfo.crtime, fileInfo.ctime, fileInfo.atime, fileInfo.mtime, fileInfo.meta_mode, fileInfo.gid, fileInfo.uid, null, TskData.FileKnown.UNKNOWN, fileInfo.escaped_path, fileInfo.extension, fileInfo.ownerUid, ownerAccountObjId, false, trans); if (fileInfo.fsObjId != fileInfo.parentObjId) { // Add new file ID to the list to send to ingest unless it is the root folder newObjIds.add(objId); } // If we're adding the root directory for the file system, cache it if (fileInfo.parentObjId == fileInfo.fsObjId) { fsIdToRootDir.put(fileInfo.fsObjId, objId); } // If the file is a directory, cache the object ID. if ((fileInfo.metaType == TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue() || (fileInfo.metaType == TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue())) && (fileInfo.name != null) && ! fileInfo.name.equals(".") && ! fileInfo.name.equals("..")) { String dirName = fileInfo.escaped_path + fileInfo.name; ParentCacheKey key = new ParentCacheKey(fileInfo.fsObjId, fileInfo.metaAddr, fileInfo.seq, dirName); parentDirCache.put(key, objId); } } catch (TskCoreException ex) { if (computedParentObjId > 0) { // Most likely a database error occurred logger.log(Level.SEVERE, "Error adding file to the database - parent object ID: " + computedParentObjId + ", file system object ID: " + fileInfo.fsObjId + ", name: " + fileInfo.name, ex); } else { // The parent lookup failed logger.log(Level.SEVERE, "Error adding file to the database", ex); } } } commitTransaction(); try { addDataSourceCallbacks.onFilesAdded(newObjIds); } catch (Exception ex) { // Exception firewall to prevent unexpected return to the native code logger.log(Level.SEVERE, "Unexpected error from files added callback", ex); } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding batched files to database", ex); revertTransaction(); return -1; } return 0; } /** * Look up the parent object ID for a file using the cache or the database. * * @param fileInfo The file to find the parent of * * @return Parent object ID * * @throws TskCoreException */ private long getParentObjId(FileInfo fileInfo) throws TskCoreException { // Remove the final slash from the path unless we're in the root folder String parentPath = fileInfo.escaped_path; if(parentPath.endsWith("/") && ! parentPath.equals("/")) { parentPath = parentPath.substring(0, parentPath.lastIndexOf('/')); } // Look up the parent ParentCacheKey key = new ParentCacheKey(fileInfo.fsObjId, fileInfo.parMetaAddr, fileInfo.parSeq, parentPath); if (parentDirCache.containsKey(key)) { return parentDirCache.get(key); } else { // There's no reason to do a database query since every folder added is being // stored in the cache. throw new TskCoreException("Could not find parent (fsObjId: " +fileInfo.fsObjId + ", parMetaAddr: " + fileInfo.parMetaAddr + ", parSeq: " + fileInfo.parSeq + ", parentPath: " + parentPath + ")"); } } /** * Add a layout file to the database. * Intended to be called from the native code during the add image process. * * @param parentObjId The parent object ID of the layout file. * @param fsObjId The file system object ID. * @param dataSourceObjId The data source object ID. * @param fileType The file type. * @param name The file name. * @param size The file size. * * @return The object ID of the new file or -1 if an error occurred */ long addLayoutFile(long parentObjId, long fsObjId, long dataSourceObjId, int fileType, String name, long size) { try { // The file system may be null for layout files Long fsObjIdForDb = fsObjId; if (fsObjId == 0) { fsObjIdForDb = null; } beginTransaction(); long objId = addFileToDb(parentObjId, fsObjIdForDb, dataSourceObjId, fileType, null, null, name, null, null, TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue(), TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue(), TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue(), TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.getValue(), size, null, null, null, null, null, null, null, null, TskData.FileKnown.UNKNOWN, null, null, null, OsAccount.NO_ACCOUNT, true, trans); commitTransaction(); // Store the layout file ID for later processing layoutFileIds.add(objId); return objId; } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding layout file to the database - parent object ID: " + parentObjId + ", file system object ID: " + fsObjId + ", name: " + name, ex); revertTransaction(); return -1; } } /** * Add a layout file range to the database. * Intended to be called from the native code during the add image process. * * @param objId Object ID of the layout file. * @param byteStart Start byte. * @param byteLen Length in bytes. * @param seq Sequence number of this range. * * @return 0 if successful, -1 if not */ long addLayoutFileRange(long objId, long byteStart, long byteLen, long seq) { batchedLayoutRanges.add(new LayoutRangeInfo(objId, byteStart, byteLen, seq)); if (batchedLayoutRanges.size() > BATCH_FILE_THRESHOLD) { return addBatchedLayoutRangesToDb(); } return 0; } /** * Add the current set of layout ranges to the database. * * @return 0 if successful, -1 if not */ private long addBatchedLayoutRangesToDb() { try { beginTransaction(); LayoutRangeInfo range; while ((range = batchedLayoutRanges.poll()) != null) { try { addLayoutFileRangeToDb(range.objId, range.byteStart, range.byteLen, range.seq, trans); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding layout file range to the database - layout file ID: " + range.objId + ", byte start: " + range.byteStart + ", length: " + range.byteLen + ", seq: " + range.seq, ex); } } commitTransaction(); return 0; } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding batched files to database", ex); revertTransaction(); return -1; } } /** * Send completed layout files on for further processing. * Note that this must wait until we know all the ranges for each * file have been added to the database. */ void processLayoutFiles() { addDataSourceCallbacks.onFilesAdded(layoutFileIds); layoutFileIds.clear(); } /** * Add a virtual directory to hold unallocated file system blocks. * Intended to be called from the native code during the add image process. * * @param fsObjId * @param name * * @return The object ID of the new virtual directory or -1 if an error occurred */ long addUnallocFsBlockFilesParent(long fsObjId, String name) { try { if (! fsIdToRootDir.containsKey(fsObjId)) { logger.log(Level.SEVERE, "Error - root directory for file system ID {0} not found", fsObjId); return -1; } beginTransaction(); VirtualDirectory dir = caseDb.addVirtualDirectory(fsIdToRootDir.get(fsObjId), name, trans); commitTransaction(); addDataSourceCallbacks.onFilesAdded(Arrays.asList(dir.getId())); return dir.getId(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating virtual directory " + name + " under file system ID " + fsObjId, ex); revertTransaction(); return -1; } } /** * Class to use as a key into the parent object ID map */ private class ParentCacheKey { long fsObjId; long metaAddr; long seqNum; String path; /** * Create the key into the parent dir cache. * Only NTFS uses the seqNum of the parent. For all file systems set to zero. * * @param fsObjId The file system object ID. * @param metaAddr The metadata address of the directory. * @param seqNum The sequence number of the directory. Unused unless file system is NTFS. * @param path The path to the directory (should not include final slash unless root dir). */ ParentCacheKey(long fsObjId, long metaAddr, long seqNum, String path) { this.fsObjId = fsObjId; this.metaAddr = metaAddr; if (ownerIdToAccountMap.containsKey(fsObjId) && (ownerIdToAccountMap.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS) || ownerIdToAccountMap.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS_DETECT))) { this.seqNum = seqNum; } else { this.seqNum = 0; } this.path = path; } @Override public boolean equals(Object obj) { if (! (obj instanceof ParentCacheKey)) { return false; } ParentCacheKey otherKey = (ParentCacheKey) obj; if (this.fsObjId != otherKey.fsObjId || this.metaAddr != otherKey.metaAddr || this.seqNum != otherKey.seqNum) { return false; } return StringUtils.equals(this.path, otherKey.path); } @Override public int hashCode() { int hash = 3; hash = 31 * hash + (int) (this.fsObjId ^ (this.fsObjId >>> 32)); hash = 31 * hash + (int) (this.metaAddr ^ (this.metaAddr >>> 32)); hash = 31 * hash + (int) (this.seqNum ^ (this.seqNum >>> 32)); hash = 31 * hash + Objects.hashCode(this.path); return hash; } } /** * Utility class to hold data for layout ranges waiting * to be added to the database. */ private class LayoutRangeInfo { long objId; long byteStart; long byteLen; long seq; LayoutRangeInfo(long objId, long byteStart, long byteLen, long seq) { this.objId = objId; this.byteStart = byteStart; this.byteLen = byteLen; this.seq = seq; } } /** * Utility class to hold data for files waiting to be * added to the database. */ private class FileInfo { long parentObjId; long fsObjId; long dataSourceObjId; int fsType; int attrType; int attrId; String name; long metaAddr; long metaSeq; int dirType; int metaType; int dirFlags; int metaFlags; long size; long crtime; long ctime; long atime; long mtime; int meta_mode; int gid; int uid; String escaped_path; String extension; long seq; long parMetaAddr; long parSeq; String ownerUid; FileInfo(long parentObjId, long fsObjId, long dataSourceObjId, int fsType, int attrType, int attrId, String name, long metaAddr, long metaSeq, int dirType, int metaType, int dirFlags, int metaFlags, long size, long crtime, long ctime, long atime, long mtime, int meta_mode, int gid, int uid, String escaped_path, String extension, long seq, long parMetaAddr, long parSeq, String ownerUid) { this.parentObjId = parentObjId; this.fsObjId = fsObjId; this.dataSourceObjId = dataSourceObjId; this.fsType = fsType; this.attrType = attrType; this.attrId = attrId; this.name = name; this.metaAddr = metaAddr; this.metaSeq = metaSeq; this.dirType = dirType; this.metaType = metaType; this.dirFlags = dirFlags; this.metaFlags = metaFlags; this.size = size; this.crtime = crtime; this.ctime = ctime; this.atime = atime; this.mtime = mtime; this.meta_mode = meta_mode; this.gid = gid; this.uid = uid; this.escaped_path = escaped_path; this.extension = extension; this.seq = seq; this.parMetaAddr = parMetaAddr; this.parSeq = parSeq; this.ownerUid = ownerUid; } } /** * Add a file system file to the database. * * @param parentObjId The parent of the file. * @param fsObjId The object ID of the file system. * @param dataSourceObjId The data source object ID. * @param fsType The type. * @param attrType The type attribute given to the file by the file * system. * @param attrId The type id given to the file by the file system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in the * name structure of the file system. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. * @param dirFlags The allocated status of the file, usually as * reported in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The file size. * @param crtime The created time. * @param ctime The last changed time * @param atime The last accessed time. * @param mtime The last modified time. * @param meta_mode The modes for the file. * @param gid The group identifier. * @param uid The user identifier. * @param md5 The MD5 hash. * @param known The file known status. * @param escaped_path The escaped path to the file. * @param extension The file extension. * @param ownerUid Unique id of the file owner. * @param ownerAcctObjId Object id of the owner account. * @param hasLayout True if this is a layout file, false otherwise. * @param transaction The open transaction. * * @return The object ID of the new file system * * @throws TskCoreException */ private long addFileToDb(long parentObjId, Long fsObjId, long dataSourceObjId, int fsType, Integer attrType, Integer attrId, String name, Long metaAddr, Long metaSeq, int dirType, int metaType, int dirFlags, int metaFlags, long size, Long crtime, Long ctime, Long atime, Long mtime, Integer meta_mode, Integer gid, Integer uid, String md5, TskData.FileKnown known, String escaped_path, String extension, String ownerUid, Long ownerAcctObjId, boolean hasLayout, CaseDbTransaction transaction) throws TskCoreException { try { SleuthkitCase.CaseDbConnection connection = transaction.getConnection(); // Insert a row for the local/logical file into the tsk_objects table. // INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?) long objectId = caseDb.addObject(parentObjId, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection); String fileInsert = "INSERT INTO tsk_files (fs_obj_id, obj_id, data_source_obj_id, type, attr_type, attr_id, name, meta_addr, meta_seq, dir_type, meta_type, dir_flags, meta_flags, size, crtime, ctime, atime, mtime, mode, gid, uid, md5, known, parent_path, extension, has_layout, owner_uid, os_account_obj_id)" + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(fileInsert, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); if (fsObjId != null) { preparedStatement.setLong(1, fsObjId); // fs_obj_id } else { preparedStatement.setNull(1, java.sql.Types.BIGINT); } preparedStatement.setLong(2, objectId); // obj_id preparedStatement.setLong(3, dataSourceObjId); // data_source_obj_id preparedStatement.setShort(4, (short) fsType); // type if (attrType != null) { preparedStatement.setShort(5, attrType.shortValue()); // attr_type } else { preparedStatement.setNull(5, java.sql.Types.SMALLINT); } if (attrId != null) { preparedStatement.setInt(6, attrId); // attr_id } else { preparedStatement.setNull(6, java.sql.Types.INTEGER); } preparedStatement.setString(7, name); // name if (metaAddr != null) { preparedStatement.setLong(8, metaAddr); // meta_addr } else { preparedStatement.setNull(8, java.sql.Types.BIGINT); } if (metaSeq != null) { preparedStatement.setInt(9, metaSeq.intValue()); // meta_seq } else { preparedStatement.setNull(9, java.sql.Types.INTEGER); } preparedStatement.setShort(10, (short) dirType); // dir_type preparedStatement.setShort(11, (short) metaType); // meta_type preparedStatement.setShort(12, (short) dirFlags); // dir_flags preparedStatement.setShort(13, (short) metaFlags); // meta_flags preparedStatement.setLong(14, size < 0 ? 0 : size); // size if (crtime != null) { preparedStatement.setLong(15, crtime); // crtime } else { preparedStatement.setNull(15, java.sql.Types.BIGINT); } if (ctime != null) { preparedStatement.setLong(16, ctime); // ctime } else { preparedStatement.setNull(16, java.sql.Types.BIGINT); } if (atime != null) { preparedStatement.setLong(17, atime); // atime } else { preparedStatement.setNull(17, java.sql.Types.BIGINT); } if (mtime != null) { preparedStatement.setLong(18, mtime); // mtime } else { preparedStatement.setNull(18, java.sql.Types.BIGINT); } if (meta_mode != null) { preparedStatement.setLong(19, meta_mode); // mode } else { preparedStatement.setNull(19, java.sql.Types.BIGINT); } if (gid != null) { preparedStatement.setLong(20, gid); // gid } else { preparedStatement.setNull(20, java.sql.Types.BIGINT); } if (uid != null) { preparedStatement.setLong(21, uid); // uid } else { preparedStatement.setNull(21, java.sql.Types.BIGINT); } preparedStatement.setString(22, md5); // md5 preparedStatement.setInt(23, known.getFileKnownValue());// known preparedStatement.setString(24, escaped_path); // parent_path preparedStatement.setString(25, extension); // extension if (hasLayout) { preparedStatement.setInt(26, 1); // has_layout } else { preparedStatement.setNull(26, java.sql.Types.INTEGER); } preparedStatement.setString(27, ownerUid); // ownerUid if (ownerAcctObjId != OsAccount.NO_ACCOUNT) { preparedStatement.setLong(28, ownerAcctObjId); // } else { preparedStatement.setNull(28, java.sql.Types.BIGINT); } connection.executeUpdate(preparedStatement); // If this is not a slack file create the timeline events if (!hasLayout && TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType() != fsType && (!name.equals(".")) && (!name.equals(".."))) { TimelineManager timelineManager = caseDb.getTimelineManager(); DerivedFile derivedFile = new DerivedFile(caseDb, objectId, dataSourceObjId, name, TskData.TSK_FS_NAME_TYPE_ENUM.valueOf((short) dirType), TskData.TSK_FS_META_TYPE_ENUM.valueOf((short) metaType), TskData.TSK_FS_NAME_FLAG_ENUM.valueOf(dirFlags), (short) metaFlags, size, ctime, crtime, atime, mtime, null, null, null, escaped_path, null, parentObjId, null, null, extension, ownerUid, ownerAcctObjId); timelineManager.addEventsForNewFileQuiet(derivedFile, connection); } return objectId; } catch (SQLException ex) { throw new TskCoreException("Failed to add file system file", ex); } } /** * Add an image to the database. * * @param type Type of image. * @param sectorSize Sector size. * @param size Image size. * @param timezone Time zone. * @param md5 MD5 hash. * @param sha1 SHA1 hash. * @param sha256 SHA256 hash. * @param deviceId Device ID. * @param collectionDetails Collection details. * @param hostId The ID of a host already in the database. * @param transaction Case DB transaction. * * @return The newly added Image object ID. * * @throws TskCoreException */ private long addImageToDb(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size, String timezone, String md5, String sha1, String sha256, String deviceId, String collectionDetails, CaseDbTransaction transaction) throws TskCoreException { try { // Insert a row for the Image into the tsk_objects table. SleuthkitCase.CaseDbConnection connection = transaction.getConnection(); long newObjId = caseDb.addObject(0, TskData.ObjectType.IMG.getObjectType(), connection); // Add a row to tsk_image_info // INSERT INTO tsk_image_info (obj_id, type, ssize, tzone, size, md5, sha1, sha256, display_name) String imageInfoSql = "INSERT INTO tsk_image_info (obj_id, type, ssize, tzone, size, md5, sha1, sha256, display_name)" + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(imageInfoSql, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setShort(2, (short) type.getValue()); preparedStatement.setLong(3, sectorSize); preparedStatement.setString(4, timezone); //prevent negative size long savedSize = size < 0 ? 0 : size; preparedStatement.setLong(5, savedSize); preparedStatement.setString(6, md5); preparedStatement.setString(7, sha1); preparedStatement.setString(8, sha256); preparedStatement.setString(9, null); connection.executeUpdate(preparedStatement); // Add a row to data_source_info String dataSourceInfoSql = "INSERT INTO data_source_info (obj_id, device_id, time_zone, acquisition_details, host_id) VALUES (?, ?, ?, ?, ?)"; // NON-NLS preparedStatement = connection.getPreparedStatement(dataSourceInfoSql, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, newObjId); preparedStatement.setString(2, deviceId); preparedStatement.setString(3, timezone); preparedStatement.setString(4, collectionDetails); preparedStatement.setLong(5, imageHost.getHostId()); connection.executeUpdate(preparedStatement); return newObjId; } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding image to database"), ex); } } /** * Add an image name to the database. * * @param objId The object id of the image. * @param name The file name for the image * @param sequence The sequence number of this file. * @param transaction The open transaction. * * @throws TskCoreException */ private void addImageNameToDb(long objId, String name, long sequence, CaseDbTransaction transaction) throws TskCoreException { try { SleuthkitCase.CaseDbConnection connection = transaction.getConnection(); String imageNameSql = "INSERT INTO tsk_image_names (obj_id, name, sequence) VALUES (?, ?, ?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(imageNameSql, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, objId); preparedStatement.setString(2, name); preparedStatement.setLong(3, sequence); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException(String.format("Error adding image name %s to image with object ID %d", name, objId), ex); } } /** * Add a layout file range to the database. * * @param objId Object ID of the layout file. * @param byteStart Start byte. * @param byteLen Length in bytes. * @param seq Sequence number of this range. * @param transaction The open transaction. * * @throws TskCoreException */ void addLayoutFileRangeToDb(long objId, long byteStart, long byteLen, long seq, CaseDbTransaction transaction) throws TskCoreException { try { SleuthkitCase.CaseDbConnection connection = transaction.getConnection(); String insertRangeSql = "INSERT INTO tsk_file_layout (obj_id, byte_start, byte_len, sequence) " //NON-NLS + "VALUES (?, ?, ?, ?)"; PreparedStatement preparedStatement = connection.getPreparedStatement(insertRangeSql, Statement.NO_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setLong(1, objId); preparedStatement.setLong(2, byteStart); preparedStatement.setLong(3, byteLen); preparedStatement.setLong(4, seq); connection.executeUpdate(preparedStatement); } catch (SQLException ex) { throw new TskCoreException("Error adding layout range to file with obj ID " + objId, ex); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AnalysisResultAdded.java000644 000765 000024 00000002321 14137073413 031216 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * This class encapsulates an analysis result added to Content, and the * content's aggregate score upon adding the analysis result. */ public class AnalysisResultAdded { private final AnalysisResult analysisResult; private final Score score; AnalysisResultAdded(AnalysisResult analysisResult, Score score) { this.analysisResult = analysisResult; this.score = score; } public AnalysisResult getAnalysisResult() { return analysisResult; } public Score getAggregateScore() { return score; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypes.java000644 000765 000024 00000015574 14137073413 031125 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.net.InternetDomainName; import java.net.URI; import java.net.URISyntaxException; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_TRACKPOINTS; import org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil; import org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints; /** * Container class for various types of timeline events * */ class TimelineEventTypes { private TimelineEventTypes() { } /** * Function that always returns the empty string no matter what it is * applied to. * */ final static class EmptyExtractor implements TimelineEventArtifactTypeImpl.TSKCoreCheckedFunction { @Override public String apply(BlackboardArtifact ignored) throws TskCoreException { return ""; } } static class URLArtifactEventType extends TimelineEventArtifactTypeSingleDescription { URLArtifactEventType(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type timeAttribute, BlackboardAttribute.Type descriptionAttribute) { super(typeID, displayName, superType, artifactType, timeAttribute, descriptionAttribute); } @Override TimelineEventDescription parseDescription(String fullDescriptionRaw, String medDescriptionRaw, String shortDescriptionRaw) { /** * Parses the full description from db, which is the full URL, to a * EventDescription object with three levels of detail. Just ignores * the passed in medium and short descriptions which should be * empty/null anyways. * */ String fullDescription = fullDescriptionRaw; try { URI uri = new URI(fullDescription); String host = uri.getHost(); if (host == null) { host = StringUtils.strip(fullDescription, "./"); } String shortDescription; if (InternetDomainName.isValid(host)) { InternetDomainName domain = InternetDomainName.from(host); shortDescription = (domain.isUnderPublicSuffix()) ? domain.topPrivateDomain().toString() : domain.toString(); } else { shortDescription = host; } String mediumDescription = new URI(uri.getScheme(), uri.getUserInfo(), host, uri.getPort(), uri.getPath(), null, null).toString(); return new TimelineEventDescription(fullDescription, mediumDescription, shortDescription); } catch (URISyntaxException ex) { //There was an error parsing the description as a URL, just ignore the description levels. return new TimelineEventDescription(fullDescription); } } } static class FilePathEventType extends TimelineEventTypeImpl { FilePathEventType(long typeID, String displayName, TimelineEventType.HierarchyLevel eventTypeZoomLevel, TimelineEventType superType) { super(typeID, displayName, eventTypeZoomLevel, superType); } @Override TimelineEventDescription parseDescription(String fullDescription, String medDescription, String shortDescription) { return parseFilePathDescription(fullDescription); } } static class FilePathArtifactEventType extends TimelineEventArtifactTypeSingleDescription { FilePathArtifactEventType(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type timeAttribute, BlackboardAttribute.Type descriptionAttribute) { super(typeID, displayName, superType, artifactType, timeAttribute, descriptionAttribute); } @Override TimelineEventDescription parseDescription(String fullDescriptionRaw, String medDescriptionRaw, String shortDescriptionRaw) { return parseFilePathDescription(fullDescriptionRaw); } } /** * Handle GPS_TRACK artifacts special. * GPS_TRACK artifacts do not have a time attribute, by they do have a * JSON list of waypoints from which a start time can be extracted. */ static class GPSTrackArtifactEventType extends TimelineEventArtifactTypeSingleDescription { GPSTrackArtifactEventType(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type descriptionAttribute) { // Passing TSK_GEO_TRACKPOINTS as the "time attribute" as more of a place filler, to avoid any null issues super(typeID, displayName, superType, artifactType, new BlackboardAttribute.Type(TSK_GEO_TRACKPOINTS), descriptionAttribute); } @Override public TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifact) throws TskCoreException { //If there is not a list if track points do not create an event. BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(TSK_GEO_TRACKPOINTS)); if (attribute == null) { return null; } // Get the waypoint list "start time" GeoTrackPoints pointsList; try { pointsList = BlackboardJsonAttrUtil.fromAttribute(attribute, GeoTrackPoints.class); } catch (BlackboardJsonAttrUtil.InvalidJsonException ex) { throw new TskCoreException("Unable to parse track points in TSK_GEO_TRACKPOINTS attribute", ex); } Long startTime = pointsList.getStartTime(); // If we didn't find a startime do not create an event. if (startTime == null) { return null; } return new TimelineEventDescriptionWithTime(startTime, null, null, extractFullDescription(artifact)); } } /** * Parse the full description from the DB, which is just the file path, into * three levels. * * @param fullDescription * * @return An TimelineEventDescription with three levels of detail. */ static TimelineEventDescription parseFilePathDescription(String fullDescription) { String[] split = fullDescription.split("/"); String mediumDescription = Stream.of(split) .filter(StringUtils::isNotBlank) .limit(Math.max(1, split.length - 2)) .collect(Collectors.joining("/", "/", "")) .replaceAll("//", "/"); String shortDescription = Stream.of(split) .filter(StringUtils::isNotBlank) .limit(1) .collect(Collectors.joining("/", "/", "")) .replaceAll("//", "/"); return new TimelineEventDescription(fullDescription, mediumDescription, shortDescription); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Person.java000644 000765 000024 00000003576 14137073413 026575 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Objects; /** * Encapsulates a person. */ public final class Person { private final long id; private String name; Person(long id, String name) { this.id = id; this.name = name; } /** * Gets the row id for the person. * * @return Row id. */ public long getPersonId() { return id; } /** * Gets the name for the person. * * @return Person name. */ public String getName() { return name; } /** * Sets the name for the person. Does not update the database. * * @param newName The new name. */ public void setName(String newName) { this.name = newName; } @Override public int hashCode() { int hash = 5; hash = 67 * hash + (int) (this.id ^ (this.id >>> 32)); hash = 67 * hash + Objects.hashCode(this.name); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Person other = (Person) obj; if (this.id != other.id) { return false; } if ((this.name == null) ? (other.name != null) : !this.name.equals(other.name)) { return false; } return true; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskException.java000644 000765 000024 00000002355 14137073413 027741 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * General parent exception that is thrown from Sleuthkit classes. */ public abstract class TskException extends Exception { private static final long serialVersionUID = 123049873L; /** * Create exception containing the error message * * @param msg the message */ public TskException(String msg) { super(msg); } /** * Create exception containing the error message and cause exception * * @param msg the message * @param ex cause exception */ public TskException(String msg, Exception ex) { super(msg, ex); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/DefaultAddDataSourceCallbacks.java000644 000765 000024 00000001720 14137073413 033064 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.List; /** * Do-nothing version of AddDataSourceCallbacks */ public class DefaultAddDataSourceCallbacks implements AddDataSourceCallbacks { @Override public void onFilesAdded(List fileObjectIds) { // Do nothing } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SleuthkitVisitableItem.java000644 000765 000024 00000001747 14137073413 031763 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Interface for all visitable datatypes that can be found in the tsk database */ public interface SleuthkitVisitableItem { /** * visitor pattern support * * @param v visitor * * @return visitor return value */ public T accept(SleuthkitItemVisitor v); } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/InvalidAccountIDException.java000644 000765 000024 00000002641 14137073413 032316 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Exception thrown when an account identifier is not valid. * */ public class InvalidAccountIDException extends TskCoreException { private static final long serialVersionUID = 1L; /** * Default constructor when error message is not available */ public InvalidAccountIDException() { super("No error message available."); } /** * Create exception containing the error message. * * @param msg Message. */ public InvalidAccountIDException(String msg) { super(msg); } /** * Create exception containing the error message and cause exception. * * @param msg Message. * @param ex Underlying exception. */ public InvalidAccountIDException(String msg, Exception ex) { super(msg, ex); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypeImpl.java000644 000765 000024 00000006252 14137073413 031555 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.collect.ImmutableSortedSet; import java.util.Optional; import java.util.SortedSet; import org.apache.commons.lang3.ObjectUtils; /** * Implementation of TimelineEventType for the standard predefined event types. */ class TimelineEventTypeImpl implements TimelineEventType { private final long typeID; private final String displayName; private final TimelineEventType superType; private final TimelineEventType.HierarchyLevel eventTypeZoomLevel; /** * * @param typeID ID (from the Database) * @param displayName * @param eventTypeZoomLevel Where it is in the type hierarchy * @param superType */ TimelineEventTypeImpl(long typeID, String displayName, TimelineEventType.HierarchyLevel eventTypeZoomLevel, TimelineEventType superType) { this.superType = superType; this.typeID = typeID; this.displayName = displayName; this.eventTypeZoomLevel = eventTypeZoomLevel; } TimelineEventDescription parseDescription(String fullDescriptionRaw, String medDescriptionRaw, String shortDescriptionRaw) { // The standard/default implementation: Just bundle the three description levels into one object. return new TimelineEventDescription(fullDescriptionRaw, medDescriptionRaw, shortDescriptionRaw); } @Override public SortedSet getChildren() { return ImmutableSortedSet.of(); } @Override public Optional getChild(String string) { return getChildren().stream() .filter(type -> type.getDisplayName().equalsIgnoreCase(displayName)) .findFirst(); } @Override public String getDisplayName() { return displayName; } @Override public TimelineEventType getParent() { return ObjectUtils.defaultIfNull(superType, ROOT_EVENT_TYPE); } @Override public TimelineEventType.HierarchyLevel getTypeHierarchyLevel() { return eventTypeZoomLevel; } @Override public long getTypeID() { return typeID; } @Override public int hashCode() { int hash = 5; hash = 17 * hash + (int) (this.typeID ^ (this.typeID >>> 32)); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TimelineEventType other = (TimelineEventType) obj; return this.getTypeID() == other.getTypeID(); } @Override public String toString() { return "StandardEventType{" + "id=" + getTypeID() + ", displayName=" + getDisplayName() + '}'; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AccountPair.java000644 000765 000024 00000004142 14137073413 027525 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Class representing an unordered pair of account device instances. is * same as . First and second are used to distinguish the two accounts, but * do not imply an order. */ public final class AccountPair { private final AccountDeviceInstance account1; private final AccountDeviceInstance account2; /** * Get the first AccountDeviceInstance. First doesn't imply order and is * simply used to distinguish the two accounts. * * @return The first AccountDeviceInstance. */ public AccountDeviceInstance getFirst() { return account1; } /** * Get the second AccountDeviceInstance. Second doesn't imply order and is * simply used to distinguish the two accounts. * * @return The second AccountDeviceInstance. */ public AccountDeviceInstance getSecond() { return account2; } AccountPair(AccountDeviceInstance account1, AccountDeviceInstance account2) { this.account1 = account1; this.account2 = account2; } @Override public int hashCode() { return account1.hashCode() + account2.hashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof AccountPair)) { return false; } AccountPair otherPair = (AccountPair) other; return (account1.equals(otherPair.account1) && account2.equals(otherPair.account2)) || (account1.equals(otherPair.account2) && account2.equals(otherPair.account1)); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CaseDbAccessManager.java000644 000765 000024 00000050640 14137073413 031057 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import static org.sleuthkit.datamodel.SleuthkitCase.closeStatement; import org.sleuthkit.datamodel.TskData.DbType; /** * This class provides modules with access to the case database * to create custom tables/indexes and to query them. * */ public final class CaseDbAccessManager { /** * Callback interface to process the result of DB query run through DBAccessManager */ public interface CaseDbAccessQueryCallback { /** * Processes the ResultSet from CaseDbAccessManager query. * * This is called synchronously by CaseDbAccessManager, * and should avoid any long running operations. * * @param resultSet ResultSet from query. */ void process(ResultSet resultSet); } private static final Logger logger = Logger.getLogger(CaseDbAccessManager.class.getName()); private final SleuthkitCase tskDB; /** * Constructor * * @param skCase The SleuthkitCase * */ CaseDbAccessManager(SleuthkitCase skCase) { this.tskDB = skCase; } /** * Checks if a column exists in a table. * * @param tableName name of the table * @param columnName column name to check * * @return true if the column already exists, false otherwise * @throws TskCoreException */ public boolean columnExists(String tableName, String columnName) throws TskCoreException { boolean doesColumnExists = false; CaseDbTransaction localTrans = tskDB.beginTransaction(); try { doesColumnExists = columnExists(tableName, columnName, localTrans); localTrans.commit(); localTrans = null; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); } } } return doesColumnExists; } /** * Checks if a column exists in a table. * * @param tableName name of the table * @param columnName column name to check * @param transaction transaction * * @return true if the column already exists, false otherwise * @throws TskCoreException */ public boolean columnExists(String tableName, String columnName, CaseDbTransaction transaction) throws TskCoreException { boolean columnExists = false; Statement statement = null; ResultSet resultSet = null; try { CaseDbConnection connection = transaction.getConnection(); statement = connection.createStatement(); if (DbType.SQLITE == tskDB.getDatabaseType()) { String tableInfoQuery = "PRAGMA table_info(%s)"; //NON-NLS resultSet = statement.executeQuery(String.format(tableInfoQuery, tableName)); while (resultSet.next()) { if (resultSet.getString("name").equalsIgnoreCase(columnName)) { columnExists = true; break; } } } else { String tableInfoQueryTemplate = "SELECT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='%s' AND column_name='%s')"; //NON-NLS resultSet = statement.executeQuery(String.format(tableInfoQueryTemplate, tableName.toLowerCase(), columnName.toLowerCase())); if (resultSet.next()) { columnExists = resultSet.getBoolean(1); } } } catch (SQLException ex) { throw new TskCoreException("Error checking if column " + columnName + "exists ", ex); } finally { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex2) { logger.log(Level.WARNING, "Failed to to close resultset after checking column", ex2); } } closeStatement(statement); } return columnExists; } /** * Checks if a table exists in the case database. * * @param tableName name of the table to check * * @return true if the table already exists, false otherwise * @throws TskCoreException */ public boolean tableExists(String tableName) throws TskCoreException { boolean doesTableExist = false; CaseDbTransaction localTrans = tskDB.beginTransaction(); try { doesTableExist = tableExists(tableName, localTrans); localTrans.commit(); localTrans = null; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); //NON-NLS } } } return doesTableExist; } /** * Checks if a table exists in the case database. * * @param tableName name of the table to check * @param transaction transaction * * @return true if the table already exists, false otherwise * @throws TskCoreException */ public boolean tableExists(String tableName, CaseDbTransaction transaction) throws TskCoreException { boolean tableExists = false; Statement statement = null; ResultSet resultSet = null; try { CaseDbConnection connection = transaction.getConnection(); statement = connection.createStatement(); if (DbType.SQLITE == tskDB.getDatabaseType()) { resultSet = statement.executeQuery("SELECT name FROM sqlite_master WHERE type='table'"); //NON-NLS while (resultSet.next()) { if (resultSet.getString("name").equalsIgnoreCase(tableName)) { //NON-NLS tableExists = true; break; } } } else { String tableInfoQueryTemplate = "SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name='%s')"; //NON-NLS resultSet = statement.executeQuery(String.format(tableInfoQueryTemplate, tableName.toLowerCase())); if (resultSet.next()) { tableExists = resultSet.getBoolean(1); } } } catch (SQLException ex) { throw new TskCoreException("Error checking if table " + tableName + "exists ", ex); } finally { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex2) { logger.log(Level.WARNING, "Failed to to close resultset after checking table", ex2); } } closeStatement(statement); } return tableExists; } /** * Creates a table with the specified name and schema. * * If the table already exists, it does nothing, and no error is generated * * It is recommended that clients of the API use module specific prefixes * to prevent name collisions. * * @param tableName name of the table to create * @param tableSchema table schema * * @throws TskCoreException */ public void createTable(final String tableName, final String tableSchema) throws TskCoreException { validateTableName(tableName); validateSQL(tableSchema); tskDB.acquireSingleUserCaseWriteLock(); String createSQL = "CREATE TABLE IF NOT EXISTS " + tableName + " " + tableSchema; try (CaseDbConnection connection = tskDB.getConnection(); Statement statement = connection.createStatement();) { statement.execute(createSQL); } catch (SQLException ex) { throw new TskCoreException("Error creating table " + tableName, ex); } finally { tskDB.releaseSingleUserCaseWriteLock(); } } /** * Alters a table with the specified name. * * @param tableName name of the table to alter * @param alterSQL SQL to alter the table * * @throws TskCoreException */ public void alterTable(final String tableName, final String alterSQL) throws TskCoreException { CaseDbTransaction localTrans = tskDB.beginTransaction(); try { alterTable(tableName, alterSQL, localTrans); localTrans.commit(); localTrans = null; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); } } } } /** * Alters a table with the specified name. * * @param tableName name of the table to alter * @param alterSQL SQL to alter the table * @param transaction transaction * * @throws TskCoreException */ public void alterTable(final String tableName, final String alterSQL, final CaseDbTransaction transaction) throws TskCoreException { validateTableName(tableName); validateSQL(alterSQL); CaseDbConnection connection = transaction.getConnection(); Statement statement = null; String sql = "ALTER TABLE " + tableName + " " + alterSQL; try { statement = connection.createStatement(); statement.execute(sql); } catch (SQLException ex) { // SQLite occasionally returns false for columnExists() if a table was just created with that column // leading to "duplicate column name" exception. // We ignore this exception if (DbType.SQLITE == tskDB.getDatabaseType() && alterSQL.toLowerCase().contains("add column") && ex.getMessage().toLowerCase().contains("duplicate column name")) { logger.log(Level.WARNING, String.format("Column being added by SQL = %s already exists in table %s", alterSQL, tableName)); return; } throw new TskCoreException(String.format("Error altering table %s with SQL = %s", tableName, sql), ex); } finally { closeStatement(statement); } } /** * Creates an index on the specified table, on specified column(s). * * If the index already exists, it does nothing, and no error is generated. * * It is recommended that clients of the API use module specific prefixes * to prevent name collisions. * * @param indexName name of index to create * @param tableName name of table to create the index on * @param colsSQL - columns on which to index * * @throws TskCoreException */ public void createIndex(final String indexName, final String tableName, final String colsSQL) throws TskCoreException { validateTableName(tableName); validateIndexName(indexName); validateSQL(colsSQL); tskDB.acquireSingleUserCaseWriteLock(); String indexSQL = "CREATE INDEX IF NOT EXISTS " + indexName + " ON " + tableName + " " + colsSQL; // NON-NLS try (CaseDbConnection connection = tskDB.getConnection(); Statement statement = connection.createStatement(); ) { statement.execute(indexSQL); } catch (SQLException ex) { throw new TskCoreException("Error creating index " + tableName, ex); } finally { tskDB.releaseSingleUserCaseWriteLock(); } } /** * Inserts a row in the specified table. * * @param tableName - table to insert into. * @param sql - SQL string specifying column values. * * @return - rowID of the row * * @throws TskCoreException */ public long insert(final String tableName, final String sql) throws TskCoreException { CaseDbTransaction localTrans = tskDB.beginTransaction(); try { long rowId = insert(tableName, sql, localTrans); localTrans.commit(); localTrans = null; return rowId; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); } } } } /** * Inserts a row in the specified table, as part of the specified transaction. * If the primary key is duplicate, it does nothing. * * Note: For PostGreSQL, the caller must include the ON CONFLICT DO NOTHING clause * * Caller is responsible for committing the transaction. * * @param tableName - table to insert into. * @param sql - SQL string specifying column values. * @param transaction transaction in which the insert/update is done * * @return - rowID of the row inserted * * @throws TskCoreException */ public long insert(final String tableName, final String sql, final CaseDbTransaction transaction) throws TskCoreException { long rowId = 0; validateTableName(tableName); validateSQL(sql); CaseDbConnection connection = transaction.getConnection(); PreparedStatement statement = null; ResultSet resultSet; String insertSQL = "INSERT"; if (DbType.SQLITE == tskDB.getDatabaseType()) { insertSQL += " OR IGNORE"; } insertSQL = insertSQL+ " INTO " + tableName + " " + sql; // NON-NLS try { statement = connection.prepareStatement(insertSQL, Statement.RETURN_GENERATED_KEYS); connection.executeUpdate(statement); resultSet = statement.getGeneratedKeys(); if (resultSet.next()) { rowId = resultSet.getLong(1); //last_insert_rowid() } } catch (SQLException ex) { throw new TskCoreException("Error inserting row in table " + tableName + " with sql = "+ insertSQL, ex); } finally { closeStatement(statement); } return rowId; } /** * Inserts a row in the specified table. * If the primary key is duplicate, the existing row is updated. * * Note: For PostGreSQL, the caller must include the ON CONFLICT UPDATE clause to handle * duplicates * * @param tableName - table to insert into. * @param sql - SQL string specifying column values. * * @return - rowID of the row inserted/updated * * @throws TskCoreException */ public long insertOrUpdate(final String tableName, final String sql) throws TskCoreException { CaseDbTransaction localTrans = tskDB.beginTransaction(); try { long rowId = insertOrUpdate(tableName, sql, localTrans); localTrans.commit(); localTrans = null; return rowId; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); } } } } /** * Inserts a row in the specified table, as part of the specified transaction. * If the primary key is duplicate, the existing row is updated. * Caller is responsible for committing the transaction. * * Note: For PostGreSQL, the caller must include the ON CONFLICT UPDATE clause to handle * duplicates * * @param tableName - table to insert into. * @param sql - SQL string specifying column values. * @param transaction transaction in which the insert/update is done * * @return - rowID of the row inserted/updated * * @throws TskCoreException */ public long insertOrUpdate(final String tableName, final String sql, final CaseDbTransaction transaction) throws TskCoreException { long rowId = 0; validateTableName(tableName); validateSQL(sql); CaseDbConnection connection = transaction.getConnection(); PreparedStatement statement = null; ResultSet resultSet; String insertSQL = "INSERT"; if (DbType.SQLITE == tskDB.getDatabaseType()) { insertSQL += " OR REPLACE"; } insertSQL += " INTO " + tableName + " " + sql; // NON-NLS try { statement = connection.prepareStatement(insertSQL, Statement.RETURN_GENERATED_KEYS); connection.executeUpdate(statement); resultSet = statement.getGeneratedKeys(); resultSet.next(); rowId = resultSet.getLong(1); //last_insert_rowid() } catch (SQLException ex) { throw new TskCoreException("Error inserting row in table " + tableName + " with sql = "+ insertSQL, ex); } finally { closeStatement(statement); } return rowId; } /** * Updates row(s) in the specified table. * * @param tableName - table to insert into. * @param sql - SQL string specifying column values and conditions. * * @throws TskCoreException */ public void update(final String tableName, final String sql) throws TskCoreException { CaseDbTransaction localTrans = tskDB.beginTransaction(); try { update(tableName, sql, localTrans); localTrans.commit(); localTrans = null; } finally { if (null != localTrans) { try { localTrans.rollback(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex); } } } } /** * Updates row(s) in the specified table, as part of the specified transaction. * Caller is responsible for committing the transaction. * * @param tableName - table to insert into. * @param sql - SQL string specifying column values and conditions. * @param transaction - transaction under which the update is performed. * * @throws TskCoreException */ public void update(final String tableName, final String sql, CaseDbTransaction transaction ) throws TskCoreException { validateTableName(tableName); validateSQL(sql); CaseDbConnection connection = transaction.getConnection(); Statement statement = null; String updateSQL = "UPDATE " + tableName + " " + sql; // NON-NLS try { statement = connection.createStatement(); statement.executeUpdate(updateSQL); } catch (SQLException ex) { throw new TskCoreException("Error Updating table " + tableName, ex); } finally { closeStatement(statement); } } /** * Runs the specified SELECT query and then calls the specified callback with the result. * * @param sql SQL string specifying the columns to select, tables to select from and the WHERE clause. * @param queryCallback Callback object to process the result. * * @throws TskCoreException */ public void select(final String sql, final CaseDbAccessQueryCallback queryCallback) throws TskCoreException { if (queryCallback == null) { throw new TskCoreException("Callback is null"); } validateSQL(sql); tskDB.acquireSingleUserCaseReadLock(); String selectSQL = "SELECT " + sql; // NON-NLS try (CaseDbConnection connection = tskDB.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(selectSQL)) { queryCallback.process(resultSet); } catch (SQLException ex) { throw new TskCoreException("Error running SELECT query.", ex); } finally { tskDB.releaseSingleUserCaseReadLock(); } } /** * Deletes a row in the specified table. * * @param tableName table from which to delete the row * @param sql - SQL string specifying the condition to identify the row to delete * * @throws TskCoreException */ public void delete(final String tableName, final String sql ) throws TskCoreException { validateTableName(tableName); validateSQL(sql); tskDB.acquireSingleUserCaseWriteLock(); String deleteSQL = "DELETE FROM " + tableName + " " + sql; // NON-NLS try (CaseDbConnection connection = tskDB.getConnection(); Statement statement = connection.createStatement();) { statement.executeUpdate(deleteSQL); } catch (SQLException ex) { throw new TskCoreException("Error deleting row from table " + tableName, ex); } finally { tskDB.releaseSingleUserCaseWriteLock(); } } /** * Validates table name. * Specifically, it ensures the table doesn't begin with 'tsk_' * to avoid modifications to core TSK tables * * @param tableName * @throws TskCoreException, if the table name is invalid. */ private void validateTableName(String tableName) throws TskCoreException { if (SleuthkitCase.getCoreTableNames().contains(tableName.toLowerCase())) { throw new TskCoreException("Attempt to modify a core TSK table " + tableName); } if (tableName.toLowerCase().startsWith("tsk_")) { throw new TskCoreException("Modifying tables with tsk_ prefix is not allowed. "); } } /** * Validates index name. * Specifically, it ensures the index name doesn't collide with any of our core indexes * in CaseDB * * @param indexName * @throws TskCoreException, if the index name is invalid. */ private void validateIndexName(String indexName) throws TskCoreException { if (indexName.isEmpty()) { throw new TskCoreException("Invalid index name " + indexName); } if (SleuthkitCase.getCoreIndexNames().contains(indexName.toLowerCase())) { throw new TskCoreException("Attempt to modify a core TSK index " + indexName); } } /** * Validates given SQL string. * * @param sql The SQL to validate. * * @throws TskCoreException Thrown if the SQL is not valid. */ private void validateSQL(String sql) throws TskCoreException { /* * TODO (JIRA-5950): Need SQL injection defense in CaseDbAccessManager */ } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/LocalFile.java000644 000765 000024 00000033267 14137073413 027161 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import java.util.List; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a local/logical file (e.g., on a user's machine) that has * been added to a case. */ public class LocalFile extends AbstractFile { /** * Constructs a representation of a local/logical file (e.g., on a user's * machine) that has been added to the case database. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param fileType The type of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentId The object id of parent of the file. * @param parentPath The path of the parent of the file. * @param dataSourceObjectId The object id of the data source for the file. * @param localPath The absolute path of the file in secondary * storage. * @param encodingType The encoding type of the file. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid String UID of the user as found in in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ LocalFile(SleuthkitCase db, long objId, String name, TSK_DB_FILES_TYPE_ENUM fileType, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String mimeType, String md5Hash, String sha256Hash, FileKnown knownState, long parentId, String parentPath, long dataSourceObjectId, String localPath, TskData.EncodingType encodingType, String extension, String ownerUid, Long osAccountObjId) { super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, fileType, 0L, 0, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList()); // TODO (AUT-1904): The parent id should be passed to AbstractContent // through the class hierarchy contructors, using // AbstractContent.UNKNOWN_ID as needed. if (parentId > 0) { setParentId(parentId); } super.setLocalFilePath(localPath); setEncodingType(encodingType); } /** * Gets the extents in terms of byte addresses of this local file within its * data source, an empty list. * * @return An empty list of extents (TskFileRange objects) * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getRanges() throws TskCoreException { return Collections.emptyList(); } /** * Indicates whether or not this local file is the root of a file system, * always returns false. * * @return False. */ @Override public boolean isRoot() { return false; } /** * Accepts a content visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A ContentVisitor supplying an algorithm to run using this * local file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param The type returned by the visitor. * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this local file as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Provides a string representation of this local file. * * @param preserveState True if state should be included in the string * representation of this object. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "LocalFile [\t" + "]\t"; //NON-NLS } /** * Constructs a representation of a local/logical file (e.g., on a user's * machine) that has been added to the case database. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param fileType The type of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param localPath The absolute path of the file in secondary storage. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected LocalFile(SleuthkitCase db, long objId, String name, TSK_DB_FILES_TYPE_ENUM fileType, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, FileKnown knownState, String parentPath, String localPath) { this(db, objId, name, fileType, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, null, md5Hash, null, knownState, AbstractContent.UNKNOWN_ID, parentPath, db.getDataSourceObjectId(objId), localPath, TskData.EncodingType.NONE, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT); } /** * Constructs a representation of a local/logical file (e.g., on a user's * machine) that has been added to the case database. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param fileType The type of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param localPath The absolute path of the file in secondary storage. * @param parentId The object id of parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated protected LocalFile(SleuthkitCase db, long objId, String name, TSK_DB_FILES_TYPE_ENUM fileType, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, FileKnown knownState, String parentPath, String localPath, long parentId) { this(db, objId, name, fileType, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, md5Hash, knownState, parentPath, localPath); } /** * Constructs a representation of a local/logical file (e.g., on a user's * machine) that has been added to the case. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param name The name of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param localPath The absolute path of the file in secondary storage. * @param parentId The object id of parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated protected LocalFile(SleuthkitCase db, long objId, String name, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, String md5Hash, FileKnown knownState, String parentPath, String localPath, long parentId) { this(db, objId, name, TSK_DB_FILES_TYPE_ENUM.LOCAL, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, md5Hash, knownState, parentPath, localPath); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskDataException.java000644 000765 000024 00000002570 14137073413 030532 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Data exception that is thrown from Sleuthkit classes. */ public class TskDataException extends TskException { private static final long serialVersionUID = 123049876L; /** * Default constructor when error message is not available */ public TskDataException() { super("No error message available."); } /** * Create exception containing the error message * * @param msg the message */ public TskDataException(String msg) { super(msg); } /** * Create exception containing the error message and cause exception * * @param msg the message * @param ex cause exception */ public TskDataException(String msg, Exception ex) { super(msg, ex); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OSInfo.java000644 000765 000024 00000013010 14137073413 026444 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import java.util.Map; import java.util.HashMap; import java.util.ArrayList; import java.util.List; /** * Utility class to hold information from OS Info artifacts */ public class OSInfo { private final List artifacts; private final Map attributeMap; private final boolean isBackup; private final boolean haveFsContent; private final long fileSystemId; private final boolean haveParentId; private final long parentObjId; public OSInfo() { artifacts = new ArrayList(); attributeMap = new HashMap(); isBackup = false; fileSystemId = 0; haveFsContent = false; parentObjId = 0; haveParentId = false; } /** * Initialize an OSInfo object * * @param a_art - OSInfo artifact associated with one registry hive * @param a_isBackup - True if the registry hive was found in a * "RegBack" directory * @param a_fileSystemId - File system ID for FS containing the registry * hive * @param a_parent - Parent directory containing the registry hive. * Can be null * * @throws TskCoreException */ public OSInfo(BlackboardArtifact a_art, boolean a_isBackup, long a_fileSystemId, Content a_parent) throws TskCoreException { artifacts = new ArrayList(); artifacts.add(a_art); isBackup = a_isBackup; fileSystemId = a_fileSystemId; haveFsContent = true; attributeMap = new HashMap(); for (BlackboardAttribute attr : a_art.getAttributes()) { attributeMap.put(attr.getAttributeType().getTypeID(), attr.getValueString()); } if (a_parent != null) { parentObjId = a_parent.getId(); haveParentId = true; } else { parentObjId = 0; haveParentId = false; } } /** * Initialize an OSInfo object (without file system information) * * @param a_art - OSInfo artifact associated with one registry hive * @param a_isBackup - True if the registry hive was found in a "RegBack" * directory * @param a_parent - Parent directory containing the registry hive. Can be * null * * @throws TskCoreException */ public OSInfo(BlackboardArtifact a_art, boolean a_isBackup, Content a_parent) throws TskCoreException { artifacts = new ArrayList(); artifacts.add(a_art); isBackup = a_isBackup; fileSystemId = 0; haveFsContent = false; if (a_parent != null) { parentObjId = a_parent.getId(); haveParentId = true; } else { parentObjId = 0; haveParentId = false; } attributeMap = new HashMap(); for (BlackboardAttribute attr : a_art.getAttributes()) { attributeMap.put(attr.getAttributeType().getTypeID(), attr.getValueString()); } } /** * Determine whether two OSInfo objects should be combined. * * @param a_osInfo - the OSInfo object to compare against * * @return */ public boolean matches(OSInfo a_osInfo) { // Check if the two are in the same directory. // OSInfo is only dependant on SYSTEM and SOFTWARE, which should always be in the same directory // on the file system. if (haveParentId && a_osInfo.haveParentId) { return (parentObjId == a_osInfo.parentObjId); } // If we don't have a parent directory, just see if they're on the same file system, // and both have the same backup status. if (haveFsContent && a_osInfo.haveFsContent) { return ((a_osInfo.isBackup == isBackup) && (a_osInfo.fileSystemId == fileSystemId)); } return false; } /** * Combine the attribute map for two OSInfo objects. * * @param a_osInfo - The OSInfo object to combine with */ public void combine(OSInfo a_osInfo) { artifacts.addAll(a_osInfo.artifacts); attributeMap.putAll(a_osInfo.attributeMap); } public List getArtifacts() { return artifacts; } public boolean haveFileSystem() { return haveFsContent; } public long getFileSystemId() { return fileSystemId; } public boolean getIsBackup() { return isBackup; } /** * Generic method to get an OSInfo attribute value by ATTRIBUTE_TYPE. * * @param attrType - the attribute to get * * @return */ public String getAttributeValue(ATTRIBUTE_TYPE attrType) { if (attributeMap.containsKey(attrType.getTypeID())) { return attributeMap.get(attrType.getTypeID()); } return ""; } /* * Dedicated getters for the most common attributes. */ public String getCompName() { return getAttributeValue(ATTRIBUTE_TYPE.TSK_NAME); } public String getProcessorArchitecture() { return getAttributeValue(ATTRIBUTE_TYPE.TSK_PROCESSOR_ARCHITECTURE); } public String getDomain() { return getAttributeValue(ATTRIBUTE_TYPE.TSK_DOMAIN); } public String getOSName() { return getAttributeValue(ATTRIBUTE_TYPE.TSK_PROG_NAME); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/File.java000644 000765 000024 00000030563 14137073413 026202 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import java.util.List; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * A representation of a file system file that has been added to a case. */ public class File extends FsContent { /** * Constructs a representation of a file system file that has been added to * the case. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ File(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType, String extension, String ownerUid, Long osAccountObjId, List fileAttributes) { super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, fileAttributes); } /** * Accepts a content visitor (Visitor design pattern). * * @param visitor A ContentVisitor supplying an algorithm to run using this * file as input. * * @return The output of the algorithm. */ @Override public T accept(SleuthkitItemVisitor visitor) { return visitor.visit(this); } /** * Accepts a Sleuthkit item visitor (Visitor design pattern). * * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using * this file as input. * * @return The output of the algorithm. */ @Override public T accept(ContentVisitor v) { return v.visit(this); } /** * Provides a string representation of this file. * * @param preserveState True if state should be included in the string * representation of this object. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "File [\t" + "]\t"; //NON-NLS } /** * Constructs a representation of a file system file that has been added to * the case. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param fsObjId The object id of the file system to which this file * belongs. * @param attrType The type attribute given to the file by the file * system. * @param attrId The type id given to the file by the file system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") protected File(SleuthkitCase db, long objId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, db.getDataSourceObjectId(objId), fsObjId, attrType, attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, knownState, parentPath, null); } /** * Constructs a representation of a file system file that has been added to * the case. This deprecated version has attrId field defined as a short * which has since been changed to an int. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") File(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) { this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList()); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Examples/000755 000765 000024 00000000000 14137073560 026232 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescription.java000755 000765 000024 00000005031 14137073413 032272 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * A container for a timeline event description with potentially varying levels * of detail. */ class TimelineEventDescription { private final String shortDesc; private final String mediumDesc; private final String fullDesc; /** * Constructs a container for a timeline event description that varies with * each of three levels of detail. * * @param fullDescription The full length description of an event for use * at a high level of detail. * @param medDescription The medium length description of an event for use * at a medium level of detail. * @param shortDescription The short length description of an event for use * at a low level of detail. */ TimelineEventDescription(String fullDescription, String medDescription, String shortDescription) { this.shortDesc = shortDescription; this.mediumDesc = medDescription; this.fullDesc = fullDescription; } /** * Constructs a container for a timeline event description for the high * level of detail. The descriptions for the low and medium levels of detail * will be the empty string. * * @param fullDescription The full length description of an event for use at * a high level of detail. */ TimelineEventDescription(String fullDescription) { this.shortDesc = ""; this.mediumDesc = ""; this.fullDesc = fullDescription; } /** * Gets the description of this event at the given level of detail. * * @param levelOfDetail The level of detail. * * @return The event description at the given level of detail. */ String getDescription(TimelineLevelOfDetail levelOfDetail) { switch (levelOfDetail) { case HIGH: default: return this.fullDesc; case MEDIUM: return this.mediumDesc; case LOW: return this.shortDesc; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/EncodedFileOutputStream.java000644 000765 000024 00000005730 14137073413 032057 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; /** * Output stream wrapper for encoding files being written to disk. The idea is * to prevent malicious files from getting extracted onto the user's hard drive * in their original form. The encoding type used here should match the one used * to create the derived file database entry for this file. */ public class EncodedFileOutputStream extends BufferedOutputStream { private final TskData.EncodingType type; private long encodedDataLength; /** * Create an encoded output stream using the specified encoding. * * @param out * @param type * * @throws IOException */ public EncodedFileOutputStream(OutputStream out, TskData.EncodingType type) throws IOException { super(out); this.type = type; encodedDataLength = 0; writeHeader(); } /** * Create an encoded output stream using the specified encoding and buffer * size. * * @param out * @param size * @param type * * @throws IOException */ public EncodedFileOutputStream(OutputStream out, int size, TskData.EncodingType type) throws IOException { super(out, size); this.type = type; writeHeader(); } private void writeHeader() throws IOException { // We get the encoded header here so it will be in plaintext after encoding write(EncodedFileUtil.getEncodedHeader(type), 0, EncodedFileUtil.getHeaderLength()); encodedDataLength -= EncodedFileUtil.getHeaderLength(); } @Override public void write(int b) throws IOException { super.write((int) EncodedFileUtil.encodeByte((byte) b, type)); encodedDataLength++; } @Override public void write(byte[] b, int off, int len) throws IOException { byte[] encodedData = new byte[b.length]; for (int i = 0; i < b.length; i++) { encodedData[i] = EncodedFileUtil.encodeByte(b[i], type); } super.write(encodedData, off, len); encodedDataLength += len; } /** * Get the number of bytes written to the file, excluding header bytes. * This is needed for storing the original length of the file in the * tsk_files table in cases where we don't know the size in advance. * * @return the number of bytes written to the stream, excluding the header. */ public long getBytesWritten() { return encodedDataLength; } }sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Score.java000644 000765 000024 00000017656 14137073413 026406 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.Comparator; import java.util.ResourceBundle; /** * Encapsulates either an analysis result score or the aggregate score of * Content. A score measures how likely the Content object is to be relevant to * an investigation. Relevance is determined by a series of analysis techniques, * each of which has a score. The aggregate score for an item is then determined * based on its analysis results. * * A score has two primary fields: Significance and Priority. * * There are two priorities : Normal and Override. Nearly everything should have * a "Normal" priority. "Override" is used when a user wants to change the score * because of a false positive. An "Override" score will take priority over * the combined "Normal" scores. An item should have only one "Override" score * at a time, but that is not currently enforced. * * The significance is a range of how Notable (i.e. "Bad") the item is. The * range is from NONE (i.e. "Good") to NOTABLE with values in the middle, such * as LIKELY_NOTABLE for suspicious items. The LIKELY_ values are used when * there is less confidence in the result. The significance has to do with the * false positive rate at actually detecting notable or benign things. * * * For an example, if a file is found in a MD5 hashset of notable files, then a * module would use a significance of NOTABLE. This is because the MD5 is exact * match and the hash set is all notable files. * * For a keyword hit, the significance would be LIKELY_NOTABLE because keywords * often can be used in both good and bad ways. A user will need to review the * file to determine if it is a true or false positive. * * If a file is found to be on a good list (via MD5), then it could have a * significance of NONE and then other modules could ignore it. * * An aggregate score is the combination of the specific analysis results. * USER_RESULTS will overrule NORMAL. NOTABLE overrules NONE. Both of those * overrule the LIKELY_* results. * * NOTABLE > NONE > LIKELY_NOTABLE > LIKELY_NONE > UNKNOWN */ public class Score implements Comparable { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); /** * Indicates the relevance of an item based on the analysis result's conclusion. * * For comparing significance, the following ordering applies * * Bad > Good > Likely Bad > Likely Good > Unknown * */ public enum Significance { // Enum name must not have any spaces. /* Notes on the ordinal numbers: We defined these so that we could easily * compare values while also have some concept of grouping. * The 1x values are a higher confidence than the 0x files. * NOTABLE (x9) has priority over NOT NOTABLE (x8). * If we need to make this more complicated in the future, we can add * other groupings, such as 14 and 15. */ /// no significance assigned yet. UNKNOWN(0, "Unknown", "Significance.Unknown.displayName.text"), /// likely good LIKELY_NONE(8, "LikelyNone", "Significance.LikelyNone.displayName.text"), /// likely bad, suspicious LIKELY_NOTABLE(9, "LikelyNotable", "Significance.LikelyNotable.displayName.text"), /// good NONE(18, "None", "Significance.None.displayName.text"), /// bad NOTABLE(19, "Notable", "Significance.Notable.displayName.text"); private final int id; private final String name; // name must not have spaces private final String displayNameKey; // display name is loaded from resource bundle using this key. private Significance(int id, String name, String displayNameKey) { this.id = id; this.name = name; this.displayNameKey = displayNameKey; } public static Significance fromString(String name) { return Arrays.stream(values()) .filter(val -> val.getName().equals(name)) .findFirst().orElse(NONE); } static public Significance fromID(int id) { return Arrays.stream(values()) .filter(val -> val.getId() == id) .findFirst().orElse(NONE); } /** * Get enum ordinal. * * @return Ordinal. */ public int getId() { return id; } /** * Gets name that has no spaces in it. * Does not get translated. * * @return Name. */ public String getName() { return name; } /** * Gets display name that may have spaces and can be used in the UI. * May return a translated version. * * @return Display name. */ public String getDisplayName() { return bundle.getString(displayNameKey); } @Override public String toString() { return name; } } /** * Represents the priority of the score to allow overrides by a user or module */ public enum Priority { // Name must not have any spaces. NORMAL(0, "Normal", "Score.Priority.Normal.displayName.text"), OVERRIDE(10, "Override", "Score.Priority.Override.displayName.text"); private final int id; private final String name; private final String displayNameKey; // display name is loaded from resource bundle using this key. private Priority(int id, String name, String displayNameKey) { this.id = id; this.name = name; this.displayNameKey = displayNameKey; } public static Priority fromString(String name) { return Arrays.stream(values()) .filter(val -> val.getName().equals(name)) .findFirst().orElse(NORMAL); } static public Priority fromID(int id) { return Arrays.stream(values()) .filter(val -> val.getId() == id) .findFirst().orElse(NORMAL); } public int getId() { return id; } public String getName() { return name; } public String getDisplayName() { return bundle.getString(displayNameKey); } @Override public String toString() { return name; } } public static final Score SCORE_NOTABLE = new Score(Significance.NOTABLE, Priority.NORMAL); public static final Score SCORE_LIKELY_NOTABLE = new Score(Significance.LIKELY_NOTABLE, Priority.NORMAL); public static final Score SCORE_LIKELY_NONE = new Score(Significance.LIKELY_NONE, Priority.NORMAL); public static final Score SCORE_NONE= new Score(Significance.NONE, Priority.NORMAL); public static final Score SCORE_UNKNOWN = new Score(Significance.UNKNOWN, Priority.NORMAL); // Score is a combination of significance and priority. private final Significance significance; private final Priority priority; public Score(Significance significance, Priority priority) { this.significance = significance; this.priority = priority; } public Significance getSignificance() { return significance; } public Priority getPriority() { return priority; } @Override public int compareTo(Score other) { // A score is a combination of significance & priority. // Priority Override overrides Normal. // If two results have same priority, then the higher significance wins. if (this.getPriority() != other.getPriority()) { return this.getPriority().ordinal() - other.getPriority().ordinal(); } else { return this.getSignificance().ordinal() - other.getSignificance().ordinal(); } } public static final Comparator getScoreComparator() { return (Score score1, Score score2) -> { return score1.compareTo(score2); }; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties000644 000765 000024 00000061142 14137073414 030005 0ustar00carrierstaff000000 000000 BlackboardArtifact.tskGenInfo.text=General Info BlackboardArtifact.tskWebBookmark.text=Web Bookmarks BlackboardArtifact.tskWebCookie.text=Web Cookies BlackboardArtifact.tskWebHistory.text=Web History BlackboardArtifact.tskWebDownload.text=Web Downloads BlackboardArtifact.tsk.recentObject.text=Recent Documents BlackboardArtifact.tskGpsTrackpoint.text=GPS Trackpoints BlackboardArtifact.tskInstalledProg.text=Installed Programs BlackboardArtifact.tskKeywordHits.text=Keyword Hits BlackboardArtifact.tskHashsetHit.text=Hashset Hits BlackboardArtifact.tskDeviceAttached.text=USB Device Attached BlackboardArtifact.tskInterestingFileHit.text=Interesting Files BlackboardArtifact.tskEmailMsg.text=E-Mail Messages BlackboardArtifact.tskExtractedText.text=Extracted Text BlackboardArtifact.tskWebSearchQuery.text=Web Search BlackboardArtifact.tskMetadataExif.text=EXIF Metadata BlackboardArtifact.tagFile.text=Tagged Files BlackboardArtifact.tskTagArtifact.text=Tagged Results BlackboardArtifact.tskOsInfo.text=Operating System Information BlackboardArtifact.tskOsAccount.text=Operating System User Account BlackboardArtifact.tskServiceAccount.text=Web Accounts BlackboardArtifact.tskToolOutput.text=Raw Tool Output BlackboardArtifact.tskContact.text=Contacts BlackboardArtifact.tskMessage.text=Messages BlackboardArtifact.tskCalllog.text=Call Logs BlackboardArtifact.tskCalendarEntry.text=Calendar Entries BlackboardArtifact.tskSpeedDialEntry.text=Speed Dial Entries BlackboardArtifact.tskBluetoothPairing.text=BlueTooth Pairings BlackboardArtifact.tskGpsBookmark.text=GPS Bookmarks BlackboardArtifact.tskGpsLastKnownLocation.text=GPS Last Known Location BlackboardArtifact.tskGpsSearch.text=GPS Searches BlackboardArtifact.tskProgRun.text=Run Programs BlackboardArtifact.tskEncryptionDetected.text=Encryption Detected BlackboardArtifact.tskEncryptionSuspected.text=Encryption Suspected BlackboardArtifact.tskExtMismatchDetected.text=Extension Mismatch Detected BlackboardArtifact.tskInterestingArtifactHit.text=Interesting Results BlackboardArtifact.tskRemoteDrive.text=Remote Drive BlackboardArtifact.tskFaceDetected.text=Face Detected BlackboardArtifact.tskAccount.text=Accounts BlackboardArtifact.tskTLEvent.text=TL Events BlackboardArtifact.tskObjectDetected.text=Object Detected BlackboardArtifact.tskWIFINetwork.text=Wireless Networks BlackboardArtifact.tskDeviceInfo.text=Device Info BlackboardArtifact.tskSimAttached.text=SIM Attached BlackboardArtifact.tskBluetoothAdapter.text=Bluetooth Adapter BlackboardArtifact.tskWIFINetworkAdapter.text=Wireless Network Adapters BlackboardArtifact.tskVerificationFailed.text=Verification Failure BlackboardArtifact.tskDataSourceUsage.text=Data Source Usage BlackboardArtifact.tskWebFormAutofill.text=Web Form Autofill BlackboardArtifact.tskWebFormAddresses.text=Web Form Addresses BlackboardArtifact.tskDownloadSource.text=Download Source BlackboardArtifact.tskWebCache.text=Web Cache BlackboardArtifact.tskClipboardContent.text=Clipboard Content BlackboardArtifact.tskUserContentSuspected.text=User Content Suspected BlackboardArtifact.tskMetadata.text=Metadata BlackboardArtifact.tskTrack.text=GPS Track BlackboardArtifact.tskWebAccountType.text=Web Account Type BlackboardArtifact.tskScreenShots.text=Screenshots BlackboardArtifact.tskDhcpInfo.text=DHCP Information BlackboardArtifact.tskProgNotifications.text=Program Notifications BlackboardArtifact.tskBackupEvent.text=Backup Events BlackboardArtifact.tskDeletedProg.text=Deleted Programs BlackboardArtifact.tskUserDeviceEvent.text=User Device Events BlackboardArtifact.shortDescriptionDate.text=at {0} BlackboardArtifact.tskAssociatedObject.text=Associated Object BlackboardArtifact.tskWebCategorization.text=Web Categories BlackboardArtifact.tskPreviouslySeen.text=Previously Seen BlackboardArtifact.tskPreviouslyUnseen.text=Previously Unseen BlackboardArtifact.tskPreviouslyNotable.text=Previously Notable BlackboardArtifact.tskYaraHit.text=YARA Hit BlackboardArtifact.tskGPSArea.text=GPS Area BlackboardAttribute.tskAccountType.text=Account Type BlackboardAttribute.tskUrl.text=URL BlackboardAttribute.tskDatetime.text=Date/Time BlackboardAttribute.tskName.text=Name BlackboardAttribute.tskProgName.text=Program Name BlackboardAttribute.tskValue.text=Value BlackboardAttribute.tskFlag.text=Flag BlackboardAttribute.tskPath.text=Path BlackboardAttribute.tskKeyword.text=Keyword BlackboardAttribute.tskKeywordRegexp.text=Keyword Regular Expression BlackboardAttribute.tskKeywordPreview.text=Keyword Preview BlackboardAttribute.tskKeywordSet.text=Keyword Set BlackboardAttribute.tskUserName.text=Username BlackboardAttribute.tskDomain.text=Domain BlackboardAttribute.tskPassword.text=Password BlackboardAttribute.tskNamePerson.text=Person Name BlackboardAttribute.tskDeviceModel.text=Device Model BlackboardAttribute.tskDeviceMake.text=Device Make BlackboardAttribute.tskDeviceId.text=Device ID BlackboardAttribute.tskEmail.text=Email BlackboardAttribute.tskHashMd5.text=MD5 Hash BlackboardAttribute.tskHashSha1.text=SHA1 Hash BlackboardAttribute.tskHashSha225.text=SHA2-256 Hash BlackboardAttribute.tskHashSha2512.text=SHA2-512 Hash BlackboardAttribute.tskText.text=Text BlackboardAttribute.tskTextFile.text=Text File BlackboardAttribute.tskTextLanguage.text=Text Language BlackboardAttribute.tskEntropy.text=Entropy BlackboardAttribute.tskHashsetName.text=Hashset Name BlackboardAttribute.tskInterestingFile.text=Interesting File BlackboardAttribute.tskReferrer.text=Referrer URL BlackboardAttribute.tskDateTimeAccessed.text=Date Accessed BlackboardAttribute.tskIpAddress.text=IP Address BlackboardAttribute.tskPhoneNumber.text=Phone Number BlackboardAttribute.tskPathId.text=Path ID BlackboardAttribute.tskSetName.text=Set Name BlackboardAttribute.tskEncryptionDetected.text=Encryption Detected BlackboardAttribute.tskMalwareDetected.text=Malware Detected BlackboardAttribute.tskStegDetected.text=Steganography Detected BlackboardAttribute.tskEmailTo.text=E-Mail To BlackboardAttribute.tskEmailCc.text=E-Mail CC BlackboardAttribute.tskEmailBcc.text=E-Mail BCC BlackboardAttribute.tskEmailFrom.text=E-Mail From BlackboardAttribute.tskEmailContentPlain.text=Message (Plaintext) BlackboardAttribute.tskEmailContentHtml.text=Message (HTML) BlackboardAttribute.tskEmailContentRtf.text=Message (RTF) BlackboardAttribute.tskMsgId.text=Message ID BlackboardAttribute.tskMsgReplyId.text=Message Reply ID BlackboardAttribute.tskDateTimeRcvd.text=Date Received BlackboardAttribute.tskDateTimeSent.text=Date Sent BlackboardAttribute.tskSubject.text=Subject BlackboardAttribute.tskTitle.text=Title BlackboardAttribute.tskGeoLatitude.text=Latitude BlackboardAttribute.tskGeoLongitude.text=Longitude BlackboardAttribute.tskGeoVelocity.text=Velocity BlackboardAttribute.tskGeoAltitude.text=Altitude BlackboardAttribute.tskGeoBearing.text=Bearing BlackboardAttribute.tskGeoHPrecision.text=Horizontal Precision BlackboardAttribute.tskGeoVPrecision.text=Vertical Precision BlackboardAttribute.tskGeoMapDatum.text=Map Datum BlackboardAttribute.tskFileTypeSig.text=File Type (signature) BlackboardAttribute.tskFileTypeExt.text=File Type (extension) BlackboardAttribute.tskTaggedArtifact.text=Tagged Result BlackboardAttribute.tskTagName.text=Tag Name BlackboardAttribute.tskComment.text=Comment BlackboardAttribute.tskUrlDecoded.text=Decoded URL BlackboardAttribute.tskDateTimeCreated.text=Date Created BlackboardAttribute.tskDateTimeModified.text=Date Modified BlackboardAttribute.tskProcessorArchitecture.text=Processor Architecture BlackboardAttribute.tskVersion.text=Version BlackboardAttribute.tskUserId.text=User ID BlackboardAttribute.tskDescription.text=Description BlackboardAttribute.tskMessageType.text=Message Type BlackboardAttribute.tskPhoneNumberHome.text=Phone Number (Home) BlackboardAttribute.tskPhoneNumberOffice.text=Phone Number (Office) BlackboardAttribute.tskPhoneNumberMobile.text=Phone Number (Mobile) BlackboardAttribute.tskPhoneNumberFrom.text=From Phone Number BlackboardAttribute.tskPhoneNumberTo.text=To Phone Number BlackboardAttribute.tskDirection.text=Direction BlackboardAttribute.tskEmailHome.text=Email (Home) BlackboardAttribute.tskEmailOffice.text=Email (Office) BlackboardAttribute.tskDateTimeStart.text=Start Date/Time BlackboardAttribute.tskDateTimeEnd.text=End Date/Time BlackboardAttribute.tskCalendarEntryType.text=Calendar Entry Type BlackboardAttribute.tskLocation.text=Location BlackboardAttribute.tskShortcut.text=Short Cut BlackboardAttribute.tskDeviceName.text=Device Name BlackboardAttribute.tskCategory.text=Category BlackboardAttribute.tskEmailReplyTo.text=ReplyTo Address BlackboardAttribute.tskServerName.text=Server Name BlackboardAttribute.tskCount.text=Count BlackboardAttribute.tskMinCount.text=Minimum Count BlackboardAttribute.tskPathSource.text=Path Source BlackboardAttribute.tskPermissions.text=Permissions BlackboardAttribute.tskAssociatedArtifact.text=Associated Artifact BlackboardAttribute.tskIsDeleted.text=Is Deleted BlackboardAttribute.tskLocalPath.text=Local Path BlackboardAttribute.tskRemotePath.text=Remote Path BlackboardAttribute.tskProcessorName.text=Processor Name BlackboardAttribute.tskTempDir.text=Temporary Files Directory BlackboardAttribute.tskProductId.text=Product ID BlackboardAttribute.tskOwner.text=Owner BlackboardAttribute.tskOrganization.text=Organization BlackboardAttribute.tskCardNumber.text=Card Number BlackboardAttribute.tskCardExpiration.text=Card Expiration (YYMM) BlackboardAttribute.tskCardServiceCode.text=Card Service Code BlackboardAttribute.tskCardDiscretionary.text=Card Discretionary Data BlackboardAttribute.tskCardLRC.text=Card Longitudinal Redundancy Check BlackboardAttribute.tskKeywordSearchDocumentID.text=Keyword Search Document ID BlackboardAttribute.tskCardScheme.text=Card Scheme BlackboardAttribute.tskCardType.text=Card Type BlackboardAttribute.tskBrandName.text=Brand Name BlackboardAttribute.tskBankName.text=Bank Name BlackboardAttribute.tskCountry.text=Country BlackboardAttribute.tskCity.text=City BlackboardAttribute.tskKeywordSearchType.text=Keyword Search Type BlackboardAttribute.tskHeaders.text=Headers BlackboardAttribute.tskId.text=ID BlackboardAttribute.tskTLEventType.text=Event Type BlackboardAttribute.tskSsid.text=SSID BlackboardAttribute.tskBssid.text=BSSID BlackboardAttribute.tskMacAddress.text=MAC Address BlackboardAttribute.tskImei.text=IMEI BlackboardAttribute.tskImsi.text=IMSI BlackboardAttribute.tskIccid.text=ICCID BlackboardAttribute.tskthreadid.text=Thread ID BlackboardAttribute.tskdatetimedeleted.text=Time Deleted BlackboardAttribute.tskdatetimepwdreset.text=Password Reset Date BlackboardAttribute.tskdatetimepwdfail.text=Password Fail Date BlackboardAttribute.tskdisplayname.text=Display Name BlackboardAttribute.tskpasswordsettings.text=Password Settings BlackboardAttribute.tskaccountsettings.text=Account Settings BlackboardAttribute.tskpasswordhint.text=Password Hint BlackboardAttribute.tskgroups.text=Groups BlackboardAttribute.tskattachments.text=Message Attachments BlackboardAttribute.tskgeopath.text=List of Track Points BlackboardAttribute.tskgeowaypoints.text=List of Waypoints BlackboardAttribute.tskdistancetraveled.text=Distance Traveled BlackboardAttribute.tskdistancefromhome.text=Distance from Homepoint BlackboardAttribute.tskhashphotodna.text=PhotoDNA Hash BlackboardAttribute.tskbytessent.text=Bytes Sent BlackboardAttribute.tskbytesreceived.text=Bytes Received BlackboardAttribute.tsklastprinteddatetime.text=Last Printed Date BlackboardAttribute.tskgeoareapoints.text=List of points making up the outline of an area BlackboardAttribute.tskrule.text = Rule BlackboardAttribute.tskActivityType.text=Activity Type BlackboardAttribute.tskRealm.text=Realm BlackboardAttribute.tskHost.text=Host BlackboardAttribute.tskHomeDir.text=Home Directory BlackboardAttribute.tskIsAdmin.text=Is Administrator BlackboardAttribute.tskCorrelationType.text=Correlation Type BlackboardAttribute.tskCorrelationValue.text=Correlation Value BlackboardAttribute.tskOtherCases.text=Other Cases AbstractFile.readLocal.exception.msg4.text=Error reading local file\: {0} AbstractFile.readLocal.exception.msg1.text=Error reading local file, local path is not set AbstractFile.readLocal.exception.msg2.text=Error reading local file, it does not exist at local path\: {0} AbstractFile.readLocal.exception.msg3.text=Error reading local file, file not readable at local path\: {0} AbstractFile.readLocal.exception.msg5.text=Cannot read local file\: {0} DerviedFile.derivedMethod.exception.msg1.text=Error getting derived method for file id\: {0} FsContent.readInt.err.msg.text=Image file does not exist or is inaccessible. Image.verifyImageSize.errStr1.text=\nPossible Incomplete Image\: Error reading volume at offset {0} Image.verifyImageSize.errStr2.text=\nPossible Incomplete Image\: Error reading volume at offset {0} Image.verifyImageSize.errStr3.text=\nPossible Incomplete Image\: Error reading file system at offset {0} Image.verifyImageSize.errStr4.text=\nPossible Incomplete Image\: Error reading file system at offset {0} SlackFile.readInt.err.msg.text=Image file does not exist or is inaccessible. SleuthkitCase.isFileFromSource.exception.msg.text=Error, data source should be parent-less (images, file-sets), got\: {0} SleuthkitCase.isFileFromSource.exception.msg2.text=Error, data source should be Image or VirtualDirectory, got\: {0} SleuthkitCase.SchemaVersionMismatch=Schema version does not match SleuthkitCase.findFiles.exception.msg1.text=Error, data source should be parent-less (images, file-sets), got\: {0} SleuthkitCase.findFiles.exception.msg2.text=Error, data source should be Image or VirtualDirectory, got\: {0} SleuthkitCase.findFiles.exception.msg3.text=Error finding files in the data source by name, SleuthkitCase.findFiles3.exception.msg1.text=Error, data source should be parent-less (images, file-sets), got\: {0} SleuthkitCase.findFiles3.exception.msg2.text=Error, data source should be Image or VirtualDirectory, got\: {0} SleuthkitCase.findFiles3.exception.msg3.text=Error finding files in the data source by name, SleuthkitCase.addDerivedFile.exception.msg1.text=Error creating a derived file, cannot get new id of the object, file name\: {0} SleuthkitCase.addDerivedFile.exception.msg2.text=Error creating a derived file, file name\: {0} SleuthkitCase.addLocalFile.exception.msg1.text=Error adding local file\: {0}, parent to add to is null SleuthkitCase.addLocalFile.exception.msg2.text=Error creating a local file, cannot get new id of the object, file name\: {0} SleuthkitCase.addLocalFile.exception.msg3.text=Error creating a derived file, file name\: {0} SleuthkitCase.getLastObjectId.exception.msg.text=Error closing result set after getting last object id. TskData.tskFsNameFlagEnum.allocated=Allocated TskData.tskFsNameFlagEnum.unallocated=Unallocated TskData.tskFsMetaFlagEnum.allocated=Allocated TskData.tskFsMetaFlagEnum.unallocated=Unallocated TskData.tskFsMetaFlagEnum.used=Used TskData.tskFsMetaFlagEnum.unused=Unused TskData.tskFsMetaFlagEnum.compressed=Compressed TskData.tskFsMetaFlagEnum.orphan=Orphan TskData.tskFsTypeEnum.autoDetect=Auto Detect TskData.tskFsTypeEnum.NTFSautoDetect=NTFS (Auto Detection) TskData.tskFsTypeEnum.FATautoDetect=FAT (Auto Detection) TskData.tskFsTypeEnum.ExtXautoDetect=ExtX (Auto Detection) TskData.tskFsTypeEnum.SWAPautoDetect=SWAP (Auto Detection) TskData.tskFsTypeEnum.RAWautoDetect=RAW (Auto Detection) TskData.tskFsTypeEnum.ISO9660autoDetect=ISO9660 (Auto Detection) TskData.tskFsTypeEnum.HFSautoDetect=HFS (Auto Detection) TskData.tskFsTypeEnum.YAFFS2autoDetect=YAFFS2 (Auto Detection) TskData.tskFsTypeEnum.APFSautoDetect=APFS (Auto Detection) TskData.tskFsTypeEnum.unsupported=Unsupported File System TskData.tskImgTypeEnum.autoDetect=Auto Detect TskData.tskImgTypeEnum.rawSingle=Raw Single TskData.tskImgTypeEnum.rawSplit=Raw Split TskData.tskImgTypeEnum.unknown=Unknown TskData.tskVSTypeEnum.autoDetect=Auto Detect TskData.tskVSTypeEnum.fake=Fake TskData.tskVSTypeEnum.unsupported=Unsupported TskData.tskVSTypeEnum.exception.msg1.text=No TSK_VS_TYPE_ENUM of value\: {0} TskData.fileKnown.unknown=unknown TskData.fileKnown.known=known TskData.fileKnown.knownBad=notable TskData.fileKnown.exception.msg1.text=No FileKnown of value\: {0} TskData.encodingType.exception.msg1.text=No EncodingType of value\: {0} TskData.tskDbFilesTypeEnum.exception.msg1.text=No TSK_FILE_TYPE_ENUM of value\: {0} TskData.objectTypeEnum.exception.msg1.text=No ObjectType of value\: {0} TskData.tskImgTypeEnum.exception.msg1.text=No TSK_IMG_TYPE_ENUM of value\: {0} TskData.tskFsTypeEnum.exception.msg1.text=No TSK_FS_TYPE_ENUM of value\: {0} TskData.tskFsAttrTypeEnum.exception.msg1.text=No TSK_FS_ATTR_TYPE_ENUM of value\: {0} TskData.tskFsNameFlagEnum.exception.msg1.text=No TSK_FS_NAME_FLAG_ENUM of value\: {0} TskData.tskFsMetaTypeEnum.exception.msg1.text=No TSK_FS_META_TYPE_ENUM of value\: {0} TskData.tskFsNameTypeEnum.exception.msg1.text=No TSK_FS_NAME_TYPE_ENUM matching type\: {0} Volume.desc.text=Unknown Volume.read.exception.msg1.text=This volume's parent should be a VolumeSystem, but it's not. Volume.vsFlagToString.allocated=Allocated Volume.vsFlagToString.unallocated=Unallocated BlackboardArtifact.tskGpsRoute.text=GPS Route BlackboardAttribute.tskGeoLatitudeStart.text=Starting Latitude BlackboardAttribute.tskGeoLatitudeEnd.text=Ending Latitude BlackboardAttribute.tskGeoLongitudeStart.text=Starting Longitude BlackboardAttribute.tskGeoLongitudeEnd.text=Ending Longitude BlackboardAttribute.tskReadStatus.text=Read DatabaseConnectionCheck.Everything=Invalid hostname, port number, username, and/or password. DatabaseConnectionCheck.Hostname=Invalid hostname. DatabaseConnectionCheck.Port=Invalid port number. DatabaseConnectionCheck.HostnameOrPort=Invalid hostname and/or port number. DatabaseConnectionCheck.Authentication=Invalid username and/or password. DatabaseConnectionCheck.Access=Invalid username and/or password. DatabaseConnectionCheck.ServerDiskSpace=PostgreSQL server issue. Check disk space and memory availabilty on the PostgreSQL server. DatabaseConnectionCheck.ServerRestart="PostgreSQL server issue. PostgreSQL server may need to be restarted. DatabaseConnectionCheck.InternalServerIssue=Internal PostgreSQL issue. Database may be corrupted. DatabaseConnectionCheck.Connection=Invalid hostname, port, username, and/or password. DatabaseConnectionCheck.Installation=Issue with installation. JDBC driver not found. DatabaseConnectionCheck.MissingHostname=Missing hostname. DatabaseConnectionCheck.MissingPort=Missing port number. DatabaseConnectionCheck.MissingUsername=Missing username. DatabaseConnectionCheck.MissingPassword=Missing password. IngestJobInfo.IngestJobStatusType.Started.displayName=Started IngestJobInfo.IngestJobStatusType.Cancelled.displayName=Cancelled IngestJobInfo.IngestJobStatusType.Completed.displayName=Completed IngestModuleInfo.IngestModuleType.FileLevel.displayName=File Level IngestModuleInfo.IngestModuleType.DataArtifact.displayName=Data Artifact IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=Data Source Level IngestModuleInfo.IngestModuleType.Multiple.displayName=Multiple ReviewStatus.Approved=Approved ReviewStatus.Rejected=Rejected ReviewStatus.Undecided=Undecided CategoryType.DataArtifact=Data Artifact CategoryType.AnalysisResult=Analysis Result TimelineLevelOfDetail.low=Low TimelineLevelOfDetail.medium=Medium TimelineLevelOfDetail.high=High BaseTypes.fileSystem.name=File System BaseTypes.webActivity.name=Web Activity BaseTypes.miscTypes.name=Other FileSystemTypes.fileModified.name=File Modified FileSystemTypes.fileAccessed.name=File Accessed FileSystemTypes.fileCreated.name=File Created FileSystemTypes.fileChanged.name=File Changed MiscTypes.message.name=Messages MiscTypes.GPSRoutes.name=GPS Routes MiscTypes.GPSTrackpoint.name=GPS Trackpoint MiscTypes.Calls.name=Call Begin MiscTypes.CallsEnd.name=Call End MiscTypes.Email.name=Email Sent MiscTypes.EmailRcvd.name=Email Received MiscTypes.recentDocuments.name=Recent Documents MiscTypes.installedPrograms.name=Program Installed MiscTypes.exif.name=Exif MiscTypes.devicesAttached.name=Devices Attached MiscTypes.LogEntry.name=Log Entry MiscTypes.Registry.name=Registry MiscTypes.GPSBookmark.name=GPS Bookmark MiscTypes.GPSLastknown.name=GPS Last Known Location MiscTypes.GPSearch.name=GPS Search MiscTypes.GPSTrack.name=GPS Track MiscTypes.metadataLastPrinted.name=Document Last Printed MiscTypes.metadataLastSaved.name=Document Last Saved MiscTypes.metadataCreated.name=Document Created MiscTypes.programexecuted.name=Program Run RootEventType.eventTypes.name=Event Types WebTypes.webDownloads.name=Web Downloads WebTypes.webCookies.name=Web Cookies Create WebTypes.webCookiesAccessed.name=Web Cookies Accessed WebTypes.webCookiesStart.name=Web Cookies Start WebTypes.webCookiesEnd.name=Web Cookies End WebTypes.webBookmarks.name=Web Bookmarks WebTypes.webHistory.name=Web History Accessed WebTypes.webHistoryCreated.name=Web History Created WebTypes.webSearch.name=Web Searches WebTypes.webFormAutoFill.name=Web Form Autofill Created WebTypes.webFormAddress.name=Web Form Address Created WebTypes.webFormAddressModified.name=Web Form Address Modified WebTypes.webFormAutofillAccessed.name=Web Form Autofill Accessed CustomTypes.other.name=Standard Artifact Event CustomTypes.userCreated.name=Manually Created Event CustomTypes.customArtifact.name=Custom Artifact Event EventTypeHierarchyLevel.root=Root EventTypeHierarchyLevel.category=Category EventTypeHierarchyLevel.event=Event DataSourcesFilter.displayName.text=Limit data sources to DescriptionFilter.mode.exclude=Exclude DescriptionFilter.mode.include=Include hashHitsFilter.displayName.text=Must have hash hit hideKnownFilter.displayName.text=Hide Known Files IntersectionFilter.displayName.text=Intersection tagsFilter.displayName.text=Must be tagged TextFilter.displayName.text=Must include text: TypeFilter.displayName.text=Limit event types to FileTypesFilter.displayName.text=Limit file types to OsAccountStatus.Unknown.text=Unknown OsAccountStatus.Active.text=Active OsAccountStatus.Disabled.text=Disabled OsAccountStatus.Deleted.text=Deleted OsAccountType.Unknown.text=Unknown OsAccountType.Service.text=Service OsAccountType.Interactive.text=Interactive OsAccountInstanceType.Launched.text=Launched OsAccountInstanceType.Accessed.text=Accessed OsAccountInstanceType.Referenced.text=Referenced OsAccountInstanceType.Launched.descr.text=Account owner launched a program action on the host. OsAccountInstanceType.Accessed.descr.text=Account owner accessed resources on the host for read/write via some service. OsAccountInstanceType.Referenced.descr.text=Account owner was referenced in a log file on the host. OsAccountRealm.Known.text=Known OsAccountRealm.Inferred.text=Inferred OsAccountRealm.Unknown.text=Unknown OsAccountRealm.Local.text=Local OsAccountRealm.Domain.text=Domain Score.Priority.Normal.displayName.text=Normal Score.Priority.Override.displayName.text=Override Significance.Unknown.displayName.text=Unknown Significance.LikelyNone.displayName.text=Likely Not Notable Significance.LikelyNotable.displayName.text=Likely Notable Significance.None.displayName.text=Not Notable Significance.Notable.displayName.text=Notable TimelineEventType.BackupEventStart.txt=Backup Begin TimelineEventType.BackupEventEnd.txt=Backup End TimelineEventType.BackupEvent.description.start=Backup Begin TimelineEventType.BackupEvent.description.end=Backup End TimelineEventType.BluetoothPairingLastConnection.txt=Bluetooth Pairing Last Connection TimelineEventType.BluetoothPairing.txt=Bluetooth Pairing TimelineEventType.CalendarEntryStart.txt=Calendar Entry Begin TimelineEventType.CalendarEntryEnd.txt=Calendar Entry End TimelineEventType.DeletedProgram.txt=Program Deleted TimelineEventType.DeletedProgramDeleted.txt=Application Deleted TimelineEventType.OSAccountAccessed.txt=Operating System Account Accessed TimelineEventType.OSAccountCreated.txt=Operating System Account Created TimelineEventType.OSAccountPwdFail.txt=Operating System Account Password Fail TimelineEventType.OSAccountPwdReset.txt=Operating System Account Password Reset TimelineEventType.OSInfo.txt=Operating System Information TimelineEventType.ProgramNotification.txt=Program Notification TimelineEventType.ScreenShot.txt=Screen Shot TimelineEventType.UserDeviceEventStart.txt=User Activity Begin TimelineEventType.UserDeviceEventEnd.txt=User Activity End TimelineEventType.ServiceAccount.txt=Service Account TimelineEventType.WIFINetwork.txt=Wifi Network TimelineEventType.WebCache.text=Web Cache TimelineEventType.BluetoothAdapter.txt=Bluetooth Adapter BaseTypes.geolocation.name=Geolocation BaseTypes.communication.name=Communication TskData.ObjectType.IMG.name=Disk Image TskData.ObjectType.VS.name=Volume System TskData.ObjectType.VOL.name=Volume TskData.ObjectType.FS.name=File System TskData.ObjectType.AbstractFile.name=File TskData.ObjectType.Artifact.name=Artifact TskData.ObjectType.Report.name=Report TskData.ObjectType.Pool.name=Pool TskData.ObjectType.OsAccount.name=OS Account TskData.ObjectType.HostAddress.name=Host Address TskData.ObjectType.Unsupported.name=Unsupported sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/ContentVisitor.java000755 000765 000024 00000013660 14137073413 030317 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Interface for implementing a visitor pattern on all Content implementations. * Visitor implements an algorithm on the content object. The algorithm is * completely decoupled from the content object. The visitor pattern emulates * double dispatch mechanism. It allows to act differently depending on the * instance type, without need to test what the actual type is. E.g. it allows * for processing a Content object hierarchy without using instanceof * statements. Generic type parameter T is a return type from the visit methods. * * @param return type of visit methods */ public interface ContentVisitor { /** * Act on (visit) a Directory content object * * @param d the directory to visit / act on * * @return result of the visit */ T visit(Directory d); /** * Act on (visit) a File content object * * @param f File to visit / act on * * @return result of the visit */ T visit(File f); /** * Act on (visit) a FileSystem content object * * @param fs file system to visit / act on * * @return result of the visit */ T visit(FileSystem fs); /** * Act on (visit) an Image content object * * @param i image to visit / act on * * @return result of the visit */ T visit(Image i); /** * Act on (visit) a Pool content object * * @param p pool to visit / act on * * @return result of the visit */ T visit(Pool p); /** * Act on (visit) a Volume content object * * @param v volume to visit / act on * * @return result of the visit */ T visit(Volume v); /** * Act on (visit) a VolumeSystem content object * * @param vs volume system to visit / act on * * @return result of the visit */ T visit(VolumeSystem vs); /** * Act on (visit) a LayoutFile content object * * @param lf layout file to visit / act on * * @return result of the visit */ T visit(LayoutFile lf); /** * Act on (visit) a VirtualDirectory content object * * @param vd virtual dir to visit / act on * * @return result of the visit */ T visit(VirtualDirectory vd); /** * Act on (visit) a LocalDirectory content object * * @param ld local dir to visit / act on * * @return result of the visit */ T visit(LocalDirectory ld); /** * Act on (visit) a DerivedFile content object * * @param lf local file to visit / act on * * @return result of the visit */ T visit(DerivedFile lf); /** * Act on (visit) a LocalFile content object * * @param df derived file to visit / act on * * @return result of the visit */ T visit(LocalFile df); /** * Act on (visit) a SlackFile content object * * @param sf slack file to visit / act on * * @return result of the visit */ T visit(SlackFile sf); /** * Act on (visit) a blackboard artifact object * * @param ba blackboard artifact object to visit / act on * * @return result of the visit */ T visit(BlackboardArtifact ba); /** * Act on (visit) a Report object * * @param r report object to visit / act on * * @return result of the visit */ T visit(Report r); /** * Act on (visit) a OsAccount object * * @param act OsAccount object to visit / act on * * @return result of the visit */ T visit(OsAccount act); /** * Act on (visit) an UnsupportedContent object * * @param uc UnsupportedContent object to visit / act on * * @return result of the visit */ T visit(UnsupportedContent uc); /** * The default content visitor - quickest method for implementing a custom * visitor. Every visit method delegates to the defaultVisit method, the * only required method to be implemented. Then, implement the specific * visit methods for the objects on which the algorithm needs to act * differently. * * @param generic type, signifies the object type to be returned from * visit() */ static abstract public class Default implements ContentVisitor { protected abstract T defaultVisit(Content c); @Override public T visit(Directory d) { return defaultVisit(d); } @Override public T visit(File f) { return defaultVisit(f); } @Override public T visit(FileSystem fs) { return defaultVisit(fs); } @Override public T visit(Image i) { return defaultVisit(i); } @Override public T visit(Volume v) { return defaultVisit(v); } @Override public T visit(Pool p) { return defaultVisit(p); } @Override public T visit(VolumeSystem vs) { return defaultVisit(vs); } @Override public T visit(LayoutFile lf) { return defaultVisit(lf); } @Override public T visit(VirtualDirectory ld) { return defaultVisit(ld); } @Override public T visit(LocalDirectory ld) { return defaultVisit(ld); } @Override public T visit(DerivedFile df) { return defaultVisit(df); } @Override public T visit(LocalFile lf) { return defaultVisit(lf); } @Override public T visit(SlackFile sf) { return defaultVisit(sf); } @Override public T visit(BlackboardArtifact ba) { return defaultVisit(ba); } @Override public T visit(Report r) { return defaultVisit(r); } @Override public T visit(OsAccount act) { return defaultVisit(act); } @Override public T visit(UnsupportedContent uc) { return defaultVisit(uc); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Attribute.java000644 000765 000024 00000015402 14137073413 027261 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.MoreObjects; import java.util.Arrays; import java.util.Objects; /** * This is a concrete implementation of a simple Attribute Type. */ public class Attribute extends AbstractAttribute{ /** * The `parent` object of this Attribute. */ private long attributeParentId; /** * Primary key in the respective attribute table. */ private long id; /** * Constructs an attribute with an integer value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param valueInt The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER. */ public Attribute(BlackboardAttribute.Type attributeType, int valueInt) throws IllegalArgumentException { super(attributeType, valueInt); } /** * Constructs an attribute with a long/datetime value. The attribute should * be added to an appropriate artifact. * * @param attributeType The attribute type. * @param valueLong The attribute value. * * @throws IllegalArgumentException If the value type of the specified * standard attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG * or * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME. */ public Attribute(BlackboardAttribute.Type attributeType, long valueLong) throws IllegalArgumentException { super(attributeType, valueLong); } /** * Constructs an attribute with a double value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param valueDouble The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE. */ public Attribute(BlackboardAttribute.Type attributeType, double valueDouble) throws IllegalArgumentException { super(attributeType, valueDouble); } /** * Constructs an attribute with a string value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param valueString The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING. */ public Attribute(BlackboardAttribute.Type attributeType, String valueString) throws IllegalArgumentException { super(attributeType, valueString); } /** * Constructs an attribute with a byte array value. The attribute should be * added to an appropriate artifact. * * @param attributeType The attribute type. * @param valueBytes The attribute value. * * @throws IllegalArgumentException If the value type of the specified * attribute type is not * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE. */ public Attribute(BlackboardAttribute.Type attributeType, byte[] valueBytes) throws IllegalArgumentException { super(attributeType, valueBytes); } /** * Constructs an artifact attribute. To be used when creating an attribute * based on a query of the blackboard _attributes table in the case * database. * * @param attributeOwnerId The owner id for this attribute. * @param attributeTypeID The attribute type id. * @param valueType The attribute value type. * @param valueInt The value from the the value_int32 column. * @param valueLong The value from the the value_int64 column. * @param valueDouble The value from the the value_double column. * @param valueString The value from the the value_text column. * @param valueBytes The value from the the value_byte column. * @param sleuthkitCase A reference to the SleuthkitCase object * representing the case database. */ Attribute(long id, long attributeOwnerId, BlackboardAttribute.Type attributeType, int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes, SleuthkitCase sleuthkitCase) { super(attributeType, valueInt, valueLong, valueDouble, valueString, valueBytes, sleuthkitCase); this.id = id; } /** * Gets the parent Id of this attribute. A parent is defined as the Object * to which this attribute is associated with. Eg: For a file Attribute the * attribute parent id would be the file object id. * * @return */ final public long getAttributeParentId() { return this.attributeParentId; } /** * Set the parent id for this attribute. Parent is defined as the Object * to which this attribute is associated with. * @param attributeParentId */ final void setAttributeParentId(long attributeParentId) { this.attributeParentId = attributeParentId; } /** * Returns the Id of the Attribute. * @return */ public long getId() { return id; } /** * Set the id of the attribute * @param id */ void setId(long id) { this.id = id; } @Override public int hashCode() { return Objects.hash( this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(), this.getValueString(), this.getValueBytes()); } @Override public boolean equals(Object that) { if (this == that) { return true; } else if (that instanceof Attribute) { return areValuesEqual(that); } else { return false; } } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("attributeType", getAttributeType().toString()) .add("valueInt", getValueInt()) .add("valueLong", getValueLong()) .add("valueDouble", getValueDouble()) .add("valueString", getValueString()) .add("valueBytes", Arrays.toString(getValueBytes()) ) .add("Case", getCaseDatabase()) .toString(); } }sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HostManager.java000755 000765 000024 00000046257 14137073413 027545 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.Strings; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Savepoint; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.UUID; import org.sleuthkit.datamodel.Host.HostDbStatus; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.TskEvent.HostsUpdatedTskEvent; import org.sleuthkit.datamodel.TskEvent.HostsDeletedTskEvent; /** * Responsible for creating/updating/retrieving Hosts. */ public final class HostManager { private final SleuthkitCase db; /** * Construct a HostManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ HostManager(SleuthkitCase skCase) { this.db = skCase; } /** * Create a host with specified name. If a host already exists with the * given name, it returns the existing host. * * @param name Host name. * * @return Host with the specified name. * * @throws TskCoreException */ public Host newHost(String name) throws TskCoreException { CaseDbTransaction transaction = db.beginTransaction(); try { Host host = newHost(name, transaction); transaction.commit(); transaction = null; return host; } finally { if (transaction != null) { transaction.rollback(); } } } /** * Create a host with given name. If the host already exists, the existing * host will be returned. * * NOTE: Whenever possible, create hosts as part of a single step * transaction so that it can quickly determine a host of the same name * already exists. If you call this as part of a multi-step * CaseDbTransaction, then this method may think it can insert the host * name, but then when it comes time to call CaseDbTransaction.commit(), * there could be a uniqueness constraint violation and other inserts in the * same transaction could have problems. * * This method should never be made public and exists only because we need * to support APIs that do not take in a host and we must make one. Ensure * that if you call this method that the host name you give will be unique. * * @param name Host name that must be unique if this is called as part of a * multi-step transaction * @param trans Database transaction to use. * * @return Newly created host. * * @throws TskCoreException */ Host newHost(String name, CaseDbTransaction trans) throws TskCoreException { // must have a name if (Strings.isNullOrEmpty(name)) { throw new TskCoreException("Illegal argument passed to createHost: Host name is required."); } CaseDbConnection connection = trans.getConnection(); Savepoint savepoint = null; try { savepoint = connection.getConnection().setSavepoint(); String hostInsertSQL = "INSERT INTO tsk_hosts(name) VALUES (?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(hostInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, name); connection.executeUpdate(preparedStatement); // Read back the row id Host host = null; try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) { if (resultSet.next()) { host = new Host(resultSet.getLong(1), name); //last_insert_rowid() } else { throw new SQLException("Error executing " + hostInsertSQL); } } if (host != null) { trans.registerAddedHost(host); } return host; } catch (SQLException ex) { if (savepoint != null) { try { connection.getConnection().rollback(savepoint); } catch (SQLException ex2) { throw new TskCoreException(String.format("Error adding host with name = %s and unable to rollback", name), ex); } } // It may be the case that the host already exists, so try to get it. Optional optHost = getHostByName(name, connection); if (optHost.isPresent()) { return optHost.get(); } throw new TskCoreException(String.format("Error adding host with name = %s", name), ex); } } /** * Updates the name of the provided host. * * @param host The host to be updated. * @param newName The new name of the host. * * @return The updated host. * * @throws TskCoreException */ public Host updateHostName(Host host, String newName) throws TskCoreException { if (host == null) { throw new TskCoreException("Illegal argument passed to updateHost: No host argument provided."); } else if (newName == null) { throw new TskCoreException(String.format("Illegal argument passed to updateHost: Host with id %d has no name", host.getHostId())); } long hostId = host.getHostId(); Host updatedHost = null; db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = db.getConnection()) { // Don't update the name for non-active hosts String hostInsertSQL = "UPDATE tsk_hosts " + "SET name = " + " CASE WHEN db_status = " + Host.HostDbStatus.ACTIVE.getId() + " THEN ? ELSE name END " + "WHERE id = ?"; PreparedStatement preparedStatement = connection.getPreparedStatement(hostInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, newName); preparedStatement.setLong(2, hostId); connection.executeUpdate(preparedStatement); updatedHost = getHostById(hostId, connection).orElseThrow(() -> new TskCoreException((String.format("Error while fetching newly updated host with id: %d, ")))); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating host with name = %s", newName), ex); } finally { db.releaseSingleUserCaseWriteLock(); } if (updatedHost != null) { fireChangeEvent(updatedHost); } return updatedHost; } /** * Delete a host. Name comparison is case-insensitive. * * @param name Name of the host to delete. * * @return The id of the deleted host or null if no host was deleted. * * @throws TskCoreException */ public Long deleteHost(String name) throws TskCoreException { if (name == null) { throw new TskCoreException("Illegal argument passed to deleteHost: Name provided must be non-null"); } // query to check if there are any dependencies on this host. If so, don't delete. String queryString = "SELECT COUNT(*) AS count FROM\n" + "(SELECT obj_id AS id, host_id FROM data_source_info\n" + "UNION\n" + "SELECT id, scope_host_id AS host_id FROM tsk_os_account_realms\n" + "UNION\n" + "SELECT id, host_id FROM tsk_os_account_attributes\n" + "UNION\n" + "SELECT id, host_id FROM tsk_host_address_map) children\n" + "INNER JOIN tsk_hosts h ON children.host_id = h.id WHERE LOWER(h.name)=LOWER(?)"; String deleteString = "DELETE FROM tsk_hosts WHERE LOWER(name) = LOWER(?)"; CaseDbTransaction trans = this.db.beginTransaction(); try { // check if host has any child data sources. if so, don't delete and throw exception. PreparedStatement query = trans.getConnection().getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS); query.clearParameters(); query.setString(1, name); try (ResultSet queryResults = query.executeQuery()) { if (queryResults.next() && queryResults.getLong("count") > 0) { throw new TskCoreException(String.format("Host with name '%s' has child data and cannot be deleted.", name)); } } // otherwise, delete the host PreparedStatement update = trans.getConnection().getPreparedStatement(deleteString, Statement.RETURN_GENERATED_KEYS); update.clearParameters(); update.setString(1, name); int numUpdated = update.executeUpdate(); // get ids for deleted. Long hostId = null; if (numUpdated > 0) { try (ResultSet updateResult = update.getGeneratedKeys()) { if (updateResult.next()) { hostId = updateResult.getLong(1); } } } trans.commit(); trans = null; fireDeletedEvent(new Host(hostId, name)); return hostId; } catch (SQLException ex) { throw new TskCoreException(String.format("Error deleting host with name %s", name), ex); } finally { if (trans != null) { trans.rollback(); } } } /** * Get all data sources associated with a given host. * * @param host The host. * * @return The list of data sources corresponding to the host. * * @throws TskCoreException */ public List getDataSourcesForHost(Host host) throws TskCoreException { String queryString = "SELECT * FROM data_source_info WHERE host_id = " + host.getHostId(); List dataSources = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { dataSources.add(db.getDataSource(rs.getLong("obj_id"))); } return dataSources; } catch (SQLException | TskDataException ex) { throw new TskCoreException(String.format("Error getting data sources for host " + host.getName()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get active host with given name. * * @param name Host name to look for. * * @return Optional with host. Optional.empty if no matching host is found. * * @throws TskCoreException */ public Optional getHostByName(String name) throws TskCoreException { try (CaseDbConnection connection = db.getConnection()) { return getHostByName(name, connection); } } /** * Get active host with given name. * * @param name Host name to look for. * @param connection Database connection to use. * * @return Optional with host. Optional.empty if no matching host is found. * * @throws TskCoreException */ private Optional getHostByName(String name, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_hosts" + " WHERE LOWER(name) = LOWER(?)" + " AND db_status = " + Host.HostDbStatus.ACTIVE.getId(); db.acquireSingleUserCaseReadLock(); try { PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS); s.clearParameters(); s.setString(1, name); try (ResultSet rs = s.executeQuery()) { if (!rs.next()) { return Optional.empty(); // no match found } else { return Optional.of(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")))); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host with name = %s", name), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get host with the given id. * * @param id The id of the host. * * @return Optional with host. Optional.empty if no matching host is found. * * @throws TskCoreException */ public Optional getHostById(long id) throws TskCoreException { try (CaseDbConnection connection = db.getConnection()) { return getHostById(id, connection); } } /** * Get host with given id. * * @param id The id of the host. * @param connection Database connection to use. * * @return Optional with host. Optional.empty if no matching host is found. * * @throws TskCoreException */ private Optional getHostById(long id, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_hosts WHERE id = " + id; db.acquireSingleUserCaseReadLock(); try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (rs.next()) { return Optional.of(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")))); } else { return Optional.empty(); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host with id: " + id), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get all hosts that have a status of ACTIVE. * * @return Collection of hosts that have ACTIVE status. * * @throws TskCoreException */ public List getAllHosts() throws TskCoreException { String queryString = "SELECT * FROM tsk_hosts WHERE db_status = " + HostDbStatus.ACTIVE.getId(); List hosts = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { hosts.add(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")))); } return hosts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting hosts"), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get host for the given data source. * * @param dataSource The data source to look up the host for. * * @return The host for this data source (will not be null). * * @throws TskCoreException if no host is found or an error occurs. */ public Host getHostByDataSource(DataSource dataSource) throws TskCoreException { return getHostByDataSource(dataSource.getId()); } /** * Get host for the given data source ID. * * @param dataSourceId The data source ID to look up the host for. * * @return The host for this data source (will not be null). * * @throws TskCoreException if no host is found or an error occurs. */ Host getHostByDataSource(long dataSourceId) throws TskCoreException { String queryString = "SELECT tsk_hosts.id AS hostId, tsk_hosts.name AS name, tsk_hosts.db_status AS db_status FROM \n" + "tsk_hosts INNER JOIN data_source_info \n" + "ON tsk_hosts.id = data_source_info.host_id \n" + "WHERE data_source_info.obj_id = " + dataSourceId; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (!rs.next()) { throw new TskCoreException(String.format("Host not found for data source with ID = %d", dataSourceId)); } else { return new Host(rs.getLong("hostId"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting host for data source with ID = %d", dataSourceId), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Merge source host into destination host. When complete: - All realms will * have been moved into the destination host or merged with existing realms * in the destination host. - All references to the source host will be * updated to reference the destination host. - The source host will be * updated so that it will no longer be returned by any methods apart from * get by host id. * * @param sourceHost The source host. * @param destHost The destination host. * * @throws TskCoreException */ public void mergeHosts(Host sourceHost, Host destHost) throws TskCoreException { String query = ""; CaseDbTransaction trans = null; try { trans = db.beginTransaction(); // Merge or move any realms associated with the source host List realms = db.getOsAccountRealmManager().getRealmsByHost(sourceHost, trans.getConnection()); for (OsAccountRealm realm : realms) { db.getOsAccountRealmManager().moveOrMergeRealm(realm, destHost, trans); } try (Statement s = trans.getConnection().createStatement()) { // Update references to the source host // tsk_host_address_map has a unique constraint on host_id, addr_obj_id, time, // so delete any rows that would be duplicates. query = "DELETE FROM tsk_host_address_map " + "WHERE id IN ( " + "SELECT " + " sourceMapRow.id " + "FROM " + " tsk_host_address_map destMapRow " + "INNER JOIN tsk_host_address_map sourceMapRow ON destMapRow.addr_obj_id = sourceMapRow.addr_obj_id AND destMapRow.time = sourceMapRow.time " + "WHERE destMapRow.host_id = " + destHost.getHostId() + " AND sourceMapRow.host_id = " + sourceHost.getHostId() + " )"; s.executeUpdate(query); query = makeOsAccountUpdateQuery("tsk_host_address_map", "host_id", sourceHost, destHost); s.executeUpdate(query); query = makeOsAccountUpdateQuery("tsk_os_account_attributes", "host_id", sourceHost, destHost); s.executeUpdate(query); query = makeOsAccountUpdateQuery("data_source_info", "host_id", sourceHost, destHost); s.executeUpdate(query); // Mark the source host as merged and change the name to a random string. String mergedName = makeMergedHostName(); query = "UPDATE tsk_hosts SET merged_into = " + destHost.getHostId() + ", db_status = " + Host.HostDbStatus.MERGED.getId() + ", name = '" + mergedName + "' " + " WHERE id = " + sourceHost.getHostId(); s.executeUpdate(query); } trans.commit(); trans = null; // Fire events for updated and deleted hosts fireChangeEvent(sourceHost); fireDeletedEvent(destHost); } catch (SQLException ex) { throw new TskCoreException("Error executing query: " + query, ex); } finally { if (trans != null) { trans.rollback(); } } } /** * Create the query to update the host id column to the merged host. * * @param tableName Name of table to update. * @param columnName Name of the column containing the host id. * @param sourceHost The source host. * @param destHost The destination host. * * @return The query. */ private String makeOsAccountUpdateQuery(String tableName, String columnName, Host sourceHost, Host destHost) { return "UPDATE " + tableName + " SET " + columnName + " = " + destHost.getHostId() + " WHERE " + columnName + " = " + sourceHost.getHostId(); } /** * Create a random name for hosts that have been merged. * * @return The random signature. */ private String makeMergedHostName() { return "MERGED " + UUID.randomUUID().toString(); } /** * Fires an event that a host has changed. Do not call this with an open * transaction. * * @param newValue The new value for the host. */ private void fireChangeEvent(Host newValue) { db.fireTSKEvent(new HostsUpdatedTskEvent(Collections.singletonList(newValue))); } /** * Fires an event that a host has been deleted. Do not call this with an * open transaction. * * @param deleted The deleted host. */ private void fireDeletedEvent(Host deleted) { db.fireTSKEvent(new HostsDeletedTskEvent(Collections.singletonList(deleted.getHostId()))); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CaseDbSchemaVersionNumber.java000644 000765 000024 00000005755 14137073413 032311 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * As of SleuthKit 4.5.0 database schema versions are two part: Major.Minor. * This versioning schema is based on semantic versioning, but without using the * patch number (in practice it is always the default value of zero for case database versions). * * The major part is incremented for incompatible changes, i.e., the case * database will not be usable by an older version. For example, the major * number should be incremented if tables and/or columns are removed, the * meanings of values changes, or new records are added to lookup tables * that will not be convertible to older versions of the corresponding Java * enums. * * The minor version is incremented for compatible changes that are usable by * older versions of the SleuthKit, although the new schema may not be fully taken * advantage of. For example, adding an index should be backwards compatible: * an older version of the software will still be able to open and use the case database, but * query performance may or may not be affected. Also, adding a column to a * table should be backwards compatible as older versions of the software should * simply ignore it. */ public final class CaseDbSchemaVersionNumber extends VersionNumber { /** * Constructor for CaseDBSchemaVersionNumber. The patch version is unused * and immutably 0. * * @param majorVersion The major version part. * @param minorVersion The minor version part. */ public CaseDbSchemaVersionNumber(int majorVersion, int minorVersion) { super(majorVersion, minorVersion, 0); } /** * Is a database with the given schema version openable by this version * number? * * @param dbSchemaVersion The schema version of the db want to check for * compatibility. * * @return true if the db schema version is compatible with this version. */ public boolean isCompatible(CaseDbSchemaVersionNumber dbSchemaVersion) { /* * Since we provide upgrade paths for schema versions greater than 1, this * amounts to checking if the major version part is greater than 1 and less * than this version's major number. */ final int dbMajor = dbSchemaVersion.getMajor(); return 1 < dbMajor && dbMajor <= getMajor(); } @Override public String toString() { return getMajor() + "." + getMinor(); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/FileManager.java000644 000765 000024 00000011632 14137073413 027471 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; import java.util.Objects; /** * Utility class for file-based database queries. */ public class FileManager { private final SleuthkitCase skCase; /** * Constructs a FileManager. * * @param casedb The case database. */ FileManager(SleuthkitCase skCase) { this.skCase = Objects.requireNonNull(skCase, "Cannot create Blackboard for null SleuthkitCase"); } /** * Find all files with the exact given name and parentId. * * @param parentId Id of the parent folder to search. * @param name Exact file name to match. * * @return A list of matching files. * * @throws TskCoreException */ public List findFilesExactName(long parentId, String name) throws TskCoreException { String ext = SleuthkitCase.extractExtension(name); String query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id " + " WHERE tsk_objects.par_obj_id = ? AND tsk_files.name = ? "; if (!ext.isEmpty()) { query += " AND tsk_files.extension = ? "; } skCase.acquireSingleUserCaseReadLock(); try (SleuthkitCase.CaseDbConnection connection = skCase.getConnection()) { PreparedStatement statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); statement.setLong(1, parentId); statement.setString(2, name); if (!ext.isEmpty()) { statement.setString(3, ext); } try (ResultSet rs = connection.executeQuery(statement)) { return skCase.resultSetToAbstractFiles(rs, connection); } } catch (SQLException ex) { throw new TskCoreException("SQLException thrown when calling query: " + query + " for parentID = " + parentId + " and name " + name, ex); } finally { skCase.releaseSingleUserCaseReadLock(); } } /** * Find all files with the exact given name and exact parent path. * * @param dataSource The data source to search within. * @param name Exact file name to match. * @param path Exact parent path. * * @return A list of matching files. * * @throws TskCoreException */ public List findFilesExactNameExactPath(Content dataSource, String name, String path) throws TskCoreException { // Database paths will always start and end with a forward slash, so add those if not present String normalizedPath = path; if (!normalizedPath.startsWith("/")) { normalizedPath = "/" + normalizedPath; } if (!normalizedPath.endsWith("/")) { normalizedPath = normalizedPath + "/"; } String ext = SleuthkitCase.extractExtension(name); String query = ""; skCase.acquireSingleUserCaseReadLock(); try (SleuthkitCase.CaseDbConnection connection = skCase.getConnection()) { PreparedStatement statement; if (ext.isEmpty()) { query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE parent_path = ? AND name = ? AND data_source_obj_id = ?"; statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); statement.setString(1, normalizedPath); statement.setString(2, name); statement.setLong(3, dataSource.getId()); } else { // This is done as an optimization since the extension column in tsk_files is indexed query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE extension = ? AND parent_path = ? AND name = ? AND data_source_obj_id = ?"; statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS); statement.clearParameters(); statement.setString(1, ext); statement.setString(2, normalizedPath); statement.setString(3, name); statement.setLong(4, dataSource.getId()); } try (ResultSet rs = connection.executeQuery(statement)) { return skCase.resultSetToAbstractFiles(rs, connection); } } catch (SQLException ex) { throw new TskCoreException("SQLException thrown when calling query: " + query + " for parent path = " + path + " and name " + name, ex); } finally { skCase.releaseSingleUserCaseReadLock(); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/SQLHelper.java000644 000765 000024 00000003570 14137073413 027120 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Interface for classes to help create queries for SQLite or PostgreSQL */ interface SQLHelper { // Get the type for the primary key String getPrimaryKey(); // Get the type for big int-type data String getBigIntType(); // Get the type for blob-type data String getBlobType(); // Get the description column name for the tsk_vs_parts table. // This varies between SQLite and PostgreSQL. String getVSDescColName(); /** * PostgreSQL-specific implementation */ class PostgreSQLHelper implements SQLHelper { @Override public String getPrimaryKey() { return "BIGSERIAL"; } @Override public String getBigIntType() { return "BIGINT"; } @Override public String getBlobType() { return "BYTEA"; } @Override public String getVSDescColName() { return "descr"; } } /** * SQLite-specific implementation */ class SQLiteHelper implements SQLHelper { @Override public String getPrimaryKey() { return "INTEGER"; } @Override public String getBigIntType() { return "INTEGER"; } @Override public String getBlobType() { return "BLOB"; } @Override public String getVSDescColName() { return "desc"; } } }sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java000644 000765 000024 00000044243 14137073414 030422 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.SleuthkitCase.ObjectInfo; /** * Implements some general methods from the Content interface common across many * content sub types */ public abstract class AbstractContent implements Content { private final static BlackboardArtifact.Type GEN_INFO_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_GEN_INFO); public final static long UNKNOWN_ID = -1; private final SleuthkitCase db; private final long objId; private final String name; private volatile Content parent; private volatile String uniquePath; protected long parentId; private volatile boolean hasChildren; private volatile boolean checkedHasChildren; private volatile int childrenCount; private BlackboardArtifact genInfoArtifact = null; protected AbstractContent(SleuthkitCase db, long obj_id, String name) { this.db = db; this.objId = obj_id; this.name = name; this.parentId = UNKNOWN_ID; checkedHasChildren = false; hasChildren = false; childrenCount = -1; } @Override public String getName() { return this.name; } /* * This base implementation simply walks the hierarchy appending its own * name to the result of calling its parent's getUniquePath() method (with * interleaving forward slashes). */ @Override public String getUniquePath() throws TskCoreException { // It is possible that multiple threads could be doing this calculation // simultaneously, but it's worth the potential extra processing to prevent deadlocks. if (uniquePath == null) { String tempUniquePath = ""; if (!name.isEmpty()) { tempUniquePath = "/" + getName(); } Content myParent = getParent(); if (myParent != null) { tempUniquePath = myParent.getUniquePath() + tempUniquePath; } // Don't update uniquePath until it is complete. uniquePath = tempUniquePath; } return uniquePath; } @Override public boolean hasChildren() throws TskCoreException { if (checkedHasChildren == true) { return hasChildren; } hasChildren = this.getSleuthkitCase().getHasChildren(this); checkedHasChildren = true; return hasChildren; } @Override public int getChildrenCount() throws TskCoreException { if (childrenCount != -1) { return childrenCount; } childrenCount = this.getSleuthkitCase().getContentChildrenCount(this); hasChildren = childrenCount > 0; checkedHasChildren = true; return childrenCount; } @Override public Content getParent() throws TskCoreException { // It is possible that multiple threads could be doing this calculation // simultaneously, but it's worth the potential extra processing to prevent deadlocks. if (parent == null) { ObjectInfo parentInfo; parentInfo = db.getParentInfo(this); if (parentInfo == null) { parent = null; } else { parent = db.getContentById(parentInfo.getId()); } } return parent; } void setParent(Content parent) { this.parent = parent; } /** * Set the ID of the this AbstractContent's parent * * @param parentId the ID of the parent. Note: use * AbstractContent.UNKNOWN_ID if the parent's ID is not * known. */ void setParentId(long parentId) { this.parentId = parentId; } @Override public long getId() { return this.objId; } /** * Gets all children of this abstract content, if any. * * @return A list of the children. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getChildren() throws TskCoreException { List children = new ArrayList(); children.addAll(getSleuthkitCase().getAbstractFileChildren(this)); children.addAll(getSleuthkitCase().getBlackboardArtifactChildren(this)); return children; } /** * Gets the object ids of objects, if any, that are children of this * abstract content. * * @return A list of the object ids. * * @throws TskCoreException if there was an error querying the case * database. */ @Override public List getChildrenIds() throws TskCoreException { List childrenIDs = new ArrayList(); childrenIDs.addAll(getSleuthkitCase().getAbstractFileChildrenIds(this)); childrenIDs.addAll(getSleuthkitCase().getBlackboardArtifactChildrenIds(this)); return childrenIDs; } // classes should override this if they can be a data source @Override public Content getDataSource() throws TskCoreException { Content myParent = getParent(); if (myParent == null) { return null; } return myParent.getDataSource(); } /** * Return whether this content has a Pool above it * * @return true if there is a Pool object in the parent structure * * @throws TskCoreException */ boolean isPoolContent() throws TskCoreException { return getPool() != null; } /** * Get the pool volume * * @return the volume above this content and below a Pool object or null if * not found * * @throws TskCoreException */ Volume getPoolVolume() throws TskCoreException { Content myParent = getParent(); if (myParent == null) { return null; } if (!(myParent instanceof AbstractContent)) { return null; } if (myParent instanceof Volume) { // This is potentially it, but need to check that this is a volume under a pool if (((Volume) myParent).isPoolContent()) { return (Volume) myParent; } else { // There are no pools in the hierarchy, so we're done return null; } } // Try one level higher return ((AbstractContent) myParent).getPoolVolume(); } /** * Get the pool * * @return the pool above this content or null if not found * * @throws TskCoreException */ Pool getPool() throws TskCoreException { Content myParent = getParent(); if (myParent == null) { return null; } if (!(myParent instanceof AbstractContent)) { return null; } if (myParent instanceof Pool) { return (Pool) myParent; } // Try one level higher return ((AbstractContent) myParent).getPool(); } /** * Gets handle of SleuthkitCase to which this content belongs * * @return the case handle */ public SleuthkitCase getSleuthkitCase() { return db; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AbstractContent other = (AbstractContent) obj; if (this.objId != other.objId) { return false; } try { // New children may have been added to an existing content // object in which case they are not equal. if (this.getChildrenCount() != other.getChildrenCount()) { return false; } } catch (TskCoreException ex) { Logger.getLogger(AbstractContent.class.getName()).log(Level.SEVERE, null, ex); } return true; } @Override public int hashCode() { int hash = 7 + (int) (this.objId ^ (this.objId >>> 32)); try { hash = 41 * hash + this.getChildrenCount(); } catch (TskCoreException ex) { Logger.getLogger(AbstractContent.class.getName()).log(Level.SEVERE, null, ex); } return hash; } @Deprecated @Override public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException { // don't let them make more than 1 GEN_INFO if (artifactTypeID == ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()) { return getGenInfoArtifact(true); } BlackboardArtifact.Type artifactType = db.getArtifactType(artifactTypeID); switch (artifactType.getCategory()) { case DATA_ARTIFACT: return this.newDataArtifact(artifactType, Collections.emptyList()); case ANALYSIS_RESULT: { AnalysisResultAdded addedResult = this.newAnalysisResult(artifactType, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList()); return addedResult.getAnalysisResult(); } default: throw new TskCoreException(String.format("Unknown category: %s for artifact type id: %d", artifactType.getCategory().getName(), artifactTypeID)); } } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList) throws TskCoreException { long dataSourceObjectId = this.getDataSource().getId(); CaseDbTransaction trans = db.beginTransaction(); try { AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objId, dataSourceObjectId, score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException(String.format("Error adding analysis result to content with objId = %d.", objId), ex); } } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList, long dataSourceId) throws TskCoreException { long dataSourceObjectId = dataSourceId; CaseDbTransaction trans = db.beginTransaction(); try { AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objId, dataSourceObjectId, score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException(String.format("Error adding analysis result to content with objId = %d.", objId), ex); } } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId) throws TskCoreException { DataArtifact artifact = db.getBlackboard().newDataArtifact(artifactType, objId, this.getDataSource().getId(), attributesList, osAccountId); if (osAccountId != null) { try (CaseDbConnection connection = db.getConnection()) { db.getOsAccountManager().newOsAccountInstance(osAccountId, getDataSource().getId(), OsAccountInstance.OsAccountInstanceType.ACCESSED, connection); } } return artifact; } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId, long dataSourceId) throws TskCoreException { DataArtifact artifact = db.getBlackboard().newDataArtifact(artifactType, objId, dataSourceId, attributesList, osAccountId); if (osAccountId != null) { try (CaseDbConnection connection = db.getConnection()) { db.getOsAccountManager().newOsAccountInstance(osAccountId, dataSourceId, OsAccountInstance.OsAccountInstanceType.ACCESSED, connection); } } return artifact; } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList) throws TskCoreException { return newDataArtifact(artifactType, attributesList, null); } @Deprecated @SuppressWarnings("deprecation") @Override public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { return newArtifact(type.getTypeID()); } @Override public ArrayList getArtifacts(String artifactTypeName) throws TskCoreException { return getArtifacts(db.getArtifactType(artifactTypeName).getTypeID()); } @Override public ArrayList getArtifacts(int artifactTypeID) throws TskCoreException { if (artifactTypeID == ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()) { if (genInfoArtifact == null) // don't make one if it doesn't already exist { getGenInfoArtifact(false); } ArrayList list = new ArrayList(); // genInfoArtifact coudl still be null if there isn't an artifact if (genInfoArtifact != null) { list.add(genInfoArtifact); } return list; } return db.getBlackboardArtifacts(artifactTypeID, objId); } @Override public ArrayList getArtifacts(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { return getArtifacts(type.getTypeID()); } @Override public BlackboardArtifact getGenInfoArtifact() throws TskCoreException { return getGenInfoArtifact(true); } @Override public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreException { if (genInfoArtifact != null) { return genInfoArtifact; } // go to db directly to avoid infinite loop ArrayList arts = db.getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO, objId); BlackboardArtifact retArt; if (arts.isEmpty()) { if (create) { retArt = this.newDataArtifact(GEN_INFO_TYPE, Collections.emptyList()); } else { return null; } } else { retArt = arts.get(0); } genInfoArtifact = retArt; return retArt; } @Override public ArrayList getGenInfoAttributes(ATTRIBUTE_TYPE attr_type) throws TskCoreException { ArrayList returnList = new ArrayList(); if (genInfoArtifact == null) { getGenInfoArtifact(false); if (genInfoArtifact == null) { return returnList; } } for (BlackboardAttribute attribute : genInfoArtifact.getAttributes()) { if (attribute.getAttributeType().getTypeID() == attr_type.getTypeID()) { returnList.add(attribute); } } return returnList; } @Override public ArrayList getAllArtifacts() throws TskCoreException { return db.getMatchingArtifacts("WHERE obj_id = " + objId); //NON-NLS } @Override public List getAllAnalysisResults() throws TskCoreException { return db.getBlackboard().getAnalysisResults(objId); } @Override public List getAllDataArtifacts() throws TskCoreException { return db.getBlackboard().getDataArtifactsBySource(objId); } @Override public Score getAggregateScore() throws TskCoreException { return db.getScoringManager().getAggregateScore(objId); } @Override public List getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException { return db.getBlackboard().getAnalysisResults(objId, artifactType.getTypeID()); //NON-NLS } @Override public long getArtifactsCount(String artifactTypeName) throws TskCoreException { return db.getBlackboardArtifactsCount(artifactTypeName, objId); } @Override public long getArtifactsCount(int artifactTypeID) throws TskCoreException { return db.getBlackboardArtifactsCount(artifactTypeID, objId); } @Override public long getArtifactsCount(ARTIFACT_TYPE type) throws TskCoreException { return db.getBlackboardArtifactsCount(type, objId); } @Override public long getAllArtifactsCount() throws TskCoreException { return db.getBlackboardArtifactsCount(objId); } @Override public Set getHashSetNames() throws TskCoreException { Set hashNames = new HashSet(); ArrayList artifacts = getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT); for (BlackboardArtifact a : artifacts) { BlackboardAttribute attribute = a.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SET_NAME)); if (attribute != null) { hashNames.add(attribute.getValueString()); } } return Collections.unmodifiableSet(hashNames); } @Override public String toString() { return toString(true); } public String toString(boolean preserveState) { if (preserveState) { return "AbstractContent [\t" + "objId " + String.format("%010d", objId) + "\t" //NON-NLS + "name " + name + "\t" + "parentId " + parentId + "\t" //NON-NLS + "\t" + "checkedHasChildren " + checkedHasChildren //NON-NLS + "\t" + "hasChildren " + hasChildren //NON-NLS + "\t" + "childrenCount " + childrenCount //NON-NLS + "uniquePath " + uniquePath + "]\t"; //NON-NLS } else { try { if (getParent() != null) { return "AbstractContent [\t" + "objId " + String.format("%010d", objId) //NON-NLS + "\t" + "name " + name //NON-NLS + "\t" + "checkedHasChildren " + checkedHasChildren //NON-NLS + "\t" + "hasChildren " + hasChildren //NON-NLS + "\t" + "childrenCount " + childrenCount //NON-NLS + "\t" + "getUniquePath " + getUniquePath() //NON-NLS + "\t" + "getParent " + getParent().getId() + "]\t"; //NON-NLS } else { return "AbstractContent [\t" + "objId " //NON-NLS + String.format("%010d", objId) + "\t" + "name " + name //NON-NLS + "\t" + "checkedHasChildren " + checkedHasChildren //NON-NLS + "\t" + "hasChildren " + hasChildren //NON-NLS + "\t" + "childrenCount " + childrenCount //NON-NLS + "\t" + "uniquePath " + getUniquePath() //NON-NLS + "\t" + "parentId " + parentId + "]\t"; //NON-NLS } } catch (TskCoreException ex) { Logger.getLogger(AbstractContent.class.getName()).log(Level.SEVERE, "Could not find Parent", ex); //NON-NLS return "AbstractContent [\t" + "objId " + String.format("%010d", objId) + "\t" //NON-NLS + "name " + name + "\t" + "parentId " + parentId + "\t" //NON-NLS + "\t" + "checkedHasChildren " + checkedHasChildren //NON-NLS + "\t" + "hasChildren " + hasChildren //NON-NLS + "\t" + "childrenCount " + childrenCount //NON-NLS + "uniquePath " + uniquePath + "]\t"; //NON-NLS } } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/MessageFolder.java000644 000765 000024 00000002755 14137073413 030045 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * This class is used to abstract a message folder in an email container, * like an mbox or pst file */ class MessageFolder { private final long srcObjID; private final String pathName; private boolean hasSubfolders; public MessageFolder(String pathName, long objID) { this(pathName, objID, false); } public MessageFolder(String pathName, long objID, boolean hasSubfolders) { this.pathName = pathName; this.srcObjID = objID; this.hasSubfolders = hasSubfolders; } public String getName() { return this.pathName; } public long getSrcOBjID() { return this.srcObjID; } public synchronized boolean hasSubfolders() { return this.hasSubfolders; } public synchronized void setHasSubfolders(boolean hasSubFolders) { this.hasSubfolders = hasSubFolders; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java000644 000765 000024 00000160240 14137073414 031603 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import static org.sleuthkit.datamodel.SleuthkitCase.closeConnection; import static org.sleuthkit.datamodel.SleuthkitCase.closeResultSet; import static org.sleuthkit.datamodel.SleuthkitCase.closeStatement; /** * Provides an API to create Accounts and communications/relationships between * accounts. */ public final class CommunicationsManager { private static final Logger LOGGER = Logger.getLogger(CommunicationsManager.class.getName()); private static final BlackboardArtifact.Type ACCOUNT_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT); private final SleuthkitCase db; private final Map accountTypeToTypeIdMap = new ConcurrentHashMap<>(); private final Map typeNameToAccountTypeMap = new ConcurrentHashMap<>(); // Artifact types that can represent a relationship between accounts. private static final Set RELATIONSHIP_ARTIFACT_TYPE_IDS = new HashSet(Arrays.asList( BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT.getTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG.getTypeID() )); private static final String RELATIONSHIP_ARTIFACT_TYPE_IDS_CSV_STR = StringUtils.buildCSVString(RELATIONSHIP_ARTIFACT_TYPE_IDS); /** * Construct a CommunicationsManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * * @throws TskCoreException if there is in error initializing the account * types. */ CommunicationsManager(SleuthkitCase skCase) throws TskCoreException { this.db = skCase; initAccountTypes(); } /** * Make sure the predefined account types are in the account types table. * * @throws TskCoreException if there is an error reading the pre-existing * account types from the db. */ private void initAccountTypes() throws TskCoreException { db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = db.getConnection(); Statement statement = connection.createStatement();) { // Read the table int count = readAccountTypes(); if (0 == count) { // Table is empty, populate it with predefined types for (Account.Type type : Account.Type.PREDEFINED_ACCOUNT_TYPES) { try { statement.execute("INSERT INTO account_types (type_name, display_name) VALUES ( '" + type.getTypeName() + "', '" + type.getDisplayName() + "')"); //NON-NLS } catch (SQLException ex) { try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM account_types WHERE type_name = '" + type.getTypeName() + "'")) { //NON-NLS resultSet.next(); if (resultSet.getLong("count") == 0) { throw ex; } } } try (ResultSet rs2 = connection.executeQuery(statement, "SELECT account_type_id FROM account_types WHERE type_name = '" + type.getTypeName() + "'")) { //NON-NLS rs2.next(); int typeID = rs2.getInt("account_type_id"); Account.Type accountType = new Account.Type(type.getTypeName(), type.getDisplayName()); this.accountTypeToTypeIdMap.put(accountType, typeID); this.typeNameToAccountTypeMap.put(type.getTypeName(), accountType); } } } } catch (SQLException ex) { LOGGER.log(Level.SEVERE, "Failed to add row to account_types", ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Reads in in the account types table and returns the number of account * types read in. * * @return The number of account types read. * * @throws TskCoreException if there is a problem reading the account types. */ private int readAccountTypes() throws TskCoreException { CaseDbConnection connection = null; Statement statement = null; ResultSet resultSet = null; int count = 0; db.acquireSingleUserCaseReadLock(); try { connection = db.getConnection(); statement = connection.createStatement(); // If the account_types table is already populated, say when opening a case, then load it resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM account_types"); //NON-NLS resultSet.next(); if (resultSet.getLong("count") > 0) { resultSet.close(); resultSet = connection.executeQuery(statement, "SELECT * FROM account_types"); while (resultSet.next()) { Account.Type accountType = new Account.Type(resultSet.getString("type_name"), resultSet.getString("display_name")); this.accountTypeToTypeIdMap.put(accountType, resultSet.getInt("account_type_id")); this.typeNameToAccountTypeMap.put(accountType.getTypeName(), accountType); } count = this.typeNameToAccountTypeMap.size(); } } catch (SQLException ex) { throw new TskCoreException("Failed to read account_types", ex); } finally { closeResultSet(resultSet); closeStatement(statement); closeConnection(connection); db.releaseSingleUserCaseReadLock(); } return count; } /** * Gets the SleuthKit case. * * @return The SleuthKit case (case database) object. */ SleuthkitCase getSleuthkitCase() { return this.db; } /** * Add a custom account type that is not already defined in Account.Type. * Will not allow duplicates and will return existing type if the name is * already defined. * * @param accountTypeName account type that must be unique * @param displayName account type display name * * @return Account.Type * * @throws TskCoreException if a critical error occurs within TSK core */ // NOTE: Full name given for Type for doxygen linking public org.sleuthkit.datamodel.Account.Type addAccountType(String accountTypeName, String displayName) throws TskCoreException { Account.Type accountType = new Account.Type(accountTypeName, displayName); // check if already in map if (this.accountTypeToTypeIdMap.containsKey(accountType)) { return accountType; } CaseDbTransaction trans = db.beginTransaction(); Statement s = null; ResultSet rs = null; try { s = trans.getConnection().createStatement(); rs = trans.getConnection().executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS if (!rs.next()) { rs.close(); s.execute("INSERT INTO account_types (type_name, display_name) VALUES ( '" + accountTypeName + "', '" + displayName + "')"); //NON-NLS // Read back the typeID rs = trans.getConnection().executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS rs.next(); int typeID = rs.getInt("account_type_id"); accountType = new Account.Type(rs.getString("type_name"), rs.getString("display_name")); this.accountTypeToTypeIdMap.put(accountType, typeID); this.typeNameToAccountTypeMap.put(accountTypeName, accountType); trans.commit(); return accountType; } else { int typeID = rs.getInt("account_type_id"); accountType = new Account.Type(rs.getString("type_name"), rs.getString("display_name")); this.accountTypeToTypeIdMap.put(accountType, typeID); return accountType; } } catch (SQLException ex) { trans.rollback(); throw new TskCoreException("Error adding account type", ex); } finally { closeResultSet(rs); closeStatement(s); } } /** * Records that an account was used in a specific file. Behind the scenes, * it will create a case-specific Account object if it does not already * exist and create the needed database entries (which currently include * making a BlackboardArtifact). * * @param accountType account type * @param accountUniqueID unique account identifier (such as email address) * @param moduleName module creating the account * @param sourceFile source file the account was found in (for the * blackboard) * * @return AccountFileInstance * * @throws TskCoreException If a critical error occurs within TSK * core * @throws InvalidAccountIDException If the account identifier is not valid. */ // NOTE: Full name given for Type for doxygen linking public AccountFileInstance createAccountFileInstance(org.sleuthkit.datamodel.Account.Type accountType, String accountUniqueID, String moduleName, Content sourceFile) throws TskCoreException, InvalidAccountIDException { // make or get the Account (unique at the case-level) Account account = getOrCreateAccount(accountType, normalizeAccountID(accountType, accountUniqueID)); /* * make or get the artifact. Will not create one if it already exists * for the sourceFile. Such as an email PST that has the same email * address multiple times. Only one artifact is created for each email * message in that PST. */ BlackboardArtifact accountArtifact = getOrCreateAccountFileInstanceArtifact(accountType, normalizeAccountID(accountType, accountUniqueID), moduleName, sourceFile); // The account instance map was unused so we have removed it from the database, // but we expect we may need it so I am preserving this method comment and usage here. // add a row to Accounts to Instances mapping table // @@@ BC: Seems like we should only do this if we had to create the artifact. // But, it will probably fail to create a new one based on unique constraints. // addAccountFileInstanceMapping(account.getAccountID(), accountArtifact.getArtifactID()); return new AccountFileInstance(accountArtifact, account); } /** * Get the Account with the given account type and account ID. * * @param accountType account type * @param accountUniqueID unique account identifier (such as an email * address) * * @return Account, returns NULL is no matching account found * * @throws TskCoreException If a critical error occurs within TSK * core. * @throws InvalidAccountIDException If the account identifier is not valid. */ // NOTE: Full name given for Type for doxygen linking public Account getAccount(org.sleuthkit.datamodel.Account.Type accountType, String accountUniqueID) throws TskCoreException, InvalidAccountIDException { Account account = null; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, "SELECT * FROM accounts WHERE account_type_id = " + getAccountTypeId(accountType) + " AND account_unique_identifier = '" + normalizeAccountID(accountType, accountUniqueID) + "'");) { //NON-NLS if (rs.next()) { account = new Account(rs.getInt("account_id"), accountType, rs.getString("account_unique_identifier")); } } catch (SQLException ex) { throw new TskCoreException("Error getting account type id", ex); } finally { db.releaseSingleUserCaseReadLock(); } return account; } /** * Adds relationships between the sender and each of the recipient account * instances and between all recipient account instances. All account * instances must be from the same data source. * * @param sender Sender account, may be null. * @param recipients List of recipients, may be empty. * @param sourceArtifact Artifact that relationships were derived from. * @param relationshipType The type of relationships to be created. * @param dateTime Date of communications/relationship, as epoch * seconds. * * * @throws org.sleuthkit.datamodel.TskCoreException * @throws org.sleuthkit.datamodel.TskDataException If the all the accounts * and the relationship are * not from the same data * source, or if the * sourceArtifact and * relationshipType are not * compatible. */ // NOTE: Full name given for Type for doxygen linking public void addRelationships(AccountFileInstance sender, List recipients, BlackboardArtifact sourceArtifact, org.sleuthkit.datamodel.Relationship.Type relationshipType, long dateTime) throws TskCoreException, TskDataException { if (sourceArtifact.getDataSourceObjectID() == null) { throw new TskDataException("Source Artifact does not have a valid data source."); } if (relationshipType.isCreatableFrom(sourceArtifact) == false) { throw new TskDataException("Can not make a " + relationshipType.getDisplayName() + " relationship from a" + sourceArtifact.getDisplayName()); } /* * Enforce that all accounts and the relationship between them are from * the same 'source'. This is required for the queries to work * correctly. */ // Currently we do not save the direction of communication List accountIDs = new ArrayList<>(); if (null != sender) { accountIDs.add(sender.getAccount().getAccountID()); if (!sender.getDataSourceObjectID().equals(sourceArtifact.getDataSourceObjectID())) { throw new TskDataException("Sender and relationship are from different data sources :" + "Sender source ID" + sender.getDataSourceObjectID() + " != relationship source ID" + sourceArtifact.getDataSourceObjectID()); } } for (AccountFileInstance recipient : recipients) { accountIDs.add(recipient.getAccount().getAccountID()); if (!recipient.getDataSourceObjectID().equals(sourceArtifact.getDataSourceObjectID())) { throw new TskDataException("Recipient and relationship are from different data sources :" + "Recipient source ID" + recipient.getDataSourceObjectID() + " != relationship source ID" + sourceArtifact.getDataSourceObjectID()); } } // Set up the query for the prepared statement String query = "INTO account_relationships (account1_id, account2_id, relationship_source_obj_id, date_time, relationship_type, data_source_obj_id ) " + "VALUES (?,?,?,?,?,?)"; switch (db.getDatabaseType()) { case POSTGRESQL: query = "INSERT " + query + " ON CONFLICT DO NOTHING"; break; case SQLITE: query = "INSERT OR IGNORE " + query; break; default: throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name()); } CaseDbTransaction trans = db.beginTransaction(); try { SleuthkitCase.CaseDbConnection connection = trans.getConnection(); PreparedStatement preparedStatement = connection.getPreparedStatement(query, Statement.NO_GENERATED_KEYS); for (int i = 0; i < accountIDs.size(); i++) { for (int j = i + 1; j < accountIDs.size(); j++) { long account1_id = accountIDs.get(i); long account2_id = accountIDs.get(j); preparedStatement.clearParameters(); preparedStatement.setLong(1, account1_id); preparedStatement.setLong(2, account2_id); preparedStatement.setLong(3, sourceArtifact.getId()); if (dateTime > 0) { preparedStatement.setLong(4, dateTime); } else { preparedStatement.setNull(4, java.sql.Types.BIGINT); } preparedStatement.setInt(5, relationshipType.getTypeID()); preparedStatement.setLong(6, sourceArtifact.getDataSourceObjectID()); connection.executeUpdate(preparedStatement); } } trans.commit(); } catch (SQLException ex) { trans.rollback(); throw new TskCoreException("Error adding accounts relationship", ex); } } /** * Get the Account for the given account type and account ID. Create an a * new account if one doesn't exist * * @param accountType account type * @param accountUniqueID unique account identifier * * @return A matching account, either existing or newly created. * * @throws TskCoreException exception thrown if a critical error * occurs within TSK core * @throws InvalidAccountIDException If the account identifier is not valid. * */ private Account getOrCreateAccount(Account.Type accountType, String accountUniqueID) throws TskCoreException, InvalidAccountIDException { Account account = getAccount(accountType, accountUniqueID); if (null == account) { String query = " INTO accounts (account_type_id, account_unique_identifier) " + "VALUES ( " + getAccountTypeId(accountType) + ", '" + normalizeAccountID(accountType, accountUniqueID) + "'" + ")"; switch (db.getDatabaseType()) { case POSTGRESQL: query = "INSERT " + query + " ON CONFLICT DO NOTHING"; //NON-NLS break; case SQLITE: query = "INSERT OR IGNORE " + query; break; default: throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name()); } CaseDbTransaction trans = db.beginTransaction(); Statement s = null; ResultSet rs = null; try { s = trans.getConnection().createStatement(); s.execute(query); trans.commit(); account = getAccount(accountType, accountUniqueID); } catch (SQLException ex) { trans.rollback(); throw new TskCoreException("Error adding an account", ex); } finally { closeResultSet(rs); closeStatement(s); } } return account; } /** * Gets or creates an account artifact for an instance of an account found * in a file. * * @param accountType The account type of the account instance. * @param accountUniqueID The account ID of the account instance, should be * unique for the account type (e.g., an email * address for an email account). * @param moduleName The name of the module that found the account * instance. * @param sourceFile The file in which the account instance was found. * * @return The account artifact. * * @throws TskCoreException If there is an error querying or updating the * case database. */ private BlackboardArtifact getOrCreateAccountFileInstanceArtifact(Account.Type accountType, String accountUniqueID, String moduleName, Content sourceFile) throws TskCoreException { if (sourceFile == null) { throw new TskCoreException("Source file not provided."); } BlackboardArtifact accountArtifact = getAccountFileInstanceArtifact(accountType, accountUniqueID, sourceFile); if (accountArtifact == null) { List attributes = Arrays.asList( new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, moduleName, accountType.getTypeName()), new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, moduleName, accountUniqueID) ); accountArtifact = sourceFile.newDataArtifact(ACCOUNT_TYPE, attributes); try { db.getBlackboard().postArtifact(accountArtifact, moduleName); } catch (BlackboardException ex) { LOGGER.log(Level.SEVERE, String.format("Error posting new account artifact to the blackboard (object ID = %d)", accountArtifact.getId()), ex); } } return accountArtifact; } /** * Get the blackboard artifact for the given account type, account ID, and * source file * * @param accountType account type * @param accountUniqueID Unique account ID (such as email address) * @param sourceFile Source file (for the artifact) * * @return blackboard artifact, returns NULL is no matching account found * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ private BlackboardArtifact getAccountFileInstanceArtifact(Account.Type accountType, String accountUniqueID, Content sourceFile) throws TskCoreException { BlackboardArtifact accountArtifact = null; String queryStr = "SELECT artifacts.artifact_id AS artifact_id," + " artifacts.obj_id AS obj_id," + " artifacts.artifact_obj_id AS artifact_obj_id," + " artifacts.data_source_obj_id AS data_source_obj_id," + " artifacts.artifact_type_id AS artifact_type_id," + " artifacts.review_status_id AS review_status_id," + " tsk_data_artifacts.os_account_obj_id AS os_account_obj_id" + " FROM blackboard_artifacts AS artifacts" + " JOIN blackboard_attributes AS attr_account_type" + " ON artifacts.artifact_id = attr_account_type.artifact_id" + " JOIN blackboard_attributes AS attr_account_id" + " ON artifacts.artifact_id = attr_account_id.artifact_id" + " AND attr_account_id.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID.getTypeID() + " AND attr_account_id.value_text = '" + accountUniqueID + "'" + " LEFT JOIN tsk_data_artifacts ON tsk_data_artifacts.artifact_obj_id = artifacts.artifact_obj_id" + " WHERE artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() + " AND attr_account_type.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE.getTypeID() + " AND attr_account_type.value_text = '" + accountType.getTypeName() + "'" + " AND artifacts.obj_id = " + sourceFile.getId(); //NON-NLS db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS if (rs.next()) { BlackboardArtifact.Type bbartType = db.getArtifactType(rs.getInt("artifact_type_id")); accountArtifact = new DataArtifact(db, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null, bbartType.getTypeID(), bbartType.getTypeName(), bbartType.getDisplayName(), BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")), rs.getLong("os_account_obj_id"), false); } } catch (SQLException ex) { throw new TskCoreException("Error getting account", ex); } finally { db.releaseSingleUserCaseReadLock(); } return accountArtifact; } /** * Get the Account.Type for the give type name. * * @param accountTypeName An account type name. * * @return An Account.Type or null if the account type does not exist. * * @throws TskCoreException If an error occurs accessing the case database. */ // NOTE: Full name given for Type for doxygen linking public org.sleuthkit.datamodel.Account.Type getAccountType(String accountTypeName) throws TskCoreException { if (this.typeNameToAccountTypeMap.containsKey(accountTypeName)) { return this.typeNameToAccountTypeMap.get(accountTypeName); } db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, "SELECT account_type_id, type_name, display_name FROM account_types WHERE type_name = '" + accountTypeName + "'");) { //NON-NLS Account.Type accountType = null; if (rs.next()) { accountType = new Account.Type(accountTypeName, rs.getString("display_name")); this.accountTypeToTypeIdMap.put(accountType, rs.getInt("account_type_id")); this.typeNameToAccountTypeMap.put(accountTypeName, accountType); } return accountType; } catch (SQLException ex) { throw new TskCoreException("Error getting account type id", ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Returns a list of AccountDeviceInstances that have at least one * relationship that meets the criteria listed in the filters. * * Applicable filters: DeviceFilter, AccountTypeFilter, DateRangeFilter, * RelationshipTypeFilter, MostRecentFilter * * @param filter filters to apply * * @return list of AccountDeviceInstances * * @throws TskCoreException exception thrown if a critical error occurs * within TSK core */ public List getAccountDeviceInstancesWithRelationships(CommunicationsFilter filter) throws TskCoreException { //set up applicable filters Set applicableInnerQueryFilters = new HashSet(Arrays.asList( CommunicationsFilter.DateRangeFilter.class.getName(), CommunicationsFilter.DeviceFilter.class.getName(), CommunicationsFilter.RelationshipTypeFilter.class.getName() )); String relationshipFilterSQL = getCommunicationsFilterSQL(filter, applicableInnerQueryFilters); String relationshipLimitSQL = getMostRecentFilterLimitSQL(filter); String relTblfilterQuery = "SELECT * " + "FROM account_relationships as relationships" + (relationshipFilterSQL.isEmpty() ? "" : " WHERE " + relationshipFilterSQL) + (relationshipLimitSQL.isEmpty() ? "" : relationshipLimitSQL); String uniqueAccountQueryTemplate = " SELECT %1$1s as account_id," + " data_source_obj_id" + " FROM ( " + relTblfilterQuery + ")AS %2$s"; String relationshipTableFilterQuery1 = String.format(uniqueAccountQueryTemplate, "account1_id", "union_query_1"); String relationshipTableFilterQuery2 = String.format(uniqueAccountQueryTemplate, "account2_id", "union_query_2"); //this query groups by account_id and data_source_obj_id across both innerQueries String uniqueAccountQuery = "SELECT DISTINCT account_id, data_source_obj_id" + " FROM ( " + relationshipTableFilterQuery1 + " UNION " + relationshipTableFilterQuery2 + " ) AS inner_union" + " GROUP BY account_id, data_source_obj_id"; // set up applicable filters Set applicableFilters = new HashSet(Arrays.asList( CommunicationsFilter.AccountTypeFilter.class.getName() )); String accountTypeFilterSQL = getCommunicationsFilterSQL(filter, applicableFilters); String queryStr = //account info " accounts.account_id AS account_id," + " accounts.account_unique_identifier AS account_unique_identifier," //account type info + " account_types.type_name AS type_name," //Account device instance info + " data_source_info.device_id AS device_id" + " FROM ( " + uniqueAccountQuery + " ) AS account_device_instances" + " JOIN accounts AS accounts" + " ON accounts.account_id = account_device_instances.account_id" + " JOIN account_types AS account_types" + " ON accounts.account_type_id = account_types.account_type_id" + " JOIN data_source_info AS data_source_info" + " ON account_device_instances.data_source_obj_id = data_source_info.obj_id" + (accountTypeFilterSQL.isEmpty() ? "" : " WHERE " + accountTypeFilterSQL); switch (db.getDatabaseType()) { case POSTGRESQL: queryStr = "SELECT DISTINCT ON ( accounts.account_id, data_source_info.device_id) " + queryStr; break; case SQLITE: queryStr = "SELECT " + queryStr + " GROUP BY accounts.account_id, data_source_info.device_id"; break; default: throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name()); } db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS ArrayList accountDeviceInstances = new ArrayList(); while (rs.next()) { long account_id = rs.getLong("account_id"); String deviceID = rs.getString("device_id"); final String type_name = rs.getString("type_name"); final String account_unique_identifier = rs.getString("account_unique_identifier"); Account.Type accountType = typeNameToAccountTypeMap.get(type_name); Account account = new Account(account_id, accountType, account_unique_identifier); accountDeviceInstances.add(new AccountDeviceInstance(account, deviceID)); } return accountDeviceInstances; } catch (SQLException ex) { throw new TskCoreException("Error getting account device instances. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the number of relationships between all pairs of accounts in the * given set. For each pair of accounts == , find the number * of relationships between those two accounts that pass the given filter,. * * Applicable filters: DeviceFilter, DateRangeFilter, RelationshipTypeFilter * * @param accounts The set of accounts to count the relationships (pairwise) * between. * @param filter The filter that relationships must pass to be included in * the count. * * @return The number of relationships (that pass the filter) between each * pair of accounts, organized in a map where the key is an * unordered pair of account ids, and the value is the number of * relationships. * * @throws TskCoreException if there is a problem querying the DB. */ public Map getRelationshipCountsPairwise(Set accounts, CommunicationsFilter filter) throws TskCoreException { Set accountIDs = new HashSet(); Set accountDeviceIDs = new HashSet(); for (AccountDeviceInstance adi : accounts) { accountIDs.add(adi.getAccount().getAccountID()); accountDeviceIDs.add("'" + adi.getDeviceId() + "'"); } //set up applicable filters Set applicableFilters = new HashSet(Arrays.asList( CommunicationsFilter.DateRangeFilter.class.getName(), CommunicationsFilter.DeviceFilter.class.getName(), CommunicationsFilter.RelationshipTypeFilter.class.getName() )); String accountIDsCSL = StringUtils.buildCSVString(accountIDs); String accountDeviceIDsCSL = StringUtils.buildCSVString(accountDeviceIDs); String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters); final String queryString = " SELECT count(DISTINCT relationships.relationship_source_obj_id) AS count," //realtionship count + " data_source_info.device_id AS device_id," //account 1 info + " accounts1.account_id AS account1_id," + " accounts1.account_unique_identifier AS account1_unique_identifier," + " account_types1.type_name AS type_name1," + " account_types1.display_name AS display_name1," //account 2 info + " accounts2.account_id AS account2_id," + " accounts2.account_unique_identifier AS account2_unique_identifier," + " account_types2.type_name AS type_name2," + " account_types2.display_name AS display_name2" + " FROM account_relationships AS relationships" + " JOIN data_source_info AS data_source_info" + " ON relationships.data_source_obj_id = data_source_info.obj_id " //account1 aliases + " JOIN accounts AS accounts1 " + " ON accounts1.account_id = relationships.account1_id" + " JOIN account_types AS account_types1" + " ON accounts1.account_type_id = account_types1.account_type_id" //account2 aliases + " JOIN accounts AS accounts2 " + " ON accounts2.account_id = relationships.account2_id" + " JOIN account_types AS account_types2" + " ON accounts2.account_type_id = account_types2.account_type_id" + " WHERE (( relationships.account1_id IN (" + accountIDsCSL + ")) " + " AND ( relationships.account2_id IN ( " + accountIDsCSL + " ))" + " AND ( data_source_info.device_id IN (" + accountDeviceIDsCSL + "))) " + (filterSQL.isEmpty() ? "" : " AND " + filterSQL) + " GROUP BY data_source_info.device_id, " + " accounts1.account_id, " + " account_types1.type_name, " + " account_types1.display_name, " + " accounts2.account_id, " + " account_types2.type_name, " + " account_types2.display_name"; Map results = new HashMap(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString);) { //NON-NLS while (rs.next()) { //make account 1 Account.Type type1 = new Account.Type(rs.getString("type_name1"), rs.getString("display_name1")); AccountDeviceInstance adi1 = new AccountDeviceInstance(new Account(rs.getLong("account1_id"), type1, rs.getString("account1_unique_identifier")), rs.getString("device_id")); //make account 2 Account.Type type2 = new Account.Type(rs.getString("type_name2"), rs.getString("display_name2")); AccountDeviceInstance adi2 = new AccountDeviceInstance(new Account(rs.getLong("account2_id"), type2, rs.getString("account2_unique_identifier")), rs.getString("device_id")); AccountPair relationshipKey = new AccountPair(adi1, adi2); long count = rs.getLong("count"); //merge counts for relationships that have the accounts flipped. Long oldCount = results.get(relationshipKey); if (oldCount != null) { count += oldCount; } results.put(relationshipKey, count); } return results; } catch (SQLException ex) { throw new TskCoreException("Error getting relationships between accounts. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the number of unique relationship sources (such as EMAIL artifacts) * associated with an account on a given device (AccountDeviceInstance) that * meet the filter criteria. * * Applicable filters: RelationshipTypeFilter, DateRangeFilter * * @param accountDeviceInstance Account of interest * @param filter Filters to apply. * * @return number of account relationships found for this account. * * @throws org.sleuthkit.datamodel.TskCoreException * */ public long getRelationshipSourcesCount(AccountDeviceInstance accountDeviceInstance, CommunicationsFilter filter) throws TskCoreException { long account_id = accountDeviceInstance.getAccount().getAccountID(); // Get the list of Data source objects IDs correpsonding to this DeviceID. String datasourceObjIdsCSV = StringUtils.buildCSVString( db.getDataSourceObjIds(accountDeviceInstance.getDeviceId())); // set up applicable filters Set applicableFilters = new HashSet(Arrays.asList( CommunicationsFilter.RelationshipTypeFilter.class.getName(), CommunicationsFilter.DateRangeFilter.class.getName() )); String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters); String innerQuery = " account_relationships AS relationships"; String limitStr = getMostRecentFilterLimitSQL(filter); if (!limitStr.isEmpty()) { innerQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships"; } String queryStr = "SELECT count(DISTINCT relationships.relationship_source_obj_id) as count " + " FROM" + innerQuery + " WHERE relationships.data_source_obj_id IN ( " + datasourceObjIdsCSV + " )" + " AND ( relationships.account1_id = " + account_id + " OR relationships.account2_id = " + account_id + " )" + (filterSQL.isEmpty() ? "" : " AND " + filterSQL); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS rs.next(); return (rs.getLong("count")); } catch (SQLException ex) { throw new TskCoreException("Error getting relationships count for account device instance. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the unique relationship sources (such as EMAIL artifacts) associated * with accounts on specific devices (AccountDeviceInstance) that meet the * filter criteria. * * Applicable filters: RelationshipTypeFilter, DateRangeFilter, * MostRecentFilter * * @param accountDeviceInstanceList set of account device instances for * which to get the relationship sources. * @param filter Filters to apply. * * @return relationship sources found for given account(s). * * @throws org.sleuthkit.datamodel.TskCoreException */ public Set getRelationshipSources(Set accountDeviceInstanceList, CommunicationsFilter filter) throws TskCoreException { if (accountDeviceInstanceList.isEmpty()) { //log this? return Collections.emptySet(); } Map> accountIdToDatasourceObjIdMap = new HashMap<>(); for (AccountDeviceInstance accountDeviceInstance : accountDeviceInstanceList) { long accountID = accountDeviceInstance.getAccount().getAccountID(); List dataSourceObjIds = db.getDataSourceObjIds(accountDeviceInstance.getDeviceId()); if (accountIdToDatasourceObjIdMap.containsKey(accountID)) { accountIdToDatasourceObjIdMap.get(accountID).addAll(dataSourceObjIds); } else { accountIdToDatasourceObjIdMap.put(accountID, new HashSet<>(dataSourceObjIds)); } } List adiSQLClauses = new ArrayList<>(); for (Map.Entry> entry : accountIdToDatasourceObjIdMap.entrySet()) { final Long accountID = entry.getKey(); String datasourceObjIdsCSV = StringUtils.buildCSVString(entry.getValue()); adiSQLClauses.add( "( ( relationships.data_source_obj_id IN ( " + datasourceObjIdsCSV + " ) )" + " AND ( relationships.account1_id = " + accountID + " OR relationships.account2_id = " + accountID + " ) )" ); } String adiSQLClause = StringUtils.joinAsStrings(adiSQLClauses, " OR "); // set up applicable filters Set applicableFilters = new HashSet(Arrays.asList( CommunicationsFilter.RelationshipTypeFilter.class .getName(), CommunicationsFilter.DateRangeFilter.class .getName() )); String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters); String limitQuery = " account_relationships AS relationships"; String limitStr = getMostRecentFilterLimitSQL(filter); if (!limitStr.isEmpty()) { limitQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships"; } String queryStr = "SELECT DISTINCT artifacts.artifact_id AS artifact_id," + " artifacts.obj_id AS obj_id," + " artifacts.artifact_obj_id AS artifact_obj_id," + " artifacts.data_source_obj_id AS data_source_obj_id, " + " artifacts.artifact_type_id AS artifact_type_id, " + " artifacts.review_status_id AS review_status_id," + " tsk_data_artifacts.os_account_obj_id as os_account_obj_id" + " FROM blackboard_artifacts as artifacts" + " JOIN " + limitQuery + " ON artifacts.artifact_obj_id = relationships.relationship_source_obj_id" + " LEFT JOIN tsk_data_artifacts ON artifacts.artifact_obj_id = tsk_data_artifacts.artifact_obj_id" // append sql to restrict search to specified account device instances + " WHERE (" + adiSQLClause + " )" // plus other filters + (filterSQL.isEmpty() ? "" : " AND (" + filterSQL + " )"); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS Set relationshipSources = new HashSet<>(); relationshipSources.addAll(getDataArtifactsFromResult(rs)); return relationshipSources; } catch (SQLException ex) { throw new TskCoreException("Error getting relationships for account. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get a set of AccountDeviceInstances that have relationships with the * given AccountDeviceInstance and meet the criteria of the given filter. * * Applicable filters: DeviceFilter, DateRangeFilter, RelationshipTypeFilter * * @param accountDeviceInstance The account device instance. * @param filter The filters to apply. * * @return A set of AccountDeviceInstances that have relationships with the * given AccountDeviceInstance and meet the criteria of the given * filter. * * @throws TskCoreException if there is a serious error executing he query. */ public List getRelatedAccountDeviceInstances(AccountDeviceInstance accountDeviceInstance, CommunicationsFilter filter) throws TskCoreException { final List dataSourceObjIds = getSleuthkitCase().getDataSourceObjIds(accountDeviceInstance.getDeviceId()); //set up applicable filters Set applicableInnerQueryFilters = new HashSet(Arrays.asList( CommunicationsFilter.DateRangeFilter.class.getName(), CommunicationsFilter.DeviceFilter.class.getName(), CommunicationsFilter.RelationshipTypeFilter.class.getName() )); String innerQueryfilterSQL = getCommunicationsFilterSQL(filter, applicableInnerQueryFilters); String innerQueryTemplate = " SELECT %1$1s as account_id," + " data_source_obj_id" + " FROM account_relationships as relationships" + " WHERE %2$1s = " + accountDeviceInstance.getAccount().getAccountID() + "" + " AND data_source_obj_id IN (" + StringUtils.buildCSVString(dataSourceObjIds) + ")" + (innerQueryfilterSQL.isEmpty() ? "" : " AND " + innerQueryfilterSQL); String innerQuery1 = String.format(innerQueryTemplate, "account1_id", "account2_id"); String innerQuery2 = String.format(innerQueryTemplate, "account2_id", "account1_id"); //this query groups by account_id and data_source_obj_id across both innerQueries String combinedInnerQuery = "SELECT account_id, data_source_obj_id " + " FROM ( " + innerQuery1 + " UNION " + innerQuery2 + " ) AS inner_union" + " GROUP BY account_id, data_source_obj_id"; // set up applicable filters Set applicableFilters = new HashSet(Arrays.asList( CommunicationsFilter.AccountTypeFilter.class.getName() )); String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters); String queryStr = //account info " accounts.account_id AS account_id," + " accounts.account_unique_identifier AS account_unique_identifier," //account type info + " account_types.type_name AS type_name," //Account device instance info + " data_source_info.device_id AS device_id" + " FROM ( " + combinedInnerQuery + " ) AS account_device_instances" + " JOIN accounts AS accounts" + " ON accounts.account_id = account_device_instances.account_id" + " JOIN account_types AS account_types" + " ON accounts.account_type_id = account_types.account_type_id" + " JOIN data_source_info AS data_source_info" + " ON account_device_instances.data_source_obj_id = data_source_info.obj_id" + (filterSQL.isEmpty() ? "" : " WHERE " + filterSQL); switch (db.getDatabaseType()) { case POSTGRESQL: queryStr = "SELECT DISTINCT ON ( accounts.account_id, data_source_info.device_id) " + queryStr; break; case SQLITE: queryStr = "SELECT " + queryStr + " GROUP BY accounts.account_id, data_source_info.device_id"; break; default: throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name()); } db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryStr);) { ArrayList accountDeviceInstances = new ArrayList(); while (rs.next()) { long account_id = rs.getLong("account_id"); String deviceID = rs.getString("device_id"); final String type_name = rs.getString("type_name"); final String account_unique_identifier = rs.getString("account_unique_identifier"); Account.Type accountType = typeNameToAccountTypeMap.get(type_name); Account account = new Account(account_id, accountType, account_unique_identifier); accountDeviceInstances.add(new AccountDeviceInstance(account, deviceID)); } return accountDeviceInstances; } catch (SQLException ex) { throw new TskCoreException("Error getting account device instances. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get the sources (artifacts, content) of relationships between the given * account device instances. * * Applicable filters: DeviceFilter, DateRangeFilter, * RelationshipTypeFilter, MostRecentFilter * * @param account1 First AccountDeviceInstance * @param account2 Second AccountDeviceInstance * @param filter Filters to apply. * * @return relationship sources for relationships between account1 and * account2. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getRelationshipSources(AccountDeviceInstance account1, AccountDeviceInstance account2, CommunicationsFilter filter) throws TskCoreException { //set up applicable filters Set applicableFilters = new HashSet<>(Arrays.asList( CommunicationsFilter.DateRangeFilter.class.getName(), CommunicationsFilter.DeviceFilter.class.getName(), CommunicationsFilter.RelationshipTypeFilter.class.getName() )); String limitQuery = " account_relationships AS relationships"; String limitStr = getMostRecentFilterLimitSQL(filter); if (!limitStr.isEmpty()) { limitQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships"; } String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters); final String queryString = "SELECT artifacts.artifact_id AS artifact_id," + " artifacts.obj_id AS obj_id," + " artifacts.artifact_obj_id AS artifact_obj_id," + " artifacts.data_source_obj_id AS data_source_obj_id," + " artifacts.artifact_type_id AS artifact_type_id," + " artifacts.review_status_id AS review_status_id," + " tsk_data_artifacts.os_account_obj_id AS os_account_obj_id" + " FROM blackboard_artifacts AS artifacts" + " JOIN " + limitQuery + " ON artifacts.artifact_obj_id = relationships.relationship_source_obj_id" + " LEFT JOIN tsk_data_artifacts ON artifacts.artifact_obj_id = tsk_data_artifacts.artifact_obj_id" + " WHERE (( relationships.account1_id = " + account1.getAccount().getAccountID() + " AND relationships.account2_id = " + account2.getAccount().getAccountID() + " ) OR ( relationships.account2_id = " + account1.getAccount().getAccountID() + " AND relationships.account1_id =" + account2.getAccount().getAccountID() + " ))" + (filterSQL.isEmpty() ? "" : " AND " + filterSQL); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString);) { ArrayList artifacts = new ArrayList<>(); artifacts.addAll(getDataArtifactsFromResult(rs)); return artifacts; } catch (SQLException ex) { throw new TskCoreException("Error getting relationships between accounts. " + ex.getMessage(), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get a list AccountFileInstance for the given accounts. * * @param account List of accounts * * @return A lit of AccountFileInstances for the given accounts or null if * none are found. * * @throws org.sleuthkit.datamodel.TskCoreException */ public List getAccountFileInstances(Account account) throws TskCoreException { List accountFileInstanceList = new ArrayList<>(); @SuppressWarnings("deprecation") List artifactList = getSleuthkitCase().getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, account.getTypeSpecificID()); if (artifactList != null && !artifactList.isEmpty()) { for (BlackboardArtifact artifact : artifactList) { accountFileInstanceList.add(new AccountFileInstance(artifact, account)); } } if (!accountFileInstanceList.isEmpty()) { return accountFileInstanceList; } else { return null; } } /** * Gets a list of the distinct account types that can currently be found in * the case db. * * @return A list of distinct accounts or an empty list. * * @throws TskCoreException */ public List getAccountTypesInUse() throws TskCoreException { String query = "SELECT DISTINCT accounts.account_type_id, type_name, display_name FROM accounts JOIN account_types ON accounts.account_type_id = account_types.account_type_id"; List inUseAccounts = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query);) { Account.Type accountType; while (rs.next()) { String accountTypeName = rs.getString("type_name"); accountType = this.typeNameToAccountTypeMap.get(accountTypeName); if (accountType == null) { accountType = new Account.Type(accountTypeName, rs.getString("display_name")); this.accountTypeToTypeIdMap.put(accountType, rs.getInt("account_type_id")); } inUseAccounts.add(accountType); } return inUseAccounts; } catch (SQLException ex) { throw new TskCoreException("Error getting account type id", ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Gets a list of accounts that are related to the given artifact. * * @param artifact * * @return A list of distinct accounts or an empty list if none where found. * * @throws TskCoreException */ public List getAccountsRelatedToArtifact(BlackboardArtifact artifact) throws TskCoreException { if (artifact == null) { throw new IllegalArgumentException("null arugment passed to getAccountsRelatedToArtifact"); } List accountList = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = db.getConnection()) { try { // In order to get a list of all the unique accounts in a relationship with the given aritfact // we must first union a list of the unique account1_id in the relationship with artifact // then the unique account2_id (inner select with union). The outter select assures the list // of the inner select only contains unique accounts. String query = String.format("SELECT DISTINCT (account_id), account_type_id, account_unique_identifier" + " FROM (" + " SELECT DISTINCT (account_id), account_type_id, account_unique_identifier" + " FROM accounts" + " JOIN account_relationships ON account1_id = account_id" + " WHERE relationship_source_obj_id = %d" + " UNION " + " SELECT DISTINCT (account_id), account_type_id, account_unique_identifier" + " FROM accounts" + " JOIN account_relationships ON account2_id = account_id" + " WHERE relationship_source_obj_id = %d) AS unionOfRelationships", artifact.getId(), artifact.getId()); try (Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery(query)) { while (rs.next()) { Account.Type accountType = null; int accountTypeId = rs.getInt("account_type_id"); for (Map.Entry entry : accountTypeToTypeIdMap.entrySet()) { if (entry.getValue() == accountTypeId) { accountType = entry.getKey(); break; } } accountList.add(new Account(rs.getInt("account_id"), accountType, rs.getString("account_unique_identifier"))); } } catch (SQLException ex) { throw new TskCoreException("Unable to get account list for give artifact " + artifact.getId(), ex); } } finally { db.releaseSingleUserCaseReadLock(); } } return accountList; } /** * Get account_type_id for the given account type. * * @param accountType account type to lookup. * * @return account_type_id for the given account type. 0 if not known. */ int getAccountTypeId(Account.Type accountType) { if (accountTypeToTypeIdMap.containsKey(accountType)) { return accountTypeToTypeIdMap.get(accountType); } return 0; } /** * Normalize the given account ID according to the rules of the given * Account.Type. * * @param accountType The type of account to normalize for * @param accountUniqueID The account id to normalize * * @return The normalized account id. * * @throws InvalidAccountIDException If the account identifier is not valid. */ private String normalizeAccountID(Account.Type accountType, String accountUniqueID) throws InvalidAccountIDException { if (accountUniqueID == null || accountUniqueID.isEmpty()) { throw new InvalidAccountIDException("Account id is null or empty."); } String normalizedAccountID; if (accountType.equals(Account.Type.PHONE)) { normalizedAccountID = CommunicationsUtils.normalizePhoneNum(accountUniqueID); } else if (accountType.equals(Account.Type.EMAIL)) { normalizedAccountID = CommunicationsUtils.normalizeEmailAddress(accountUniqueID); } else { normalizedAccountID = accountUniqueID.toLowerCase().trim(); } return normalizedAccountID; } /** * Builds the SQL for the given CommunicationsFilter. * * Gets the SQL for each subfilter and combines using AND. * * @param commFilter The CommunicationsFilter to get the SQL for. * @param applicableFilters A Set of names of classes of subfilters that are * applicable. SubFilters not in this list will be * ignored. * * @return return SQL suitible for use IN a where clause. */ private String getCommunicationsFilterSQL(CommunicationsFilter commFilter, Set applicableFilters) { if (null == commFilter || commFilter.getAndFilters().isEmpty()) { return ""; } String sqlStr = ""; StringBuilder sqlSB = new StringBuilder(); boolean first = true; for (CommunicationsFilter.SubFilter subFilter : commFilter.getAndFilters()) { // If the filter is applicable if (applicableFilters.contains(subFilter.getClass().getName())) { String subfilterSQL = subFilter.getSQL(this); if (!subfilterSQL.isEmpty()) { if (first) { first = false; } else { sqlSB.append(" AND "); } sqlSB.append("( "); sqlSB.append(subfilterSQL); sqlSB.append(" )"); } } } if (!sqlSB.toString().isEmpty()) { sqlStr = "( " + sqlSB.toString() + " )"; } return sqlStr; } /** * Builds the SQL for the MostRecentFilter. * * @param filter The CommunicationsFilter to get the SQL for. * * @return Order BY and LIMIT clause or empty string if no filter is * available. */ private String getMostRecentFilterLimitSQL(CommunicationsFilter filter) { String limitStr = ""; if (filter != null && !filter.getAndFilters().isEmpty()) { for (CommunicationsFilter.SubFilter subFilter : filter.getAndFilters()) { if (subFilter.getClass().getName().equals(CommunicationsFilter.MostRecentFilter.class.getName())) { limitStr = subFilter.getSQL(this); break; } } } return limitStr; } /** * A helper method that will return a set of BlackboardArtifact objects for * the given ResultSet. * * @param resultSet The results of executing a query. * * @return A list of BlackboardArtifact objects. * * @throws SQLException * @throws TskCoreException */ private List getDataArtifactsFromResult(ResultSet resultSet) throws SQLException, TskCoreException { List artifacts = new ArrayList<>(); while (resultSet.next()) { BlackboardArtifact.Type bbartType = db.getArtifactType(resultSet.getInt("artifact_type_id")); artifacts.add(new DataArtifact(db, resultSet.getLong("artifact_id"), resultSet.getLong("obj_id"), resultSet.getLong("artifact_obj_id"), resultSet.getObject("data_source_obj_id") != null ? resultSet.getLong("data_source_obj_id") : null, bbartType.getTypeID(), bbartType.getTypeName(), bbartType.getDisplayName(), BlackboardArtifact.ReviewStatus.withID(resultSet.getInt("review_status_id")), resultSet.getLong("os_account_obj_id"), false)); } return artifacts; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/FsContent.java000644 000765 000024 00000043637 14137073413 027234 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM; /** * An abstract base class for representations of a file system files or * directories that have been added to a case. * * TODO move common getters to AbstractFile class */ public abstract class FsContent extends AbstractFile { private static final Logger logger = Logger.getLogger(FsContent.class.getName()); private volatile String uniquePath; private List metaDataText = null; private volatile FileSystem parentFileSystem; /** * @deprecated Use getFileSystemId instead. */ // TODO: Make private. @Deprecated protected final long fsObjId; /** * * @deprecated Use getFileHandle instead. */ // TODO: Make private. @Deprecated protected volatile long fileHandle = 0; /** * Constructs an abstract base class for representations of a file system * files or directories that have been added to a case. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param fileType The type of file * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * @param extension The extension part of the file name (not * including the '.'), can be null. * @param ownerUid UID of the file owner as found in the file * system, can be null. * @param osAccountObjId Obj id of the owner OS account, may be null. */ @SuppressWarnings("deprecation") FsContent(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId, String name, TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType, String extension, String ownerUid, Long osAccountObjId, List fileAttributes) { super(db, objId, dataSourceObjectId, attrType, attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, fileAttributes); this.fsObjId = fsObjId; } /** * Get the object id of the parent file system of this file or directory. * * @return the parent file system id */ @SuppressWarnings("deprecation") public long getFileSystemId() { return fsObjId; } /** * Sets the parent file system of this file or directory. * * @param parent The parent file system object. */ void setFileSystem(FileSystem parent) { parentFileSystem = parent; } /** * Gets the parent file system of this file or directory. * * @return the file system object of the parent * * @throws org.sleuthkit.datamodel.TskCoreException */ @SuppressWarnings("deprecation") public FileSystem getFileSystem() throws TskCoreException { if (parentFileSystem == null) { synchronized (this) { if (parentFileSystem == null) { parentFileSystem = getSleuthkitCase().getFileSystemById(fsObjId, AbstractContent.UNKNOWN_ID); } } } return parentFileSystem; } /** * Opens a JNI file handle for this file or directory. * * @throws TskCoreException if there is a problem opening the handle. */ @SuppressWarnings("deprecation") void loadFileHandle() throws TskCoreException { if (fileHandle == 0) { synchronized (this) { if (fileHandle == 0) { fileHandle = SleuthkitJNI.openFile(getFileSystem().getFileSystemHandle(), metaAddr, attrType, attrId, getSleuthkitCase()); } } } } /** * Gets the JNI file handle for this file or directory, zero if the file has * not been opened by calling loadHandle. * * @return The JNI file handle. */ @SuppressWarnings("deprecation") long getFileHandle() { return fileHandle; } /** * Reads bytes from this file or directory. * * @param buf Buffer to read into. * @param offset Start position in the file. * @param len Number of bytes to read. * * @return Number of bytes read. * * @throws TskCoreException if there is a problem reading the file. */ @Override @SuppressWarnings("deprecation") protected synchronized int readInt(byte[] buf, long offset, long len) throws TskCoreException { if (offset == 0 && size == 0) { //special case for 0-size file return 0; } loadFileHandle(); return SleuthkitJNI.readFile(fileHandle, buf, offset, len); } @Override public boolean isRoot() { try { FileSystem fs = getFileSystem(); return fs.getRoot_inum() == this.getMetaAddr(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Exception while calling 'getFileSystem' on " + this, ex); //NON-NLS return false; } } /** * Gets the parent directory of this file or directory. * * @return The parent directory or null if there isn't one * * @throws TskCoreException if there was an error querying the case * database. */ public AbstractFile getParentDirectory() throws TskCoreException { return getSleuthkitCase().getParentDirectory(this); } /** * Gets the data source (image) for this file or directory directory. * * @return The data source. * * @throws TskCoreException if there is an error querying the case database. */ @Override public Content getDataSource() throws TskCoreException { return getFileSystem().getDataSource(); } /** * Get the full path to this file or directory, starting with a "/" and the * image name and then all the other segments in the path. * * @return A unique path for this object. * * @throws TskCoreException if there is an error querying the case database. */ @Override public String getUniquePath() throws TskCoreException { // It is possible that multiple threads could be doing this calculation // simultaneously, but it's worth the potential extra processing to prevent deadlocks. if (uniquePath == null) { StringBuilder sb = new StringBuilder(); sb.append(getFileSystem().getUniquePath()); sb.append(getParentPath()); sb.append(getName()); uniquePath = sb.toString(); } return uniquePath; } /** * Gets a text-based description of the file's metadata. This is the same * content as the TSK istat tool produces and is different information for * each type of file system. * * @return List of text, one element per line. * * @throws TskCoreException */ public synchronized List getMetaDataText() throws TskCoreException { if (metaDataText != null) { return metaDataText; } // if there is no metadata for this file, return empty string if (metaAddr == 0) { metaDataText = new ArrayList(); metaDataText.add(""); return metaDataText; } loadFileHandle(); metaDataText = SleuthkitJNI.getFileMetaDataText(fileHandle); return metaDataText; } /** * Closes the JNI file handle for this file or directory. */ @Override @SuppressWarnings("deprecation") public synchronized void close() { if (fileHandle != 0) { SleuthkitJNI.closeFile(fileHandle); fileHandle = 0; } } /** * Closes the JNI file handle for this file or directory when the FsContent * object is garbage-collected. */ @Override public void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } /** * Provides a string representation of this file or directory. * * @param preserveState True if state should be included in the string * representation of this object. */ @Override @SuppressWarnings("deprecation") public String toString(boolean preserveState) { return super.toString(preserveState) + "FsContent [\t" //NON-NLS + "fsObjId " + fsObjId //NON-NLS + "\t" + "uniquePath " + uniquePath //NON-NLS + "\t" + "fileHandle " + fileHandle //NON-NLS + "]\t"; } /** * Constructs an abstract base class for representations of a file system * files or directories that have been added to a case. * * @param db The case database to which the file has been added. * @param objId The object id of the file in the case database. * @param fsObjId The object id of the file system to which this file * belongs. * @param attrType The type attribute given to the file by the file * system. * @param attrId The type id given to the file by the file system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in the name * structure of the file system. May be set to * TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in the * metadata structure of the file system. May be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as reported * in the name structure of the file system. * @param metaFlags The allocated status of the file, usually as reported * in the metadata structure of the file system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet calculated. * @param knownState The known state of the file from a hash database * lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") FsContent(SleuthkitCase db, long objId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) { this(db, objId, db.getDataSourceObjectId(objId), fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList() ); } /** * Constructs an abstract base class for representations of a file system * files or directories that have been added to a case. This deprecated * version has attrId filed defined as a short which has since been changed * to an int. * * @param db The case database to which the file has been * added. * @param objId The object id of the file in the case database. * @param dataSourceObjectId The object id of the data source for the file. * @param fsObjId The object id of the file system to which this * file belongs. * @param attrType The type attribute given to the file by the * file system. * @param attrId The type id given to the file by the file * system. * @param name The name of the file. * @param metaAddr The meta address of the file. * @param metaSeq The meta sequence number of the file. * @param dirType The type of the file, usually as reported in * the name structure of the file system. May be * set to TSK_FS_NAME_TYPE_ENUM.UNDEF. * @param metaType The type of the file, usually as reported in * the metadata structure of the file system. May * be set to * TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF. * @param dirFlag The allocated status of the file, usually as * reported in the name structure of the file * system. * @param metaFlags The allocated status of the file, usually as * reported in the metadata structure of the file * system. * @param size The size of the file. * @param ctime The changed time of the file. * @param crtime The created time of the file. * @param atime The accessed time of the file. * @param mtime The modified time of the file. * @param modes The modes for the file. * @param uid The UID for the file. * @param gid The GID for the file. * @param md5Hash The MD5 hash of the file, null if not yet * calculated. * @param knownState The known state of the file from a hash * database lookup, null if not yet looked up. * @param parentPath The path of the parent of the file. * @param mimeType The MIME type of the file, null if it has not * yet been determined. * * @deprecated Do not make subclasses outside of this package. */ @Deprecated @SuppressWarnings("deprecation") FsContent(SleuthkitCase db, long objId, long dataSourceObjectId, long fsObjId, TSK_FS_ATTR_TYPE_ENUM attrType, short attrId, String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) { this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList()); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskData.java000644 000765 000024 00000067735 14137073413 026671 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.text.MessageFormat; import java.util.ResourceBundle; import java.util.EnumSet; import java.util.Set; /** * Contains enums for the integer values stored in the database and returned by * the various data model objects. */ public class TskData { private final static ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); /** * The type of the file system file, as reported in the name structure of * the file system. This is the dir_type column in the tsk_files table. */ public enum TSK_FS_NAME_TYPE_ENUM { UNDEF(0, "-"), ///< Unknown type FIFO(1, "p"), ///< Named pipe NON-NLS CHR(2, "c"), ///< Character device NON-NLS DIR(3, "d"), ///< Directory NON-NLS BLK(4, "b"), ///< Block device NON-NLS REG(5, "r"), ///< Regular file NON-NLS LNK(6, "l"), ///< Symbolic link NON-NLS SOCK(7, "s"), ///< Socket NON-NLS SHAD(8, "h"), ///< Shadow inode (solaris) NON-NLS WHT(9, "w"), ///< Whiteout (openbsd) NON-NLS VIRT(10, "v"), ///< Special (TSK added "Virtual" files) NON-NLS VIRT_DIR(11, "V"); ///< Special (TSK added "Virtual" directories) NON-NLS private short dirType; String label; private TSK_FS_NAME_TYPE_ENUM(int type, String label) { this.dirType = (short) type; this.label = label; } /** * Get dir type * * @return the dir type long value */ public short getValue() { return dirType; } /** * Get the label string * * @return the label string value */ public String getLabel() { return this.label; } /** * Convert to the enum type from the short value * * @param dir_type enum type value to convert * * @return converted long value */ static public TSK_FS_NAME_TYPE_ENUM valueOf(short dir_type) { for (TSK_FS_NAME_TYPE_ENUM v : TSK_FS_NAME_TYPE_ENUM.values()) { if (v.dirType == dir_type) { return v; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskFsNameTypeEnum.exception.msg1.text"), dir_type)); } } /** * The type of the file system file, as reported in the metadata structure * of the file system. This is the meta_type column in the tsk_files table. */ public enum TSK_FS_META_TYPE_ENUM { TSK_FS_META_TYPE_UNDEF(0, "-"), TSK_FS_META_TYPE_REG(1, "r"), ///< Regular file NON-NLS TSK_FS_META_TYPE_DIR(2, "d"), ///< Directory file NON-NLS TSK_FS_META_TYPE_FIFO(3, "p"), ///< Named pipe (fifo) NON-NLS TSK_FS_META_TYPE_CHR(4, "c"), ///< Character device NON-NLS TSK_FS_META_TYPE_BLK(5, "b"), ///< Block device NON-NLS TSK_FS_META_TYPE_LNK(6, "l"), ///< Symbolic link NON-NLS TSK_FS_META_TYPE_SHAD(7, "s"), ///< SOLARIS ONLY NON-NLS TSK_FS_META_TYPE_SOCK(8, "h"), ///< UNIX domain socket NON-NLS TSK_FS_META_TYPE_WHT(9, "w"), ///< Whiteout NON-NLS TSK_FS_META_TYPE_VIRT(10, "v"), ///< "Virtual File" created by TSK for file system areas NON-NLS TSK_FS_META_TYPE_VIRT_DIR(11, "v"); ///< "Virtual Directory" created by TSK for Orphan Files NON-NLS private short metaType; private String metaTypeStr; private TSK_FS_META_TYPE_ENUM(int type, String metaTypeStr) { this.metaType = (short) type; this.metaTypeStr = metaTypeStr; } /** * Get meta type short value * * @return the meta type long value */ public short getValue() { return metaType; } @Override public String toString() { return metaTypeStr; } public static TSK_FS_META_TYPE_ENUM valueOf(short metaType) { for (TSK_FS_META_TYPE_ENUM type : TSK_FS_META_TYPE_ENUM.values()) { if (type.getValue() == metaType) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskFsMetaTypeEnum.exception.msg1.text"), metaType)); } } /** * The allocated status of a file system file, as reported in the name * structure of the file system. This is the dir_flags column in the * tsk_files table. */ public enum TSK_FS_NAME_FLAG_ENUM { ALLOC(1, bundle.getString("TskData.tskFsNameFlagEnum.allocated")), ///< Name is in an allocated state UNALLOC(2, bundle.getString("TskData.tskFsNameFlagEnum.unallocated")); ///< Name is in an unallocated state private short dirFlag; private String dirFlagStr; private TSK_FS_NAME_FLAG_ENUM(int flag, String dirFlagStr) { this.dirFlag = (short) flag; this.dirFlagStr = dirFlagStr; } /** * Get short value of the flag * * @return the long flag value */ public short getValue() { return dirFlag; } @Override public String toString() { return dirFlagStr; } /** * Convert dirFlag int value to the enum type * * @param dirFlag int value to convert * * @return the enum type corresponding to dirFlag */ public static TSK_FS_NAME_FLAG_ENUM valueOf(int dirFlag) { for (TSK_FS_NAME_FLAG_ENUM flag : TSK_FS_NAME_FLAG_ENUM.values()) { if (flag.dirFlag == dirFlag) { return flag; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskFsNameFlagEnum.exception.msg1.text"), dirFlag)); } } /** * The allocated status of the file system file, as reported in the metadata * structure of the file system. This is the meta_flags column in the * tsk_files table. */ public enum TSK_FS_META_FLAG_ENUM { ALLOC(1, bundle.getString("TskData.tskFsMetaFlagEnum.allocated")), ///< Metadata structure is currently in an allocated state UNALLOC(2, bundle.getString("TskData.tskFsMetaFlagEnum.unallocated")), ///< Metadata structure is currently in an unallocated state USED(4, bundle.getString("TskData.tskFsMetaFlagEnum.used")), ///< Metadata structure has been allocated at least once UNUSED(8, bundle.getString("TskData.tskFsMetaFlagEnum.unused")), ///< Metadata structure has never been allocated. COMP(16, bundle.getString("TskData.tskFsMetaFlagEnum.compressed")), ///< The file contents are compressed. ORPHAN(32, bundle.getString("TskData.tskFsMetaFlagEnum.orphan")); ///< Return only metadata structures that have no file name pointing to the (inode_walk flag only) private short meta_flag; private String label; private TSK_FS_META_FLAG_ENUM(int flag, String label) { this.meta_flag = (short) flag; this.label = label; } /** * Get meta flags short value * * @return the long value of meta flags */ public short getValue() { return meta_flag; } /** * Get string label of the metal flags * * @return string meta flags label */ @Override public String toString() { return label; } /** * Returns all the enum elements that match the flags in metaFlag * * @param metaFlags Flags to convert to Enums. * * @return matching TSK_FS_META_FLAG_ENUM elements */ public static Set valuesOf(short metaFlags) { Set matchedFlags = EnumSet.noneOf(TSK_FS_META_FLAG_ENUM.class); for (TSK_FS_META_FLAG_ENUM v : TSK_FS_META_FLAG_ENUM.values()) { long flag = v.getValue(); if ((metaFlags & flag) == flag) { matchedFlags.add(v); } } return matchedFlags; } public static short toInt(Set metaFlags) { short val = 0; for (TSK_FS_META_FLAG_ENUM flag : metaFlags) { val |= flag.getValue(); } return val; } } /** * Type of data that is stored in the attribute for a file system file. This * is the attr_type column in the tsk_files table. */ public enum TSK_FS_ATTR_TYPE_ENUM { TSK_FS_ATTR_TYPE_NOT_FOUND(0x00), // 0 TSK_FS_ATTR_TYPE_DEFAULT(0x01), // 1 TSK_FS_ATTR_TYPE_NTFS_SI(0x10), // 16 TSK_FS_ATTR_TYPE_NTFS_ATTRLIST(0x20), // 32 TSK_FS_ATTR_TYPE_NTFS_FNAME(0x30), // 48 TSK_FS_ATTR_TYPE_NTFS_VVER(0x40), // 64 (NT) TSK_FS_ATTR_TYPE_NTFS_OBJID(0x40), // 64 (2K) TSK_FS_ATTR_TYPE_NTFS_SEC(0x50), // 80 TSK_FS_ATTR_TYPE_NTFS_VNAME(0x60), // 96 TSK_FS_ATTR_TYPE_NTFS_VINFO(0x70), // 112 TSK_FS_ATTR_TYPE_NTFS_DATA(0x80), // 128 TSK_FS_ATTR_TYPE_NTFS_IDXROOT(0x90), // 144 TSK_FS_ATTR_TYPE_NTFS_IDXALLOC(0xA0), // 160 TSK_FS_ATTR_TYPE_NTFS_BITMAP(0xB0), // 176 TSK_FS_ATTR_TYPE_NTFS_SYMLNK(0xC0), // 192 (NT) TSK_FS_ATTR_TYPE_NTFS_REPARSE(0xC0), // 192 (2K) TSK_FS_ATTR_TYPE_NTFS_EAINFO(0xD0), // 208 TSK_FS_ATTR_TYPE_NTFS_EA(0xE0), // 224 TSK_FS_ATTR_TYPE_NTFS_PROP(0xF0), // (NT) TSK_FS_ATTR_TYPE_NTFS_LOG(0x100), // (2K) TSK_FS_ATTR_TYPE_UNIX_INDIR(0x1001), // Indirect blocks for UFS and ExtX file systems // Types for HFS+ File Attributes TSK_FS_ATTR_TYPE_HFS_DEFAULT(0x01), // 1 Data fork of fs special files and misc TSK_FS_ATTR_TYPE_HFS_DATA(0x1100), // 4352 Data fork of regular files TSK_FS_ATTR_TYPE_HFS_RSRC(0x1101), // 4353 Resource fork of regular files TSK_FS_ATTR_TYPE_HFS_EXT_ATTR(0x1102), // 4354 Extended Attributes) except compression records TSK_FS_ATTR_TYPE_HFS_COMP_REC(0x1103); // 4355 Compression records private int val; private TSK_FS_ATTR_TYPE_ENUM(int val) { this.val = val; } public int getValue() { return val; } public static TSK_FS_ATTR_TYPE_ENUM valueOf(int val) { for (TSK_FS_ATTR_TYPE_ENUM type : TSK_FS_ATTR_TYPE_ENUM.values()) { if (type.val == val) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskFsAttrTypeEnum.exception.msg1.text"), val)); } }; /** * Flags for a partition in the disk image. This is the flags column in the * tsk_vs_parts table. */ public enum TSK_VS_PART_FLAG_ENUM { TSK_VS_PART_FLAG_ALLOC(1), ///< Sectors are allocated to a volume in the volume system TSK_VS_PART_FLAG_UNALLOC(2), ///< Sectors are not allocated to a volume TSK_VS_PART_FLAG_META(4), ///< Sectors contain volume system metadata and could also be ALLOC or UNALLOC TSK_VS_PART_FLAG_ALL(7); ///< Show all sectors in the walk. private long vs_flag; private TSK_VS_PART_FLAG_ENUM(long flag) { vs_flag = flag; } /** * Get long value of the vs flag * * @return the long value of the flag */ public long getVsFlag() { return vs_flag; } } /** * The permissions of a file system file. This is the mode column in the * tsk_files table. */ public enum TSK_FS_META_MODE_ENUM { /* * The following describe the file permissions */ TSK_FS_META_MODE_ISUID(0004000), ///< set user id on execution TSK_FS_META_MODE_ISGID(0002000), ///< set group id on execution TSK_FS_META_MODE_ISVTX(0001000), ///< sticky bit TSK_FS_META_MODE_IRUSR(0000400), ///< R for owner TSK_FS_META_MODE_IWUSR(0000200), ///< W for owner TSK_FS_META_MODE_IXUSR(0000100), ///< X for owner TSK_FS_META_MODE_IRGRP(0000040), ///< R for group TSK_FS_META_MODE_IWGRP(0000020), ///< W for group TSK_FS_META_MODE_IXGRP(0000010), ///< X for group TSK_FS_META_MODE_IROTH(0000004), ///< R for other TSK_FS_META_MODE_IWOTH(0000002), ///< W for other TSK_FS_META_MODE_IXOTH(0000001); ///< X for other private short mode; private TSK_FS_META_MODE_ENUM(int mode) { this.mode = (short) mode; } /** * Get short value of the meta mode * * @return the long value of the meta mode */ public short getMode() { return mode; } /** * Returns all the TSK_FS_META_MODE_ENUM enum elements that match the * given modes * * @param modes * * @return matching TSK_FS_META_MODE_ENUM elements */ public static Set valuesOf(short modes) { Set matchedFlags = EnumSet.noneOf(TSK_FS_META_MODE_ENUM.class); for (TSK_FS_META_MODE_ENUM v : TSK_FS_META_MODE_ENUM.values()) { long flag = v.getMode(); if ((modes & flag) == flag) { matchedFlags.add(v); } } return matchedFlags; } /** * @param modes the set of modes to convert * * @return the short int representing the given set of modes */ public static short toInt(Set modes) { short modesInt = 0; for (TSK_FS_META_MODE_ENUM mode : modes) { modesInt |= mode.getMode(); } return modesInt; } }; /** * The type of the file system. This is the fs_type column in the * tsk_fs_info table. */ public enum TSK_FS_TYPE_ENUM { TSK_FS_TYPE_DETECT(0x00000000, bundle.getString("TskData.tskFsTypeEnum.autoDetect")), ///< Use autodetection methods TSK_FS_TYPE_NTFS(0x00000001, "NTFS"), ///< NTFS file system TSK_FS_TYPE_NTFS_DETECT(0x00000001, bundle.getString("TskData.tskFsTypeEnum.NTFSautoDetect")), ///< NTFS auto detection TSK_FS_TYPE_FAT12(0x00000002, "FAT12"), ///< FAT12 file system TSK_FS_TYPE_FAT16(0x00000004, "FAT16"), ///< FAT16 file system TSK_FS_TYPE_FAT32(0x00000008, "FAT32"), ///< FAT32 file system TSK_FS_TYPE_EXFAT(0x0000000A, "ExFAT"), ///< ExFAT file system TSK_FS_TYPE_FAT_DETECT(0x0000000e, bundle.getString("TskData.tskFsTypeEnum.FATautoDetect")), ///< FAT auto detection TSK_FS_TYPE_FFS1(0x00000010, "UFS1"), ///< UFS1 (FreeBSD, OpenBSD, BSDI ...) TSK_FS_TYPE_FFS1B(0x00000020, "UFS1b"), ///< UFS1b (Solaris - has no type) TSK_FS_TYPE_FFS2(0x00000040, "UFS2"), ///< UFS2 - FreeBSD, NetBSD TSK_FS_TYPE_FFS_DETECT(0x00000070, "UFS"), ///< UFS auto detection TSK_FS_TYPE_EXT2(0x00000080, "Ext2"), ///< Ext2 file system TSK_FS_TYPE_EXT3(0x00000100, "Ext3"), ///< Ext3 file system TSK_FS_TYPE_EXT_DETECT(0x00000180, bundle.getString("TskData.tskFsTypeEnum.ExtXautoDetect")), ///< ExtX auto detection TSK_FS_TYPE_SWAP(0x00000200, "SWAP"), ///< SWAP file system TSK_FS_TYPE_SWAP_DETECT(0x00000200, bundle.getString("TskData.tskFsTypeEnum.SWAPautoDetect")), ///< SWAP auto detection TSK_FS_TYPE_RAW(0x00000400, "RAW"), ///< RAW file system TSK_FS_TYPE_RAW_DETECT(0x00000400, bundle.getString("TskData.tskFsTypeEnum.RAWautoDetect")), ///< RAW auto detection TSK_FS_TYPE_ISO9660(0x00000800, "ISO9660"), ///< ISO9660 file system TSK_FS_TYPE_ISO9660_DETECT(0x00000800, bundle.getString("TskData.tskFsTypeEnum.ISO9660autoDetect")), ///< ISO9660 auto detection TSK_FS_TYPE_HFS(0x00001000, "HFS"), ///< HFS file system TSK_FS_TYPE_HFS_DETECT(0x00001000, bundle.getString("TskData.tskFsTypeEnum.HFSautoDetect")), ///< HFS auto detection TSK_FS_TYPE_EXT4(0x00002000, "Ext4"), ///< Ext4 file system TSK_FS_TYPE_YAFFS2(0x00004000, "YAFFS2"), ///< YAFFS2 file system TSK_FS_TYPE_YAFFS2_DETECT(0x00004000, bundle.getString("TskData.tskFsTypeEnum.YAFFS2autoDetect")), ///< YAFFS2 auto detection TSK_FS_TYPE_APFS(0x00010000, "APFS"), ///< APFS file system TSK_FS_TYPE_APFS_DETECT(0x00010000, bundle.getString("TskData.tskFsTypeEnum.APFSautoDetect")), ///< APFS auto detection TSK_FS_TYPE_UNSUPP(0xffffffff, bundle.getString("TskData.tskFsTypeEnum.unsupported")); ///< Unsupported file system private int value; private String displayName; private TSK_FS_TYPE_ENUM(int value, String displayName) { this.value = value; this.displayName = displayName; } /** * get the value for the enum type * * @return int value for the enum type */ public int getValue() { return value; } /** * Get display name of the enum * * @return the displayName */ public String getDisplayName() { return displayName; } /** * Convert fs type int value to the enum type - get the first matching * enum type * * @param fsTypeValue int value to convert * * @return the enum type - first enum type matching the fsTypeValue */ public static TSK_FS_TYPE_ENUM valueOf(int fsTypeValue) { for (TSK_FS_TYPE_ENUM type : TSK_FS_TYPE_ENUM.values()) { if (type.value == fsTypeValue) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskFsTypeEnum.exception.msg1.text"), fsTypeValue)); } }; /** * The type of the disk image. This is the types column in the * tsk_images_info table. */ public enum TSK_IMG_TYPE_ENUM { TSK_IMG_TYPE_DETECT(0, bundle.getString("TskData.tskImgTypeEnum.autoDetect")), // Auto Detection TSK_IMG_TYPE_RAW_SING(1, bundle.getString("TskData.tskImgTypeEnum.rawSingle")), // Single raw file (dd) TSK_IMG_TYPE_RAW_SPLIT(2, bundle.getString("TskData.tskImgTypeEnum.rawSplit")), // Split raw files TSK_IMG_TYPE_AFF_AFF(4, "AFF"), // Advanced Forensic Format NON-NLS TSK_IMG_TYPE_AFF_AFD(8, "AFD"), // AFF Multiple File NON-NLS TSK_IMG_TYPE_AFF_AFM(16, "AFM"), // AFF with external metadata NON-NLS TSK_IMG_TYPE_AFF_ANY(32, "AFF"), // All AFFLIB image formats (including beta ones) NON-NLS TSK_IMG_TYPE_EWF_EWF(64, "E01"), // Expert Witness format (encase) NON-NLS TSK_IMG_TYPE_VMDK_VMDK(128, "VMDK"), // VMware Virtual Disk (VMDK) NON-NLS TSK_IMG_TYPE_VHD_VHD(256, "VHD"), // Virtual Hard Disk (VHD) image format NON-NLS TSK_IMG_TYPE_POOL_POOL(16384, "POOL"), // Pool (internal use) NON-NLS TSK_IMG_TYPE_UNSUPP(65535, bundle.getString("TskData.tskImgTypeEnum.unknown")); // Unsupported Image Type private long imgType; private String name; private TSK_IMG_TYPE_ENUM(long type, String name) { this.imgType = type; this.name = name; } public static TSK_IMG_TYPE_ENUM valueOf(long imgType) { for (TSK_IMG_TYPE_ENUM type : TSK_IMG_TYPE_ENUM.values()) { if (type.getValue() == imgType) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskImgTypeEnum.exception.msg1.text"), imgType)); } /** * Get long value of the image type * * @return the long value of the image type */ public long getValue() { return imgType; } /** * Get the name of the image type * * @return */ public String getName() { return name; } }; /** * The type of the partition in the partition table. This is the flags * column in the tsk_vs_parts table. */ public enum TSK_VS_TYPE_ENUM { TSK_VS_TYPE_DETECT(0x0000, bundle.getString("TskData.tskVSTypeEnum.autoDetect")), ///< Use autodetection methods TSK_VS_TYPE_DOS(0x0001, "DOS"), ///< DOS Partition table NON-NLS TSK_VS_TYPE_BSD(0x0002, "BSD"), ///< BSD Partition table NON-NLS TSK_VS_TYPE_SUN(0x0004, "SUN VTOC"), ///< Sun VTOC NON-NLS TSK_VS_TYPE_MAC(0x0008, "Mac"), ///< Mac partition table NON-NLS TSK_VS_TYPE_GPT(0x0010, "GPT"), ///< GPT partition table NON-NLS TSK_VS_TYPE_APFS(0x0020, "APFS"), ///< APFS pool NON-NLS TSK_VS_TYPE_DBFILLER(0x00F0, bundle.getString("TskData.tskVSTypeEnum.fake")), ///< fake partition table type for loaddb (for images that do not have a volume system) TSK_VS_TYPE_UNSUPP(0xFFFF, bundle.getString("TskData.tskVSTypeEnum.unsupported")); ///< Unsupported private long vsType; private String name; private TSK_VS_TYPE_ENUM(long type, String name) { this.vsType = type; this.name = name; } public static TSK_VS_TYPE_ENUM valueOf(long vsType) { for (TSK_VS_TYPE_ENUM type : TSK_VS_TYPE_ENUM.values()) { if (type.getVsType() == vsType) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskVSTypeEnum.exception.msg1.text"), vsType)); } /** * Get long value of the vs type * * @return the long value of the vs type */ public long getVsType() { return vsType; } /** * Get the name of the volume system type. * * @return */ public String getName() { return name; } }; /** * High-level type of an object from the database. This is the type column * in the tsk_objects table. */ public enum ObjectType { IMG(0, bundle.getString("TskData.ObjectType.IMG.name")), ///< Disk Image - see tsk_image_info for more details VS(1, bundle.getString("TskData.ObjectType.VS.name")), ///< Volume System - see tsk_vs_info for more details VOL(2, bundle.getString("TskData.ObjectType.VOL.name")), ///< Volume - see tsk_vs_parts for more details FS(3, bundle.getString("TskData.ObjectType.FS.name")), ///< File System - see tsk_fs_info for more details ABSTRACTFILE(4, bundle.getString("TskData.ObjectType.AbstractFile.name")), ///< File - see tsk_files for more details ARTIFACT(5, bundle.getString("TskData.ObjectType.Artifact.name")), /// Artifact - see blackboard_artifacts for more details REPORT(6, bundle.getString("TskData.ObjectType.Report.name")), ///< Report - see reports for more details POOL(7, bundle.getString("TskData.ObjectType.Pool.name")), ///< Pool OS_ACCOUNT(8, bundle.getString("TskData.ObjectType.OsAccount.name")), ///< OS Account - see tsk_os_accounts for more details HOST_ADDRESS(9, bundle.getString("TskData.ObjectType.HostAddress.name")), ///< Host Address - see tsk_host_addresses for more details UNSUPPORTED(-1, bundle.getString("TskData.ObjectType.Unsupported.name")) ///< Unsupported type ; private final short objectType; private final String displayName; private ObjectType(int objectType, String displayName) { this.objectType = (short) objectType; this.displayName = displayName; } /** * Get short value of the object type * * @return the long value of the object type */ public short getObjectType() { return objectType; } @Override public String toString() { return displayName; } /** * Convert object type short value to the enum type * * @param objectType long value to convert * * @return the enum type */ public static ObjectType valueOf(short objectType) { for (ObjectType v : ObjectType.values()) { if (v.objectType == objectType) { return v; } } return UNSUPPORTED; } } /** * The type of file in a database, such as file system versus local file. * This is the type field in the tsk_files table. */ public enum TSK_DB_FILES_TYPE_ENUM { FS(0, "File System"), ///< File that can be found in file system tree. CARVED(1, "Carved"), ///< Set of blocks for a file found from carving. Could be on top of a TSK_DB_FILES_TYPE_UNALLOC_BLOCKS range. DERIVED(2, "Derived"), ///< File derived from a parent file (i.e. from ZIP) LOCAL(3, "Local"), ///< Local file that was added (not from a disk image) UNALLOC_BLOCKS(4, "Unallocated Blocks"), ///< Set of blocks not allocated by file system. Parent should be image, volume, or file system. Many columns in tsk_files will be NULL. Set layout in tsk_file_layout. UNUSED_BLOCKS(5, "Unused Blocks"), ///< Set of blocks that are unallocated AND not used by a carved or other file type. Parent should be UNALLOC_BLOCKS, many columns in tsk_files will be NULL, set layout in tsk_file_layout. VIRTUAL_DIR(6, "Virtual Directory"), ///< Virtual directory (not on fs) with no meta-data entry that can be used to group files of types other than TSK_DB_FILES_TYPE_FS. Its parent is either another TSK_DB_FILES_TYPE_FS or a root directory or type TSK_DB_FILES_TYPE_FS. SLACK(7, "Slack"), ///< Slack space for a single file LOCAL_DIR(8, "Local Directory"), ///< Local directory that was added (not from a disk image) LAYOUT_FILE(9, "Layout File"), ///< Set of blocks from an image that have been designated as a file ; private final short fileType; private final String name; private TSK_DB_FILES_TYPE_ENUM(int fileType, String name) { this.fileType = (short) fileType; this.name = name; } /** * Convert db files type short value to the enum type * * @param fileType long value to convert * * @return the enum type */ public static TSK_DB_FILES_TYPE_ENUM valueOf(short fileType) { for (TSK_DB_FILES_TYPE_ENUM type : TSK_DB_FILES_TYPE_ENUM.values()) { if (type.fileType == fileType) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskDbFilesTypeEnum.exception.msg1.text"), fileType)); } /** * Get short value of the file type * * @return the long value of the file type */ public short getFileType() { return fileType; } public String getName() { return name; } } /** * The type of pool in a database. * This is the pool_type field in the tsk_pool_info table. */ public enum TSK_POOL_TYPE_ENUM { TSK_POOL_TYPE_DETECT(0, "Auto detect"), ///< Use autodetection methods TSK_POOL_TYPE_APFS(1, "APFS Pool"), ///< APFS Pooled Volumes TSK_POOL_TYPE_UNSUPP(0xffff, "Unsupported") ///< Unsupported pool container type ; private final short poolType; private final String name; TSK_POOL_TYPE_ENUM(int poolType, String name) { this.poolType = (short) poolType; this.name = name; } /** * Convert db pool type short value to the enum type * * @param poolType long value to convert * * @return the enum type */ public static TSK_POOL_TYPE_ENUM valueOf(long poolType) { for (TSK_POOL_TYPE_ENUM type : TSK_POOL_TYPE_ENUM.values()) { if (type.poolType == poolType) { return type; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.tskDbFilesTypeEnum.exception.msg1.text"), poolType)); // TODO } /** * Get short value of the file type * * @return the long value of the file type */ public short getValue() { return poolType; } public String getName() { return name; } } /** * Identifies if a file was in a hash database or not. This is the known * column in the tsk_files table. */ public enum FileKnown { UNKNOWN(0, bundle.getString("TskData.fileKnown.unknown")), ///< File marked as unknown by hash db KNOWN(1, bundle.getString("TskData.fileKnown.known")), ///< File marked as a known by hash db BAD(2, bundle.getString("TskData.fileKnown.knownBad")); ///< File marked as known and bad/notable/interesting by hash db private byte known; private String name; private FileKnown(int known, String name) { this.known = (byte) known; this.name = name; } /** * Convert file known type byte value to the enum type * * @param known long value to convert * * @return the enum type */ public static FileKnown valueOf(byte known) { for (FileKnown v : FileKnown.values()) { if (v.known == known) { return v; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.fileKnown.exception.msg1.text"), known)); } public String getName() { return this.name; } /** * Get byte value of the file known status * * @return the long value of the file known status */ public byte getFileKnownValue() { return this.known; } } /** * DbType is the enum covering database type. It tells you what underlying * database you can use in Autopsy and TSK. */ public enum DbType { // Add any additional remote database types here, and keep it in sync // with the Sleuthkit version of this enum located at: // sleuthkit/tsk/auto/db_connection_info.h // Be sure to add to settingsValid() if you add a type here. SQLITE(0), POSTGRESQL(1); private int value; DbType(int val) { this.value = val; } public int getValue() { return this.value; } } /** * Encoding type records whether locally stored files have been encoded * or not, and the method used to do so. This is the encoding_type column * in the tsk_files_path table. * Files are encoded using EncodedFileOutputStream and are saved to the * database as derived files with the appropriate encoding type argument. */ public enum EncodingType{ // Update EncodedFileUtil.java to handle any new types NONE(0), XOR1(1); private final int type; private EncodingType(int type){ this.type = type; } public int getType(){ return type; } public static EncodingType valueOf(int type) { for (EncodingType v : EncodingType.values()) { if (v.type == type) { return v; } } throw new IllegalArgumentException( MessageFormat.format(bundle.getString("TskData.encodingType.exception.msg1.text"), type)); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimeUtilities.java000644 000765 000024 00000005725 14137073413 030117 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; /** * Time related utility methods * */ public class TimeUtilities { private static final Logger LOGGER = Logger.getLogger(TimeUtilities.class.getName()); private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); private TimeUtilities(){ } /** * Return the epoch into string in yyyy-MM-dd HH:mm:ss format * * @param epoch time in seconds * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" */ public static String epochToTime(long epoch) { String time = "0000-00-00 00:00:00"; if (epoch != 0) { time = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss z").format(new java.util.Date(epoch * 1000)); } return time; } /** * Return the epoch into string in yyyy-MM-dd HH:mm:ss format, * in the given timezone * * @param epoch time in seconds * @param tzone time zone * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" */ public static String epochToTime(long epoch, TimeZone tzone) { String time = "0000-00-00 00:00:00"; if (epoch != 0) { synchronized (DATE_FORMATTER) { DATE_FORMATTER.setTimeZone(tzone); time = DATE_FORMATTER.format(new java.util.Date(epoch * 1000)); } } return time; } /** * Return the epoch into string in ISO8601 format, in the given timezone. * * @param epoch time in seconds * @param tzone time zone * * @return formatted date time string as */ public static String epochToTimeISO8601(long epoch, TimeZone tzone) { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); formatter.setTimeZone(tzone); return formatter.format(new Date(epoch)); } /** * Convert from ISO 8601 formatted date time string to epoch time in seconds * * @param time formatted date time string as "yyyy-MM-dd HH:mm:ss" * * @return epoch time in seconds */ public static long timeToEpoch(String time) { long epoch = 0; try { epoch = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(time).getTime() / 1000; } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to parse time string", e); //NON-NLS } return epoch; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Report.java000644 000765 000024 00000030467 14137073414 026602 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2014-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import static java.nio.file.StandardOpenOption.READ; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; /** * This is a class that models reports. */ public class Report implements Content { private static final BlackboardArtifact.Type KEYWORD_HIT_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT); static long ID_NOT_SET = -1; private long objectId = ID_NOT_SET; private final String pathAsString; private final Path pathAsPath; // NULL if path is for a URL private final long createdTime; private final String sourceModuleName; private final String reportName; private Content parent; // The object from which the report was generated. private final SleuthkitCase db; // A reference to the database instance. private FileChannel fileChannel = null; // Used to read report content. private static final Logger LOGGER = Logger.getLogger(Report.class.getName()); /** * Create a Report instance. * * @param id Primary key from associated row in the case database. * @param path Absolute path to report. * @param createdTime Created time of report (in UNIX epoch time). * @param reportName May be empty * @param parent The parent/source of the Report. */ Report(SleuthkitCase db, long id, String path, long createdTime, String sourceModuleName, String reportName, Content parent) { this.db = db; this.objectId = id; this.pathAsString = path; if (path.startsWith("http")) { this.pathAsPath = null; } else { this.pathAsPath = Paths.get(path); } this.createdTime = createdTime; this.sourceModuleName = sourceModuleName; this.reportName = reportName; this.parent = parent; } @Override public long getId() { return objectId; } /** * Get the absolute local path to the report. * * @return */ public String getPath() { return (pathAsPath != null ? pathAsPath.toString() : pathAsString); } /** * Get the creation date of the report. * * @return Number of seconds since Jan 1, 1970. */ public long getCreatedTime() { return createdTime; } /** * Get the name of the module (e.g., ingest module, reporting module) that * generated the report. * * @return The module name. */ public String getSourceModuleName() { return this.sourceModuleName; } /** * Get the report name, if any. * * @return The name of the report, possibly empty. */ public String getReportName() { return reportName; } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { if (pathAsPath == null || Files.isDirectory(pathAsPath)) { return 0; } int totalBytesRead = 0; ByteBuffer data = ByteBuffer.wrap(buf); try { if (fileChannel == null) { fileChannel = FileChannel.open(pathAsPath, READ); } fileChannel.position(offset); int bytesRead = 0; do { bytesRead = fileChannel.read(data); if (bytesRead != -1) { totalBytesRead += bytesRead; } } while (bytesRead != -1 && data.hasRemaining()); } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Failed to read report file.", ex); } return totalBytesRead; } @Override public void close() { try { if (fileChannel != null) { fileChannel.close(); } } catch (IOException ex) { LOGGER.log(Level.WARNING, "Failed to close report file.", ex); } } @Override public long getSize() { try { return (pathAsPath != null ? Files.size(pathAsPath) : 0); } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Failed to get size of report.", ex); // If we cannot determine the size of the report, return zero // to prevent attempts to read content. return 0; } } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public String getName() { return reportName; } @Override public String getUniquePath() throws TskCoreException { // @@@ This is wrong... we need to use the same logic is in AbstractContent.getUniquePath(). return getPath(); } @Override public Content getDataSource() throws TskCoreException { if (null == parent) { return null; } else { return parent.getDataSource(); } } @Override public List getChildren() throws TskCoreException { return Collections.emptyList(); } @Override public boolean hasChildren() throws TskCoreException { return false; } @Override public int getChildrenCount() throws TskCoreException { return 0; } @Override public Content getParent() throws TskCoreException { if (parent == null) { SleuthkitCase.ObjectInfo parentInfo; parentInfo = db.getParentInfo(this); if (parentInfo == null) { parent = null; } else { parent = db.getContentById(parentInfo.getId()); } } return parent; } @Override public List getChildrenIds() throws TskCoreException { return Collections.emptyList(); } @Deprecated @Override public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException { if (artifactTypeID != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { throw new TskCoreException("Reports can only have keyword hit artifacts."); } long fileObjId = getId(); long dsObjId = getDataSource() == null ? null : getDataSource().getId(); try { return db.getBlackboard().newAnalysisResult( KEYWORD_HIT_TYPE, fileObjId, dsObjId, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList()) .getAnalysisResult(); } catch (BlackboardException ex) { throw new TskCoreException("Unable to get analysis result for keword hit.", ex); } } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList) throws TskCoreException { CaseDbTransaction trans = db.beginTransaction(); try { AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objectId, this.getDataSource().getId(), score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException("Error adding analysis result.", ex); } } @Override public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList, long dataSourceId) throws TskCoreException { CaseDbTransaction trans = db.beginTransaction(); try { AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objectId, dataSourceId, score, conclusion, configuration, justification, attributesList, trans); trans.commit(); return resultAdded; } catch (BlackboardException ex) { trans.rollback(); throw new TskCoreException("Error adding analysis result.", ex); } } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId) throws TskCoreException { if (artifactType.getTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { throw new TskCoreException("Reports can only have keyword hit artifacts."); } return db.getBlackboard().newDataArtifact(artifactType, objectId, this.getDataSource().getId(), attributesList, osAccountId); } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId, long dataSourceId) throws TskCoreException { if (artifactType.getTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { throw new TskCoreException("Reports can only have keyword hit artifacts."); } return db.getBlackboard().newDataArtifact(artifactType, objectId, dataSourceId, attributesList, osAccountId); } @Override public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList) throws TskCoreException { return newDataArtifact(artifactType, attributesList, null); } @Deprecated @SuppressWarnings("deprecation") @Override public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { return newArtifact(type.getTypeID()); } @Override public ArrayList getArtifacts(String artifactTypeName) throws TskCoreException { return getArtifacts(db.getArtifactType(artifactTypeName).getTypeID()); } @Override public BlackboardArtifact getGenInfoArtifact() throws TskCoreException { // TSK_GEN_INFO artifact is obsolete. return null; } @Override public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreException { // TSK_GEN_INFO artifact is obsolete. return null; } @Override public ArrayList getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE attr_type) throws TskCoreException { // TSK_GEN_INFO artifact is obsolete. return null; } @Override public ArrayList getArtifacts(int artifactTypeID) throws TskCoreException { if (artifactTypeID != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { throw new TskCoreException("Reports can only have keyword hit artifacts."); } return db.getBlackboardArtifacts(artifactTypeID, objectId); } @Override public ArrayList getArtifacts(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { return getArtifacts(type.getTypeID()); } @Override public ArrayList getAllArtifacts() throws TskCoreException { return db.getMatchingArtifacts("WHERE obj_id = " + objectId); //NON-NLS } @Override public List getAllAnalysisResults() throws TskCoreException { return db.getBlackboard().getAnalysisResults(objectId); } @Override public List getAllDataArtifacts() throws TskCoreException { return db.getBlackboard().getDataArtifactsBySource(objectId); } @Override public List getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException { return db.getBlackboard().getAnalysisResults(objectId, artifactType.getTypeID()); } @Override public Score getAggregateScore() throws TskCoreException { return db.getScoringManager().getAggregateScore(objectId); } @Override public Set getHashSetNames() throws TskCoreException { return Collections.emptySet(); } @Override public long getArtifactsCount(String artifactTypeName) throws TskCoreException { return getArtifactsCount(db.getArtifactType(artifactTypeName).getTypeID()); } @Override public long getArtifactsCount(int artifactTypeID) throws TskCoreException { if (artifactTypeID != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { throw new TskCoreException("Reports can only have keyword hit artifacts."); } return db.getBlackboardArtifactsCount(artifactTypeID, objectId); } @Override public long getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException { return getArtifactsCount(type.getTypeID()); } @Override public long getAllArtifactsCount() throws TskCoreException { return db.getBlackboardArtifactsCount(objectId); } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TagSet.java000755 000765 000024 00000005556 14137073413 026521 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; /** * A TagSet is a named group of TagNames. */ public class TagSet { private final String setName; private final long id; private final List tagNameList; /** * Construct a TagSet. * * @param id Tag set id value. * @param setName Name of tag set. */ TagSet(long id, String setName, List tagNameList) { if (setName == null || setName.isEmpty()) { throw new IllegalArgumentException("TagSet name must be a non-empty string"); } this.tagNameList = new ArrayList<>(tagNameList); this.tagNameList.sort(new TagNameComparator()); this.id = id; this.setName = setName; } /** * Returns the name of the tag set. * * @return Tag set name. */ public String getName() { return setName; } /** * Returns a list of the TagName objects that belong to the tag set. * * @return An unmodifiable list of TagName objects. */ public List getTagNames() { return Collections.unmodifiableList(tagNameList); } /** * Return the TagSet id. * * @return TagSet id value. */ public long getId() { return id; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TagSet other = (TagSet) obj; return (this.id == other.getId() && setName.equals(other.getName()) && tagNameList.equals(other.tagNameList)); } @Override public int hashCode() { int hash = 5; hash = 89 * hash + (int) (this.id ^ (this.id >>> 32)); hash = 89 * hash + Objects.hashCode(this.setName); hash = 89 * hash + Objects.hashCode(this.tagNameList); return hash; } /** * Comparator for TagNames. TagNames will sort by rank, then TagName.getName(). */ private class TagNameComparator implements Comparator { @Override public int compare(TagName tagName1, TagName tagName2) { int result = ((Integer)tagName1.getRank()).compareTo(tagName2.getRank()); if(result == 0) { result = tagName1.getDisplayName().compareTo(tagName2.getDisplayName()); } return result; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CommunicationsUtils.java000644 000765 000024 00000015061 14137073413 031330 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.apache.commons.lang3.StringUtils; /** * Provides general utility methods related to communications artifacts. * */ public final class CommunicationsUtils { // These symbols are allowed in dialed or written forms of phone numbers. // A '+' is allowed only as a leading digit and hence not inlcuded here. private static final Set TELEPHONY_CHARS = new HashSet<>(Arrays.asList( "-", "(", ")", "#", "*", "," )); private static final int MIN_PHONENUMBER_LEN = 3; /** * Empty private constructor. */ private CommunicationsUtils() { } /** * Normalize the given phone number by removing all non numeric characters, * except: a leading + # or * or , * * Note: this method intentionally performs a rather lenient validation of * the phone number in order to not drop any collected data. * * @param phoneNumber The string to normalize. * * @return The normalized phone number. * * @throws InvalidAccountIDException If the given string is not a valid * phone number. * */ public static String normalizePhoneNum(String phoneNumber) throws InvalidAccountIDException { if (StringUtils.isEmpty(phoneNumber)) { throw new InvalidAccountIDException(String.format("Input phone number is empty or null.")); } if (isValidPhoneNumber(phoneNumber) == false) { throw new InvalidAccountIDException(String.format("Input string is not a valid phone number: %s", phoneNumber)); } String normalizedNumber = phoneNumber.trim(); normalizedNumber = normalizedNumber.replaceAll("\\s+", ""); // remove spaces. normalizedNumber = normalizedNumber.replaceAll("[\\-()]", ""); // remove parens & dashes. // ensure a min length if (normalizedNumber.length() < MIN_PHONENUMBER_LEN) { throw new InvalidAccountIDException("Invalid phone number string " + phoneNumber); } return normalizedNumber; } /** * Normalizes the given email address. * * @param emailAddress The email address string to be normalized. * * @return The normalized email address. * * @throws InvalidAccountIDException If the given string is not a valid * email address. */ public static String normalizeEmailAddress(String emailAddress) throws InvalidAccountIDException { if (StringUtils.isEmpty(emailAddress)) { throw new InvalidAccountIDException(String.format("Input email address is empty or null.")); } if (isValidEmailAddress(emailAddress) == false) { throw new InvalidAccountIDException(String.format("Input string is not a valid email address: %s", emailAddress)); } return emailAddress.toLowerCase().replace(";", "").trim(); } /** * Checks if the given accountId is a valid id for the specified account * type. * * @param accountType Account type. * @param accountUniqueID Id to check. * * @return True, if the id is a valid id for the given account type, False * otherwise. */ public static boolean isValidAccountId(Account.Type accountType, String accountUniqueID) { if (accountType == Account.Type.PHONE) { return isValidPhoneNumber(accountUniqueID); } if (accountType == Account.Type.EMAIL) { return isValidEmailAddress(accountUniqueID); } return !StringUtils.isEmpty(accountUniqueID); } /** * Checks if the given string is a valid phone number. * * NOTE: this method intentionally performs a rather lenient validation of * the phone number in order to not drop any collected data. * * @param phoneNum Phone number string to check. * * @return True if the given string is a valid phone number, false * otherwise. */ public static boolean isValidPhoneNumber(String phoneNum) { if (StringUtils.isEmpty(phoneNum)) { return false; } String trimmedPhoneNum = phoneNum.trim(); // A phone number may have a leading '+', special telephony chars, or digits. // Anything else implies an invalid phone number. for (int i = 0; i < trimmedPhoneNum.length(); i++) { if (!((trimmedPhoneNum.charAt(i) == '+' && i == 0) // a '+' is allowed only at the beginning || isValidPhoneChar(trimmedPhoneNum.charAt(i)))) { return false; } } return true; } /** * Checks if the given character is a valid character for a phone number. * * @param ch Character to check. * * @return True, if its a valid phone number character, false, otherwise. */ private static boolean isValidPhoneChar(char ch) { return Character.isSpaceChar(ch) || Character.isDigit(ch) || TELEPHONY_CHARS.contains(String.valueOf(ch)); } /** * Checks if the given string is a valid email address. * * Note: this method intentionally performs a rather lenient validation in * order to not drop any collected data. * * Note: We are requiring that an email address have a "." on the right-hand * side to allow us to differentiate between app-specific identifiers and * email addresses. We realize that some emails can be sent within * enterprises without a ".', but that this is less common than encountering * app-specific identifiers of the form a(at)b. * * @param emailAddress String to check. * * @return True if the given string is a valid email address, false * otherwise. */ public static boolean isValidEmailAddress(String emailAddress) { if (StringUtils.isEmpty(emailAddress)) { return false; } if (emailAddress.contains("@") == false || emailAddress.contains(".") == false ) { return false; } // emsure there's a username and domain String[] tokens = emailAddress.split("@"); if (tokens.length < 2 || StringUtils.isEmpty(tokens[0]) || StringUtils.isEmpty(tokens[1])) { return false; } // ensure domain has name and suffix String[] tokens2 = tokens[1].split("\\."); return !(tokens2.length < 2 || StringUtils.isEmpty(tokens2[0]) || StringUtils.isEmpty(tokens2[1])); } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Host.java000644 000765 000024 00000004763 14137073413 026243 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Objects; /** * Encapsulates a host. */ public final class Host { private final long id; private final String name; private final HostDbStatus status; Host(long id, String name) { this(id, name, HostDbStatus.ACTIVE); } Host(long id, String name, HostDbStatus status) { this.id = id; this.name = name; this.status = status; } /** * Gets the row id for the host. * * @return Row id. */ public long getHostId() { return id; } /** * Gets the name for the host. * * @return Host name. */ public String getName() { return name; } /** * Gets the status for the host. * * @return Host status. */ HostDbStatus getStatus() { return status; } @Override public int hashCode() { int hash = 5; hash = 67 * hash + (int) (this.id ^ (this.id >>> 32)); hash = 67 * hash + Objects.hashCode(this.name); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Host other = (Host) obj; if (this.id != other.id) { return false; } if ((this.name == null) ? (other.name != null) : !this.name.equals(other.name)) { return false; } return true; } /** * Encapsulates status of host row. */ enum HostDbStatus { ACTIVE(0, "Active"), MERGED(1, "Merged"), DELETED(2, "Deleted"); private final int id; private final String name; HostDbStatus(int id, String name) { this.id = id; this.name = name; } int getId() { return id; } String getName() { return name; } static HostDbStatus fromID(int typeId) { for (HostDbStatus type : HostDbStatus.values()) { if (type.ordinal() == typeId) { return type; } } return null; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/000755 000765 000024 00000000000 14137073560 027621 5ustar00carrierstaff000000 000000 sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/LibraryUtils.java000644 000765 000024 00000014114 14137073413 027742 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; /** * Collection of methods to load libraries embedded in the TSK Datamodel Jar * file. * * @author jwallace */ public class LibraryUtils { public static final String[] EXTS = new String[]{".so", ".dylib", ".dll", ".jnilib"}; //NON-NLS /** * The libraries the TSK Datamodel needs. */ public enum Lib { MSVCP("msvcp100", ""), //NON-NLS MSVCR("msvcr100", ""), //NON-NLS ZLIB("zlib", "z"), //NON-NLS LIBEWF("libewf", "ewf"), //NON-NLS LIBVMDK("libvmdk", "vmdk"), //NON-NLS LIBVHDI("libvhdi", "vhd"), //NON-NLS TSK_JNI("libtsk_jni", "tsk_jni"); //NON-NLS private final String name; private final String unixName; Lib(String name, String unixName) { this.name = name; this.unixName = unixName; } public String getLibName() { return this.name; } public String getUnixName() { return this.unixName; } } /** * Load the Sleuthkit JNI. * * @return true if library was found and loaded */ public static boolean loadSleuthkitJNI() { boolean loaded = LibraryUtils.loadNativeLibFromTskJar(Lib.TSK_JNI); if (!loaded) { System.out.println("SleuthkitJNI: failed to load " + Lib.TSK_JNI.getLibName()); //NON-NLS } else { System.out.println("SleuthkitJNI: loaded " + Lib.TSK_JNI.getLibName()); //NON-NLS } return loaded; } /** * Get the name of the current platform. * * @return a platform identifier, formatted as "OS_ARCH/OS_NAME" */ private static String getPlatform() { String os = System.getProperty("os.name").toLowerCase(); if (LibraryUtils.isWindows()) { os = "win"; //NON-NLS } else if (LibraryUtils.isMac()) { os = "mac"; //NON-NLS } else if (LibraryUtils.isLinux()) { os = "linux"; //NON-NLS } // os.arch represents the architecture of the JVM, not the os String arch = System.getProperty("os.arch"); return arch.toLowerCase() + "/" + os.toLowerCase(); } /** * Is the platform Windows? * * @return */ private static boolean isWindows() { return System.getProperty("os.name").toLowerCase().contains("windows"); //NON-NLS } /** * Is the platform Mac? * * @return */ private static boolean isMac() { return System.getProperty("os.name").toLowerCase().contains("mac"); //NON-NLS } /** * Is the platform Linux? * * @return */ private static boolean isLinux() { return System.getProperty("os.name").equals("Linux"); //NON-NLS } /** * Attempt to extract and load the specified native library. * * @param library * * @return */ private static boolean loadNativeLibFromTskJar(Lib library) { String libName = library.getLibName(); String userName = System.getProperty("user.name"); // find the library in the jar file StringBuilder pathInJarBase = new StringBuilder(); pathInJarBase.append("/NATIVELIBS/"); //NON-NLS pathInJarBase.append(getPlatform()); pathInJarBase.append("/"); pathInJarBase.append(libName); URL urlInJar = null; String libExt = null; for (String ext : EXTS) { urlInJar = SleuthkitJNI.class.getResource(pathInJarBase.toString() + ext); if (urlInJar != null) { libExt = ext; break; } } if (urlInJar == null) { System.out.println("Library not found in jar (" + libName + ")"); //NON-NLS return false; } StringBuilder pathToTempFile = new StringBuilder(); pathToTempFile.append(System.getProperty("java.io.tmpdir")); pathToTempFile.append(java.io.File.separator); pathToTempFile.append(libName); pathToTempFile.append("_"); pathToTempFile.append(userName); pathToTempFile.append(libExt); // copy library to temp folder and load it try { java.io.File tempLibFile = new java.io.File(pathToTempFile.toString()); //NON-NLS System.out.println("Temp Folder for Libraries: " + tempLibFile.getParent()); //NON-NLS // cycle through the libraries and delete them. // we used to copy dlls into here. // delete any than may still exist from previous installations. // Dec 2013 for (Lib l : Lib.values()) { String ext = getExtByPlatform(); // try the windows version java.io.File f = new java.io.File(l.getLibName() + ext); //System.out.println(f.getName()); if (f.exists()) { f.delete(); } else { // try the unix version java.io.File fUnix = new java.io.File(l.getUnixName() + ext); //System.out.println(fUnix.getName()); if (fUnix.exists()) { fUnix.delete(); } } } // Delete old file if (tempLibFile.exists()) { if (tempLibFile.delete() == false) { System.out.println("Error deleting old native library. Is the app already running? (" + tempLibFile.toString() + ")"); //NON-NLS return false; } } // copy it InputStream in = urlInJar.openStream(); OutputStream out = new FileOutputStream(tempLibFile); byte[] buffer = new byte[1024]; int length; while ((length = in.read(buffer)) > 0) { out.write(buffer, 0, length); } in.close(); out.close(); // load it System.load(tempLibFile.getAbsolutePath()); } catch (IOException e) { // Loading failed. System.out.println("Error loading library: " + e.getMessage()); //NON-NLS return false; } return true; } private static String getExtByPlatform() { if (isWindows()) { return ".dll"; //NON-NLS } else if (isMac()) { return ".dylib"; //NON-NLS } else { return ".so"; //NON-NLS } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/PersonManager.java000755 000765 000024 00000035575 14137073413 030077 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.base.Strings; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection; import org.sleuthkit.datamodel.TskEvent.PersonsAddedTskEvent; /** * Responsible for creating/updating/retrieving Persons. */ public final class PersonManager { private final SleuthkitCase db; /** * Construct a PersonManager for the given SleuthkitCase. * * @param skCase The SleuthkitCase * */ PersonManager(SleuthkitCase skCase) { this.db = skCase; } /** * Get all persons in the database. * * @return List of persons * * @throws TskCoreException */ public List getPersons() throws TskCoreException { String queryString = "SELECT * FROM tsk_persons"; List persons = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { while (rs.next()) { persons.add(new Person(rs.getLong("id"), rs.getString("name"))); } return persons; } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting persons"), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Update the database to match the given Person. * * @param person The person to update. * * @return person The person that was updated. * * @throws TskCoreException */ public Person updatePerson(Person person) throws TskCoreException { // Must have a non-empty name if (Strings.isNullOrEmpty(person.getName())) { throw new TskCoreException("Illegal argument passed to updatePerson: Name field for person with ID " + person.getPersonId() + " is null/empty. Will not update database."); } String queryString = "UPDATE tsk_persons" + " SET name = ? WHERE id = " + person.getPersonId(); db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = db.getConnection()) { PreparedStatement s = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS); s.clearParameters(); s.setString(1, person.getName()); s.executeUpdate(); } catch (SQLException ex) { throw new TskCoreException(String.format("Error updating person with id = %d", person.getPersonId()), ex); } finally { db.releaseSingleUserCaseWriteLock(); } db.fireTSKEvent(new TskEvent.PersonsUpdatedTskEvent(Collections.singletonList(person))); return person; } /** * Delete a person. Name comparison is case-insensitive. * * @param name Name of the person to delete * * @throws TskCoreException */ public void deletePerson(String name) throws TskCoreException { String queryString = "DELETE FROM tsk_persons" + " WHERE LOWER(name) = LOWER(?)"; Person deletedPerson = null; db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = db.getConnection()) { PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS); s.clearParameters(); s.setString(1, name); s.executeUpdate(); try (ResultSet resultSet = s.getGeneratedKeys()) { if (resultSet.next()) { deletedPerson = new Person(resultSet.getLong(1), name); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error deleting person with name %s", name), ex); } finally { db.releaseSingleUserCaseWriteLock(); } if (deletedPerson != null) { db.fireTSKEvent(new TskEvent.PersonsDeletedTskEvent(Collections.singletonList(deletedPerson.getPersonId()))); } } /** * Get person with given name. Name comparison is case-insensitive. * * @param name Person name to look for. * * @return Optional with person. Optional.empty if no matching person is * found. * * @throws TskCoreException */ public Optional getPerson(String name) throws TskCoreException { db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection()) { return getPerson(name, connection); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get person with given id. * * @param id Id of the person to look for. * * @return Optional with person. Optional.empty if no matching person is * found. * * @throws TskCoreException */ public Optional getPerson(long id) throws TskCoreException { String queryString = "SELECT * FROM tsk_persons WHERE id = " + id; db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (rs.next()) { return Optional.of(new Person(rs.getLong("id"), rs.getString("name"))); } else { return Optional.empty(); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting persons"), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Create a person with specified name. If a person already exists with the * given name, it returns the existing person. Name comparison is * case-insensitive. * * @param name Person name. * * @return Person with the specified name. * * @throws TskCoreException */ public Person newPerson(String name) throws TskCoreException { // Must have a name if (Strings.isNullOrEmpty(name)) { throw new TskCoreException("Illegal argument passed to createPerson: Non-empty name is required."); } Person toReturn = null; CaseDbConnection connection = null; db.acquireSingleUserCaseWriteLock(); try { connection = db.getConnection(); // First try to load it from the database. This is a case-insensitive look-up // to attempt to prevent having two entries with the same lower-case name. Optional person = getPerson(name, connection); if (person.isPresent()) { return person.get(); } // Attempt to insert the new Person. String personInsertSQL = "INSERT INTO tsk_persons(name) VALUES (?)"; // NON-NLS PreparedStatement preparedStatement = connection.getPreparedStatement(personInsertSQL, Statement.RETURN_GENERATED_KEYS); preparedStatement.clearParameters(); preparedStatement.setString(1, name); connection.executeUpdate(preparedStatement); // Read back the row id. try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) { if (resultSet.next()) { toReturn = new Person(resultSet.getLong(1), name); //last_insert_rowid() } else { throw new SQLException("Error executing SQL: " + personInsertSQL); } } } catch (SQLException ex) { if (connection != null) { // The insert may have failed because this person was just added on another thread, so try getting the person again. // (Note: the SingleUserCaseWriteLock is a no-op for multi-user cases so acquiring it does not prevent this situation) Optional person = getPerson(name, connection); if (person.isPresent()) { return person.get(); } } throw new TskCoreException(String.format("Error adding person with name = %s", name), ex); } finally { db.releaseSingleUserCaseWriteLock(); } if (toReturn != null) { db.fireTSKEvent(new PersonsAddedTskEvent(Collections.singletonList(toReturn))); } return toReturn; } /** * Get all hosts associated with the given person. * * @param person The person. * * @return The list of hosts corresponding to the person. * * @throws TskCoreException Thrown if there is an issue querying the case * database. */ public List getHostsForPerson(Person person) throws TskCoreException { return executeHostsQuery("SELECT * FROM tsk_hosts WHERE person_id = " + person.getPersonId()); } /** * Gets all hosts not associated with any person. * * @return The hosts. * * @throws TskCoreException Thrown if there is an issue querying the case * database. */ public List getHostsWithoutPersons() throws TskCoreException { return executeHostsQuery("SELECT * FROM tsk_hosts WHERE person_id IS NULL"); } /** * Executes a query of the tsk_hosts table in the case database. * * @param hostsQuery The SQL query to execute. * * @throws TskCoreException Thrown if there is an issue querying the case * database. * * @throws TskCoreException */ private List executeHostsQuery(String hostsQuery) throws TskCoreException { String sql = hostsQuery + " AND db_status = " + Host.HostDbStatus.ACTIVE.getId(); List hosts = new ArrayList<>(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, sql)) { while (rs.next()) { hosts.add(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")))); } return hosts; } catch (SQLException ex) { throw new TskCoreException(String.format("Error executing '" + sql + "'"), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Get person with given name. Name comparison is case-insensitive. * * @param name Person name to look for. * @param connection Database connection to use. * * @return Optional with person. Optional.empty if no matching person is * found. * * @throws TskCoreException */ private Optional getPerson(String name, CaseDbConnection connection) throws TskCoreException { String queryString = "SELECT * FROM tsk_persons" + " WHERE LOWER(name) = LOWER(?)"; try { PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS); s.clearParameters(); s.setString(1, name); try (ResultSet rs = s.executeQuery()) { if (!rs.next()) { return Optional.empty(); // no match found } else { return Optional.of(new Person(rs.getLong("id"), rs.getString("name"))); } } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting person with name = %s", name), ex); } } /** * Get person for the given host or empty if no associated person. * * @param host The host. * * @return The parent person or empty if no parent person. * * @throws TskCoreException if error occurs. */ public Optional getPerson(Host host) throws TskCoreException { String queryString = "SELECT p.id AS personId, p.name AS name FROM \n" + "tsk_persons p INNER JOIN tsk_hosts h\n" + "ON p.id = h.person_id \n" + "WHERE h.id = " + host.getHostId(); db.acquireSingleUserCaseReadLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) { if (rs.next()) { return Optional.of(new Person(rs.getLong("personId"), rs.getString("name"))); } else { return Optional.empty(); } } catch (SQLException ex) { throw new TskCoreException(String.format("Error getting person for host with ID = %d", host.getHostId()), ex); } finally { db.releaseSingleUserCaseReadLock(); } } /** * Adds one or more hosts to a person. * * @param person The person. * @param hosts The hosts. * * @throws TskCoreException Thrown if the operation cannot be completed. */ public void addHostsToPerson(Person person, List hosts) throws TskCoreException { if (person == null) { throw new TskCoreException("Illegal argument: person must be non-null"); } if (hosts == null || hosts.isEmpty()) { throw new TskCoreException("Illegal argument: hosts must be non-null and non-empty"); } executeHostsUpdate(person, getHostIds(hosts), new TskEvent.HostsAddedToPersonTskEvent(person, hosts)); } /** * Removes one or more hosts from a person. * * @param person The person. * @param hosts The hosts. * * @throws TskCoreException Thrown if the operation cannot be completed. */ public void removeHostsFromPerson(Person person, List hosts) throws TskCoreException { if (person == null) { throw new TskCoreException("Illegal argument: person must be non-null"); } if (hosts == null || hosts.isEmpty()) { throw new TskCoreException("Illegal argument: hosts must be non-null and non-empty"); } List hostIds = getHostIds(hosts); executeHostsUpdate(null, hostIds, new TskEvent.HostsRemovedFromPersonTskEvent(person, hostIds)); } /** * Executes an update of the person_id column for one or more hosts in the * tsk_hosts table in the case database. * * @param person The person to get the person ID from or null if the person * ID of the hosts should be set to NULL. * @param hostIds The host IDs of the hosts. * @param event A TSK event to be published if the update succeeds. * * @throws TskCoreException Thrown if the update fails. */ private void executeHostsUpdate(Person person, List hostIds, TskEvent event) throws TskCoreException { String updateSql = null; db.acquireSingleUserCaseWriteLock(); try (CaseDbConnection connection = this.db.getConnection(); Statement statement = connection.createStatement()) { updateSql = (person == null) ? String.format("UPDATE tsk_hosts SET person_id = NULL") : String.format("UPDATE tsk_hosts SET person_id = %d", person.getPersonId()); String hostIdsCsvList = hostIds.stream() .map(hostId -> hostId.toString()) .collect(Collectors.joining(",")); updateSql += " WHERE id IN (" + hostIdsCsvList + ")"; statement.executeUpdate(updateSql); db.fireTSKEvent(event); } catch (SQLException ex) { throw new TskCoreException(String.format(updateSql == null ? "Error connecting to case database" : "Error executing '" + updateSql + "'"), ex); } finally { db.releaseSingleUserCaseWriteLock(); } } /** * Gets a list of host IDs from a list of hosts. * * @param hosts The hosts. * * @return The host IDs. */ private List getHostIds(List hosts) { List hostIds = new ArrayList<>(); hostIds.addAll(hosts.stream() .map(host -> host.getHostId()) .collect(Collectors.toList())); return hostIds; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java000644 000765 000024 00000040557 14137073413 027225 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.ResourceBundle; /** * Abstracts an OS user account. OS Accounts have a scope, which is defined by * their parent OsAccountRealm. * * An OS user account may own files and (some) artifacts. * * OsAcounts can be created with minimal data and updated as more is learned. * Caller must call update() to save any new data. */ public final class OsAccount extends AbstractContent { private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); final static Long NO_ACCOUNT = null; final static String NO_OWNER_ID = null; private final SleuthkitCase sleuthkitCase; private final long osAccountObjId; // Object ID within the database private final long realmId; // realm where the account exists in (could be local or domain scoped) private final String loginName; // user login name - may be null private final String addr; // a unique user sid/uid, may be null private final String signature; // This exists only to prevent duplicates. // Together realm_id & signature must be unique for each account. // It is either addr if addr is defined, // or the login_name if login_name is defined. private final String fullName; // full name, may be null private final OsAccountType osAccountType; private final OsAccountStatus osAccountStatus; private final OsAccountDbStatus osAccountDbStatus; // Status of row in the database private final Long creationTime; private List osAccountAttributes = null; /** * Encapsulates status of an account - whether is it active or disabled or * deleted. */ public enum OsAccountStatus { UNKNOWN(0, bundle.getString("OsAccountStatus.Unknown.text")), ACTIVE(1, bundle.getString("OsAccountStatus.Active.text")), DISABLED(2, bundle.getString("OsAccountStatus.Disabled.text")), DELETED(3, bundle.getString("OsAccountStatus.Deleted.text")); private final int id; private final String name; OsAccountStatus(int id, String name) { this.id = id; this.name = name; } /** * Get account status id. * * @return Account status id. */ public int getId() { return id; } /** * Get the account status enum name. * * @return */ public String getName() { return name; } /** * Gets account status enum from id. * * @param statusId Id to look for. * * @return Account status enum. */ public static OsAccountStatus fromID(int statusId) { for (OsAccountStatus statusType : OsAccountStatus.values()) { if (statusType.ordinal() == statusId) { return statusType; } } return null; } } /** * Encapsulates status of OsAccount row. OsAccounts that are not "Active" * are generally invisible - they will not be returned by any queries on the * string fields. */ enum OsAccountDbStatus { ACTIVE(0, "Active"), MERGED(1, "Merged"), DELETED(2, "Deleted"); private final int id; private final String name; OsAccountDbStatus(int id, String name) { this.id = id; this.name = name; } int getId() { return id; } String getName() { return name; } static OsAccountDbStatus fromID(int typeId) { for (OsAccountDbStatus type : OsAccountDbStatus.values()) { if (type.ordinal() == typeId) { return type; } } return null; } } /** * Encapsulates an account type - whether it's an interactive login account * or a service account. */ public enum OsAccountType { UNKNOWN(0, bundle.getString("OsAccountType.Unknown.text")), SERVICE(1, bundle.getString("OsAccountType.Service.text")), INTERACTIVE(2, bundle.getString("OsAccountType.Interactive.text")); private final int id; private final String name; OsAccountType(int id, String name) { this.id = id; this.name = name; } /** * Get account type id. * * @return Account type id. */ public int getId() { return id; } /** * Get account type name. * * @return Account type name. */ public String getName() { return name; } /** * Gets account type enum from id. * * @param typeId Id to look for. * * @return Account type enum. */ public static OsAccountType fromID(int typeId) { for (OsAccountType accountType : OsAccountType.values()) { if (accountType.ordinal() == typeId) { return accountType; } } return null; } } /** * Constructs an OsAccount with a realm/username and unique id, and * signature. * * @param sleuthkitCase The SleuthKit case (case database) that contains * the artifact data. * @param osAccountobjId Obj id of the account in tsk_objects table. * @param realmId Realm - defines the scope of this account. * @param loginName Login name for the account. May be null. * @param uniqueId An id unique within the realm - a SID or uid. May * be null, only if login name is not null. * @param signature A unique signature constructed from realm id and * loginName or uniqueId. * @param fullName Full name. * @param creationTime Account creation time. * @param accountType Account type. * @param accountStatus Account status. * @param dbStatus Status of row in database. */ OsAccount(SleuthkitCase sleuthkitCase, long osAccountobjId, long realmId, String loginName, String uniqueId, String signature, String fullName, Long creationTime, OsAccountType accountType, OsAccountStatus accountStatus, OsAccountDbStatus accountDbStatus) { super(sleuthkitCase, osAccountobjId, signature); this.sleuthkitCase = sleuthkitCase; this.osAccountObjId = osAccountobjId; this.realmId = realmId; this.loginName = loginName; this.addr = uniqueId; this.signature = signature; this.fullName = fullName; this.creationTime = creationTime; this.osAccountType = accountType; this.osAccountStatus = accountStatus; this.osAccountDbStatus = accountDbStatus; } /** * This function is used by OsAccountManger to update the list of OsAccount * attributes. * * @param osAccountAttributes The osAccount attributes that are to be added. */ synchronized void setAttributesInternal(List osAccountAttributes) { this.osAccountAttributes = osAccountAttributes; } /** * Get the account Object Id that is unique within the scope of the case. * * @return Account * id. */ public long getId() { return osAccountObjId; } /** * Get the unique identifier for the account, such as UID or SID. The id is * unique within the account realm. * * @return Optional unique identifier. */ public Optional getAddr() { return Optional.ofNullable(addr); } /** * Get the ID for the account realm. Get the Realm via * OsAccountRealmManager.getRealmByRealmId() NOTE: The realm may get updated * as more data is parsed, so listen for events to update as needed. * * @return */ public long getRealmId() { return realmId; } /** * Get account login name, such as "jdoe" * * @return Optional login name. */ public Optional getLoginName() { return Optional.ofNullable(loginName); } /** * Get the account signature. * * @return Account signature. */ String getSignature() { return signature; } /** * Get account user full name, such as "John Doe" * * @return Optional with full name. */ public Optional getFullName() { return Optional.ofNullable(fullName); } /** * Get account creation time. * * @return Optional with account creation time. */ public Optional getCreationTime() { return Optional.ofNullable(creationTime); } /** * Get account type. * * @return Optional with account type. */ public Optional getOsAccountType() { return Optional.ofNullable(osAccountType); } /** * Get account status. * * @return Optional with account status. */ public Optional getOsAccountStatus() { return Optional.ofNullable(osAccountStatus); } /** * Get account status in the database. * * @return Database account status. */ public OsAccountDbStatus getOsAccountDbStatus() { return osAccountDbStatus; } /** * Get additional account attributes. * * @return List of additional account attributes. May return an empty list. * * @throws TskCoreException */ public synchronized List getExtendedOsAccountAttributes() throws TskCoreException { if (osAccountAttributes == null) { osAccountAttributes = sleuthkitCase.getOsAccountManager().getOsAccountAttributes(this); } return Collections.unmodifiableList(osAccountAttributes); } /** * Return the os account instances. * * @return List of all the OsAccountInstances. May return an empty list. * * @throws TskCoreException */ public synchronized List getOsAccountInstances() throws TskCoreException { return sleuthkitCase.getOsAccountManager().getOsAccountInstances(this); } /** * Gets the SleuthKit case database for this account. * * @return The SleuthKit case object. */ @Override public SleuthkitCase getSleuthkitCase() { return sleuthkitCase; } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { // No data to read. return 0; } @Override public void close() { // nothing to close } @Override public long getSize() { // No data. return 0; } @Override public T accept(ContentVisitor v) { throw new UnsupportedOperationException("Not supported yet."); } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } /** * Abstracts attributes of an OS account. An attribute may be specific to a * host, or applicable across all hosts. * * As an example, last login time is host specific, whereas last password * reset date is independent of a host. * */ public final class OsAccountAttribute extends AbstractAttribute { private final long osAccountObjId; // OS account to which this attribute belongs. private final Long hostId; // Host to which this attribute applies, may be null private final Long sourceObjId; // Object id of the source where the attribute was discovered. /** * Creates an os account attribute with int value. * * @param attributeType Attribute type. * @param valueInt Int value. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if the attribute applies to all the hosts * in the realm. * @param sourceObj Source where the attribute was found, may be * null. */ public OsAccountAttribute(BlackboardAttribute.Type attributeType, int valueInt, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueInt); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Creates an os account attribute with long value. * * @param attributeType Attribute type. * @param valueLong Long value. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if it applies across hosts. * @param sourceObj Source where the attribute was found. */ public OsAccountAttribute(BlackboardAttribute.Type attributeType, long valueLong, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueLong); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Creates an os account attribute with double value. * * @param attributeType Attribute type. * @param valueDouble Double value. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if it applies across hosts. * @param sourceObj Source where the attribute was found. */ public OsAccountAttribute(BlackboardAttribute.Type attributeType, double valueDouble, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueDouble); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Creates an os account attribute with string value. * * @param attributeType Attribute type. * @param valueString String value. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if applies across hosts. * @param sourceObj Source where the attribute was found. */ public OsAccountAttribute(BlackboardAttribute.Type attributeType, String valueString, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueString); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Creates an os account attribute with byte-array value. * * @param attributeType Attribute type. * @param valueBytes Bytes value. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if it applies across hosts. * @param sourceObj Source where the attribute was found. */ public OsAccountAttribute(BlackboardAttribute.Type attributeType, byte[] valueBytes, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueBytes); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Constructor to be used when creating an attribute after reading the * data from the table. * * @param attributeType Attribute type. * @param valueInt Int value. * @param valueLong Long value. * @param valueDouble Double value. * @param valueString String value. * @param valueBytes Bytes value. * @param sleuthkitCase Sleuthkit case. * @param osAccount Account which the attribute pertains to. * @param host Host on which the attribute applies to. Pass * Null if it applies across hosts. * @param sourceObj Source where the attribute was found. */ OsAccountAttribute(BlackboardAttribute.Type attributeType, int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes, SleuthkitCase sleuthkitCase, OsAccount osAccount, Host host, Content sourceObj) { super(attributeType, valueInt, valueLong, valueDouble, valueString, valueBytes, sleuthkitCase); this.osAccountObjId = osAccount.getId(); this.hostId = (host != null ? host.getHostId() : null); this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null); } /** * Get the host id for the account attribute. * * @return Optional with Host id. */ public Optional getHostId() { return Optional.ofNullable(hostId); } /** * Get the object id of account to which this attribute applies. * * @return Account row id. */ public long getOsAccountObjectId() { return osAccountObjId; } /** * Get the object id of the source where the attribute was found. * * @return Object id of source. */ public Optional getSourceObjectId() { return Optional.ofNullable(sourceObjId); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Volume.java000644 000765 000024 00000017344 14137073413 026574 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ResourceBundle; import java.util.ArrayList; import java.util.List; /** * Represents a volume in a VolumeSystem, object stored in tsk_vs_parts table. * Populated based on data in database. */ public class Volume extends AbstractContent { private long addr; private long startSector; //in sectors, relative to volume system start private long lengthInSectors; //in sectors private long flags; private String desc; private volatile long volumeHandle = 0; private volatile String uniquePath; private static ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); /** * Constructor to create the data object mapped from tsk_vs_parts entry * * @param db database object * @param obj_id * @param addr * @param startSector starting sector, relative to start of VS * @param lengthInSectors * @param flags * @param desc */ protected Volume(SleuthkitCase db, long obj_id, long addr, long startSector, long lengthInSectors, long flags, String desc) { super(db, obj_id, "vol" + Long.toString(addr)); //NON-NLS this.addr = addr; this.startSector = startSector; this.lengthInSectors = lengthInSectors; this.uniquePath = null; this.flags = flags; if (!desc.equals("")) { this.desc = desc; } else { this.desc = bundle.getString("Volume.desc.text"); } } @Override public int read(byte[] buf, long offset, long len) throws TskCoreException { synchronized (this) { Content myParent = getParent(); if (!(myParent instanceof VolumeSystem)) { throw new TskCoreException(bundle.getString("Volume.read.exception.msg1.text")); } VolumeSystem parentVs = (VolumeSystem) myParent; // Reading from APFS volumes/volume systems is not yet supported if (parentVs.getType().equals(TskData.TSK_VS_TYPE_ENUM.TSK_VS_TYPE_APFS)) { throw new TskCoreException("Reading APFS pool volumes not yet supported"); } // read from the volume if (volumeHandle == 0) { volumeHandle = SleuthkitJNI.openVsPart(parentVs.getVolumeSystemHandle(), addr); } } return SleuthkitJNI.readVsPart(volumeHandle, buf, offset, len); } @Override public void close() { // there is nothing to free. The VolumeSystem structure // in C++ contains this structure and will free it. volumeHandle = 0; } @Override public void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } @Override public long getSize() { return lengthInSectors * 512; } @Override public String getUniquePath() throws TskCoreException { // It is possible that multiple threads could be doing this calculation // simultaneously, but it's worth the potential extra processing to prevent deadlocks. if(uniquePath == null) { String tempUniquePath = ""; String name = getName(); if (!name.isEmpty()) { tempUniquePath = "/vol_" + name; //NON-NLS } Content myParent = getParent(); if (myParent != null) { tempUniquePath = myParent.getUniquePath() + tempUniquePath; } // Don't update uniquePath until it is complete. uniquePath = tempUniquePath; } return uniquePath; } //methods get exact data from database. could be manipulated to get more //meaningful data. /** * get the unique partition address within this volume system (assigned by * The Sleuth Kit) * * @return partition address in volume system */ public long getAddr() { return addr; } /** * get the starting sector address of this volume relative to start of the * volume system * * @return starting address */ public long getStart() { return startSector; } /** * get the length of the volume in sectors * * @return length */ public long getLength() { return lengthInSectors; } /** * get the flags * * @return flags */ public long getFlags() { return flags; } /** * get the flags as String * * @return flags as String */ public String getFlagsAsString() { return Volume.vsFlagToString(flags); } /** * get the description. This is set by the volume system and doesn't exist * for all volumes. * * @return description */ public String getDescription() { return desc; } // ----- Here all the methods for vs flags conversion / mapping ----- /** * Convert volume type flag to string * * @param vsFlag long flag to convert * * @return string representation */ public static String vsFlagToValue(long vsFlag) { String result = ""; for (TskData.TSK_VS_PART_FLAG_ENUM flag : TskData.TSK_VS_PART_FLAG_ENUM.values()) { if (flag.getVsFlag() == vsFlag) { result = flag.toString(); } } return result; } /** * Convert volume flag string to long * * @param vsFlag string representation of the flag * * @return long representation of the flag */ public static long valueToVsFlag(String vsFlag) { long result = 0; for (TskData.TSK_VS_PART_FLAG_ENUM flag : TskData.TSK_VS_PART_FLAG_ENUM.values()) { if (flag.toString().equals(vsFlag)) { result = flag.getVsFlag(); } } return result; } /** * Convert long representation of the flag to user readable format * * @param vsFlag long repr. of the flag * * @return user readable string representation */ public static String vsFlagToString(long vsFlag) { String result = ""; long allocFlag = TskData.TSK_VS_PART_FLAG_ENUM.TSK_VS_PART_FLAG_ALLOC.getVsFlag(); long unallocFlag = TskData.TSK_VS_PART_FLAG_ENUM.TSK_VS_PART_FLAG_UNALLOC.getVsFlag(); // some variables that might be needed in the future long metaFlag = TskData.TSK_VS_PART_FLAG_ENUM.TSK_VS_PART_FLAG_META.getVsFlag(); long allFlag = TskData.TSK_VS_PART_FLAG_ENUM.TSK_VS_PART_FLAG_ALL.getVsFlag(); if ((vsFlag & allocFlag) == allocFlag) { result = bundle.getString("Volume.vsFlagToString.allocated"); } if ((vsFlag & unallocFlag) == unallocFlag) { result = bundle.getString("Volume.vsFlagToString.unallocated"); } // ... add more code here if needed return result; } @Override public T accept(SleuthkitItemVisitor v) { return v.visit(this); } @Override public T accept(ContentVisitor v) { return v.visit(this); } @Override public List getChildren() throws TskCoreException { return getSleuthkitCase().getVolumeChildren(this); } @Override public List getChildrenIds() throws TskCoreException { return getSleuthkitCase().getVolumeChildrenIds(this); } /** * @return a list of FileSystem that are direct descendents of this Image. * * @throws TskCoreException */ public List getFileSystems() throws TskCoreException { List children = getChildren(); List fileSystems = new ArrayList(); for (Content child : children) { if (child instanceof FileSystem) { fileSystems.add((FileSystem) child); } } return fileSystems; } @Override public String toString(boolean preserveState) { return super.toString(preserveState) + "Volume [\t" + "addr " + addr + "\t" + "desc " + desc + "\t" + "flags " + flags + "\t" + "length " + lengthInSectors + "\t" + "start " + startSector + "]\t"; //NON-NLS } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/CaseDbConnectionInfo.java000644 000765 000024 00000005462 14137073413 031300 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2015 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import org.sleuthkit.datamodel.TskData.DbType; /** * The intent of this class is to hold any information needed to connect to a * remote database server, except for the actual database name. This does not * hold information to connect to a local database such as SQLite. * * It can be used generically to hold remote database connection information. */ public class CaseDbConnectionInfo { private String hostNameOrIP; private String portNumber; private String userName; private String password; private DbType dbType; /** * The intent of this class is to hold any information needed to connect to * a remote database server, except for the actual database name. This does * not hold information to connect to a local database such as SQLite. * * It can be used generically to hold remote database connection * information. * * @param hostNameOrIP the host name * @param portNumber the port number * @param userName the user name * @param password the password * @param dbType the database type */ public CaseDbConnectionInfo(String hostNameOrIP, String portNumber, String userName, String password, DbType dbType) { this.hostNameOrIP = hostNameOrIP; this.portNumber = portNumber; this.userName = userName; this.password = password; if (dbType == DbType.SQLITE) { throw new IllegalArgumentException("SQLite database type invalid for CaseDbConnectionInfo. CaseDbConnectionInfo should be used only for remote database types."); } this.dbType = dbType; } public DbType getDbType() { return this.dbType; } public String getHost() { return this.hostNameOrIP; } public String getPort() { return this.portNumber; } public String getUserName() { return this.userName; } public String getPassword() { return this.password; } public void setDbType(DbType db) { this.dbType = db; } public void setHost(String host) { this.hostNameOrIP = host; } public void setPort(String port) { this.portNumber = port; } public void setUserName(String user) { this.userName = user; } public void setPassword(String pass) { this.password = pass; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/AccountDeviceInstance.java000644 000765 000024 00000004125 14137073413 031517 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2017-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Encapsulates an Account existing on a specific device. * * There is a 1:M:N relationship between * Account, AccountDeviceInstance & AccountFileInstance */ public final class AccountDeviceInstance { private final Account account; private final String deviceID; AccountDeviceInstance(Account account, String deviceId) { this.account = account; this.deviceID = deviceId; } /** * Returns the underlying Account * * @return account */ public Account getAccount(){ return this.account; } /** * Returns the device Id the Account existed on * * @return device id */ public String getDeviceId(){ return this.deviceID; } @Override public int hashCode() { int hash = 5; hash = 11 * hash + (this.account != null ? this.account.hashCode() : 0); hash = 11 * hash + (this.deviceID != null ? this.deviceID.hashCode() : 0); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AccountDeviceInstance other = (AccountDeviceInstance) obj; if ((this.deviceID == null) ? (other.deviceID != null) : !this.deviceID.equals(other.deviceID)) { return false; } if (this.account != other.account && (this.account == null || !this.account.equals(other.account))) { return false; } return true; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Relationship.java000644 000765 000024 00000011033 14137073413 027753 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2017-18 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Arrays; import java.util.Collections; import static java.util.Collections.singleton; import java.util.HashMap; import java.util.HashSet; import java.util.Set; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE; import static org.sleuthkit.datamodel.CollectionUtils.hashSetOf; /** * A relationship between Accounts, such as a communication ( email, sms, phone * call (call log) ) or presence in a contact book. */ public final class Relationship { public static final class Type { private final String displayName; private final String typeName; private final int typeID; public static final Relationship.Type MESSAGE = new Type("MESSAGE", "Message", 1); public static final Relationship.Type CALL_LOG = new Type("CALL_LOG", "Call Log", 2); public static final Relationship.Type CONTACT = new Type("CONTACT", "Contact", 3); private final static HashMap> typesToArtifactTypeIDs = new HashMap>(); static { typesToArtifactTypeIDs.put(MESSAGE, hashSetOf( TSK_EMAIL_MSG.getTypeID(), TSK_MESSAGE.getTypeID())); typesToArtifactTypeIDs.put(CALL_LOG, singleton( TSK_CALLLOG.getTypeID())); typesToArtifactTypeIDs.put(CONTACT, singleton( TSK_CONTACT.getTypeID())); } private static final Set PREDEFINED_COMMUNICATION_TYPES = Collections.unmodifiableSet(new HashSet(Arrays.asList( MESSAGE, CALL_LOG))); /** * Subset of predefined types that represent communications. * * @return A subset of predefined types that represent communications. * */ static Set getPredefinedCommunicationTypes() { return PREDEFINED_COMMUNICATION_TYPES; } private Type(String name, String displayName, int id) { this.typeName = name; this.displayName = displayName; this.typeID = id; } /** * Get the display name. * * @return The display name. */ public String getDisplayName() { return displayName; } /** * Get the unique type name * * @return The unique type name. */ public String getTypeName() { return typeName; } /** * Get the id of this type. * * @return The type ID. */ public int getTypeID() { return typeID; } @Override public int hashCode() { int hash = 7; hash = 37 * hash + (this.typeName != null ? this.typeName.hashCode() : 0); hash = 37 * hash + this.typeID; return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Type other = (Type) obj; if (this.typeID != other.typeID) { return false; } if ((this.typeName == null) ? (other.typeName != null) : !this.typeName.equals(other.typeName)) { return false; } return true; } @Override public String toString() { return "{" + this.getClass().getName() + ": typeID=" + typeName + ", displayName=" + this.displayName + ", typeName=" + this.typeName + "}"; } /** * Is this type creatable from the given artifact. Specifically do they * have compatible types. * * @param relationshipArtifact the relationshipArtifact to test * creatability from * * @return if a relationship of this type can be created from the given * artifact. */ boolean isCreatableFrom(BlackboardArtifact relationshipArtifact) { Set get = typesToArtifactTypeIDs.get(this); return get != null && get.contains(relationshipArtifact.getArtifactTypeID()); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Bundle_ja.properties000644 000765 000024 00000123041 14137073413 030453 0ustar00carrierstaff000000 000000 #Thu Sep 30 10:23:46 UTC 2021 AbstractFile.readLocal.exception.msg1.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u304c\u30bb\u30c3\u30c8\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002 AbstractFile.readLocal.exception.msg2.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u4e0b\u8a18\u306e\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u306b\u306f\u5b58\u5728\u3057\u307e\u305b\u3093\uff1a{0} AbstractFile.readLocal.exception.msg3.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u4e0b\u8a18\u306e\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u3067\u306f\u8aad\u307f\u53d6\u308a\u3067\u304d\u307e\u305b\u3093\uff1a{0} AbstractFile.readLocal.exception.msg4.text=\u30d5\u30a1\u30a4\u30eb{0}\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f AbstractFile.readLocal.exception.msg5.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb{0}\u3092\u8aad\u307f\u53d6\u308c\u307e\u305b\u3093 BaseTypes.communication.name=\u30b3\u30df\u30e5\u30cb\u30b1\u30fc\u30b7\u30e7\u30f3 BaseTypes.fileSystem.name=\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0 BaseTypes.geolocation.name=\u30b8\u30aa\u30ed\u30b1\u30fc\u30b7\u30e7\u30f3 BaseTypes.miscTypes.name=\u305d\u306e\u4ed6 BaseTypes.webActivity.name=Web\u30a2\u30af\u30c6\u30a3\u30d3\u30c6\u30a3 BlackboardArtifact.shortDescriptionDate.text=\u3067{0} BlackboardArtifact.tagFile.text=\u30bf\u30b0\u4ed8\u3051\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb BlackboardArtifact.tsk.recentObject.text=\u6700\u8fd1\u958b\u3044\u305f\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8 BlackboardArtifact.tskAccount.text=\u30a2\u30ab\u30a6\u30f3\u30c8 BlackboardArtifact.tskAssociatedObject.text=\u95a2\u9023\u30aa\u30d6\u30b8\u30a7\u30af\u30c8 BlackboardArtifact.tskBackupEvent.text=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u30a4\u30d9\u30f3\u30c8 BlackboardArtifact.tskBluetoothAdapter.text=Bluetooth\u30a2\u30c0\u30d7\u30bf\u30fc BlackboardArtifact.tskBluetoothPairing.text=Bluetooth\u30da\u30a2\u30ea\u30f3\u30b0 BlackboardArtifact.tskCalendarEntry.text=\u30ab\u30ec\u30f3\u30c0\u30fc\u30a8\u30f3\u30c8\u30ea\u30fc BlackboardArtifact.tskCalllog.text=\u30b3\u30fc\u30eb\u30ed\u30b0 BlackboardArtifact.tskClipboardContent.text=\u30af\u30ea\u30c3\u30d7\u30dc\u30fc\u30c9\u306e\u5185\u5bb9 BlackboardArtifact.tskContact.text=\u30b3\u30f3\u30bf\u30af\u30c8 BlackboardArtifact.tskDataSourceUsage.text=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306e\u4f7f\u7528\u6cd5 BlackboardArtifact.tskDeletedProg.text=\u524a\u9664\u3055\u308c\u305f\u30d7\u30ed\u30b0\u30e9\u30e0 BlackboardArtifact.tskDeviceAttached.text=USB\u30c7\u30d0\u30a4\u30b9\u304c\u63a5\u7d9a\u3055\u308c\u3066\u3044\u307e\u3059 BlackboardArtifact.tskDeviceInfo.text=\u30c7\u30d0\u30a4\u30b9\u60c5\u5831 BlackboardArtifact.tskDhcpInfo.text=DHCP\u60c5\u5831 BlackboardArtifact.tskDownloadSource.text=\u30bd\u30fc\u30b9\u3092\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9 BlackboardArtifact.tskEmailMsg.text=E\u30e1\u30fc\u30eb\u30e1\u30c3\u30bb\u30fc\u30b8 BlackboardArtifact.tskEncryptionDetected.text=\u6697\u53f7\u5316\u691c\u51fa\u6e08 BlackboardArtifact.tskEncryptionSuspected.text=\u6697\u53f7\u5316\u306e\u53ef\u80fd\u6027 BlackboardArtifact.tskExtMismatchDetected.text=\u62e1\u5f35\u5b50\u4e0d\u4e00\u81f4\u691c\u51fa\u6e08 BlackboardArtifact.tskExtractedText.text=\u62bd\u51fa\u3055\u308c\u305f\u30c6\u30ad\u30b9\u30c8 BlackboardArtifact.tskFaceDetected.text=\u9854\u8a8d\u8b58 BlackboardArtifact.tskGPSArea.text=GPS\u30a8\u30ea\u30a2 BlackboardArtifact.tskGenInfo.text=\u4e00\u822c\u60c5\u5831 BlackboardArtifact.tskGpsBookmark.text=GPS\u30d6\u30c3\u30af\u30de\u30fc\u30af BlackboardArtifact.tskGpsLastKnownLocation.text=\u6700\u5f8c\u306b\u53d6\u5f97\u3057\u305fGPS\u4f4d\u7f6e\u60c5\u5831 BlackboardArtifact.tskGpsRoute.text=GPS\u30eb\u30fc\u30c8 BlackboardArtifact.tskGpsSearch.text=GPS\u691c\u7d22 BlackboardArtifact.tskGpsTrackpoint.text=GPS\u30c8\u30e9\u30c3\u30af\u30dd\u30a4\u30f3\u30c8 BlackboardArtifact.tskHashsetHit.text=\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u30d2\u30c3\u30c8 BlackboardArtifact.tskInstalledProg.text=\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u6e08\u307f\u30d7\u30ed\u30b0\u30e9\u30e0 BlackboardArtifact.tskInterestingArtifactHit.text=\u7591\u308f\u3057\u3044\u7d50\u679c BlackboardArtifact.tskInterestingFileHit.text=\u7591\u308f\u3057\u3044\u30d5\u30a1\u30a4\u30eb BlackboardArtifact.tskKeywordHits.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u30d2\u30c3\u30c8 BlackboardArtifact.tskMessage.text=\u30e1\u30c3\u30bb\u30fc\u30b8 BlackboardArtifact.tskMetadata.text=\u30e1\u30bf\u30c7\u30fc\u30bf BlackboardArtifact.tskMetadataExif.text=EXIF\u30e1\u30bf\u30c7\u30fc\u30bf BlackboardArtifact.tskObjectDetected.text=\u30aa\u30d6\u30b8\u30a7\u30af\u30c8\u304c\u691c\u51fa\u3055\u308c\u307e\u3057\u305f BlackboardArtifact.tskOsAccount.text=\u30aa\u30da\u30ec\u30fc\u30c6\u30a3\u30f3\u30b0\u30b7\u30b9\u30c6\u30e0\u30e6\u30fc\u30b6\u30a2\u30ab\u30a6\u30f3\u30c8 BlackboardArtifact.tskOsInfo.text=\u30aa\u30da\u30ec\u30fc\u30c6\u30a3\u30f3\u30b0\u30b7\u30b9\u30c6\u30e0\u60c5\u5831 BlackboardArtifact.tskPreviouslyNotable.text=\u4ee5\u524d\u306b\u6ce8\u76ee\u306b\u5024\u3059\u308b BlackboardArtifact.tskPreviouslySeen.text=\u4ee5\u524d\u306b\u8a8d\u8b58 BlackboardArtifact.tskPreviouslyUnseen.text=\u4ee5\u524d\u306b\u306f\u672a\u8a8d\u8b58 BlackboardArtifact.tskProgNotifications.text=\u30d7\u30ed\u30b0\u30e9\u30e0\u901a\u77e5 BlackboardArtifact.tskProgRun.text=\u5b9f\u884c\u30d7\u30ed\u30b0\u30e9\u30e0 BlackboardArtifact.tskRemoteDrive.text=\u30ea\u30e2\u30fc\u30c8\u30c9\u30e9\u30a4\u30d6 BlackboardArtifact.tskScreenShots.text=\u30b9\u30af\u30ea\u30fc\u30f3\u30b7\u30e7\u30c3\u30c8 BlackboardArtifact.tskServiceAccount.text=Web\u30a2\u30ab\u30a6\u30f3\u30c8 BlackboardArtifact.tskSimAttached.text=SIM\u63a5\u7d9a BlackboardArtifact.tskSpeedDialEntry.text=\u30b9\u30d4\u30fc\u30c9\u30c0\u30a4\u30eb\u30a8\u30f3\u30c8\u30ea\u30fc BlackboardArtifact.tskTLEvent.text=TL\u30a4\u30d9\u30f3\u30c8 BlackboardArtifact.tskTagArtifact.text=\u30bf\u30b0\u4ed8\u3051\u3055\u308c\u305f\u7d50\u679c BlackboardArtifact.tskToolOutput.text=\u30ed\u30fc\u30c4\u30fc\u30eb\u30a2\u30a6\u30c8\u30d7\u30c3\u30c8 BlackboardArtifact.tskTrack.text=GPS\u30c8\u30e9\u30c3\u30af BlackboardArtifact.tskUserContentSuspected.text=\u7591\u308f\u3057\u3044\u30e6\u30fc\u30b6\u30fc\u30b3\u30f3\u30c6\u30f3\u30c4 BlackboardArtifact.tskUserDeviceEvent.text=\u30e6\u30fc\u30b6\u30fc\u30c7\u30d0\u30a4\u30b9\u30a4\u30d9\u30f3\u30c8 BlackboardArtifact.tskVerificationFailed.text=\u691c\u8a3c\u306e\u5931\u6557 BlackboardArtifact.tskWIFINetwork.text=\u30ef\u30a4\u30e4\u30ec\u30b9\u30cd\u30c3\u30c8\u30ef\u30fc\u30af BlackboardArtifact.tskWIFINetworkAdapter.text=\u30ef\u30a4\u30e4\u30ec\u30b9\u30cd\u30c3\u30c8\u30ef\u30fc\u30af\u30a2\u30c0\u30d7\u30bf BlackboardArtifact.tskWebAccountType.text=Web\u30a2\u30ab\u30a6\u30f3\u30c8\u30bf\u30a4\u30d7 BlackboardArtifact.tskWebBookmark.text=\u30a6\u30a7\u30d6\u30b5\u30a4\u30c8\u30d6\u30c3\u30af\u30de\u30fc\u30af BlackboardArtifact.tskWebCache.text=Web\u30ad\u30e3\u30c3\u30b7\u30e5 BlackboardArtifact.tskWebCategorization.text=Web\u30ab\u30c6\u30b4\u30ea BlackboardArtifact.tskWebCookie.text=\u30a6\u30a7\u30d6cookie BlackboardArtifact.tskWebDownload.text=\u30a6\u30a7\u30d6\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9 BlackboardArtifact.tskWebFormAddresses.text=Web\u30d5\u30a9\u30fc\u30e0\u306e\u30a2\u30c9\u30ec\u30b9 BlackboardArtifact.tskWebFormAutofill.text=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b BlackboardArtifact.tskWebHistory.text=\u30a6\u30a7\u30d6\u5c65\u6b74 BlackboardArtifact.tskWebSearchQuery.text=\u30a6\u30a7\u30d6\u691c\u7d22 BlackboardArtifact.tskYaraHit.text=YARA\u30d2\u30c3\u30c8 BlackboardAttribute.tskAccountType.text=\u30a2\u30ab\u30f3\u30c8\u30bf\u30a4\u30d7 BlackboardAttribute.tskActivityType.text=\u30a2\u30af\u30c6\u30a3\u30d3\u30c6\u30a3\u30bf\u30a4\u30d7 BlackboardAttribute.tskAssociatedArtifact.text=\u95a2\u9023\u3059\u308b\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8 BlackboardAttribute.tskBankName.text=\u30d0\u30f3\u30af\u540d BlackboardAttribute.tskBrandName.text=\u30d6\u30e9\u30f3\u30c9\u540d BlackboardAttribute.tskBssid.text=BSSID BlackboardAttribute.tskCalendarEntryType.text=\u30ab\u30ec\u30f3\u30c0\u30fc\u30a8\u30f3\u30c8\u30ea\u30fc\u30bf\u30a4\u30d7 BlackboardAttribute.tskCardDiscretionary.text=\u30ab\u30fc\u30c9\u88c1\u91cf\u30c7\u30fc\u30bf BlackboardAttribute.tskCardExpiration.text=\u30ab\u30fc\u30c9\u6709\u52b9\u671f\u9650\uff08YYMM\uff09 BlackboardAttribute.tskCardLRC.text=\u30ab\u30fc\u30c9\u7e26\u65b9\u5411\u5197\u9577\u6027\u30c1\u30a7\u30c3\u30af BlackboardAttribute.tskCardNumber.text=\u30ab\u30fc\u30c9\u756a\u53f7 BlackboardAttribute.tskCardScheme.text=\u30ab\u30fc\u30c9\u30b9\u30ad\u30fc\u30e0 BlackboardAttribute.tskCardServiceCode.text=\u30ab\u30fc\u30c9\u30b5\u30fc\u30d3\u30b9\u30b3\u30fc\u30c9 BlackboardAttribute.tskCardType.text=\u30ab\u30fc\u30c9\u306e\u6709\u52b9\u671f\u9650\uff08YYMM\uff09 BlackboardAttribute.tskCategory.text=\u30ab\u30c6\u30b4\u30ea\u30fc BlackboardAttribute.tskCity.text=\u5e02 BlackboardAttribute.tskComment.text=\u30b3\u30e1\u30f3\u30c8 BlackboardAttribute.tskCorrelationType.text=\u76f8\u95a2\u30bf\u30a4\u30d7 BlackboardAttribute.tskCorrelationValue.text=\u76f8\u95a2\u5024 BlackboardAttribute.tskCount.text=\u30ab\u30a6\u30f3\u30c8 BlackboardAttribute.tskCountry.text=\u56fd BlackboardAttribute.tskDateTimeAccessed.text=\u30a2\u30af\u30bb\u30b9\u65e5\u4ed8 BlackboardAttribute.tskDateTimeCreated.text=\u4f5c\u6210\u65e5 BlackboardAttribute.tskDateTimeEnd.text=\u7d42\u4e86\u65e5\u4ed8\uff0f\u6642\u523b BlackboardAttribute.tskDateTimeModified.text=\u4fee\u6b63\u65e5 BlackboardAttribute.tskDateTimeRcvd.text=\u53d7\u4fe1\u65e5 BlackboardAttribute.tskDateTimeSent.text=\u9001\u4fe1\u65e5 BlackboardAttribute.tskDateTimeStart.text=\u958b\u59cb\u65e5\u4ed8\uff0f\u6642\u523b BlackboardAttribute.tskDatetime.text=\u65e5\u4ed8\uff0f\u6642\u523b BlackboardAttribute.tskDescription.text=\u8aac\u660e BlackboardAttribute.tskDeviceId.text=\u6a5f\u5668ID BlackboardAttribute.tskDeviceMake.text=\u6a5f\u5668\u30e1\u30fc\u30ab\u30fc BlackboardAttribute.tskDeviceModel.text=\u6a5f\u5668\u30e2\u30c7\u30eb BlackboardAttribute.tskDeviceName.text=\u6a5f\u5668\u540d BlackboardAttribute.tskDirection.text=\u65b9\u5411 BlackboardAttribute.tskDomain.text=\u30c9\u30e1\u30a4\u30f3 BlackboardAttribute.tskEmail.text=E\u30e1\u30fc\u30eb BlackboardAttribute.tskEmailBcc.text=E-Mail BCC BlackboardAttribute.tskEmailCc.text=E-Mail CC BlackboardAttribute.tskEmailContentHtml.text=\u30e1\u30c3\u30bb\u30fc\u30b8\uff08HTML\uff09 BlackboardAttribute.tskEmailContentPlain.text=\u30e1\u30c3\u30bb\u30fc\u30b8\uff08\u30d7\u30ec\u30fc\u30f3\u30c6\u30ad\u30b9\u30c8\uff09 BlackboardAttribute.tskEmailContentRtf.text=\u30e1\u30c3\u30bb\u30fc\u30b8\uff08RTF\uff09 BlackboardAttribute.tskEmailFrom.text=\u9001\u4fe1\u5143E\u30e1\u30fc\u30eb BlackboardAttribute.tskEmailHome.text=E\u30e1\u30fc\u30eb\uff08\u81ea\u5b85\uff09 BlackboardAttribute.tskEmailOffice.text=E\u30e1\u30fc\u30eb\uff08\u30aa\u30d5\u30a3\u30b9\uff09 BlackboardAttribute.tskEmailReplyTo.text=\u8fd4\u4fe1\u30a2\u30c9\u30ec\u30b9 BlackboardAttribute.tskEmailTo.text=E\u30e1\u30fc\u30eb\u5b9b\u5148 BlackboardAttribute.tskEncryptionDetected.text=\u6697\u53f7\u5316\u691c\u51fa\u6e08 BlackboardAttribute.tskEntropy.text=\u30a8\u30f3\u30c8\u30ed\u30d4\u30fc BlackboardAttribute.tskFileTypeExt.text=\u30d5\u30a1\u30a4\u30eb\u30bf\u30a4\u30d7\uff08\u62e1\u5f35\u5b50\uff09 BlackboardAttribute.tskFileTypeSig.text=\u30d5\u30a1\u30a4\u30eb\u30bf\u30a4\u30d7\uff08\u30b7\u30b0\u30cd\u30c1\u30e3\uff09 BlackboardAttribute.tskFlag.text=\u30d5\u30e9\u30b0 BlackboardAttribute.tskGeoAltitude.text=\u6a19\u9ad8 BlackboardAttribute.tskGeoBearing.text=\u65b9\u5411 BlackboardAttribute.tskGeoHPrecision.text=\u6c34\u5e73\u7cbe\u5ea6 BlackboardAttribute.tskGeoLatitude.text=\u7def\u5ea6 BlackboardAttribute.tskGeoLatitudeEnd.text=\u7d42\u4e86\u7def\u5ea6 BlackboardAttribute.tskGeoLatitudeStart.text=\u30b9\u30bf\u30fc\u30c8\u7def\u5ea6 BlackboardAttribute.tskGeoLongitude.text=\u7d4c\u5ea6 BlackboardAttribute.tskGeoLongitudeEnd.text=\u7d42\u4e86\u7d4c\u5ea6 BlackboardAttribute.tskGeoLongitudeStart.text=\u30b9\u30bf\u30fc\u30c8\u7d4c\u5ea6 BlackboardAttribute.tskGeoMapDatum.text=\u6e2c\u5730\u7cfb BlackboardAttribute.tskGeoVPrecision.text=\u5782\u76f4\u7cbe\u5ea6 BlackboardAttribute.tskGeoVelocity.text=\u901f\u5ea6 BlackboardAttribute.tskHashMd5.text=MD5\u30cf\u30c3\u30b7\u30e5 BlackboardAttribute.tskHashSha1.text=SHA1\u30cf\u30c3\u30b7\u30e5 BlackboardAttribute.tskHashSha225.text=SHA2-256\u30cf\u30c3\u30b7\u30e5 BlackboardAttribute.tskHashSha2512.text=SHA2-512\u30cf\u30c3\u30b7\u30e5 BlackboardAttribute.tskHashsetName.text=\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u540d BlackboardAttribute.tskHeaders.text=\u30d8\u30c3\u30c0\u30fc BlackboardAttribute.tskHomeDir.text=\u30db\u30fc\u30e0\u30c7\u30a3\u30ec\u30af\u30c8\u30ea BlackboardAttribute.tskHost.text=\u30db\u30b9\u30c8 BlackboardAttribute.tskIccid.text=ICCID BlackboardAttribute.tskId.text=ID BlackboardAttribute.tskImei.text=IMEI BlackboardAttribute.tskImsi.text=IMSI BlackboardAttribute.tskInterestingFile.text=\u7591\u308f\u3057\u3044\u30d5\u30a1\u30a4\u30eb BlackboardAttribute.tskIpAddress.text=IP\u30a2\u30c9\u30ec\u30b9 BlackboardAttribute.tskIsAdmin.text=\u7ba1\u7406\u8005\u3067\u3059 BlackboardAttribute.tskIsDeleted.text=\u306f\u524a\u9664\u3055\u308c\u307e\u3057\u305f BlackboardAttribute.tskKeyword.text=\u30ad\u30fc\u30ef\u30fc\u30c9 BlackboardAttribute.tskKeywordPreview.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u30d7\u30ec\u30d3\u30e5\u30fc BlackboardAttribute.tskKeywordRegexp.text=\u6b63\u898f\u8868\u73fe\u30ad\u30fc\u30ef\u30fc\u30c9 BlackboardAttribute.tskKeywordSearchDocumentID.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8ID BlackboardAttribute.tskKeywordSearchType.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u691c\u7d22\u30bf\u30a4\u30d7 BlackboardAttribute.tskKeywordSet.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u30bb\u30c3\u30c8 BlackboardAttribute.tskLocalPath.text=\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9 BlackboardAttribute.tskLocation.text=\u30ed\u30b1\u30fc\u30b7\u30e7\u30f3 BlackboardAttribute.tskMacAddress.text=Mac\u30a2\u30c9\u30ec\u30b9 BlackboardAttribute.tskMalwareDetected.text=\u30de\u30eb\u30a6\u30a7\u30a2\u691c\u51fa\u6e08 BlackboardAttribute.tskMessageType.text=\u30e1\u30c3\u30bb\u30fc\u30b8\u30bf\u30a4\u30d7 BlackboardAttribute.tskMinCount.text=\u6700\u5c0f\u30ab\u30a6\u30f3\u30c8 BlackboardAttribute.tskMsgId.text=\u30e1\u30c3\u30bb\u30fc\u30b8ID BlackboardAttribute.tskMsgReplyId.text=\u30e1\u30c3\u30bb\u30fc\u30b8\u30ea\u30d7\u30e9\u30a4ID BlackboardAttribute.tskName.text=\u540d\u524d BlackboardAttribute.tskNamePerson.text=\u4eba\u540d BlackboardAttribute.tskOrganization.text=\u7d44\u7e54 BlackboardAttribute.tskOtherCases.text=\u305d\u306e\u4ed6\u306e\u30b1\u30fc\u30b9 BlackboardAttribute.tskOwner.text=\u4fdd\u6709\u8005 BlackboardAttribute.tskPassword.text=\u30d1\u30b9\u30ef\u30fc\u30c9 BlackboardAttribute.tskPath.text=\u30d1\u30b9 BlackboardAttribute.tskPathId.text=\u30d1\u30b9ID BlackboardAttribute.tskPathSource.text=\u30d1\u30b9\u30bd\u30fc\u30b9 BlackboardAttribute.tskPermissions.text=\u30d1\u30fc\u30df\u30c3\u30b7\u30e7\u30f3 BlackboardAttribute.tskPhoneNumber.text=\u96fb\u8a71\u756a\u53f7 BlackboardAttribute.tskPhoneNumberFrom.text=\u767a\u4fe1\u8005\u96fb\u8a71\u756a\u53f7 BlackboardAttribute.tskPhoneNumberHome.text=\u96fb\u8a71\u756a\u53f7\uff08\u81ea\u5b85\uff09 BlackboardAttribute.tskPhoneNumberMobile.text=\u96fb\u8a71\u756a\u53f7\uff08\u643a\u5e2f\uff09 BlackboardAttribute.tskPhoneNumberOffice.text=\u96fb\u8a71\u756a\u53f7\uff08\u4f1a\u793e\uff09 BlackboardAttribute.tskPhoneNumberTo.text=\u7740\u4fe1\u8005\u96fb\u8a71\u756a\u53f7 BlackboardAttribute.tskProcessorArchitecture.text=\u30d7\u30ed\u30bb\u30c3\u30b5\u30a2\u30fc\u30ad\u30c6\u30af\u30c1\u30e3 BlackboardAttribute.tskProcessorName.text=\u30d7\u30ed\u30bb\u30c3\u30b5\u30fc\u540d BlackboardAttribute.tskProductId.text=\u88fd\u54c1\u756a\u53f7 BlackboardAttribute.tskProgName.text=\u30d7\u30ed\u30b0\u30e9\u30e0\u540d BlackboardAttribute.tskReadStatus.text=\u8aad\u3080 BlackboardAttribute.tskRealm.text=\u5206\u91ce BlackboardAttribute.tskReferrer.text=\u30ea\u30d5\u30a1\u30e9 BlackboardAttribute.tskRemotePath.text=\u30ea\u30e2\u30fc\u30c8\u30d1\u30b9 BlackboardAttribute.tskServerName.text=\u30b5\u30fc\u30d0\u540d BlackboardAttribute.tskSetName.text=\u30bb\u30c3\u30c8\u540d BlackboardAttribute.tskShortcut.text=\u30b7\u30e7\u30fc\u30c8\u30ab\u30c3\u30c8 BlackboardAttribute.tskSsid.text=SSID BlackboardAttribute.tskStegDetected.text=\u30b9\u30c6\u30ac\u30ce\u30b0\u30e9\u30d5\u30a3\u30fc\u691c\u51fa\u6e08 BlackboardAttribute.tskSubject.text=\u30b5\u30d6\u30b8\u30a7\u30af\u30c8 BlackboardAttribute.tskTLEventType.text=\u30a4\u30d9\u30f3\u30c8\u30bf\u30a4\u30d7 BlackboardAttribute.tskTagName.text=\u30bf\u30b0\u540d BlackboardAttribute.tskTaggedArtifact.text=\u30bf\u30b0\u4ed8\u3051\u3055\u308c\u305f\u7d50\u679c BlackboardAttribute.tskTempDir.text=\u4e00\u6642\u30d5\u30a1\u30a4\u30eb\u30d5\u30a9\u30eb\u30c0\u30fc BlackboardAttribute.tskText.text=\u30c6\u30ad\u30b9\u30c8 BlackboardAttribute.tskTextFile.text=\u30c6\u30ad\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb BlackboardAttribute.tskTextLanguage.text=\u30c6\u30ad\u30b9\u30c8\u8a00\u8a9e BlackboardAttribute.tskTitle.text=\u30bf\u30a4\u30c8\u30eb BlackboardAttribute.tskUrl.text=URL BlackboardAttribute.tskUrlDecoded.text=\u5fa9\u53f7\u5316\u3055\u308c\u305fURL BlackboardAttribute.tskUserId.text=\u30e6\u30fc\u30b6ID BlackboardAttribute.tskUserName.text=\u30e6\u30fc\u30b6\u540d BlackboardAttribute.tskValue.text=\u30d0\u30ea\u30e5\u30fc BlackboardAttribute.tskVersion.text=\u30d0\u30fc\u30b8\u30e7\u30f3 BlackboardAttribute.tskaccountsettings.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u8a2d\u5b9a BlackboardAttribute.tskattachments.text=\u30e1\u30c3\u30bb\u30fc\u30b8\u306e\u6dfb\u4ed8\u30d5\u30a1\u30a4\u30eb BlackboardAttribute.tskbytesreceived.text=\u53d7\u4fe1\u3057\u305f\u30d0\u30a4\u30c8\u6570 BlackboardAttribute.tskbytessent.text=\u9001\u4fe1\u30d0\u30a4\u30c8\u6570 BlackboardAttribute.tskdatetimedeleted.text=\u524a\u9664\u6642\u9593 BlackboardAttribute.tskdatetimepwdfail.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u5931\u6557\u65e5 BlackboardAttribute.tskdatetimepwdreset.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u30ea\u30bb\u30c3\u30c8\u65e5 BlackboardAttribute.tskdisplayname.text=\u8868\u793a\u540d BlackboardAttribute.tskdistancefromhome.text=\u30db\u30fc\u30e0\u30dd\u30a4\u30f3\u30c8\u304b\u3089\u306e\u8ddd\u96e2 BlackboardAttribute.tskdistancetraveled.text=\u79fb\u52d5\u8ddd\u96e2 BlackboardAttribute.tskgeoareapoints.text=\u30a8\u30ea\u30a2\u306e\u8f2a\u90ed\u3092\u69cb\u6210\u3059\u308b\u30dd\u30a4\u30f3\u30c8\u30ea\u30b9\u30c8 BlackboardAttribute.tskgeopath.text=\u30c8\u30e9\u30c3\u30af\u30dd\u30a4\u30f3\u30c8\u306e\u30ea\u30b9\u30c8 BlackboardAttribute.tskgeowaypoints.text=\u30a6\u30a7\u30a4\u30dd\u30a4\u30f3\u30c8\u306e\u30ea\u30b9\u30c8 BlackboardAttribute.tskgroups.text=\u30b0\u30eb\u30fc\u30d7 BlackboardAttribute.tskhashphotodna.text=PhotoDNA\u30cf\u30c3\u30b7\u30e5 BlackboardAttribute.tsklastprinteddatetime.text=\u6700\u7d42\u5370\u5237\u65e5 BlackboardAttribute.tskpasswordhint.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u306e\u30d2\u30f3\u30c8 BlackboardAttribute.tskpasswordsettings.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u8a2d\u5b9a BlackboardAttribute.tskrule.text=\u30eb\u30fc\u30eb BlackboardAttribute.tskthreadid.text=\u30b9\u30ec\u30c3\u30c9ID CategoryType.AnalysisResult=\u5206\u6790\u7d50\u679c CategoryType.DataArtifact=\u30c7\u30fc\u30bf\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8 CustomTypes.customArtifact.name=\u30ab\u30b9\u30bf\u30e0\u30fb\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8\u30fb\u30a4\u30d9\u30f3\u30c8 CustomTypes.other.name=\u6a19\u6e96\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8\u30fb\u30a4\u30d9\u30f3\u30c8 CustomTypes.userCreated.name=\u624b\u52d5\u3067\u4f5c\u6210\u3055\u308c\u305f\u30a4\u30d9\u30f3\u30c8 DataSourcesFilter.displayName.text=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u3092\u306b\u5236\u9650\u3059\u308b DatabaseConnectionCheck.Access=\u30e6\u30fc\u30b6\u30fc\u540d\u304b\u30d1\u30b9\u30ef\u30fc\u30c9\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.Authentication=\u30e6\u30fc\u30b6\u30fc\u540d\u304b\u30d1\u30b9\u30ef\u30fc\u30c9\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.Connection=\u30db\u30b9\u30c8\u540d\u3001\u30dd\u30fc\u30c8\u3001\u30e6\u30fc\u30b6\u30fc\u540d\u3001\u307e\u305f\u306f\u30d1\u30b9\u30ef\u30fc\u30c9\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.Everything=\u30db\u30b9\u30c8\u540d\u3001\u30dd\u30fc\u30c8\u756a\u53f7\u3001\u30e6\u30fc\u30b6\u30fc\u540d\u3001\u307e\u305f\u306f\u30d1\u30b9\u30ef\u30fc\u30c9\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.Hostname=\u7121\u52b9\u306a\u30db\u30b9\u30c8\u540d\u3002 DatabaseConnectionCheck.HostnameOrPort=\u30db\u30b9\u30c8\u540d\u3084\u30dd\u30fc\u30c8\u756a\u53f7\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.Installation=\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u306b\u95a2\u3059\u308b\u554f\u984c\u3002 JDBC\u30c9\u30e9\u30a4\u30d0\u30fc\u304c\u898b\u3064\u304b\u308a\u307e\u305b\u3093\u3002 DatabaseConnectionCheck.InternalServerIssue=PostgreSQL\u306e\u5185\u90e8\u554f\u984c\u3002 \u30c7\u30fc\u30bf\u30d9\u30fc\u30b9\u304c\u7834\u640d\u3057\u3066\u3044\u308b\u53ef\u80fd\u6027\u304c\u3042\u308a\u307e\u3059\u3002 DatabaseConnectionCheck.MissingHostname=\u30db\u30b9\u30c8\u540d\u304c\u3042\u308a\u307e\u305b\u3093\u3002 DatabaseConnectionCheck.MissingPassword=\u30d1\u30b9\u30ef\u30fc\u30c9\u304c\u3042\u308a\u307e\u305b\u3093\u3002 DatabaseConnectionCheck.MissingPort=\u30dd\u30fc\u30c8\u756a\u53f7\u304c\u3042\u308a\u307e\u305b\u3093\u3002 DatabaseConnectionCheck.MissingUsername=\u30e6\u30fc\u30b6\u30fc\u540d\u304c\u3042\u308a\u307e\u305b\u3093\u3002 DatabaseConnectionCheck.Port=\u30dd\u30fc\u30c8\u756a\u53f7\u304c\u7121\u52b9\u3067\u3059\u3002 DatabaseConnectionCheck.ServerDiskSpace=PostgreSQL\u30b5\u30fc\u30d0\u30fc\u306e\u554f\u984c\u3002 PostgreSQL\u30b5\u30fc\u30d0\u30fc\u306e\u30c7\u30a3\u30b9\u30af\u3068\u30e1\u30e2\u30ea\u5bb9\u91cf\u3092\u78ba\u8a8d\u3057\u3066\u304f\u3060\u3055\u3044\u3002 DatabaseConnectionCheck.ServerRestart=PostgreSQL\u30b5\u30fc\u30d0\u30fc\u306e\u554f\u984c\u3002PostgreSQL\u30b5\u30fc\u30d0\u30fc\u306e\u518d\u8d77\u52d5\u304c\u5fc5\u8981\u306a\u5834\u5408\u304c\u3042\u308a\u307e\u3059\u3002 DerviedFile.derivedMethod.exception.msg1.text=\u30d5\u30a1\u30a4\u30ebID\uff1a{0}\u306e\u6d3e\u751f\u65b9\u6cd5\u3092\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f DescriptionFilter.mode.exclude=\u9664\u5916\u3059\u308b DescriptionFilter.mode.include=\u542b\u3080 DescriptionLOD.full=\u8a73\u7d30 DescriptionLOD.medium=\u6982\u8981 DescriptionLOD.short=\u7c21\u6f54 EventTypeHierarchyLevel.category=\u30ab\u30c6\u30b4\u30ea\u30fc EventTypeHierarchyLevel.event=\u30a4\u30d9\u30f3\u30c8 EventTypeHierarchyLevel.root=\u30eb\u30fc\u30c8 EventTypeZoomLevel.baseType=\u30d9\u30fc\u30b9\u30bf\u30a4\u30d7 EventTypeZoomLevel.rootType=\u30eb\u30fc\u30c8\u30bf\u30a4\u30d7 EventTypeZoomLevel.subType=\u30b5\u30d6\u30bf\u30a4\u30d7 FileSystemTypes.fileAccessed.name=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb FileSystemTypes.fileChanged.name=\u5909\u66f4\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb FileSystemTypes.fileCreated.name=\u4f5c\u6210\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb FileSystemTypes.fileModified.name=\u4fee\u6b63\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb FileTypesFilter.displayName.text=\u30d5\u30a1\u30a4\u30eb\u30bf\u30a4\u30d7\u3092\u5236\u9650 FsContent.readInt.err.msg.text=\u753b\u50cf\u30d5\u30a1\u30a4\u30eb\u304c\u5b58\u5728\u3057\u306a\u3044\u304b\u3001\u30a2\u30af\u30bb\u30b9\u3067\u304d\u307e\u305b\u3093\u3002 Image.verifyImageSize.errStr1.text=\u4e0d\u5b8c\u5168\u306a\u753b\u50cf\u306e\u53ef\u80fd\u6027\uff1a\u30aa\u30d5\u30bb\u30c3\u30c8{0}\u3067\u30dc\u30ea\u30e5\u30fc\u30e0\u306e\u8aad\u53d6\u308a\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f Image.verifyImageSize.errStr2.text=\u4e0d\u5b8c\u5168\u306a\u753b\u50cf\u306e\u53ef\u80fd\u6027\uff1a\u30aa\u30d5\u30bb\u30c3\u30c8{0}\u3067\u30dc\u30ea\u30e5\u30fc\u30e0\u306e\u8aad\u53d6\u308a\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f Image.verifyImageSize.errStr3.text=\u4e0d\u5b8c\u5168\u306a\u753b\u50cf\u306e\u53ef\u80fd\u6027\uff1a\u30aa\u30d5\u30bb\u30c3\u30c8{0}\u3067\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0\u306e\u8aad\u53d6\u308a\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f Image.verifyImageSize.errStr4.text=\u4e0d\u5b8c\u5168\u306a\u753b\u50cf\u306e\u53ef\u80fd\u6027\uff1a\u30aa\u30d5\u30bb\u30c3\u30c8{0}\u3067\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0\u306e\u8aad\u53d6\u308a\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f IngestJobInfo.IngestJobStatusType.Cancelled.displayName=\u30ad\u30e3\u30f3\u30bb\u30eb IngestJobInfo.IngestJobStatusType.Completed.displayName=\u5b8c\u4e86 IngestJobInfo.IngestJobStatusType.Started.displayName=\u958b\u59cb IngestModuleInfo.IngestModuleType.DataArtifact.displayName=\u30c7\u30fc\u30bf\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8 IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u30ec\u30d9\u30eb IngestModuleInfo.IngestModuleType.FileLevel.displayName=\u30d5\u30a1\u30a4\u30eb\u30ec\u30d9\u30eb IngestModuleInfo.IngestModuleType.Multiple.displayName=\u591a\u6570 IntersectionFilter.displayName.text=\u4ea4\u5dee\u70b9 MiscTypes.Calls.name=\u901a\u8a71\u958b\u59cb MiscTypes.CallsEnd.name=\u901a\u8a71\u7d42\u4e86 MiscTypes.Email.name=\u9001\u4fe1\u3055\u308c\u305f\u96fb\u5b50\u30e1\u30fc\u30eb MiscTypes.EmailRcvd.name=\u30e1\u30fc\u30eb\u306e\u53d7\u4fe1 MiscTypes.GPSBookmark.name=GPS\u30d6\u30c3\u30af\u30de\u30fc\u30af MiscTypes.GPSLastknown.name=GPS\u8a18\u9332\u306e\u6700\u5f8c\u5834\u6240 MiscTypes.GPSRoutes.name=GPS\u30eb\u30fc\u30c8 MiscTypes.GPSTrack.name=GPS\u30c8\u30e9\u30c3\u30af MiscTypes.GPSTrackpoint.name=GPS\u30c8\u30e9\u30c3\u30af\u30dd\u30a4\u30f3\u30c8 MiscTypes.GPSearch.name=GPS\u691c\u7d22 MiscTypes.LogEntry.name=\u30ed\u30b0\u767b\u9332 MiscTypes.Registry.name=\u30ec\u30b8\u30b9\u30c8\u30ea MiscTypes.devicesAttached.name=\u63a5\u7d9a\u3055\u308c\u3066\u3044\u308b\u30c7\u30d0\u30a4\u30b9 MiscTypes.exif.name=Exif MiscTypes.installedPrograms.name=\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u6e08\u307f\u306e\u30d7\u30ed\u30b0\u30e9\u30e0 MiscTypes.message.name=\u30e1\u30c3\u30bb\u30fc\u30b8 MiscTypes.metadataCreated.name=\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u4f5c\u6210 MiscTypes.metadataLastPrinted.name=\u6700\u7d42\u5370\u5237\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8 MiscTypes.metadataLastSaved.name=\u6700\u5f8c\u306b\u4fdd\u5b58\u3055\u308c\u305f\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8 MiscTypes.programexecuted.name=\u30d7\u30ed\u30b0\u30e9\u30e0\u306e\u5b9f\u884c MiscTypes.recentDocuments.name=\u6700\u8fd1\u306e\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8 OsAccountInstanceType.Accessed.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u4f55\u3089\u304b\u306e\u30b5\u30fc\u30d3\u30b9\u3092\u4ecb\u3057\u3066\u8aad\u307f/\u66f8\u304d\u306b\u30db\u30b9\u30c8\u4e0a\u306e\u30ea\u30bd\u30fc\u30b9\u306b\u30a2\u30af\u30bb\u30b9\u3057\u307e\u3057\u305f\u3002 OsAccountInstanceType.Accessed.text=\u30a2\u30af\u30bb\u30b9\u6e08\u307f OsAccountInstanceType.Launched.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u30db\u30b9\u30c8\u3067\u30d7\u30ed\u30b0\u30e9\u30e0\u306e\u30a2\u30af\u30b7\u30e7\u30f3\u3092\u958b\u59cb\u3057\u307e\u3057\u305f\u3002 OsAccountInstanceType.Launched.text=\u4f5c\u52d5 OsAccountInstanceType.Referenced.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u30db\u30b9\u30c8\u4e0a\u306e\u30ed\u30b0\u30d5\u30a1\u30a4\u30eb\u3067\u53c2\u7167\u3055\u308c\u307e\u3057\u305f\u3002 OsAccountInstanceType.Referenced.text=\u53c2\u7167 OsAccountRealm.Domain.text=\u30c9\u30e1\u30a4\u30f3 OsAccountRealm.Inferred.text=\u63a8\u6e2c OsAccountRealm.Known.text=\u65e2\u77e5 OsAccountRealm.Local.text=\u30ed\u30fc\u30ab\u30eb OsAccountRealm.Unknown.text=\u4e0d\u660e OsAccountStatus.Active.text=\u30a2\u30af\u30c6\u30a3\u30d6 OsAccountStatus.Deleted.text=\u524a\u9664\u6e08\u307f OsAccountStatus.Disabled.text=\u7121\u52b9 OsAccountStatus.Unknown.text=\u4e0d\u660e OsAccountType.Interactive.text=\u30a4\u30f3\u30bf\u30e9\u30af\u30c6\u30a3\u30d6 OsAccountType.Service.text=\u30b5\u30fc\u30d3\u30b9 OsAccountType.Unknown.text=\u4e0d\u660e ReviewStatus.Approved=\u627f\u8a8d\u6e08\u307f ReviewStatus.Rejected=\u62d2\u5426\u3055\u308c\u307e\u3057\u305f ReviewStatus.Undecided=\u672a\u5b9a RootEventType.eventTypes.name=\u30a4\u30d9\u30f3\u30c8\u30bf\u30a4\u30d7 Score.Priority.Normal.displayName.text=\u6b63\u5e38 Score.Priority.Override.displayName.text=\u30aa\u30fc\u30d0\u30fc\u30e9\u30a4\u30c9 Significance.LikelyNone.displayName.text=\u304a\u305d\u3089\u304f\u6ce8\u76ee\u306b\u5024\u3057\u306a\u3044 Significance.LikelyNotable.displayName.text=\u304a\u305d\u3089\u304f\u6ce8\u76ee\u306b\u5024\u3059\u308b Significance.None.displayName.text=\u6ce8\u76ee\u306b\u5024\u3057\u306a\u3044 Significance.Notable.displayName.text=\u6ce8\u76ee\u3059\u3079\u304d Significance.Unknown.displayName.text=\u4e0d\u660e SlackFile.readInt.err.msg.text=\u753b\u50cf\u30d5\u30a1\u30a4\u30eb\u304c\u5b58\u5728\u3057\u306a\u3044\u304b\u3001\u30a2\u30af\u30bb\u30b9\u3067\u304d\u307e\u305b\u3093\u3002 SleuthkitCase.SchemaVersionMismatch=\u30b9\u30ad\u30fc\u30de\u306e\u30d0\u30fc\u30b8\u30e7\u30f3\u304c\u4e00\u81f4\u3057\u307e\u305b\u3093 SleuthkitCase.addDerivedFile.exception.msg1.text=\u6d3e\u751f\u30d5\u30a1\u30a4\u30eb\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30aa\u30d6\u30b8\u30a7\u30af\u30c8\u306e\u65b0\u898fID\u3092\u53d6\u5f97\u3067\u304d\u307e\u305b\u3093\u3002\u30d5\u30a1\u30a4\u30eb\u540d\uff1a{0} SleuthkitCase.addDerivedFile.exception.msg2.text=\u6d3e\u751f\u30d5\u30a1\u30a4\u30eb\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30d5\u30a1\u30a4\u30eb\u540d\uff1a{0} SleuthkitCase.addLocalFile.exception.msg1.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb{0}\u306e\u8ffd\u52a0\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u8ffd\u52a0\u5148\u306e\u30da\u30a2\u30ec\u30f3\u30c8\u304c\u30cc\u30eb\u3067\u3059\u3002 SleuthkitCase.addLocalFile.exception.msg2.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30aa\u30d6\u30b8\u30a7\u30af\u30c8\u306e\u65b0\u898fID\u3092\u53d6\u5f97\u3067\u304d\u307e\u305b\u3093\u3067\u3057\u305f\u3002\u30d5\u30a1\u30a4\u30eb\u540d\uff1a{0} SleuthkitCase.addLocalFile.exception.msg3.text=\u6d3e\u751f\u30d5\u30a1\u30a4\u30eb\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30d5\u30a1\u30a4\u30eb\u540d\uff1a{0} SleuthkitCase.findFiles.exception.msg1.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30da\u30a2\u30ec\u30f3\u30c8\u304c\u7121\u3044\uff08\u30a4\u30e1\u30fc\u30b8\u3001\u30d5\u30a1\u30a4\u30eb\u30bb\u30c3\u30c8\uff09\u306f\u305a\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} SleuthkitCase.findFiles.exception.msg2.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30a4\u30e1\u30fc\u30b8\u307e\u305f\u306fVirtualDirectory\u3067\u3042\u308b\u3079\u304d\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} SleuthkitCase.findFiles.exception.msg3.text=\u30d5\u30a1\u30a4\u30eb\u540d\u306b\u57fa\u3065\u3044\u305f\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306e\u691c\u7d22\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002 SleuthkitCase.findFiles3.exception.msg1.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30da\u30a2\u30ec\u30f3\u30c8\u304c\u7121\u3044\uff08\u30a4\u30e1\u30fc\u30b8\u3001\u30d5\u30a1\u30a4\u30eb\u30bb\u30c3\u30c8\uff09\u306f\u305a\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} SleuthkitCase.findFiles3.exception.msg2.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30a4\u30e1\u30fc\u30b8\u307e\u305f\u306fVirtualDirectory\u3067\u3042\u308b\u3079\u304d\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} SleuthkitCase.findFiles3.exception.msg3.text=\u30d5\u30a1\u30a4\u30eb\u540d\u306b\u57fa\u3065\u3044\u305f\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306e\u691c\u7d22\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002 SleuthkitCase.getLastObjectId.exception.msg.text=\u6700\u5f8c\u306e\u30aa\u30d6\u30b8\u30a7\u30af\u30c8ID\u3092\u53d6\u5f97\u3057\u305f\u5f8c\u3001\u7d50\u679c\u30bb\u30c3\u30c8\u3092\u9589\u3058\u308b\u969b\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002 SleuthkitCase.isFileFromSource.exception.msg.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30da\u30a2\u30ec\u30f3\u30c8\u304c\u7121\u3044\uff08\u30a4\u30e1\u30fc\u30b8\u3001\u30d5\u30a1\u30a4\u30eb\u30bb\u30c3\u30c8\uff09\u306f\u305a\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} SleuthkitCase.isFileFromSource.exception.msg2.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30a4\u30e1\u30fc\u30b8\u307e\u305f\u306fVirtualDirectory\u3067\u3042\u308b\u3079\u304d\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0} TextFilter.displayName.text=\u30c6\u30ad\u30b9\u30c8\u3092\u542b\u3081\u308b\u5fc5\u8981\u304c\u3042\u308a\u307e\u3059\uff1a TimelineEventType.BackupEvent.description.end=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u7d42\u4e86 TimelineEventType.BackupEvent.description.start=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u958b\u59cb TimelineEventType.BackupEventEnd.txt=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u7d42\u4e86 TimelineEventType.BackupEventStart.txt=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u958b\u59cb TimelineEventType.BluetoothAdapter.txt=Bluetooth\u30a2\u30c0\u30d7\u30bf\u30fc TimelineEventType.BluetoothPairing.txt=Bluetooth\u30da\u30a2\u30ea\u30f3\u30b0 TimelineEventType.BluetoothPairingLastConnection.txt=\u6700\u5f8c\u306e\u63a5\u7d9a\u3092Bluetooth\u3067\u30da\u30a2\u30ea\u30f3\u30b0 TimelineEventType.CalendarEntryEnd.txt=\u30ab\u30ec\u30f3\u30c0\u30fc\u5165\u529b\u7d42\u4e86 TimelineEventType.CalendarEntryStart.txt=\u30ab\u30ec\u30f3\u30c0\u30fc\u5165\u529b\u958b\u59cb TimelineEventType.DeletedProgram.txt=\u30d7\u30ed\u30b0\u30e9\u30e0\u304c\u524a\u9664\u3055\u308c\u307e\u3057\u305f TimelineEventType.DeletedProgramDeleted.txt=\u30a2\u30d7\u30ea\u30b1\u30fc\u30b7\u30e7\u30f3\u304c\u524a\u9664\u3055\u308c\u307e\u3057\u305f TimelineEventType.OSAccountAccessed.txt=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fOS\u30a2\u30ab\u30a6\u30f3\u30c8 TimelineEventType.OSAccountCreated.txt=\u30c6\u30ad\u30b9\u30c8\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u304b\u3089\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u3092\u524a\u9664\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002 TimelineEventType.OSAccountPwdFail.txt=OS\u30a2\u30ab\u30a6\u30f3\u30c8\u306e\u30d1\u30b9\u30ef\u30fc\u30c9\u30fb\u30a8\u30e9\u30fc TimelineEventType.OSAccountPwdReset.txt=OS\u30a2\u30ab\u30a6\u30f3\u30c8\u306e\u30d1\u30b9\u30ef\u30fc\u30c9\u306e\u30ea\u30bb\u30c3\u30c8 TimelineEventType.OSInfo.txt=\u30aa\u30da\u30ec\u30fc\u30c6\u30a3\u30f3\u30b0\u30b7\u30b9\u30c6\u30e0\u60c5\u5831 TimelineEventType.ProgramNotification.txt=\u30d7\u30ed\u30b0\u30e9\u30e0\u901a\u77e5 TimelineEventType.ScreenShot.txt=\u30b9\u30af\u30ea\u30fc\u30f3\u30b7\u30e7\u30c3\u30c8 TimelineEventType.ServiceAccount.txt=\u30b5\u30fc\u30d3\u30b9\u30fb\u30a2\u30ab\u30a6\u30f3\u30c8 TimelineEventType.UserDeviceEventEnd.txt=\u30e6\u30fc\u30b6\u30fc\u6d3b\u52d5\u7d42\u4e86 TimelineEventType.UserDeviceEventStart.txt=\u30e6\u30fc\u30b6\u30fc\u6d3b\u52d5\u958b\u59cb TimelineEventType.WIFINetwork.txt=Wifi\u30cd\u30c3\u30c8\u30ef\u30fc\u30af TimelineEventType.WebCache.text=Web\u30ad\u30e3\u30c3\u30b7\u30e5 TimelineLevelOfDetail.high=\u9ad8 TimelineLevelOfDetail.low=\u4f4e TimelineLevelOfDetail.medium=\u4e2d TskData.encodingType.exception.msg1.text=\u5024\uff1a{0}\u3000\u306eEncodingType\u304c\u3042\u308a\u307e\u305b\u3093 TskData.fileKnown.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fFileKnown\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.fileKnown.known=\u65e2\u77e5 TskData.fileKnown.knownBad=\u6ce8\u76ee\u3059\u3079\u304d TskData.fileKnown.unknown=\u4e0d\u660e TskData.objectTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306f\u30aa\u30d6\u30b8\u30a7\u30af\u30c8\u30bf\u30a4\u30d7\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskDbFilesTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FILE_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsAttrTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FS_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsMetaFlagEnum.allocated=\u5272\u308a\u5f53\u3066\u6e08\u307f TskData.tskFsMetaFlagEnum.compressed=\u5727\u7e2e\u6e08\u307f TskData.tskFsMetaFlagEnum.orphan=\u30aa\u30fc\u30d5\u30a1\u30f3 TskData.tskFsMetaFlagEnum.unallocated=\u672a\u5272\u308a\u5f53\u3066 TskData.tskFsMetaFlagEnum.unused=\u672a\u4f7f\u7528 TskData.tskFsMetaFlagEnum.used=\u4f7f\u7528\u6e08\u307f TskData.tskFsMetaTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FS_META_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsNameFlagEnum.allocated=\u5272\u308a\u5f53\u3066\u6e08\u307f TskData.tskFsNameFlagEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FS_NAME_FLAG_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsNameFlagEnum.unallocated=\u672a\u5272\u308a\u5f53\u3066 TskData.tskFsNameTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FS_NAME_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsTypeEnum.APFSautoDetect=APFS\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.ExtXautoDetect=ExtX\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.FATautoDetect=FAT\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.HFSautoDetect=HFS\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.ISO9660autoDetect=ISO9660\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.NTFSautoDetect=NTFS\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.RAWautoDetect=RAW\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.SWAPautoDetect=SWAP\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.YAFFS2autoDetect=YAFFS2\uff08\u81ea\u52d5\u691c\u51fa\uff09 TskData.tskFsTypeEnum.autoDetect=\u81ea\u52d5\u691c\u51fa TskData.tskFsTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_FS_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskFsTypeEnum.unsupported=\u30b5\u30dd\u30fc\u30c8\u3055\u308c\u3066\u3044\u306a\u3044\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0 TskData.tskImgTypeEnum.autoDetect=\u81ea\u52d5\u691c\u51fa TskData.tskImgTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_IMG_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskImgTypeEnum.rawSingle=\u30ed\u30fc\u30b7\u30f3\u30b0\u30eb TskData.tskImgTypeEnum.rawSplit=\u30ed\u30fc\u30b9\u30d7\u30ea\u30c3\u30c8 TskData.tskImgTypeEnum.unknown=\u4e0d\u660e TskData.tskVSTypeEnum.autoDetect=\u81ea\u52d5\u691c\u51fa TskData.tskVSTypeEnum.exception.msg1.text=\u30d0\u30ea\u30e5\u30fc\uff1a{0}\u306fTSK_VS_TYPE_ENUM\u306b\u8a72\u5f53\u3057\u307e\u305b\u3093 TskData.tskVSTypeEnum.fake=\u507d\u7269 TskData.tskVSTypeEnum.unsupported=\u975e\u30b5\u30dd\u30fc\u30c8 TypeFilter.displayName.text=\u30a4\u30d9\u30f3\u30c8\u30bf\u30a4\u30d7\u3092\u5236\u9650 Volume.desc.text=\u4e0d\u660e Volume.read.exception.msg1.text=\u3053\u306e\u30dc\u30ea\u30e5\u30fc\u30e0\u306e\u30da\u30a2\u30ec\u30f3\u30c8\u306fVolmueSystem\u3067\u3042\u308b\u3079\u304d\u3067\u3059\u304c\u3001\u9055\u3044\u307e\u3059\u3002 Volume.vsFlagToString.allocated=\u5272\u308a\u5f53\u3066\u6e08\u307f Volume.vsFlagToString.unallocated=\u672a\u5272\u308a\u5f53\u3066 WebTypes.webBookmarks.name=Web\u30d6\u30c3\u30af\u30de\u30fc\u30af WebTypes.webCookies.name=WebCookie\u3092\u4f5c\u6210 WebTypes.webCookiesAccessed.name=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fWebCookie WebTypes.webCookiesEnd.name=WebCookie\u304c\u7d42\u4e86 WebTypes.webCookiesStart.name=WebCookie\u306e\u958b\u59cb WebTypes.webDownloads.name=Web\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9 WebTypes.webFormAddress.name=Web\u30d5\u30a9\u30fc\u30e0\u30a2\u30c9\u30ec\u30b9\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f WebTypes.webFormAddressModified.name=Web\u30d5\u30a9\u30fc\u30e0\u30a2\u30c9\u30ec\u30b9\u304c\u5909\u66f4\u3055\u308c\u307e\u3057\u305f WebTypes.webFormAutoFill.name=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f WebTypes.webFormAutofillAccessed.name=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b\u306b\u30a2\u30af\u30bb\u30b9 WebTypes.webHistory.name=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fWeb\u5c65\u6b74 WebTypes.webHistoryCreated.name=Web\u5c65\u6b74\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f WebTypes.webSearch.name=Web\u691c\u7d22 ZoomSettingsPane.descrLODLabel.text=\u8a73\u7d30\u8aac\u660e\uff1a ZoomSettingsPane.historyLabel.text=\u5c65\u6b74\uff1a ZoomSettingsPane.timeUnitLabel.text=\u6642\u9593\u5358\u4f4d\uff1a ZoomSettingsPane.typeZoomLabel.text=\u30a4\u30d9\u30f3\u30c8\u30bf\u30a4\u30d7\uff1a hashHitsFilter.displayName.text=\u30cf\u30c3\u30b7\u30e5\u30d2\u30c3\u30c8\u304c\u5fc5\u8981 hideKnownFilter.displayName.text=\u65e2\u77e5\u30d5\u30a1\u30a4\u30eb\u3092\u975e\u8868\u793a tagsFilter.displayName.text=\u30bf\u30b0\u4ed8\u3051\u304c\u5fc5\u8981\u304c\u3067\u3059 sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineFilter.java000644 000765 000024 00000077756 14137073413 030256 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.net.MediaType; import java.util.ArrayList; import java.util.Arrays; import static java.util.Arrays.asList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import static java.util.stream.Collectors.joining; import java.util.stream.Stream; import static org.apache.commons.lang3.ObjectUtils.notEqual; import org.apache.commons.lang3.StringUtils; import static org.sleuthkit.datamodel.SleuthkitCase.escapeSingleQuotes; /** * An interface for timeline events filters used to selectively query the * timeline tables in the case database for timeline events via the APIs of the * timeline manager. */ public abstract class TimelineFilter { /** * Gets the display name for this filter. * * @return The display name. */ public abstract String getDisplayName(); /** * Get the SQL where clause corresponding to this filter. * * @param manager The TimelineManager to use for DB spevific parts of the * query. * * @return an SQL where clause (without the "where") corresponding to this * filter */ abstract String getSQLWhere(TimelineManager manager); /** * Makes a copy of this filter. * * @return A copy of this filter. */ public abstract TimelineFilter copyOf(); @SuppressWarnings("unchecked") static > T copySubFilters(T from, T to) { from.getSubFilters().forEach(subFilter -> to.addSubFilter((S) subFilter.copyOf())); return to; } /** * A timeline events filter that ANDs together a collection of timeline * event filters. * * @param The type of the filters to be AND'ed together. */ public static class IntersectionFilter extends CompoundFilter { /** * Constructs timeline events filter that ANDs together a collection of * timeline events filters. * * @param subFilters The collection of filters to be AND'ed together. */ @VisibleForTesting public IntersectionFilter(List subFilters) { super(subFilters); } @Override public IntersectionFilter copyOf() { @SuppressWarnings("unchecked") List subfilters = Lists.transform(getSubFilters(), f -> (SubFilterType) f.copyOf()); //make copies of all the subfilters. return new IntersectionFilter<>(subfilters); } @Override public String getDisplayName() { String subFilterDisplayNames = getSubFilters().stream() .map(TimelineFilter::getDisplayName) .collect(joining(",")); return BundleProvider.getBundle().getString("IntersectionFilter.displayName.text") + "[" + subFilterDisplayNames + "]"; } @Override String getSQLWhere(TimelineManager manager) { String trueLiteral = manager.getSQLWhere(null); String join = this.getSubFilters().stream() .filter(Objects::nonNull) .map(filter -> filter.getSQLWhere(manager)) .filter(sqlString -> notEqual(sqlString, trueLiteral)) .collect(Collectors.joining(" AND ")); return join.isEmpty() ? trueLiteral : "(" + join + ")"; } } /** * A timeline events filter used to query for a subset of the event types in * the event types hierarchy. The filter is built via a recursive descent * from any given type in the hierarchy, effectively creating a filter that * accepts the events in a branch of the event types hierarchy. */ public static final class EventTypeFilter extends UnionFilter { private final TimelineEventType rootEventType; /** * Constucts a timeline events filter used to query for a subset of the * event types in the event types hierarchy. The filter is optionally * built via a recursive descent from any given type in the hierarchy, * effectively creating a filter that accepts the events in a branch of * the event types hierarchy. Thsi constructor exists solely for the use * of this filter's implementation of the copyOf API. * * @param rootEventType The "root" of the event hierarchy for the * purposes of this filter. * @param recursive Whether or not to do a recursive descent of the * event types hierarchy from the root event type. */ private EventTypeFilter(TimelineEventType rootEventType, boolean recursive) { super(new ArrayList<>()); this.rootEventType = rootEventType; if (recursive) { // add subfilters for each subtype for (TimelineEventType subType : rootEventType.getChildren()) { addSubFilter(new EventTypeFilter(subType)); } } } /** * Constructs a timeline events filter used to query for a subset of the * event types in the event types hierarchy. The subset of event types * that pass the filter is determined by a recursive descent from any * given type in the hierarchy, effectively creating a filter that * accepts the events in a branch of the event types hierarchy. * * @param rootEventType The "root" of the event hierarchy for the * purposes of this filter. */ public EventTypeFilter(TimelineEventType rootEventType) { this(rootEventType, true); } /** * Gets the "root" of the branch of the event types hierarchy accepted * by this filter. * * @return The "root" event type. */ public TimelineEventType getRootEventType() { return rootEventType; } @Override public String getDisplayName() { return (TimelineEventType.ROOT_EVENT_TYPE.equals(rootEventType)) ? BundleProvider.getBundle().getString("TypeFilter.displayName.text") : rootEventType.getDisplayName(); } @Override public EventTypeFilter copyOf() { //make a nonrecursive copy of this filter, and then copy subfilters // RC (10/1/19): Why? return copySubFilters(this, new EventTypeFilter(rootEventType, false)); } @Override public int hashCode() { int hash = 7; hash = 17 * hash + Objects.hashCode(this.rootEventType); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final EventTypeFilter other = (EventTypeFilter) obj; if (notEqual(this.rootEventType, other.getRootEventType())) { return false; } return Objects.equals(this.getSubFilters(), other.getSubFilters()); } @Override String getSQLWhere(TimelineManager manager) { return "(tsk_events.event_type_id IN (" + getSubTypeIDs().collect(Collectors.joining(",")) + "))"; //NON-NLS } private Stream getSubTypeIDs() { if (this.getSubFilters().isEmpty()) { return Stream.of(String.valueOf(getRootEventType().getTypeID())); } else { return this.getSubFilters().stream().flatMap(EventTypeFilter::getSubTypeIDs); } } @Override public String toString() { return "EventTypeFilter{" + "rootEventType=" + rootEventType + ", subfilters=" + getSubFilters() + '}'; } } /** * A timeline events filter used to query for events where the direct source * (file or artifact) of the events has either been tagged or not tagged. */ public static final class TagsFilter extends TimelineFilter { private boolean eventSourcesAreTagged; /** * Constructs a timeline events filter used to query for a events where * the direct source (file or artifact) of the events has not been * tagged. */ public TagsFilter() { } /** * Constructs a timeline events filter used to query for events where * the direct source (file or artifact) of the events has either been * tagged or not tagged. * * @param eventSourcesAreTagged Whether the direct sources of the events * need to be tagged or not tagged to be * accepted by this filter. */ public TagsFilter(boolean eventSourcesAreTagged) { this.eventSourcesAreTagged = eventSourcesAreTagged; } /** * Sets whether the direct sources of the events have to be tagged or * not tagged to be accepted by this filter. * * @param eventSourcesAreTagged Whether the direct sources of the events * have to be tagged or not tagged to be * accepted by this filter. */ public synchronized void setEventSourcesAreTagged(boolean eventSourcesAreTagged) { this.eventSourcesAreTagged = eventSourcesAreTagged; } /** * Indicates whether the direct sources of the events have to be tagged * or not tagged. * * @return True or false. */ public synchronized boolean getEventSourceAreTagged() { return eventSourcesAreTagged; } @Override public String getDisplayName() { return BundleProvider.getBundle().getString("tagsFilter.displayName.text"); } @Override public TagsFilter copyOf() { return new TagsFilter(eventSourcesAreTagged); } @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof TagsFilter)) { return false; } return ((TagsFilter) obj).getEventSourceAreTagged() == getEventSourceAreTagged(); } @Override public int hashCode() { int hash = 7; hash = 67 * hash + Objects.hashCode(this.eventSourcesAreTagged); return hash; } @Override String getSQLWhere(TimelineManager manager) { String whereStr; if (eventSourcesAreTagged) { whereStr = "tagged = 1"; } else { whereStr = "tagged = 0"; } return whereStr; } } /** * A timeline events filter that ORs together a collection of timeline * events filters. * * @param The type of the filters to be OR'ed together. */ public static abstract class UnionFilter extends TimelineFilter.CompoundFilter { UnionFilter(List subFilters) { super(subFilters); } UnionFilter() { super(new ArrayList()); } @Override public void addSubFilter(SubFilterType subfilter) { super.addSubFilter(subfilter); } @Override String getSQLWhere(TimelineManager manager) { String join = getSubFilters().stream() .map(subFilter -> subFilter.getSQLWhere(manager)) .collect(Collectors.joining(" OR ")); return join.isEmpty() ? manager.getSQLWhere(null) : "(" + join + ")"; } } /** * A timeline events filter used to query for events that have a particular * substring in their short, medium, or full descriptions. */ public static final class TextFilter extends TimelineFilter { private String descriptionSubstring; /** * Constructs a timeline events filter used to query for events that * have the empty string as a substring in their short, medium, or full * descriptions. */ public TextFilter() { this(""); } /** * Constructs a timeline events filter used to query for events that * have a given substring in their short, medium, or full descriptions. * * @param descriptionSubstring The substring that must be present in one * or more of the descriptions of each event * that passes the filter. */ public TextFilter(String descriptionSubstring) { super(); this.descriptionSubstring = descriptionSubstring.trim(); } /** * Sets the substring that must be present in one or more of the * descriptions of each event that passes the filter. * * @param descriptionSubstring The substring. */ public synchronized void setDescriptionSubstring(String descriptionSubstring) { this.descriptionSubstring = descriptionSubstring.trim(); } @Override public String getDisplayName() { return BundleProvider.getBundle().getString("TextFilter.displayName.text"); } /** * Gets the substring that must be present in one or more of the * descriptions of each event that passes the filter. * * @return The required substring. */ public synchronized String getDescriptionSubstring() { return descriptionSubstring; } @Override public synchronized TextFilter copyOf() { return new TextFilter(getDescriptionSubstring()); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TextFilter other = (TextFilter) obj; return Objects.equals(getDescriptionSubstring(), other.getDescriptionSubstring()); } @Override public int hashCode() { int hash = 5; hash = 29 * hash + Objects.hashCode(this.descriptionSubstring); return hash; } @Override String getSQLWhere(TimelineManager manager) { if (StringUtils.isNotBlank(this.getDescriptionSubstring())) { return "((med_description like '%" + escapeSingleQuotes(this.getDescriptionSubstring()) + "%')" //NON-NLS + " or (full_description like '%" + escapeSingleQuotes(this.getDescriptionSubstring()) + "%')" //NON-NLS + " or (short_description like '%" + escapeSingleQuotes(this.getDescriptionSubstring()) + "%'))"; //NON-NLS } else { return manager.getSQLWhere(null); } } @Override public String toString() { return "TextFilter{" + "textProperty=" + descriptionSubstring + '}'; } } /** * A timeline events filter that ANDs together instances of a variety of * event filter types to create what is in effect a "tree" of filters. */ public static final class RootFilter extends IntersectionFilter { private final HideKnownFilter knownFilesFilter; private final TagsFilter tagsFilter; private final HashHitsFilter hashSetHitsFilter; private final TextFilter descriptionSubstringFilter; private final EventTypeFilter eventTypesFilter; private final DataSourcesFilter dataSourcesFilter; private final FileTypesFilter fileTypesFilter; private final Set additionalFilters = new HashSet<>(); /** * Get the data sources filter of this filter. * * @return The filter. */ public DataSourcesFilter getDataSourcesFilter() { return dataSourcesFilter; } /** * Gets the tagged events sources filter of this filter. * * @return The filter. */ public TagsFilter getTagsFilter() { return tagsFilter; } /** * Gets the source file hash set hits filter of this filter. * * @return The filter. */ public HashHitsFilter getHashHitsFilter() { return hashSetHitsFilter; } /** * Gets the event types filter of this filter. * * @return The filter. */ public EventTypeFilter getEventTypeFilter() { return eventTypesFilter; } /** * Gets the exclude known source files filter of this filter. * * @return The filter. */ public HideKnownFilter getKnownFilter() { return knownFilesFilter; } /** * Gets the description substring filter of this filter. * * @return The filter. */ public TextFilter getTextFilter() { return descriptionSubstringFilter; } /** * Gets the source file types filter of this filter. * * @return The filter. */ public FileTypesFilter getFileTypesFilter() { return fileTypesFilter; } /** * Constructs a timeline events filter that ANDs together instances of a * variety of event filter types to create what is in effect a "tree" of * filters. * * @param knownFilesFilter A filter that excludes events with * knwon file event sources. * @param tagsFilter A filter that exludes or includes * events with tagged event sources. * @param hashSetHitsFilter A filter that excludes or includes * events with event sources that have * hash set hits. * @param descriptionSubstringFilter A filter that requires a substring * to be present in the event * description. * @param eventTypesFilter A filter that accepts events of * specified events types. * @param dataSourcesFilter A filter that accepts events * associated with a specified subset * of data sources. * @param fileTypesFilter A filter that includes or excludes * events with source files of * particular media types. * @param additionalFilters Additional filters. */ public RootFilter( HideKnownFilter knownFilesFilter, TagsFilter tagsFilter, HashHitsFilter hashSetHitsFilter, TextFilter descriptionSubstringFilter, EventTypeFilter eventTypesFilter, DataSourcesFilter dataSourcesFilter, FileTypesFilter fileTypesFilter, Collection additionalFilters) { super(Arrays.asList(descriptionSubstringFilter, knownFilesFilter, tagsFilter, dataSourcesFilter, hashSetHitsFilter, fileTypesFilter, eventTypesFilter)); getSubFilters().removeIf(Objects::isNull); this.knownFilesFilter = knownFilesFilter; this.tagsFilter = tagsFilter; this.hashSetHitsFilter = hashSetHitsFilter; this.descriptionSubstringFilter = descriptionSubstringFilter; this.eventTypesFilter = eventTypesFilter; this.dataSourcesFilter = dataSourcesFilter; this.fileTypesFilter = fileTypesFilter; this.additionalFilters.addAll(asList(descriptionSubstringFilter, knownFilesFilter, tagsFilter, dataSourcesFilter, hashSetHitsFilter, fileTypesFilter, eventTypesFilter)); this.additionalFilters.removeIf(Objects::isNull); additionalFilters.stream(). filter(Objects::nonNull). filter(this::hasAdditionalFilter). map(TimelineFilter::copyOf). forEach(anonymousFilter -> getSubFilters().add(anonymousFilter)); } @Override public RootFilter copyOf() { Set subFilters = getSubFilters().stream() .filter(this::hasAdditionalFilter) .map(TimelineFilter::copyOf) .collect(Collectors.toSet()); return new RootFilter(knownFilesFilter.copyOf(), tagsFilter.copyOf(), hashSetHitsFilter.copyOf(), descriptionSubstringFilter.copyOf(), eventTypesFilter.copyOf(), dataSourcesFilter.copyOf(), fileTypesFilter.copyOf(), subFilters); } private boolean hasAdditionalFilter(TimelineFilter subFilter) { return !(additionalFilters.contains(subFilter)); } @Override public String toString() { return "RootFilter{" + "knownFilter=" + knownFilesFilter + ", tagsFilter=" + tagsFilter + ", hashFilter=" + hashSetHitsFilter + ", textFilter=" + descriptionSubstringFilter + ", typeFilter=" + eventTypesFilter + ", dataSourcesFilter=" + dataSourcesFilter + ", fileTypesFilter=" + fileTypesFilter + ", namedSubFilters=" + additionalFilters + '}'; } @Override public int hashCode() { int hash = 7; hash = 17 * hash + Objects.hashCode(this.knownFilesFilter); hash = 17 * hash + Objects.hashCode(this.tagsFilter); hash = 17 * hash + Objects.hashCode(this.hashSetHitsFilter); hash = 17 * hash + Objects.hashCode(this.descriptionSubstringFilter); hash = 17 * hash + Objects.hashCode(this.eventTypesFilter); hash = 17 * hash + Objects.hashCode(this.dataSourcesFilter); hash = 17 * hash + Objects.hashCode(this.fileTypesFilter); hash = 17 * hash + Objects.hashCode(this.additionalFilters); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final RootFilter other = (RootFilter) obj; if (notEqual(this.knownFilesFilter, other.getKnownFilter())) { return false; } if (notEqual(this.tagsFilter, other.getTagsFilter())) { return false; } if (notEqual(this.hashSetHitsFilter, other.getHashHitsFilter())) { return false; } if (notEqual(this.descriptionSubstringFilter, other.getTextFilter())) { return false; } if (notEqual(this.eventTypesFilter, other.getEventTypeFilter())) { return false; } if (notEqual(this.dataSourcesFilter, other.getDataSourcesFilter())) { return false; } if (notEqual(this.fileTypesFilter, other.getFileTypesFilter())) { return false; } return Objects.equals(this.additionalFilters, new HashSet<>(other.getSubFilters())); } } /** * A timeline events filter used to filter out events that have a direct or * indirect event source that is a known file. */ public static final class HideKnownFilter extends TimelineFilter { @Override public String getDisplayName() { return BundleProvider.getBundle().getString("hideKnownFilter.displayName.text"); } @Override public HideKnownFilter copyOf() { return new HideKnownFilter(); } @Override public int hashCode() { return 7; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } return getClass() == obj.getClass(); } @Override String getSQLWhere(TimelineManager manager) { return "(known_state != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")"; // NON-NLS } @Override public String toString() { return "HideKnownFilter{" + '}'; } } /** * A timeline events filter composed of a collection of event filters. * Concrete implementations can decide how to combine the filters in the * collection. * * @param The type of the subfilters. */ public static abstract class CompoundFilter extends TimelineFilter { protected void addSubFilter(SubFilterType subfilter) { if (getSubFilters().contains(subfilter) == false) { getSubFilters().add(subfilter); } } private final List subFilters = new ArrayList<>(); /** * Gets the collection of filters that make up this filter. * * @return The filters. */ public final List getSubFilters() { return subFilters; } /** * Indicates whether or not this filter has subfilters. * * @return True or false. */ public boolean hasSubFilters() { return getSubFilters().isEmpty() == false; } /** * Constructs a timeline events filter composed of a collection of event * filters. * * @param subFilters The collection of filters. */ protected CompoundFilter(List subFilters) { super(); this.subFilters.addAll(subFilters); } @Override public abstract CompoundFilter copyOf(); @Override public int hashCode() { int hash = 3; hash = 23 * hash + Objects.hashCode(this.subFilters); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CompoundFilter other = (CompoundFilter) obj; return Objects.equals(this.getSubFilters(), other.getSubFilters()); } @Override public String toString() { return this.getClass().getSimpleName() + "{" + "subFilters=" + subFilters + '}'; } } /** * A timeline events filter used to query for events associated with a given * data source. */ public static final class DataSourceFilter extends TimelineFilter { private final String dataSourceName; private final long dataSourceID; /** * Gets the object ID of the specified data source. * * @return The data source object ID. */ public long getDataSourceID() { return dataSourceID; } /** * Gets the display name of the specified data source. * * @return The data source display name. */ public String getDataSourceName() { return dataSourceName; } /** * Constructs a timeline events filter used to query for events * associated with a given data source. * * @param dataSourceName The data source display name. * @param dataSourceID The data source object ID. */ public DataSourceFilter(String dataSourceName, long dataSourceID) { super(); this.dataSourceName = dataSourceName; this.dataSourceID = dataSourceID; } @Override public synchronized DataSourceFilter copyOf() { return new DataSourceFilter(getDataSourceName(), getDataSourceID()); } @Override public String getDisplayName() { return getDataSourceName() + " (ID: " + getDataSourceID() + ")"; } @Override public int hashCode() { int hash = 3; hash = 47 * hash + Objects.hashCode(this.dataSourceName); hash = 47 * hash + (int) (this.dataSourceID ^ (this.dataSourceID >>> 32)); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DataSourceFilter other = (DataSourceFilter) obj; if (this.dataSourceID != other.dataSourceID) { return false; } return Objects.equals(this.dataSourceName, other.dataSourceName); } @Override String getSQLWhere(TimelineManager manager) { return "(data_source_obj_id = '" + this.getDataSourceID() + "')"; //NON-NLS } } /** * A timeline events filter used to query for events where the files that * are the direct or indirect sources of the events either have or do not * have hash set hits. * */ public static final class HashHitsFilter extends TimelineFilter { private boolean eventSourcesHaveHashSetHits; /** * Constructs a timeline events filter used to query for events where * the files that are the direct or indirect sources of the events * either do not have hash set hits. */ public HashHitsFilter() { } /** * Constructs a timeline events filter used to query for events where * the files that are the direct or indirect sources of the events * either have or do not have hash set hits. * * @param eventSourcesHaveHashSetHits Whether or not the files * associated with the events have or * do not have hash set hits. */ public HashHitsFilter(boolean eventSourcesHaveHashSetHits) { this.eventSourcesHaveHashSetHits = eventSourcesHaveHashSetHits; } /** * Sets whether or not the files associated with the events have or do * not have hash set hits * * @param eventSourcesHaveHashSetHits True or false. */ public synchronized void setEventSourcesHaveHashSetHits(boolean eventSourcesHaveHashSetHits) { this.eventSourcesHaveHashSetHits = eventSourcesHaveHashSetHits; } /** * Indicates whether or not the files associated with the events have or * do not have hash set hits * * @return True or false. */ public synchronized boolean getEventSourcesHaveHashSetHits() { return eventSourcesHaveHashSetHits; } @Override public String getDisplayName() { return BundleProvider.getBundle().getString("hashHitsFilter.displayName.text"); } @Override public HashHitsFilter copyOf() { return new HashHitsFilter(eventSourcesHaveHashSetHits); } @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof HashHitsFilter)) { return false; } return ((HashHitsFilter) obj).getEventSourcesHaveHashSetHits() == getEventSourcesHaveHashSetHits(); } @Override public int hashCode() { int hash = 7; hash = 67 * hash + Objects.hashCode(this.eventSourcesHaveHashSetHits); return hash; } @Override String getSQLWhere(TimelineManager manager) { String whereStr = ""; if (eventSourcesHaveHashSetHits) { whereStr = "hash_hit = 1"; } else { whereStr = "hash_hit = 0"; } return whereStr; } } /** * A timeline events filter used to query for events associated with a given * subset of data sources. The filter is a union of one or more single data * source filters. */ static public final class DataSourcesFilter extends UnionFilter { @Override public DataSourcesFilter copyOf() { return copySubFilters(this, new DataSourcesFilter()); } @Override public String getDisplayName() { return BundleProvider.getBundle().getString("DataSourcesFilter.displayName.text"); } } /** * A timeline events filter used to query for events with direct or indirect * event sources that are files with a given set of media types. The filter * is a union of one or more file source filters. */ static public final class FileTypesFilter extends UnionFilter { @Override public FileTypesFilter copyOf() { return copySubFilters(this, new FileTypesFilter()); } @Override public String getDisplayName() { return BundleProvider.getBundle().getString("FileTypesFilter.displayName.text"); } } /** * A timeline events filter used to query for events with direct or indirect * event sources that are files that do not have a given set of media types. */ static public class InverseFileTypeFilter extends FileTypeFilter { public InverseFileTypeFilter(String displayName, Collection mediaTypes) { super(displayName, mediaTypes); } @Override public InverseFileTypeFilter copyOf() { return new InverseFileTypeFilter(getDisplayName(), super.mediaTypes); } @Override String getSQLWhere(TimelineManager manager) { return " NOT " + super.getSQLWhere(manager); } } /** * A timeline events filter used to query for events with direct or indirect * event sources that are files with a given set of media types. */ public static class FileTypeFilter extends TimelineFilter { private final String displayName; private final String sqlWhere; Collection mediaTypes = new HashSet<>(); private FileTypeFilter(String displayName, String sql) { this.displayName = displayName; this.sqlWhere = sql; } /** * Constructs a timeline events filter used to query for events with * direct or indirect event sources that are files with a given set of * media types. * * @param displayName The display name for the filter. * @param mediaTypes The event source file media types that pass the * filter. */ public FileTypeFilter(String displayName, Collection mediaTypes) { this(displayName, mediaTypes.stream() .map(MediaType::parse) .map(FileTypeFilter::mediaTypeToSQL) .collect(Collectors.joining(" OR ", "(", ")"))); this.mediaTypes = mediaTypes; } private static String mediaTypeToSQL(MediaType mediaType) { return mediaType.hasWildcard() ? " (tsk_events.mime_type LIKE '" + escapeSingleQuotes(mediaType.type()) + "/_%' ) " : " (tsk_events.mime_type = '" + escapeSingleQuotes(mediaType.toString()) + "' ) "; } @Override public String getDisplayName() { return displayName; } @Override public FileTypeFilter copyOf() { return new FileTypeFilter(displayName, sqlWhere); } @Override public int hashCode() { int hash = 7; hash = 17 * hash + Objects.hashCode(this.displayName); hash = 17 * hash + Objects.hashCode(this.sqlWhere); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final FileTypeFilter other = (FileTypeFilter) obj; if (notEqual(this.displayName, other.displayName)) { return false; } return Objects.equals(this.sqlWhere, other.sqlWhere); } @Override String getSQLWhere(TimelineManager manager) { return sqlWhere; } @Override public String toString() { return "FileTypeFilter{" + "displayName=" + displayName + ", sqlWhere=" + sqlWhere + '}'; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/ReadContentInputStream.java000644 000765 000024 00000011453 14137073413 031722 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.IOException; import java.io.InputStream; /** * InputStream to read bytes from a Content object's data */ public final class ReadContentInputStream extends InputStream { private long currentOffset; private final long contentSize; private final Content content; public ReadContentInputStream(Content content) { this.content = content; this.currentOffset = 0; this.contentSize = content.getSize(); } @Override public int read() throws ReadContentInputStreamException { byte[] buff = new byte[1]; return (read(buff) != -1) ? buff[0] : -1; } @Override public int read(byte[] b) throws ReadContentInputStreamException { return read(b, 0, b.length); } @Override public int read(byte[] b, int off, int len) throws ReadContentInputStreamException { final int buffLen = b.length; //must return 0 for zero-length arrays if (buffLen == 0 || len == 0) { return 0; } //would get an error from TSK if we try to read an empty file if (contentSize == 0) { return -1; } // check off. Must be in bounds of buffer if (off < 0 || off >= buffLen) { return -1; } //eof, no data remains to be read if (currentOffset >= contentSize) { return -1; } // Is the file big enough for the full request? int lenToRead = (int) Math.min(contentSize - currentOffset, len); // is the buffer big enough? lenToRead = Math.min(lenToRead, buffLen - off); byte[] retBuf; if (off == 0) { //write directly to user buffer retBuf = b; } else { //write to a temp buffer, then copy to user buffer retBuf = new byte[lenToRead]; } try { final int lenRead = content.read(retBuf, currentOffset, lenToRead); if (lenRead == 0 || lenRead == -1) { //error or no more bytes to read, report EOF return -1; } else { currentOffset += lenRead; //if read into user-specified offset, copy back from temp buffer to user if (off != 0) { System.arraycopy(retBuf, 0, b, off, lenRead); } return lenRead; } } catch (TskCoreException ex) { throw new ReadContentInputStreamException(String.format("Error reading file '%s' (id=%d) at offset %d.", content.getName(), content.getId(), currentOffset), ex); } } @Override public int available() throws IOException { long len = contentSize - currentOffset; if (len < 0) { return 0; } return (int) len; } @Override public long skip(long n) throws IOException { //more efficient skip() implementation than superclass //as it does not involve reads long toSkip = Math.min(n, contentSize - currentOffset); //allow to skip to EOF currentOffset += toSkip; return toSkip; //0 1 2 3 4 5 len: 6 } @Override public void close() throws IOException { super.close(); //nothing to be done currently, file handles are closed when content is gc'ed } @Override public boolean markSupported() { return false; } /// additional methods to facilitate stream seeking /** * Get total length of the stream * * @return number of bytes that can be read from this stream */ public long getLength() { return contentSize; } /** * Get current position in the stream * * @return current offset in bytes */ public long getCurPosition() { return currentOffset; } /** * Set new current position in the stream, up to and including EOF * * @param newPosition new position in the stream to be set * * @return the actual position set, which can be less than position passed * in if EOF has been reached */ public long seek(long newPosition) { if (newPosition < 0) { throw new IllegalArgumentException("Illegal negative new position in the stream"); } currentOffset = Math.min(newPosition, contentSize); return currentOffset; } /** * Exception thrown when there's an error reading from the * ReadContentInputStream. */ public final static class ReadContentInputStreamException extends IOException { private static final long serialVersionUID = 1L; public ReadContentInputStreamException(String message) { super(message); } public ReadContentInputStreamException(String message, Throwable cause) { super(message, cause); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/ScoreChange.java000644 000765 000024 00000002573 14137073413 027504 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.Optional; /** * This class encapsulates a score change. */ final public class ScoreChange { private final long objId; private final Long dataSourceObjectId; private final Score oldScore; private final Score newScore; ScoreChange(long objId, Long dataSourceObjectId, Score oldScore, Score newScore) { this.objId = objId; this.dataSourceObjectId = dataSourceObjectId; this.oldScore = oldScore; this.newScore = newScore; } public Long getDataSourceObjectId() { return dataSourceObjectId; } public long getObjectId() { return objId; } public Score getOldScore() { return oldScore; } public Score getNewScore() { return newScore; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/ContentTag.java000755 000765 000024 00000003761 14137073413 027374 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * Instances of this class are data transfer objects (DTOs) that represent tags * a user can apply to content. */ public class ContentTag extends Tag { private final Content content; private final long beginByteOffset; private final long endByteOffset; // Clients of the org.sleuthkit.datamodel package should not directly create these objects. ContentTag(long tagID, Content content, TagName name, String comment, long beginByteOffset, long endByteOffset, String userName) { super(tagID, name, comment, userName); this.content = content; this.beginByteOffset = beginByteOffset; this.endByteOffset = endByteOffset; } /** * Return the tagged content * * @return tagged content */ public Content getContent() { return content; } /** * Returns whether the tag has a byte range * * @return true if the tag has a byte range, false otherwise */ public boolean hasByteExtent() { return (beginByteOffset > 0) && (endByteOffset > 0) && (endByteOffset > beginByteOffset); } /** * Returns starting offset of the byte range * * @return start offset */ public long getBeginByteOffset() { return beginByteOffset; } /** * Returns end offset of the byte range * * @return end offset */ public long getEndByteOffset() { return endByteOffset; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/HashHitInfo.java000644 000765 000024 00000003655 14137073413 027471 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; /** * Used to transmit hashDb information about a particular hash hit from the TSK * lookup code to Autopsy. HashEntry is for the reverse direction (adding hashes * to DB). */ public class HashHitInfo { private String hashMd5; private String hashSha1; private String hashSha2_256; private ArrayList names = new ArrayList(); private ArrayList comments = new ArrayList(); /** * Default constructor when error message is not available */ public HashHitInfo(String hashMd5, String hashSha1, String hashSha2_256) { this.hashMd5 = hashMd5; this.hashSha1 = hashSha1; this.hashSha2_256 = hashSha2_256; } /** * Add file name associated with this hash * * @param name */ public void addName(String name) { names.add(name); } /** * Add comment associated with this hash * * @param comment */ public void addComment(String comment) { comments.add(comment); } public String getHashMd5() { return hashMd5; } public String getHashSha1() { return hashSha1; } public String getHashSha256() { return hashSha2_256; } public ArrayList getNames() { return names; } public ArrayList getComments() { return comments; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/EncodedFileUtil.java000644 000765 000024 00000010157 14137073413 030317 0ustar00carrierstaff000000 000000 /* * SleuthKit Java Bindings * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.io.IOException; import java.io.RandomAccessFile; import java.util.Arrays; /** * Utility methods to support encoding/decoding files written to disk. */ class EncodedFileUtil { final static private int HEADER_LENGTH = 32; // All headers must be this long final static private String XOR1_HEADER = "TSK_CONTAINER_XOR1_xxxxxxxxxxxxx"; /** * Get the header for the given encoding type. * @param type * @return * @throws IOException */ static String getHeader(TskData.EncodingType type) throws IOException{ switch (type){ case XOR1: return XOR1_HEADER; default: throw new IOException("Can not get header for " + type.toString()); } } /** * Get the encoded version of the given type's header. * Used by EncodedFileStream so that after the header is fed through the encoding * scheme, the original plaintext header will appear at the beginning of the file. * This should not be used for testing which encoding scheme was used on a file. * @param type * @return * @throws IOException */ static byte [] getEncodedHeader(TskData.EncodingType type) throws IOException{ if(type.equals(TskData.EncodingType.NONE)){ throw new IOException("Can not get encoded header for " + type.toString()); } byte [] encHeader = new byte[HEADER_LENGTH]; byte [] plainHeader = getHeader(type).getBytes(); for(int i = 0;i < HEADER_LENGTH;i++){ encHeader[i] = encodeByte(plainHeader[i], type); } return encHeader; } /** * Returns the length of the encoded header. * This is a fixed length to allow easier detection. * @return */ static int getHeaderLength(){ return HEADER_LENGTH; } /** * Encode a byte using the given encoding scheme. * @param b * @param type * @return * @throws IOException */ static byte encodeByte(byte b, TskData.EncodingType type) throws IOException{ switch (type){ case XOR1: return ((byte)(b ^ 0xca)); default: throw new IOException("Can not encode byte with encoding type " + type.toString()); } } /** * Decode a byte using the given encoding scheme. * @param b * @param type * @return * @throws IOException */ static byte decodeByte(byte b, TskData.EncodingType type) throws IOException{ switch (type){ case XOR1: return ((byte)(b ^ 0xca)); default: throw new IOException("Can not decode byte with encoding type " + type.toString()); } } /** * Determine whether a file was encoded and which type of encoding was used. * @param fileHandle * @return * @throws IOException */ static TskData.EncodingType getEncoding(RandomAccessFile fileHandle){ try{ long curOffset = fileHandle.getFilePointer(); if (curOffset != 0) { fileHandle.seek(0); } byte[] header = new byte[HEADER_LENGTH]; int bytesRead = fileHandle.read(header, 0, HEADER_LENGTH); if(bytesRead != HEADER_LENGTH){ return TskData.EncodingType.NONE; } return(getTypeFromHeader(header)); } catch (IOException ex){ return TskData.EncodingType.NONE; } } /** * Compare the buffer containing the potential header against the encoding headers. * @param header * @return */ static private TskData.EncodingType getTypeFromHeader(byte[] header){ if(header.length != HEADER_LENGTH){ return TskData.EncodingType.NONE; } if(Arrays.equals(header, XOR1_HEADER.getBytes())){ return TskData.EncodingType.XOR1; } else { return TskData.EncodingType.NONE; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TskEvent.java000755 000765 000024 00000033525 14137073413 027072 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.collect.ImmutableSet; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; /** * Data model events. */ public interface TskEvent { /** * Gets the data source guaranteed to be associated with the event, if * applicable. * * @return The object ID of the data source associated with the event, if * specified. */ default Optional getDataSourceId() { return Optional.ofNullable(null); } /** * An abstract super class for data model events for one or more data module * objects. * * @param The type of data model object that is the subject of the * event. */ abstract static class TskObjectsEvent implements TskEvent { private final List dataModelObjects; /** * Constructs the super class part for data model events for one or more * data module objects. * * @param dataModelObjects The data model objects that are the subjects * of the event. */ TskObjectsEvent(List dataModelObjects) { this.dataModelObjects = new ArrayList<>(); this.dataModelObjects.addAll(dataModelObjects); } /** * Gets the data model objects that are the subjects of the event. * * @return The data model objects. */ List getDataModelObjects() { return Collections.unmodifiableList(dataModelObjects); } } /** * An event published when the aggregate scores of one or more data model * objects change. */ public final static class AggregateScoresChangedEvent extends TskObjectsEvent { private final Long dataSourceObjectId; /** * Constructs an event published when the aggregate scores of one or * more data model objects change. * * @param scoreChanges The score changes, must not be empty. */ AggregateScoresChangedEvent(Long dataSourceObjectId, ImmutableSet scoreChanges) { super(scoreChanges.asList()); this.dataSourceObjectId = dataSourceObjectId; scoreChanges.stream().forEach(chg -> { if (!chg.getDataSourceObjectId().equals(dataSourceObjectId)) { throw new IllegalArgumentException("All data source object IDs in List must match dataSourceObjectId"); } }); } @Override public Optional getDataSourceId() { return Optional.ofNullable(dataSourceObjectId); } /** * Gets the score changes. * * @return The score changes. */ public List getScoreChanges() { return getDataModelObjects(); } } /** * An event published when one or more analysis results are deleted. */ public final static class AnalysisResultsDeletedTskEvent extends TskObjectsEvent { /** * Constructs an event published when one or more analysis results are * deleted. * * @param deletedResultObjIds The TSK object IDs of the deleted analysis * results. */ AnalysisResultsDeletedTskEvent(List deletedResultObjIds) { super(deletedResultObjIds); } /** * Gets the TSK object IDs of the deleted analysis results. * * @return The TSK object IDs. */ public List getAnalysisResultObjectIds() { return getDataModelObjects(); } } /** * An abstract super class for host events. */ abstract static class HostsTskEvent extends TskObjectsEvent { /** * Constructs the super class part for a host event. * * @param hosts The hosts that are the subjects of the event. */ HostsTskEvent(List hosts) { super(hosts); } /** * Gets the hosts. * * @return The hosts. */ public List getHosts() { return getDataModelObjects(); } } /** * An event published when one or more hosts are added. */ public final static class HostsAddedTskEvent extends HostsTskEvent { /** * Constructs an event published when one or more hosts are added. * * @param hosts The hosts. */ HostsAddedTskEvent(List hosts) { super(hosts); } } /** * An event published when one or more hosts are updated. */ public final static class HostsUpdatedTskEvent extends HostsTskEvent { /** * Constructs an event published when one or more hosts are updated. * * @param hosts The hosts. */ HostsUpdatedTskEvent(List hosts) { super(hosts); } } /** * An event published when one or more hosts are deleted. */ public final static class HostsDeletedTskEvent extends TskObjectsEvent { /** * Constructs an event published when one or more hosts are deleted. * * @param hostIds The host IDs of the deleted hosts. */ HostsDeletedTskEvent(List hostIds) { super(hostIds); } /** * Gets the host IDs of the deleted hosts. * * @return The host IDs. */ public List getHostIds() { return getDataModelObjects(); } } /** * An abstract super class for OS account events. */ abstract static class OsAccountsTskEvent extends TskObjectsEvent { /** * Constructs the super class part of an OS account event. * * @param hosts The OS accounts that are the subjects of the event. */ OsAccountsTskEvent(List osAccounts) { super(osAccounts); } /** * Gets the OS accounts. * * @return The OS accounts. */ public List getOsAcounts() { return getDataModelObjects(); } } /** * An event published when one or more OS accounts are added. */ public final static class OsAccountsAddedTskEvent extends OsAccountsTskEvent { /** * Constructs an event published when one or more OS accounts are added. * * @param osAccounts The OS accounts. */ OsAccountsAddedTskEvent(List osAccounts) { super(osAccounts); } } /** * An event published when one or more OS accounts are updated. */ public final static class OsAccountsUpdatedTskEvent extends OsAccountsTskEvent { /** * Constructs an event published when OS accounts are updated. * * @param osAccounts The OS accounts. */ OsAccountsUpdatedTskEvent(List osAccounts) { super(osAccounts); } } /** * An event published when one or more OS accounts are deleted. */ public final static class OsAccountsDeletedTskEvent extends TskObjectsEvent { /** * Constructs an event published when one or more OS accounts are * deleted. * * @param accountList The object IDs of the deleted OS accounts. */ OsAccountsDeletedTskEvent(List accountObjectIds) { super(accountObjectIds); } /** * Gets the TSK object IDs of the deleted OS accounts. * * @return The TSK object IDs. */ public List getOsAccountObjectIds() { return getDataModelObjects(); } } /** * An event published when one or more OS account instances are added. */ public final static class OsAcctInstancesAddedTskEvent extends TskObjectsEvent { /** * Constructs an event published when one or more OS account instances * are added. * * @param hosts The OS account instances that are the subjects of the * event. */ OsAcctInstancesAddedTskEvent(List osAcctInstances) { super(osAcctInstances); } /** * Gets the OS account instances. * * @return The OS account instances. */ public List getOsAccountInstances() { return getDataModelObjects(); } } /** * An abstract super class for person events. */ static abstract class PersonsTskEvent extends TskObjectsEvent { /** * Constructs the super class part of a person event. * * @param persons The persons that are the subjects of the event. */ PersonsTskEvent(List persons) { super(persons); } /** * Gets the persons. * * @return The persons. */ public List getPersons() { return getDataModelObjects(); } } /** * An event published when one or more persons are added. */ public final static class PersonsAddedTskEvent extends PersonsTskEvent { /** * Constructs an event published when one or more persons are added. * * @param persons The persons. */ PersonsAddedTskEvent(List persons) { super(persons); } } /** * An event published when one or more persons are updated. */ public final static class PersonsUpdatedTskEvent extends PersonsTskEvent { /** * Constructs an event published when one or more persons are updated. * * @param persons The persons. */ PersonsUpdatedTskEvent(List persons) { super(persons); } } /** * An event published when one or more persons are deleted. */ public final static class PersonsDeletedTskEvent extends TskObjectsEvent { /** * Constructs an event published when one or more persons are deleted. * * @param persons The persons. */ PersonsDeletedTskEvent(List personObjectIDs) { super(personObjectIDs); } /** * Gets the person IDs of the deleted persons. * * @return The person IDs. */ public List getPersonIds() { return getDataModelObjects(); } } /** * An event published when one or more hosts are added to a person. */ public final static class HostsAddedToPersonTskEvent extends TskObjectsEvent { private final Person person; /** * Constructs the super class part of a person and host association * change event. * * @param person The person that is the subject of the event. * @param hosts The hosts that are the subjects of the event. */ HostsAddedToPersonTskEvent(Person person, List hosts) { super(hosts); this.person = person; } /** * Gets the person. * * @return The person. */ public Person getPerson() { return person; } /** * Gets the hosts. * * @return The hosts. */ public List getHosts() { return getDataModelObjects(); } } /** * An event published when one or more hosts are removed from a person. */ public final static class HostsRemovedFromPersonTskEvent extends TskObjectsEvent { private final Person person; /** * Constructs an event published when one or more hosts are removed from * a person. * * @param person The person. * @param hostIds The host IDs of the hosts. */ HostsRemovedFromPersonTskEvent(Person person, List hostIds) { super(hostIds); this.person = person; } /** * Gets the person. * * @return The person. */ public Person getPerson() { return person; } /** * Gets the host IDs of the deleted hosts. * * @return The host IDs. */ public List getHostIds() { return getDataModelObjects(); } } static abstract class TagNamesTskEvent extends TskObjectsEvent { public TagNamesTskEvent(List tagNames) { super(tagNames); } /** * Returns the list of added or updated TagName objects. * * @return The TagName list. */ public List getTagNames() { return getDataModelObjects(); } } /** * An event published when one or more TagName are added. */ public final static class TagNamesAddedTskEvent extends TagNamesTskEvent { /** * Construct an event when one or more TagName are created or updated. * * @param tagNames List of added or modified TagName. */ public TagNamesAddedTskEvent(List tagNames) { super(tagNames); } } /** * An event published when one or more TagName are updated. */ public final static class TagNamesUpdatedTskEvent extends TagNamesTskEvent { /** * Construct an event when one or more TagName are updated. * * @param tagNames List of added or modified TagName. */ public TagNamesUpdatedTskEvent(List tagNames) { super(tagNames); } } /** * An event published when one or more TagName are deleted. */ public final static class TagNamesDeletedTskEvent extends TskObjectsEvent { /** * Constructs a new event with the given list of TagName ids. * * @param tagNameIds Deleted TagName id list. */ public TagNamesDeletedTskEvent(List tagNameIds) { super(tagNameIds); } /** * List of the deleted TagName ids. * * @return The list of deleted TagName Ids. */ public List getTagNameIds() { return getDataModelObjects(); } } /** * An event published when one or more TagSets have been added. */ public final static class TagSetsAddedTskEvent extends TskObjectsEvent { /** * Constructs an added event for one or more TagSets. * * @param tagSets The added TagSet. */ public TagSetsAddedTskEvent(List tagSets) { super(tagSets); } /** * Return the TagSets list. * * @return The TagSet list. */ public List getTagSets() { return getDataModelObjects(); } } /** * An event published when one or more TagSets have been deleted. */ public final static class TagSetsDeletedTskEvent extends TskObjectsEvent { /** * Constructs a deleted event for one or more TagSets. * * @param tagSetIds The ids of the deleted TagSets. */ public TagSetsDeletedTskEvent(List tagSetIds) { super(tagSetIds); } /** * Returns the list of deleted TagSet ids. * * @return The list of deleted TagSet ids. */ public List getTagSetIds() { return getDataModelObjects(); } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Content.java000644 000765 000024 00000036070 14137073413 026734 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; /** * Interface for all datatypes that can be found in the database. Content * objects make up a tree and each object can have a parent and children. For * example, the child of an Image object is a Volume or File System. This * interface defines the basic methods for reading the content associated with * this object, the parent and children, and adding artifacts. */ public interface Content extends SleuthkitVisitableItem { /** * Reads data that this content object is associated with (file contents, * volume contents, etc.). * * @param buf a character array of data (in bytes) to copy read data to * @param offset byte offset in the content to start reading from * @param len number of bytes to read into buf. * * @return num of bytes read, or -1 on error * * @throws TskCoreException if critical error occurred during read in the * tsk core */ public int read(byte[] buf, long offset, long len) throws TskCoreException; /** * Free native resources after read is done on the Content object. After * closing, read can be called again on the same Content object, which * should result in re-opening of new native resources. */ public void close(); /** * Get the (reported) size of the content object and, in theory, how much * you should be able to read from it. In some cases, data corruption may * mean that you cannot read this much data. * * @return size of the content in bytes */ public long getSize(); /** * Visitor pattern support * * @param v visitor supplying an algorithm to run on the content object * * @return visitor return value resulting from running the algorithm */ public T accept(ContentVisitor v); /** * Get the name of this content object (does not include parent path) * * @return the name */ public String getName(); /** * @return returns the full path to this Content object starting with a "/" * followed by the Image name and similarly for all other segments * in the hierarchy. */ public String getUniquePath() throws TskCoreException; /** * Returns the unique object ID that was assigned to it in the database. * This is a Sleuth Kit database-assigned number. * * @return object id */ public long getId(); /** * Gets the root data source (image, virtual directory, etc.) of this * content. * * @return Content associated with data source or null if one can't be found * * @throws TskCoreException if critical error occurred within tsk core */ public Content getDataSource() throws TskCoreException; /** * Gets the child content objects of this content. * * @return List of children * * @throws TskCoreException if critical error occurred within tsk core */ public List getChildren() throws TskCoreException; /** * Returns true if the content object has children objects. Note, this * should be more efficient than getting children and checking it empty. * * @return true if has children, false otherwise. * * @throws TskCoreException if critical error occurred within tsk core */ public boolean hasChildren() throws TskCoreException; /** * Returns count of children objects. Note, this should be more efficient * than getting children and counting them. * * @return children count * * @throws TskCoreException if critical error occurred within tsk core */ public int getChildrenCount() throws TskCoreException; /** * @return returns the parent of this Content object or null if there isn't * one as is the case for Image. * * @throws TskCoreException */ public Content getParent() throws TskCoreException; /** * Gets the child content ids of this content. * * @return List of children ids * * @throws TskCoreException if critical error occurred within tsk core */ public List getChildrenIds() throws TskCoreException; /** * Create and add an artifact associated with this content to the blackboard * * @param artifactTypeID id of the artifact type (if the id doesn't already * exist an exception will be thrown) * * @return the blackboard artifact created (the artifact type id can be * looked up from this) * * @throws TskCoreException if critical error occurred within tsk core * @deprecated Please use newDataArtifact or newAnalysisResult. */ @Deprecated public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException; /** * Create and add an artifact associated with this content to the blackboard * * @param type artifact enum tyoe * * @return the blackboard artifact created (the artifact type id can be * looked up from this) * * @throws TskCoreException if critical error occurred within tsk core * @deprecated Please use newDataArtifact or newAnalysisResult. */ @Deprecated public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException; /** * Create and add an analysis result associated with this content. * * * @param artifactType Type of analysis result artifact to create. * @param score Score associated with this analysis. * @param conclusion Conclusion from the analysis, may be empty. * @param configuration Configuration element associated with this * analysis, may be empty. * @param justification Justification * @param attributesList Additional attributes to attach to this analysis * result artifact. * * @return AnalysisResultAdded The analysis return added and the current * aggregate score of content. * * @throws TskCoreException if critical error occurred within tsk core. */ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList) throws TskCoreException; /** * Create and add an analysis result associated with this content. * * * @param artifactType Type of analysis result artifact to create. * @param score Score associated with this analysis. * @param conclusion Conclusion from the analysis, may be empty. * @param configuration Configuration element associated with this * analysis, may be empty. * @param justification Justification * @param attributesList Additional attributes to attach to this analysis * result artifact. * @param dataSourceId The data source for the analysis result * * @return AnalysisResultAdded The analysis return added and the current * aggregate score of content. * * @throws TskCoreException if critical error occurred within tsk core. */ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList, long dataSourceId) throws TskCoreException; /** * Create and add a data artifact associated with this abstract file. This * method creates the data artifact with the os account id associated with * this abstract file if one exists. * * @param artifactType Type of data artifact to create. * @param attributesList Additional attributes to attach to this data * artifact. * * @return DataArtifact New data artifact. * * @throws TskCoreException If a critical error occurred within tsk core. */ public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList) throws TskCoreException; /** * Create and add a data artifact associated with this content. * * @param artifactType Type of analysis result artifact to create. * @param attributesList Additional attributes to attach to this data * artifact. * @param osAccountId The OS account id associated with the artifact. May * be null. * * @return DataArtifact New data artifact. * * @throws TskCoreException If a critical error occurred within tsk core. */ public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId) throws TskCoreException; /** * Create and add a data artifact associated with this content. * * @param artifactType Type of analysis result artifact to create. * @param attributesList Additional attributes to attach to this data * artifact. * @param osAccountId The OS account id associated with the artifact. May * be null. * @param dataSourceId The data source id of the artifact * * @return DataArtifact New data artifact. * * @throws TskCoreException If a critical error occurred within tsk core. */ public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId, long dataSourceId) throws TskCoreException; /** * Returns the final score for the content object. * * @return Score. * * @throws TskCoreException if critical error occurred within tsk core. */ public Score getAggregateScore() throws TskCoreException; /** * Get all artifacts associated with this content that have the given type * name * * @param artifactTypeName name of the type to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public ArrayList getArtifacts(String artifactTypeName) throws TskCoreException; /** * Get all analysis results associated with this content, that have the * given type. * * @param artifactType Type to look up. * * @return A list of analysis result artifacts matching the type. * * @throws TskCoreException If critical error occurred within tsk core. */ public List getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException; /** * Return the TSK_GEN_INFO artifact for the file so that individual * attributes can be added to it. Creates one if it does not already exist. * * @return Instance of the TSK_GEN_INFO artifact * * @throws TskCoreException */ public BlackboardArtifact getGenInfoArtifact() throws TskCoreException; /** * Return the TSK_GEN_INFO artifact for the file so that individual * attributes can be added to it. If one does not create, behavior depends * on the create argument. * * @param create If true, an artifact will be created if it does not already * exist. * * @return Instance of the TSK_GEN_INFO artifact or null if artifact does * not already exist and create was set to false * * @throws TskCoreException */ public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreException; /** * Return attributes of a given type from TSK_GEN_INFO. * * @param attr_type Attribute type to find inside of the TSK_GEN_INFO * artifact. * * @return Attributes */ public ArrayList getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE attr_type) throws TskCoreException; /** * Get all artifacts associated with this content that have the given type * id * * @param artifactTypeID type id to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public ArrayList getArtifacts(int artifactTypeID) throws TskCoreException; /** * Get all artifacts associated with this content that have the given type * * @param type type to look up * * @return a list of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public ArrayList getArtifacts(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException; /** * Get all artifacts associated with this content * * @return a list of blackboard artifacts * * @throws TskCoreException if critical error occurred within tsk core */ public ArrayList getAllArtifacts() throws TskCoreException; /** * Get all data artifacts associated with this content. * * @return A list of data artifacts. * * @throws TskCoreException If critical error occurred within tsk core. */ public List getAllDataArtifacts() throws TskCoreException; /** * Get all analysis results associated with this content. * * @return A list of analysis results. * * @throws TskCoreException If critical error occurred within tsk core. */ public List getAllAnalysisResults() throws TskCoreException; /** * Get the names of all the hashsets that this content is in. * * @return the names of the hashsets that this content is in * * @throws TskCoreException if critical error occurred within tsk core */ public Set getHashSetNames() throws TskCoreException; /** * Get count of all artifacts associated with this content that have the * given type name * * @param artifactTypeName name of the type to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public long getArtifactsCount(String artifactTypeName) throws TskCoreException; /** * Get count of all artifacts associated with this content that have the * given type id * * @param artifactTypeID type id to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public long getArtifactsCount(int artifactTypeID) throws TskCoreException; /** * Get count of all artifacts associated with this content that have the * given type * * @param type type to look up * * @return count of blackboard artifacts matching the type * * @throws TskCoreException if critical error occurred within tsk core */ public long getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException; /** * Get count of all artifacts associated with this content * * @return count of all blackboard artifacts for this content * * @throws TskCoreException if critical error occurred within tsk core */ public long getAllArtifactsCount() throws TskCoreException; } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeImpl.java000644 000765 000024 00000020206 14137073413 033226 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import com.google.common.net.InternetDomainName; import java.text.MessageFormat; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN; /** * Version of TimelineEventType for events based on artifacts */ class TimelineEventArtifactTypeImpl extends TimelineEventTypeImpl { private static final Logger logger = Logger.getLogger(TimelineEventArtifactTypeImpl.class.getName()); static final int EMAIL_FULL_DESCRIPTION_LENGTH_MAX = 150; static final int EMAIL_TO_FROM_LENGTH_MAX = 75; private final BlackboardArtifact.Type artifactType; private final BlackboardAttribute.Type dateTimeAttributeType; private final TSKCoreCheckedFunction fullExtractor; private final TSKCoreCheckedFunction medExtractor; private final TSKCoreCheckedFunction shortExtractor; private final TSKCoreCheckedFunction artifactParsingFunction; protected static final int MAX_SHORT_DESCRIPTION_LENGTH = 500; protected static final int MAX_MED_DESCRIPTION_LENGTH = 500; protected static final int MAX_FULL_DESCRIPTION_LENGTH = 1024; TimelineEventArtifactTypeImpl(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type dateTimeAttributeType, TSKCoreCheckedFunction shortExtractor, TSKCoreCheckedFunction medExtractor, TSKCoreCheckedFunction fullExtractor) { this(typeID, displayName, superType, artifactType, dateTimeAttributeType, shortExtractor, medExtractor, fullExtractor, null); } TimelineEventArtifactTypeImpl(int typeID, String displayName, TimelineEventType superType, BlackboardArtifact.Type artifactType, BlackboardAttribute.Type dateTimeAttributeType, TSKCoreCheckedFunction shortExtractor, TSKCoreCheckedFunction medExtractor, TSKCoreCheckedFunction fullExtractor, TSKCoreCheckedFunction eventPayloadFunction) { super(typeID, displayName, TimelineEventType.HierarchyLevel.EVENT, superType); this.artifactType = artifactType; this.dateTimeAttributeType = dateTimeAttributeType; this.shortExtractor = shortExtractor; this.medExtractor = medExtractor; this.fullExtractor = fullExtractor; this.artifactParsingFunction = eventPayloadFunction; } int getArtifactTypeID() { return getArtifactType().getTypeID(); } /** * The attribute type this event type is associated with. * * @return The attribute type this event type is derived from. */ BlackboardAttribute.Type getDateTimeAttributeType() { return dateTimeAttributeType; } String extractFullDescription(BlackboardArtifact artf) throws TskCoreException { return fullExtractor.apply(artf); } String extractMedDescription(BlackboardArtifact artf) throws TskCoreException { return medExtractor.apply(artf); } String extractShortDescription(BlackboardArtifact artf) throws TskCoreException { return shortExtractor.apply(artf); } /** * Get the artifact type this event type is derived from. * * @return The artifact type this event type is derived from. */ BlackboardArtifact.Type getArtifactType() { return artifactType; } /** * Parses the artifact to create a triple description with a time. * * @param artifact * * @return * * @throws TskCoreException */ TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifact) throws TskCoreException { //if we got passed an artifact that doesn't correspond to this event type, //something went very wrong. throw an exception. if (this.getArtifactTypeID() != artifact.getArtifactTypeID()) { throw new IllegalArgumentException(); } BlackboardAttribute timeAttribute = artifact.getAttribute(getDateTimeAttributeType()); if (timeAttribute == null) { /* * This has the side effect of making sure that a TimelineEvent * object is not created for this artifact. */ return null; } /* * Use the type-specific method */ if (this.artifactParsingFunction != null) { //use the hook provided by this subtype implementation to build the descriptions. return this.artifactParsingFunction.apply(artifact); } //combine descriptions in standard way String shortDescription = extractShortDescription(artifact); if (shortDescription.length() > MAX_SHORT_DESCRIPTION_LENGTH) { shortDescription = shortDescription.substring(0, MAX_SHORT_DESCRIPTION_LENGTH); } String medDescription = shortDescription + " : " + extractMedDescription(artifact); if (medDescription.length() > MAX_MED_DESCRIPTION_LENGTH) { medDescription = medDescription.substring(0, MAX_MED_DESCRIPTION_LENGTH); } String fullDescription = medDescription + " : " + extractFullDescription(artifact); if (fullDescription.length() > MAX_FULL_DESCRIPTION_LENGTH) { fullDescription = fullDescription.substring(0, MAX_FULL_DESCRIPTION_LENGTH); } return new TimelineEventDescriptionWithTime(timeAttribute.getValueLong(), shortDescription, medDescription, fullDescription); } static BlackboardAttribute getAttributeSafe(BlackboardArtifact artf, BlackboardAttribute.Type attrType) { try { return artf.getAttribute(attrType); } catch (TskCoreException ex) { logger.log(Level.SEVERE, MessageFormat.format("Error getting attribute from artifact {0}.", artf.getArtifactID()), ex); // NON-NLS return null; } } /** * Function that extracts a string representation of the given attribute * from the artifact it is applied to. */ static class AttributeExtractor implements TSKCoreCheckedFunction { private final BlackboardAttribute.Type attributeType; AttributeExtractor(BlackboardAttribute.Type attribute) { this.attributeType = attribute; } @Override public String apply(BlackboardArtifact artf) throws TskCoreException { return Optional.ofNullable(getAttributeSafe(artf, attributeType)) .map(BlackboardAttribute::getDisplayString) .orElse(""); } } /** * Specialization of AttributeExtractor that extract the domain attribute * and then further processes it to obtain the top private domain using * InternetDomainName. */ final static class TopPrivateDomainExtractor extends AttributeExtractor { final private static TopPrivateDomainExtractor instance = new TopPrivateDomainExtractor(); static TopPrivateDomainExtractor getInstance() { return instance; } TopPrivateDomainExtractor() { super(new BlackboardAttribute.Type(TSK_DOMAIN)); } @Override public String apply(BlackboardArtifact artf) throws TskCoreException { String domainString = StringUtils.substringBefore(super.apply(artf), "/"); if (InternetDomainName.isValid(domainString)) { InternetDomainName domain = InternetDomainName.from(domainString); return (domain.isUnderPublicSuffix()) ? domain.topPrivateDomain().toString() : domain.toString(); } else { return domainString; } } } /** * Functinal interface for a function from I to O that throws * TskCoreException. * * @param Input type. * @param Output type. */ @FunctionalInterface interface TSKCoreCheckedFunction { O apply(I input) throws TskCoreException; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Transaction.java000755 000765 000024 00000002722 14137073413 027607 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2013 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * interface to encapsulate database transactions * * * */ public interface Transaction { /** * rollback whatever changes this transaction represents */ public void rollback(); /** * check whether this transaction has already been committed * * @return whether this transaction has already been committed */ public Boolean isCommitted(); /** * commit this transaction to the database */ public void commit(); /** * * close this Transaction so it cannot be committed or rolledback. A closed * Transaction no longer has a reference to a db Connection and methods * invoked on a closed Transaction have no effect. */ public void close(); /** * * @return true if this transaction is closed */ public Boolean isClosed(); } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/DataSource.java000755 000765 000024 00000010562 14137073413 027355 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; /** * A data source (e.g., an image, a local disk, a virtual directory of logical * files, etc.). */ public interface DataSource extends Content { /** * Gets the ASCII-printable identifier for the device associated with the * data source. This identifier is intended to be unique across multiple * cases (e.g., a UUID). * * @return The device id. */ String getDeviceId(); /** * Gets the time zone that was used to process the data source. * * @return The time zone. */ String getTimeZone(); /** * Set the name for this data source. * * @param newName The new name for the data source * * @throws TskCoreException Thrown if an error occurs while updating the database */ void setDisplayName(String newName) throws TskCoreException; /** * Gets the size of the contents of the data source in bytes. This size can * change as archive files within the data source are expanded, files are * carved, etc., and is different from the size of the data source as * returned by Content.getSize, which is the size of the data source as a * file. * * @param sleuthkitCase The sleuthkit case instance from which to make calls * to the database. * * @return The size in bytes. * * @throws TskCoreException Thrown when there is an issue trying to retrieve * data from the database. */ long getContentSize(SleuthkitCase sleuthkitCase) throws TskCoreException; /** * Sets the acquisition details field in the case database. * * @param details The acquisition details * * @throws TskCoreException Thrown if the data can not be written */ void setAcquisitionDetails(String details) throws TskCoreException; /** * Sets the acquisition tool details such as its name, version number and * any settings used during the acquisition to acquire data. * * @param name The name of the acquisition tool. May be NULL. * @param version The acquisition tool version number. May be NULL. * @param settings The settings used by the acquisition tool. May be NULL. * * @throws TskCoreException Thrown if the data can not be written */ void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException; /** * Gets the acquisition details field from the case database. * * @return The acquisition details * * @throws TskCoreException Thrown if the data can not be read */ String getAcquisitionDetails() throws TskCoreException; /** * Gets the acquisition tool settings field from the case database. * * @return The acquisition tool settings. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ String getAcquisitionToolSettings() throws TskCoreException; /** * Gets the acquisition tool name field from the case database. * * @return The acquisition tool name. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ String getAcquisitionToolName() throws TskCoreException; /** * Gets the acquisition tool version field from the case database. * * @return The acquisition tool version. May be Null if not set. * * @throws TskCoreException Thrown if the data can not be read */ String getAcquisitionToolVersion() throws TskCoreException; /** * Gets the added date field from the case database. * * @return The date time when the image was added in epoch seconds. * * @throws TskCoreException Thrown if the data can not be read */ Long getDateAdded() throws TskCoreException; /** * Gets the host for this data source. * * @return The host * * @throws TskCoreException */ Host getHost() throws TskCoreException; } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/GeoArtifactsHelper.java000755 000765 000024 00000024357 14137073414 034213 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils; import java.util.ArrayList; import java.util.List; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil; import org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints; import org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints; import org.sleuthkit.datamodel.blackboardutils.attributes.GeoAreaPoints; /** * An artifact creation helper that adds geolocation artifacts to the case * database. */ public final class GeoArtifactsHelper extends ArtifactHelperBase { private static final BlackboardAttribute.Type WAYPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_WAYPOINTS); private static final BlackboardAttribute.Type TRACKPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_TRACKPOINTS); private static final BlackboardAttribute.Type AREAPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_AREAPOINTS); private static final BlackboardArtifact.Type GPS_TRACK_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACK); private static final BlackboardArtifact.Type GPS_ROUTE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_ROUTE); private static final BlackboardArtifact.Type GPS_AREA_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA); private final String programName; /** * Constructs an artifact creation helper that adds geolocation artifacts to * the case database. * * @param caseDb The case database. * @param moduleName The name of the module creating the artifacts. * @param programName The name of the user application associated with the * geolocation data to be recorded as artifacts, may be * null. If a program name is supplied, it will be added * to each artifact that is created as a TSK_PROG_NAME * attribute. * @param srcContent The source content for the artifacts, i.e., either a * file within a data source or a data source. */ public GeoArtifactsHelper(SleuthkitCase caseDb, String moduleName, String programName, Content srcContent) { super(caseDb, moduleName, srcContent); this.programName = programName; } /** * Adds a TSK_GPS_TRACK artifact to the case database. A Global Positioning * System (GPS) track artifact records the track, or path, of a GPS-enabled * device as a connected series of track points. A track point is a location * in a geographic coordinate system with latitude, longitude and altitude * (elevation) axes. * * @param trackName The name of the GPS track, may be null. * @param trackPoints The track points that make up the track. This list * should be non-null and non-empty. * @param moreAttributes Additional attributes for the TSK_GPS_TRACK * artifact, may be null. * * @return The TSK_GPS_TRACK artifact that was added to the case database. * * @throws TskCoreException If there is an error creating the * artifact. * @throws BlackboardException If there is a error posting the artifact * to the blackboard. * @throws IllegalArgumentException If the trackpoints provided are null or * empty. */ public BlackboardArtifact addTrack(String trackName, GeoTrackPoints trackPoints, List moreAttributes) throws TskCoreException, BlackboardException { if (trackPoints == null || trackPoints.isEmpty()) { throw new IllegalArgumentException(String.format("addTrack was passed a null or empty list of track points")); } List attributes = new ArrayList<>(); if (trackName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), trackName)); } // acquire necessary attribute. If 'toAttribute' call throws an exception, an artifact will not be created for this instance. attributes.add(BlackboardJsonAttrUtil.toAttribute(TRACKPOINTS_ATTR_TYPE, getModuleName(), trackPoints)); if (programName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName)); } if (moreAttributes != null) { attributes.addAll(moreAttributes); } Content content = getContent(); BlackboardArtifact artifact = content.newDataArtifact(GPS_TRACK_TYPE, attributes); getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName()); return artifact; } /** * Adds a TSK_GPS_ROUTE artifact to the case database. A Global Positioning * System (GPS) route artifact records one or more waypoints entered into a * GPS-enabled device as a route to be navigated from waypoint to waypoint. * A waypoint is a location in a geographic coordinate system with latitude, * longitude and altitude (elevation) axes. * * @param routeName The name of the GPS route, may be null. * @param creationTime The time at which the route was created as * milliseconds from the Java epoch of * 1970-01-01T00:00:00Z, may be null. * @param wayPoints The waypoints that make up the route. This list * should be non-null and non-empty. * @param moreAttributes Additional attributes for the TSK_GPS_ROUTE * artifact, may be null. * * @return The TSK_GPS_ROUTE artifact that was added to the case database. * * @throws TskCoreException If there is an error creating the * artifact. * @throws BlackboardException If there is a error posting the artifact * to the blackboard. * @throws IllegalArgumentException If the waypoints provided are null or * empty. */ public BlackboardArtifact addRoute(String routeName, Long creationTime, GeoWaypoints wayPoints, List moreAttributes) throws TskCoreException, BlackboardException { if (wayPoints == null || wayPoints.isEmpty()) { throw new IllegalArgumentException(String.format("addRoute was passed a null or empty list of waypoints")); } List attributes = new ArrayList<>(); attributes.add(BlackboardJsonAttrUtil.toAttribute(WAYPOINTS_ATTR_TYPE, getModuleName(), wayPoints)); if (routeName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), routeName)); } if (creationTime != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getModuleName(), creationTime)); } if (programName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName)); } if (moreAttributes != null) { attributes.addAll(moreAttributes); } Content content = getContent(); BlackboardArtifact artifact = content.newDataArtifact(GPS_ROUTE_TYPE, attributes); getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName()); return artifact; } /** * Adds a TSK_GPS_AREA artifact to the case database. A Global Positioning * System (GPS) area artifact records an area on the map outlines by an * ordered set of GPS coordinates. * * @param areaName The name of the GPS area, may be null. * @param areaPoints The points that make up the outline of the area. * This list should be non-null and non-empty. * @param moreAttributes Additional attributes for the TSK_GPS_AREA * artifact, may be null. * * @return The TSK_GPS_AREA artifact that was added to the case database. * * @throws TskCoreException If there is an error creating the * artifact. * @throws BlackboardException If there is a error posting the artifact * to the blackboard. * @throws IllegalArgumentException If the area points provided are null or * empty. */ public BlackboardArtifact addArea(String areaName, GeoAreaPoints areaPoints, List moreAttributes) throws TskCoreException, BlackboardException { if (areaPoints == null || areaPoints.isEmpty()) { throw new IllegalArgumentException(String.format("addArea was passed a null or empty list of points")); } List attributes = new ArrayList<>(); attributes.add(BlackboardJsonAttrUtil.toAttribute(AREAPOINTS_ATTR_TYPE, getModuleName(), areaPoints)); if (areaName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), areaName)); } if (programName != null) { attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName)); } if (moreAttributes != null) { attributes.addAll(moreAttributes); } Content content = getContent(); BlackboardArtifact artifact = content.newDataArtifact(GPS_AREA_TYPE, attributes); getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName()); return artifact; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java000644 000765 000024 00000007716 14137073414 033555 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Optional; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** * This class helps ingest modules create miscellaneous artifacts. * */ public final class ArtifactsHelper extends ArtifactHelperBase { private static final BlackboardArtifact.Type INSTALLED_PROG_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_INSTALLED_PROG); /** * Creates an artifact helper for modules to create artifacts. * * @param caseDb Sleuthkit case database. * @param moduleName Name of module using the helper. * @param srcContent Source content for the artifacts. * */ public ArtifactsHelper(SleuthkitCase caseDb, String moduleName, Content srcContent) { super(caseDb, moduleName, srcContent); } /** * Adds a TSK_INSTALLED_PROGRAM artifact. * * @param programName Name of program, required. * @param dateInstalled Date/time of install, can be 0 if not available. * * @return Installed program artifact added. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addInstalledProgram(String programName, long dateInstalled) throws TskCoreException, BlackboardException { return addInstalledProgram(programName, dateInstalled, Collections.emptyList()); } /** * Adds a TSK_INSTALLED_PROGRAM artifact. * * @param programName Name of program, required. * @param dateInstalled Date/time of install, can be 0 if not * available. * @param otherAttributesList Additional attributes, can be an empty list if * no additional attributes. * * @return Installed program artifact added. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addInstalledProgram(String programName, long dateInstalled, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, dateInstalled, attributes); // add the attributes attributes.addAll(otherAttributesList); // create artifact Content content = getContent(); BlackboardArtifact installedProgramArtifact = content.newDataArtifact(INSTALLED_PROG_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(installedProgramArtifact, getModuleName()); // return the artifact return installedProgramArtifact; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/000755 000765 000024 00000000000 14137073560 032007 5ustar00carrierstaff000000 000000 bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java000644 000765 000024 00000116276 14137073413 036225 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2019-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.AccountFileInstance; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.InvalidAccountIDException; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.Relationship; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil; import org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments; import org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments.FileAttachment; /** * Class to help ingest modules create communication artifacts. Communication * artifacts includes contacts, messages, call logs. * * It creates a 'self' account {@link Account} - an account for the owner/user * of the application being processed by the module. As an example, for a module * analyzing Facebook application, this would be account associated with the * unique Facebook user id of the device owner. * * In the absence of a 'self' account, a 'device' account may be used in it's * place. A 'device' account is an account meant to represent the owner of the * device and uses the unique device id as the unique account identifier. * * It also creates accounts for contacts, and sender/receivers of the messages, * and calls. * * And it also creates relationships between the self account - and the contacts * and sender/receiver accounts. * */ public final class CommunicationArtifactsHelper extends ArtifactHelperBase { private static final Logger LOGGER = Logger.getLogger(CommunicationArtifactsHelper.class.getName()); private static final BlackboardArtifact.Type CONTACT_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_CONTACT); private static final BlackboardArtifact.Type MESSAGE_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_MESSAGE); private static final BlackboardArtifact.Type CALLOG_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_CALLLOG); private static final BlackboardArtifact.Type ASSOCIATED_OBJ_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); /** * Enum for message read status */ public enum MessageReadStatus { UNKNOWN("Unknown"), /// read status is unknown UNREAD("Unread"), /// message has not been read READ("Read"); /// message has been read private final String msgReadStr; MessageReadStatus(String readStatus) { this.msgReadStr = readStatus; } public String getDisplayName() { return msgReadStr; } } /** * Enum for call/message direction. */ public enum CommunicationDirection { UNKNOWN("Unknown"), INCOMING("Incoming"), OUTGOING("Outgoing"); private final String dirStr; CommunicationDirection(String dir) { this.dirStr = dir; } public String getDisplayName() { return dirStr; } } /** * Enum for call media type. */ public enum CallMediaType { UNKNOWN("Unknown"), AUDIO("Audio"), // Audio only call VIDEO("Video"); // Video/multimedia call private final String typeStr; CallMediaType(String type) { this.typeStr = type; } public String getDisplayName() { return typeStr; } } private static final BlackboardAttribute.Type ATTACHMENTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ATTACHMENTS); // 'self' account for the application being processed by the module. private final Account.Type selfAccountType; private final String selfAccountId; private AccountFileInstance selfAccountInstance = null; // Type of accounts to be created for the module using this helper. private final Account.Type moduleAccountsType; /** * Constructs a communications artifacts helper for the given source file. * * This is a constructor for modules that do not have a 'self' account, and * will use a 'Device' account in lieu. * * It creates a DeviceAccount instance to use as a self account. * * @param caseDb Sleuthkit case db. * @param moduleName Name of module using the helper. * @param srcContent Source content being processed by the module. * @param accountsType Account type {@link Account.Type} created by this * module. * * @throws TskCoreException If there is an error creating the device * account. */ public CommunicationArtifactsHelper(SleuthkitCase caseDb, String moduleName, Content srcContent, Account.Type accountsType) throws TskCoreException { super(caseDb, moduleName, srcContent); this.moduleAccountsType = accountsType; this.selfAccountType = Account.Type.DEVICE; this.selfAccountId = ((DataSource) getContent().getDataSource()).getDeviceId(); } /** * Constructs a communications artifacts helper for the given source file. * * This constructor is for modules that have the application specific * account information for the device owner to create a 'self' account. * * It creates an account instance with specified type & id, and uses it as * the self account. * * @param caseDb Sleuthkit case db. * @param moduleName Name of module using the helper. * @param srcContent Source content being processed by the module. * @param accountsType Account type {@link Account.Type} created by this * module. * @param selfAccountType Self account type to be created for this module. * @param selfAccountId Account unique id for the self account. * * @throws TskCoreException If there is an error creating the self account */ public CommunicationArtifactsHelper(SleuthkitCase caseDb, String moduleName, Content srcContent, Account.Type accountsType, Account.Type selfAccountType, String selfAccountId) throws TskCoreException { super(caseDb, moduleName, srcContent); this.moduleAccountsType = accountsType; this.selfAccountType = selfAccountType; this.selfAccountId = selfAccountId; } /** * Creates and adds a TSK_CONTACT artifact to the case, with specified * attributes. Also creates an account instance of specified type for the * contact with the specified ID. * * @param contactName Contact name, required. * @param phoneNumber Primary phone number for contact, may be empty * or null. * @param homePhoneNumber Home phone number, may be empty or null. * @param mobilePhoneNumber Mobile phone number, may be empty or null. * @param emailAddr Email address for the contact, may be empty or * null. * * At least one phone number or email address is required. * * @return Contact artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. * */ public BlackboardArtifact addContact(String contactName, String phoneNumber, String homePhoneNumber, String mobilePhoneNumber, String emailAddr) throws TskCoreException, BlackboardException { return addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, Collections.emptyList()); } /** * Creates and adds a TSK_CONTACT artifact to the case, with specified * attributes. Also creates an account instance for the contact with the * specified ID. * * @param contactName Contact name, may be empty or null. * @param phoneNumber Primary phone number for contact, may be * empty or null. * @param homePhoneNumber Home phone number, may be empty or null. * @param mobilePhoneNumber Mobile phone number, may be empty or null. * @param emailAddr Email address for the contact, may be empty * or null. * * At least one phone number or email address or an Id is required. An Id * may be passed in as a TSK_ID attribute in additionalAttributes. * * @param additionalAttributes Additional attributes for contact, may be an * empty list. * * @return contact artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. * */ public BlackboardArtifact addContact(String contactName, String phoneNumber, String homePhoneNumber, String mobilePhoneNumber, String emailAddr, Collection additionalAttributes) throws TskCoreException, BlackboardException { // check if the caller has included any phone/email/id in addtional attributes boolean hasAnyIdAttribute = false; if (additionalAttributes != null) { for (BlackboardAttribute attr : additionalAttributes) { if ((attr.getAttributeType().getTypeName().startsWith("TSK_PHONE")) || (attr.getAttributeType().getTypeName().startsWith("TSK_EMAIL")) || (attr.getAttributeType().getTypeName().startsWith("TSK_ID"))) { hasAnyIdAttribute = true; break; } } } // At least one phone number or email address // or an optional attribute with phone/email/id must be provided if (StringUtils.isEmpty(phoneNumber) && StringUtils.isEmpty(homePhoneNumber) && StringUtils.isEmpty(mobilePhoneNumber) && StringUtils.isEmpty(emailAddr) && (!hasAnyIdAttribute)) { throw new IllegalArgumentException("At least one phone number or email address or an id must be provided."); } BlackboardArtifact contactArtifact; Collection attributes = new ArrayList<>(); // create TSK_CONTACT artifact and construct attributes addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_NAME, contactName, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, phoneNumber, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME, homePhoneNumber, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE, mobilePhoneNumber, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_EMAIL, emailAddr, attributes); // add attributes attributes.addAll(additionalAttributes); Content content = getContent(); contactArtifact = content.newDataArtifact(CONTACT_TYPE, attributes); // create an account for each specified contact method, and a relationship with self account createContactMethodAccountAndRelationship(Account.Type.PHONE, phoneNumber, contactArtifact, 0); createContactMethodAccountAndRelationship(Account.Type.PHONE, homePhoneNumber, contactArtifact, 0); createContactMethodAccountAndRelationship(Account.Type.PHONE, mobilePhoneNumber, contactArtifact, 0); createContactMethodAccountAndRelationship(Account.Type.EMAIL, emailAddr, contactArtifact, 0); // if the additional attribute list has any phone/email/id attributes, create accounts & relationships for those. if ((additionalAttributes != null) && hasAnyIdAttribute) { for (BlackboardAttribute bba : additionalAttributes) { if (bba.getAttributeType().getTypeName().startsWith("TSK_PHONE")) { createContactMethodAccountAndRelationship(Account.Type.PHONE, bba.getValueString(), contactArtifact, 0); } else if (bba.getAttributeType().getTypeName().startsWith("TSK_EMAIL")) { createContactMethodAccountAndRelationship(Account.Type.EMAIL, bba.getValueString(), contactArtifact, 0); } else if (bba.getAttributeType().getTypeName().startsWith("TSK_ID")) { createContactMethodAccountAndRelationship(this.moduleAccountsType, bba.getValueString(), contactArtifact, 0); } } } // post artifact getSleuthkitCase().getBlackboard().postArtifact(contactArtifact, getModuleName()); return contactArtifact; } /** * Creates a contact's account instance of specified account type, if the * account id is not null/empty and is a valid account id for the account * type. * * Also creates a CONTACT relationship between the self account and the new * contact account. */ private void createContactMethodAccountAndRelationship(Account.Type accountType, String accountUniqueID, BlackboardArtifact sourceArtifact, long dateTime) throws TskCoreException { // Find/Create an account instance for each of the contact method // Create a relationship between selfAccount and contactAccount if (StringUtils.isNotBlank(accountUniqueID)) { try { AccountFileInstance contactAccountInstance = createAccountInstance(accountType, accountUniqueID); // Create a relationship between self account and the contact account try { getSleuthkitCase().getCommunicationsManager().addRelationships(getSelfAccountInstance(), Collections.singletonList(contactAccountInstance), sourceArtifact, Relationship.Type.CONTACT, dateTime); } catch (TskDataException ex) { throw new TskCoreException(String.format("Failed to create relationship between account = %s and account = %s.", getSelfAccountInstance().getAccount(), contactAccountInstance.getAccount()), ex); } } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", accountUniqueID)); } } } /** * Creates an account file instance{@link AccountFileInstance} associated * with the DB file. * * @param accountType Type of account to create. * @param accountUniqueID Unique id for the account. * * @return Account instance created. * * @throws TskCoreException If there is an error creating the account * instance. */ private AccountFileInstance createAccountInstance(Account.Type accountType, String accountUniqueID) throws TskCoreException, InvalidAccountIDException { return getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(accountType, accountUniqueID, getModuleName(), getContent()); } /** * Adds a TSK_MESSAGE artifact. * * Also creates an account instance for the sender/receiver, and creates a * relationship between the self account and the sender/receiver account. * * @param messageType Message type, required. * @param direction Message direction, UNKNOWN if not available. * @param senderId Sender address id, may be null. * @param recipientId Recipient id, may be null. * @param dateTime Date/time of message, 0 if not available. * @param readStatus Message read status, UNKNOWN if not available. * @param subject Message subject, may be empty or null. * @param messageText Message body, may be empty or null. * @param threadId Message thread id, may be empty or null. * * @return Message artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addMessage( String messageType, CommunicationDirection direction, String senderId, String recipientId, long dateTime, MessageReadStatus readStatus, String subject, String messageText, String threadId) throws TskCoreException, BlackboardException { return addMessage(messageType, direction, senderId, recipientId, dateTime, readStatus, subject, messageText, threadId, Collections.emptyList()); } /** * Adds a TSK_MESSAGE artifact. * * Also creates an account instance for the sender/receiver, and creates a * relationship between the self account and the sender/receiver account. * * @param messageType Message type, required. * @param direction Message direction, UNKNOWN if not available. * @param senderId Sender id, may be null. * @param recipientId Recipient id, may be null. * @param dateTime Date/time of message, 0 if not available. * @param readStatus Message read status, UNKNOWN if not available. * @param subject Message subject, may be empty or null. * @param messageText Message body, may be empty or null. * @param threadId Message thread id, may be empty or null. * @param otherAttributesList Additional attributes, may be an empty list. * * @return Message artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addMessage(String messageType, CommunicationDirection direction, String senderId, String recipientId, long dateTime, MessageReadStatus readStatus, String subject, String messageText, String threadId, Collection otherAttributesList) throws TskCoreException, BlackboardException { return addMessage(messageType, direction, senderId, Arrays.asList(recipientId), dateTime, readStatus, subject, messageText, threadId, otherAttributesList); } /** * Adds a TSK_MESSAGE artifact. * * Also creates an account instance for the sender/receiver, and creates a * relationship between the self account and the sender/receiver accounts. * * * @param messageType Message type, required. * @param direction Message direction, UNKNOWN if not available. * @param senderId Sender id, may be null. * @param recipientIdsList Recipient ids list, may be null or empty list. * @param dateTime Date/time of message, 0 if not available. * @param readStatus Message read status, UNKNOWN if not available. * @param subject Message subject, may be empty or null. * @param messageText Message body, may be empty or null. * @param threadId Message thread id, may be empty or null. * * @return Message artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addMessage(String messageType, CommunicationDirection direction, String senderId, List recipientIdsList, long dateTime, MessageReadStatus readStatus, String subject, String messageText, String threadId) throws TskCoreException, BlackboardException { return addMessage(messageType, direction, senderId, recipientIdsList, dateTime, readStatus, subject, messageText, threadId, Collections.emptyList()); } /** * Adds a TSK_MESSAGE artifact. * * Also creates accounts for the sender/receivers, and creates relationships * between the sender/receivers account. * * @param messageType Message type, required. * @param direction Message direction, UNKNOWN if not available. * @param senderId Sender id, may be null. * @param recipientIdsList Recipient list, may be null or empty an list. * @param dateTime Date/time of message, 0 if not available. * @param readStatus Message read status, UNKNOWN if not available. * @param subject Message subject, may be empty or null. * @param messageText Message body, may be empty or null. * @param threadId Message thread id, may be empty or null. * @param otherAttributesList Other attributes, may be an empty list. * * @return Message artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addMessage(String messageType, CommunicationDirection direction, String senderId, List recipientIdsList, long dateTime, MessageReadStatus readStatus, String subject, String messageText, String threadId, Collection otherAttributesList) throws TskCoreException, BlackboardException { // Created message artifact. Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE, getModuleName(), messageType)); addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME, dateTime, attributes); addMessageReadStatusIfKnown(readStatus, attributes); addCommDirectionIfKnown(direction, attributes); // Get the self account instance AccountFileInstance selfAccountInstanceLocal = null; try { selfAccountInstanceLocal = getSelfAccountInstance(); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Failed to get/create self account with id %s", selfAccountId), ex); } // set sender attribute and create sender account AccountFileInstance senderAccountInstance = null; if (StringUtils.isNotBlank(senderId)) { try { senderAccountInstance = createAccountInstance(moduleAccountsType, senderId); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", senderId)); } } // set recipient attribute and create recipient accounts List recipientAccountsList = new ArrayList<>(); String recipientsStr = ""; if (!isEffectivelyEmpty(recipientIdsList)) { for (String recipient : recipientIdsList) { if (StringUtils.isNotBlank(recipient)) { try { recipientAccountsList.add(createAccountInstance(moduleAccountsType, recipient)); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", recipient)); } } } // Create a comma separated string of recipients recipientsStr = addressListToString(recipientIdsList); } switch (direction) { case OUTGOING: // if no sender, selfAccount substitutes caller. if (StringUtils.isEmpty(senderId) && selfAccountInstanceLocal != null) { senderAccountInstance = selfAccountInstanceLocal; } // sender becomes PHONE_FROM if (senderAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, senderAccountInstance.getAccount().getTypeSpecificID(), attributes); } // recipient becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, recipientsStr, attributes); break; case INCOMING: // if no recipeint specified, selfAccount substitutes recipient if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null) { recipientsStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID(); recipientAccountsList.add(selfAccountInstanceLocal); } // caller becomes PHONE_FROM if (senderAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, senderAccountInstance.getAccount().getTypeSpecificID(), attributes); } // callee becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, recipientsStr, attributes); break; default: // direction UNKNOWN if (StringUtils.isEmpty(senderId) && selfAccountInstanceLocal != null) { // if no sender, selfAccount substitutes caller. senderAccountInstance = selfAccountInstanceLocal; } else if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null) { // else if no recipient specified, selfAccount substitutes recipient recipientsStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID(); recipientAccountsList.add(selfAccountInstanceLocal); } // save phone numbers in direction agnostic attributes if (senderAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, senderAccountInstance.getAccount().getTypeSpecificID(), attributes); } // callee becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, recipientsStr, attributes); break; } addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_SUBJECT, subject, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_TEXT, messageText, attributes); addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_THREAD_ID, threadId, attributes); // add attributes to artifact attributes.addAll(otherAttributesList); // create TSK_MESSAGE artifact Content content = getContent(); BlackboardArtifact msgArtifact = content.newDataArtifact(MESSAGE_TYPE, attributes); // create sender/recipient relationships try { getSleuthkitCase().getCommunicationsManager().addRelationships(senderAccountInstance, recipientAccountsList, msgArtifact, Relationship.Type.MESSAGE, dateTime); } catch (TskDataException ex) { throw new TskCoreException(String.format("Failed to create Message relationships between sender account = %s and recipients = %s.", (senderAccountInstance != null) ? senderAccountInstance.getAccount().getTypeSpecificID() : "Unknown", recipientsStr), ex); } // post artifact getSleuthkitCase().getBlackboard().postArtifact(msgArtifact, getModuleName()); // return the artifact return msgArtifact; } /** * Adds a TSK_CALLLOG artifact. * * Also creates an account instance for the caller/callee, and creates a * relationship between the self account and the caller account as well * between the self account and the callee account. * * @param direction Call direction, UNKNOWN if not available. * @param callerId Caller id, may be null. * @param calleeId Callee id, may be null. * * At least one of the two must be provided - the caller Id, or a callee id. * * @param startDateTime Start date/time, 0 if not available. * @param endDateTime End date/time, 0 if not available. * @param mediaType Media type. * * @return Call log artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addCalllog(CommunicationDirection direction, String callerId, String calleeId, long startDateTime, long endDateTime, CallMediaType mediaType) throws TskCoreException, BlackboardException { return addCalllog(direction, callerId, calleeId, startDateTime, endDateTime, mediaType, Collections.emptyList()); } /** * Adds a TSK_CALLLOG artifact. * * Also creates an account instance for the caller/callee, and creates a * relationship between the self account and the caller account as well * between the self account and the callee account. * * @param direction Call direction, UNKNOWN if not available. * @param callerId Caller id, may be null. * @param calleeId Callee id, may be null. * * At least one of the two must be provided - the caller Id, or a callee id. * * @param startDateTime Start date/time, 0 if not available. * @param endDateTime End date/time, 0 if not available. * @param mediaType Media type. * @param otherAttributesList Other attributes. * * @return Call log artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addCalllog(CommunicationDirection direction, String callerId, String calleeId, long startDateTime, long endDateTime, CallMediaType mediaType, Collection otherAttributesList) throws TskCoreException, BlackboardException { return addCalllog(direction, callerId, Arrays.asList(calleeId), startDateTime, endDateTime, mediaType, otherAttributesList); } /** * Adds a TSK_CALLLOG artifact. * * Also creates an account instance for the caller/callees, and creates a * relationship between the self account and the caller account as well * between the self account and each callee account. * * @param direction Call direction, UNKNOWN if not available. * @param callerId Caller id, may be null. * @param calleeIdsList Callee list, may be an empty list. * * At least one of the two must be provided - the caller Id, or a callee id. * * @param startDateTime Start date/time, 0 if not available. * @param endDateTime End date/time, 0 if not available. * @param mediaType Call media type, UNKNOWN if not available. * * @return Call log artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addCalllog(CommunicationDirection direction, String callerId, Collection calleeIdsList, long startDateTime, long endDateTime, CallMediaType mediaType) throws TskCoreException, BlackboardException { return addCalllog(direction, callerId, calleeIdsList, startDateTime, endDateTime, mediaType, Collections.emptyList()); } /** * Adds a TSK_CALLLOG artifact. * * Also creates an account instance for the caller and each of the callees, * and creates relationships between caller and callees. * * @param direction Call direction, UNKNOWN if not available. * @param callerId Caller id, required for incoming call. * @param calleeIdsList Callee ids list, required for an outgoing * call. * * At least one of the two must be provided - the caller Id, or a callee id. * * @param startDateTime Start date/time, 0 if not available. * @param endDateTime End date/time, 0 if not available. * @param mediaType Call media type, UNKNOWN if not available. * @param otherAttributesList other attributes, can be an empty list * * @return Call log artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addCalllog(CommunicationDirection direction, String callerId, Collection calleeIdsList, long startDateTime, long endDateTime, CallMediaType mediaType, Collection otherAttributesList) throws TskCoreException, BlackboardException { // Either caller id or a callee id must be provided. if (StringUtils.isEmpty(callerId) && (isEffectivelyEmpty(calleeIdsList))) { throw new IllegalArgumentException("Either a caller id, or at least one callee id must be provided for a call log."); } AccountFileInstance selfAccountInstanceLocal = null; try { selfAccountInstanceLocal = getSelfAccountInstance(); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Failed to get/create self account with id %s", selfAccountId), ex); } Collection attributes = new ArrayList<>(); // Add basic attributes addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME_START, startDateTime, attributes); addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME_END, endDateTime, attributes); addCommDirectionIfKnown(direction, attributes); AccountFileInstance callerAccountInstance = null; if (StringUtils.isNotBlank(callerId)) { try { callerAccountInstance = createAccountInstance(moduleAccountsType, callerId); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", callerId)); } } // Create a comma separated string of callee List recipientAccountsList = new ArrayList<>(); String calleesStr = ""; if (!isEffectivelyEmpty(calleeIdsList)) { calleesStr = addressListToString(calleeIdsList); for (String callee : calleeIdsList) { if (StringUtils.isNotBlank(callee)) { try { recipientAccountsList.add(createAccountInstance(moduleAccountsType, callee)); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", callee)); } } } } switch (direction) { case OUTGOING: // if no callee throw IllegalArg if (isEffectivelyEmpty(calleeIdsList)) { throw new IllegalArgumentException("Callee not provided for an outgoing call."); } // if no caller, selfAccount substitutes caller. if (StringUtils.isEmpty(callerId) && selfAccountInstanceLocal != null) { callerAccountInstance = selfAccountInstanceLocal; } // caller becomes PHONE_FROM if (callerAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, callerAccountInstance.getAccount().getTypeSpecificID(), attributes); } // callee becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, calleesStr, attributes); break; case INCOMING: // if no caller throw IllegalArg if (StringUtils.isEmpty(callerId)) { throw new IllegalArgumentException("Caller Id not provided for incoming call."); } // if no callee specified, selfAccount substitutes callee if (isEffectivelyEmpty(calleeIdsList) && selfAccountInstanceLocal != null) { calleesStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID(); recipientAccountsList.add(selfAccountInstanceLocal); } // caller becomes PHONE_FROM if (callerAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, callerAccountInstance.getAccount().getTypeSpecificID(), attributes); } // callee becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, calleesStr, attributes); break; default: // direction UNKNOWN // save phone numbers in direction agnostic attributes if (callerAccountInstance != null) { addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, callerAccountInstance.getAccount().getTypeSpecificID(), attributes); } // callee becomes PHONE_TO addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, calleesStr, attributes); break; } // add attributes to artifact attributes.addAll(otherAttributesList); // Create TSK_CALLLOG artifact Content content = getContent(); BlackboardArtifact callLogArtifact = content.newDataArtifact(CALLOG_TYPE, attributes); callLogArtifact.addAttributes(attributes); // create relationships between caller/callees try { getSleuthkitCase().getCommunicationsManager().addRelationships(callerAccountInstance, recipientAccountsList, callLogArtifact, Relationship.Type.CALL_LOG, startDateTime); } catch (TskDataException ex) { throw new TskCoreException(String.format("Failed to create Call log relationships between caller account = %s and callees = %s.", (callerAccountInstance != null) ? callerAccountInstance.getAccount() : "", calleesStr), ex); } // post artifact getSleuthkitCase().getBlackboard().postArtifact(callLogArtifact, getModuleName()); // return the artifact return callLogArtifact; } /** * Adds attachments to a message. * * @param message Message artifact. * @param attachments Attachments to add to the message. * * @throws TskCoreException If there is an error in adding attachments */ public void addAttachments(BlackboardArtifact message, MessageAttachments attachments) throws TskCoreException { // Create attribute BlackboardAttribute blackboardAttribute = BlackboardJsonAttrUtil.toAttribute(ATTACHMENTS_ATTR_TYPE, getModuleName(), attachments); message.addAttribute(blackboardAttribute); // Associate each attachment file with the message. Collection fileAttachments = attachments.getFileAttachments(); for (FileAttachment fileAttachment : fileAttachments) { long attachedFileObjId = fileAttachment.getObjectId(); if (attachedFileObjId >= 0) { AbstractFile attachedFile = message.getSleuthkitCase().getAbstractFileById(attachedFileObjId); associateAttachmentWithMessage(message, attachedFile); } } } /** * Creates a TSK_ASSOCIATED_OBJECT artifact between the attachment file and * the message. * * @param message Message artifact. * @param attachments Attachment file. * * @return TSK_ASSOCIATED_OBJECT artifact. * * @throws TskCoreException If there is an error creating the * TSK_ASSOCIATED_OBJECT artifact. */ private BlackboardArtifact associateAttachmentWithMessage(BlackboardArtifact message, AbstractFile attachedFile) throws TskCoreException { Collection attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, this.getModuleName(), message.getArtifactID())); return attachedFile.newDataArtifact(ASSOCIATED_OBJ_TYPE, attributes); } /** * Converts a list of ids into a single comma separated string. */ private String addressListToString(Collection addressList) { String toAddresses = ""; if (addressList != null && (!addressList.isEmpty())) { StringBuilder toAddressesSb = new StringBuilder(); for (String address : addressList) { if (!StringUtils.isEmpty(address)) { toAddressesSb = toAddressesSb.length() > 0 ? toAddressesSb.append(", ").append(address) : toAddressesSb.append(address); } } toAddresses = toAddressesSb.toString(); } return toAddresses; } /** * Checks if the given list of ids has at least one non-null non-blank id. * * @param addressList List of string ids. * * @return false if the list has at least one non-null non-blank id, * otherwise true. * */ private boolean isEffectivelyEmpty(Collection idList) { if (idList == null || idList.isEmpty()) { return true; } for (String id : idList) { if (!StringUtils.isEmpty(id)) { return false; } } return true; } /** * Adds communication direction attribute to the list, if it is not unknown. */ private void addCommDirectionIfKnown(CommunicationDirection direction, Collection attributes) { if (direction != CommunicationDirection.UNKNOWN) { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DIRECTION, getModuleName(), direction.getDisplayName())); } } /** * Adds message read status attribute to the list, if it is not unknown. */ private void addMessageReadStatusIfKnown(MessageReadStatus readStatus, Collection attributes) { if (readStatus != MessageReadStatus.UNKNOWN) { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_READ_STATUS, getModuleName(), (readStatus == MessageReadStatus.READ) ? 1 : 0)); } } /** * Returns self account instance. Lazily creates it if one doesn't exist * yet. * * @return Self account instance. * * @throws TskCoreException */ private synchronized AccountFileInstance getSelfAccountInstance() throws TskCoreException, InvalidAccountIDException { if (selfAccountInstance == null) { selfAccountInstance = getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(selfAccountType, selfAccountId, this.getModuleName(), getContent()); } return selfAccountInstance; } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactHelperBase.java000644 000765 000024 00000007005 14137073414 034154 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2019-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils; import java.util.Collection; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; /** * A base class for classes that help ingest modules create artifacts. * */ class ArtifactHelperBase { private final SleuthkitCase caseDb; private final Content srcContent; // artifact source private final String moduleName; // module creating the artifacts /** * Creates an artifact helper. * * @param caseDb Sleuthkit case db * @param moduleName name module using the helper * @param srcContent source content */ ArtifactHelperBase(SleuthkitCase caseDb, String moduleName, Content srcContent) { this.moduleName = moduleName; this.srcContent = srcContent; this.caseDb = caseDb; } /** * Returns the source content. * * @return Source content. */ Content getContent() { return this.srcContent; } /** * Returns the sleuthkit case. * * @return Sleuthkit case database. */ SleuthkitCase getSleuthkitCase() { return this.caseDb; } /** * Returns module name. * * @return Module name. */ String getModuleName() { return this.moduleName; } /** * Creates and adds a string attribute of specified type to the given list, if the * attribute value is not empty or null. * * @param attributeType Attribute type. * @param attrValue String attribute value. * @param attributes List of attributes to add to. * */ void addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE attributeType, String attrValue, Collection attributes) { if (!StringUtils.isEmpty(attrValue)) { attributes.add(new BlackboardAttribute(attributeType, getModuleName(), attrValue)); } } /** * Creates and adds a long attribute of specified type to the given list, if the * attribute value is not 0. * * @param attributeType Attribute type. * @param attrValue Long attribute value. * @param attributes List of attributes to add to. */ void addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE attributeType, long attrValue, Collection attributes) { if (attrValue > 0) { attributes.add(new BlackboardAttribute(attributeType, getModuleName(), attrValue)); } } /** * Creates and adds an integer attribute of specified type to the given list, if the * attribute value is not 0. * * @param attributeType Attribute type. * @param attrValue Integer attribute value. * @param attributes List of attributes to add to. */ void addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE attributeType, int attrValue, Collection attributes) { if (attrValue > 0) { attributes.add(new BlackboardAttribute(attributeType, getModuleName(), attrValue)); } } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java000644 000765 000024 00000055202 14137073414 035471 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2019-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.StringTokenizer; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.CommunicationsManager; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.InvalidAccountIDException; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** * Class to help ingest modules create Web Browser artifacts. * * These include bookmarks, cookies, downloads, history, and web form * autofill data. * */ public final class WebBrowserArtifactsHelper extends ArtifactHelperBase { private static final Logger LOGGER = Logger.getLogger(WebBrowserArtifactsHelper.class.getName()); private static final BlackboardArtifact.Type WEB_BOOKMARK_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); private static final BlackboardArtifact.Type WEB_COOKIE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); private static final BlackboardArtifact.Type WEB_DOWNLOAD_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); private static final BlackboardArtifact.Type WEB_FORM_ADDRESS_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS); private static final BlackboardArtifact.Type WEB_FORM_AUTOFILL_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL); private static final BlackboardArtifact.Type WEB_HISTORY_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); /** * Creates a WebBrowserArtifactsHelper. * * @param caseDb Sleuthkit case db. * @param moduleName Name of module using the helper. * @param srcContent Source content being processed by the module. * */ public WebBrowserArtifactsHelper(SleuthkitCase caseDb, String moduleName, Content srcContent) { super(caseDb, moduleName, srcContent); } /** * Adds a TSK_WEB_BOOKMARK artifact. * * @param url Bookmark URL, required. * @param title Bookmark title, may be empty/null. * @param creationTime Date/time created, may be 0 if not available. * @param progName Application/program that created bookmark, may be * empty/null. * * @return Bookmark artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebBookmark(String url, String title, long creationTime, String progName) throws TskCoreException, BlackboardException { return addWebBookmark(url, title, creationTime, progName, Collections.emptyList()); } /** * Adds a TSK_WEB_BOOKMARK artifact. * * @param url Bookmark URL, required. * @param title Bookmark title, may be empty/null. * @param creationTime Date/time created, may be 0 if not available. * @param progName Application/program that created bookmark, may * be empty/null. * @param otherAttributesList Other attributes, may be an empty list. * * @return Bookmark artifact. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebBookmark(String url, String title, long creationTime, String progName, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, title, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, progName, attributes); // add attributes to artifact attributes.addAll(otherAttributesList); Content content = getContent(); BlackboardArtifact bookMarkArtifact = content.newDataArtifact(WEB_BOOKMARK_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(bookMarkArtifact, getModuleName()); // return the artifact return bookMarkArtifact; } /** * Adds a TSK_WEB_COOKIE artifact. * * @param url Url of the site that created the cookie, required. * @param creationTime Create time of cookie, may be 0 if not available. * @param name Cookie name, may be empty or null. * @param value Cookie value, may be empty or null. * @param programName Name of the application/program that created the * cookie, may be empty or null. * * @return WebCookie artifact * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebCookie(String url, long creationTime, String name, String value, String programName) throws TskCoreException, BlackboardException { return addWebCookie(url, creationTime, name, value, programName, Collections.emptyList()); } /** * Adds a TSK_WEB_COOKIE artifact. * * @param url Url of the site that created the cookie, * required. * @param creationTime Create time of cookie, may be 0 if not * available. * @param name Cookie name, may be empty or null. * @param value Cookie value, may be empty or null. * @param programName Name of the application/program that created * the cookie, may be empty or null. * * @param otherAttributesList Other attributes, may be an empty list. * * @return WebCookie artifact * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebCookie(String url, long creationTime, String name, String value, String programName, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, creationTime, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, name, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, value, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes); // add attributes to artifact attributes.addAll(otherAttributesList); Content content = getContent(); BlackboardArtifact cookieArtifact = content.newDataArtifact(WEB_COOKIE_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(cookieArtifact, getModuleName()); // return the artifact return cookieArtifact; } /** * Adds a TSK_WEB_DOWNNLOAD artifact. * * @param url URL downloaded from, required. * @param startTime Date/time downloaded, 0 if not available. * @param path Path of downloaded file, required. * @param programName Program that initiated the download, may be empty or * null. * * @return Web download artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebDownload(String url, long startTime, String path, String programName) throws TskCoreException, BlackboardException { return addWebDownload(path, startTime, url, programName, Collections.emptyList()); } /** * Adds a TSK_WEB_DOWNNLOAD artifact. * * @param url URL downloaded from, required. * @param startTime Date/time downloaded, 0 if not available. * @param path Path of downloaded file, required. * @param programName Program that initiated the download, may be * empty or null. * @param otherAttributesList Other attributes, may be an empty list. * * @return Web download artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebDownload(String url, long startTime, String path, String programName, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getModuleName(), path)); attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, startTime, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes); // add attributes to artifact attributes.addAll(otherAttributesList); Content content = getContent(); BlackboardArtifact webDownloadArtifact = content.newDataArtifact(WEB_DOWNLOAD_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(webDownloadArtifact, getModuleName()); // return the artifact return webDownloadArtifact; } /** * Adds a TSK_WEB_FORM_AUTOFILL artifact. * * @param personName Person name, required. * @param email Email address, may be empty or null. * @param phoneNumber Phone number, may be empty or null. * @param mailingAddress Mailing address, may be empty or null. * @param creationTime Creation time, may be 0 if not available. * @param accessTime Last access time, may be 0 if not available. * @param count Use count, may be 0 if not available. * * @return Web form address artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebFormAddress(String personName, String email, String phoneNumber, String mailingAddress, long creationTime, long accessTime, int count) throws TskCoreException, BlackboardException { return addWebFormAddress(personName, email, phoneNumber, mailingAddress, creationTime, accessTime, count, Collections.emptyList()); } /** * Adds a TSK_WEB_FORM_ADDRESS artifact. * * @param personName Person name, required. * @param email Email address, may be empty or null. * @param phoneNumber Phone number, may be empty or null. * @param mailingAddress Mailing address, may be empty or null. * @param creationTime Creation time, may be 0 if not available. * @param accessTime Last access time, may be 0 if not available. * @param count Use count, may be 0 if not available. * @param otherAttributesList Other attributes, may be an empty list. * * @return Web form address artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebFormAddress(String personName, String email, String phoneNumber, String mailingAddress, long creationTime, long accessTime, int count, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); CommunicationsManager commManager = this.getSleuthkitCase().getCommunicationsManager(); if (StringUtils.isNotEmpty(email)) { try { commManager.createAccountFileInstance(Account.Type.EMAIL, email, this.getModuleName(), this.getContent()); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", email), ex); } } if(StringUtils.isNotEmpty(phoneNumber)) { try { commManager.createAccountFileInstance(Account.Type.PHONE, phoneNumber, this.getModuleName(), this.getContent()); } catch (InvalidAccountIDException ex) { LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", phoneNumber), ex); } } // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), personName)); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL, email, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, phoneNumber, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_LOCATION, mailingAddress, attributes); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, count, attributes); // add artifact Content content = getContent(); BlackboardArtifact webFormAddressArtifact = content.newDataArtifact(WEB_FORM_ADDRESS_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(webFormAddressArtifact, getModuleName()); // return the artifact return webFormAddressArtifact; } /** * Adds a TSK_WEB_FORM_AUTOFILL artifact. * * @param name Name of autofill field, required. * @param value Value of autofill field, required. * @param creationTime Create date/time, may be 0 if not available. * @param accessTime Last access date/time, may be 0 if not available. * @param count Count of times used, may be 0 if not available. * * @return Web form autofill artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebFormAutofill(String name, String value, long creationTime, long accessTime, int count) throws TskCoreException, BlackboardException { return addWebFormAutofill(name, value, creationTime, accessTime, count, Collections.emptyList()); } /** * Adds a TSK_WEB_FORM_AUTOFILL artifact. * * @param name Name of autofill field, required. * @param value Value of autofill field, required. * @param creationTime Create date/time, may be 0 if not available. * @param accessTime Last access date/time, may be 0 if not * available. * @param count Count of times used, may be 0 if not * available. * @param otherAttributesList Other attributes, may be an empty list. * * @return Web form autofill artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebFormAutofill(String name, String value, long creationTime, long accessTime, int count, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), name)); attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, getModuleName(), value)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, count, attributes); // add attributes to artifact attributes.addAll(otherAttributesList); Content content = getContent(); BlackboardArtifact webFormAutofillArtifact = content.newDataArtifact(WEB_FORM_AUTOFILL_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(webFormAutofillArtifact, getModuleName()); // return the artifact return webFormAutofillArtifact; } /** * Adds a Web History artifact. * * @param url Url visited, required. * @param accessTime Last access time, may be 0 if not available. * @param referrer Referrer, may be empty or null. * @param title Website title, may be empty or null. * @param programName Application/program recording the history, may be * empty or null. * * @return Web history artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebHistory(String url, long accessTime, String referrer, String title, String programName) throws TskCoreException, BlackboardException { return addWebHistory(url, accessTime, referrer, title, programName, Collections.emptyList()); } /** * Adds a Web History artifact. * * @param url Url visited, required. * @param accessTime Last access time, may be 0 if not available. * @param referrer Referrer, may be empty or null. * @param title Website title, may be empty or null. * @param programName Application/program recording the history, may * be empty or null. * @param otherAttributesList Other attributes, may be an empty list. * * @return Web history artifact created. * * @throws TskCoreException If there is an error creating the artifact. * @throws BlackboardException If there is a problem posting the artifact. */ public BlackboardArtifact addWebHistory(String url, long accessTime, String referrer, String title, String programName, Collection otherAttributesList) throws TskCoreException, BlackboardException { Collection attributes = new ArrayList<>(); // construct attributes attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url)); addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, title, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, referrer, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes); addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes); // add attributes to artifact attributes.addAll(otherAttributesList); Content content = getContent(); BlackboardArtifact webHistoryArtifact = content.newDataArtifact(WEB_HISTORY_TYPE, attributes); // post artifact getSleuthkitCase().getBlackboard().postArtifact(webHistoryArtifact, getModuleName()); // return the artifact return webHistoryArtifact; } // TBD: this is duplicated in Autopsy. // We should move this to new Util class in TSK, and have Autopsy delegate to it. /** * Attempt to extract the domain from a URL. Will start by using the * built-in URL class, and if that fails will try to extract it manually. * * @param urlString The URL to extract the domain from * * @return empty string if no domain name was found */ private static String extractDomain(String urlString) { if (urlString == null) { return ""; } String result; try { URL url = new URL(urlString); result = url.getHost(); } catch (MalformedURLException ex) { // not a valid URL - we will try to extract it ourselves result = null; } //was not a valid URL, try a less picky method if (result == null || StringUtils.isBlank(result)) { return getBaseDomain(urlString); } return result; } /** * Attempt to manually extract the domain from a URL. * * @param url * * @return empty string if no domain could be found */ private static String getBaseDomain(String url) { String host; //strip protocol String cleanUrl = url.replaceFirst(".*:\\/\\/", ""); //strip after slashes String dirToks[] = cleanUrl.split("\\/"); if (dirToks.length > 0) { host = dirToks[0]; } else { host = cleanUrl; } //get the domain part from host (last 2) StringTokenizer tok = new StringTokenizer(host, "."); StringBuilder hostB = new StringBuilder(); int toks = tok.countTokens(); for (int count = 0; count < toks; ++count) { String part = tok.nextToken(); int diff = toks - count; if (diff < 3) { hostB.append(part); } if (diff == 2) { hostB.append('.'); } } String base = hostB.toString(); // verify there are no special characters in there if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) { return ""; } //verify that the base domain actually has a '.', details JIRA-4609 if (!base.contains(".")) { return ""; } return base; } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/BlackboardJsonAttrUtil.java000755 000765 000024 00000010512 14137073413 037141 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils.attributes; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import org.sleuthkit.datamodel.BlackboardAttribute; /** * A utility for converting between JSON and artifact attributes of value type * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON. */ public final class BlackboardJsonAttrUtil { /** * Creates an attribute of a given type with the string value set to an * object of type T serialized to JSON. * * T The type of the attribute value object to be * serialized. * * @param attrType The type of attribute to create. * @param moduleName The name of the module creating the attribute. * @param attrValue The attribute value object. * * @return The BlackboardAttribute object. */ public static BlackboardAttribute toAttribute(BlackboardAttribute.Type attrType, String moduleName, T attrValue) { if (attrType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) { throw new IllegalArgumentException(String.format("Attribute type %s does not have value type BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON", attrType.getTypeName())); } return new BlackboardAttribute(attrType, moduleName, (new Gson()).toJson(attrValue)); } /** * Creates an object of type T from the JSON in the string value of a * BlackboardAttribute with a value type of * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON. * * T The type of the object to be created from the JSON. * * @param attr The attribute. * @param clazz The class object for class T. * * @return The T object from the attribute. * * @throws InvalidJsonException Thrown the JSON in an artifact attribute * cannot be deserialized to an object of the * specified type. */ public static T fromAttribute(BlackboardAttribute attr, Class clazz) throws InvalidJsonException { BlackboardAttribute.Type attrType = attr.getAttributeType(); if (attrType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) { throw new IllegalArgumentException(String.format("Attribute type %s does not have value type BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON", attrType.getTypeName())); } String json = attr.getValueString(); if (json == null || json.isEmpty()) { throw new InvalidJsonException("The string value (JSON) of the attribute is null or empty"); } try { return (new Gson()).fromJson(json, clazz); } catch (JsonSyntaxException ex) { throw new InvalidJsonException(String.format("The string value (JSON) could not be deserialized as a %s", clazz.getName()), ex); } } /** * Constructs an exception to be thrown when the JSON in an artifact * attribute cannot be deserialized to an object of the specified type. */ public static class InvalidJsonException extends Exception { private static final long serialVersionUID = 1L; /** * Constructs an exception thrown when JSON in an artifact attribute * cannot be deserialized to an object of the specified type. * * @param message An error message. */ public InvalidJsonException(String message) { super(message); } /** * Constructs an exception thrown when JSON in an artifact attribute * cannot be deserialized to an object of the specified type. * * @param message An error message. * @param cause An excception that caused this exception to be thrown. */ public InvalidJsonException(String message, Throwable cause) { super(message, cause); } } /** * Prevents instantiation of this utility class. */ private BlackboardJsonAttrUtil() { } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/GeoTrackPoints.java000755 000765 000024 00000016116 14137073413 035474 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils.attributes; import com.google.gson.annotations.SerializedName; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; /** * A GeoTrackPoints object is a collection of TrackPoint objects. A TrackPoint * represents a track point, which is a location in a geographic coordinate * system with latitude, longitude and altitude (elevation) axes. * * GeoTrackPoints objects are designed to be used as the string value of the * TSK_GEO_TRACKPOINTS attribute of a TSK_GPS_TRACK artifact. TSK_GPS_TRACK * artifacts are used to record a track, or path, of a GPS-enabled device as a * connected series of track points. * */ public class GeoTrackPoints implements Iterable { private final List pointList; /** * Constructs an empty GeoTrackPoints object. */ public GeoTrackPoints() { pointList = new ArrayList<>(); } /** * Adds a track point to this list of track points. * * @param trackPoint A track point. */ public void addPoint(TrackPoint trackPoint) { if (trackPoint == null) { throw new IllegalArgumentException("addPoint was passed a null track point"); } pointList.add(trackPoint); } @Override public Iterator iterator() { return pointList.iterator(); } /** * Returns whether or not this list of track points is empty. * * @return True or false. */ public boolean isEmpty() { return pointList.isEmpty(); } /** * Gets the earliest track point timestamp in this list of track points, if * timestamps are present. * * @return The timestamp in milliseconds from the Java epoch of * 1970-01-01T00:00:00Z, may be null or zero. */ public Long getStartTime() { List orderedPoints = getTimeOrderedPoints(); if (orderedPoints != null) { for (TrackPoint point : orderedPoints) { if (point.getTimeStamp() != null) { return point.getTimeStamp(); } } } return null; } /** * Gets the latest track point timestamp in this list of track points, if * timestamps are present. * * @return The timestamp in milliseconds from the Java epoch of * 1970-01-01T00:00:00Z, may be null or zero. */ public Long getEndTime() { List orderedPoints = getTimeOrderedPoints(); if (orderedPoints != null) { for (int index = orderedPoints.size() - 1; index >= 0; index--) { TrackPoint point = orderedPoints.get(index); if (point.getTimeStamp() != null) { return point.getTimeStamp(); } } } return null; } /** * Gets this list of track points as a list ordered by track point * timestamp. * * @return The ordered list of track points. */ private List getTimeOrderedPoints() { return pointList.stream().sorted().collect(Collectors.toCollection(ArrayList::new)); } /** * A representation of a track point, which is a location in a geographic * coordinate system with latitude, longitude and altitude (elevation) axes. */ public final static class TrackPoint extends GeoWaypoints.Waypoint implements Comparable { @SerializedName("TSK_GEO_VELOCITY") private final Double velocity; @SerializedName("TSK_DISTANCE_FROM_HOMEPOINT") private final Double distanceFromHomePoint; @SerializedName("TSK_DISTANCE_TRAVELED") private final Double distanceTraveled; @SerializedName("TSK_DATETIME") private final Long timestamp; /** * Constructs a representation of a track point, which is a location in * a geographic coordinate system with latitude, longitude and altitude * (elevation) axes. * * @param latitude The latitude of the track point. * @param longitude The longitude of the track point. * @param altitude The altitude of the track point, may be * null. * @param name The name of the track point, may be * null. * @param velocity The velocity of the device at the track * point in meters per second, may be null. * @param distanceFromHomePoint The distance of the track point in * meters from an established home point, * may be null. * @param distanceTraveled The distance the device has traveled in * meters at the time this track point was * created, may be null. * @param timestamp The timestamp of the track point as * milliseconds from the Java epoch of * 1970-01-01T00:00:00Z, may be null. */ public TrackPoint(Double latitude, Double longitude, Double altitude, String name, Double velocity, Double distanceFromHomePoint, Double distanceTraveled, Long timestamp) { super(latitude, longitude, altitude, name); this.velocity = velocity; this.distanceFromHomePoint = distanceFromHomePoint; this.distanceTraveled = distanceTraveled; this.timestamp = timestamp; } /** * Gets the velocity of the device at this track point in meters per * second, if known. * * @return The velocity in meters/sec, may be null or zero. */ public Double getVelocity() { return velocity; } /** * Gets the distance of this track point from an established home point, * if known. * * @return The distance in meters, may be null or zero. */ public Double getDistanceFromHomePoint() { return distanceFromHomePoint; } /** * Gets the distance the device has traveled in meters at the time this * track point was created, if known. * * @return The distance traveled in meters, may be null or zero. */ public Double getDistanceTraveled() { return distanceTraveled; } /** * Gets the timestamp of this track point as milliseconds from the Java * epoch of 1970-01-01T00:00:00Z, if known. * * @return The timestamp, may be null or zero. */ public Long getTimeStamp() { return timestamp; } @Override public int compareTo(TrackPoint otherTP) { Long otherTimeStamp = otherTP.getTimeStamp(); if (timestamp == null) { if (otherTimeStamp != null) { return -1; } else { return 0; } } else if (timestamp != null && otherTimeStamp == null) { return 1; } else { return timestamp.compareTo(otherTP.getTimeStamp()); } } } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/MessageAttachments.java000644 000765 000024 00000027413 14137073413 036357 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2019-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils.attributes; import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** * Class to represent attachments to a message. * * Attachments can be URL attachments or file attachments. * */ public final class MessageAttachments { /** * * An interface implemented by message attachments. */ public interface Attachment { /** * Returns location of an attachment - a path or a URL. * * @return String representing location of attachment. */ String getLocation(); /* * Returns object id of the attachment file. * * @return Object id of attachment, may be null if not available or not * applicable. */ Long getObjId(); } /** * Represents a message attachment where a URL of the attachment is * available. * */ public static class URLAttachment implements Attachment { private final String url; /** * Creates URL attachment. * * @param url URL of attachment. */ public URLAttachment(String url) { this.url = url; } /** * Returns attachment URL. * * @return attachment URL. */ public String getURL() { return url; } @Override public String getLocation() { return this.url; } @Override public Long getObjId() { // no real object available. return null; } } /** * File attachment to a message. * * The file may or may not have been downloaded, and hence may or may not be * part of the data source. * * A file attachment may also be created for a blob that is added as a * derived file. * */ public static final class FileAttachment implements Attachment { private final String path; private final long objectID; // Mobile phones often create mount points to refer to SD Cards or other // fixed/removable storage media. // // Applications use these mount points when referring files. But they may // not exist physically in the data source. // // Common, wellknown mount points are stripped from the file paths to // accurately search for the file in the image. transient private static final List KNOWN_MOUNTPOINTS = ImmutableList.of( "/data/", // NON-NLS "/storage/emulated/"); //NON-NLS /** * Creates a file attachment from a file path. * * Searches the specified data source for the give file name and path, * and if found, saves the object Id of the file. If no match is found, * then just the pathName is remembered. * * @param caseDb Case database. * @param dataSource Data source to search in. * @param pathName Full path name of the attachment file. * * @throws TskCoreException If there is an error in finding the attached * file. */ public FileAttachment(SleuthkitCase caseDb, Content dataSource, String pathName) throws TskCoreException { //normalize the slashes. this.path = normalizePath(pathName); String fileName = path.substring(path.lastIndexOf('/') + 1); if (fileName.isEmpty()) { throw new TskCoreException(String.format("No file name specified for attachment file: %s, on data source = %d", path, dataSource.getId())); } String parentPathSubString = (path.lastIndexOf('/') < 0) ? "" : path.substring(0, path.lastIndexOf('/')); // find the attachment file objectID = findAttachmentFile(caseDb, fileName, parentPathSubString, dataSource); } /** * Creates a file attachment from a derived file. * * Occasionally the contents of an attachment may be stored as a blob in * an application database. In that case, the ingest module must write * out the contents to a local file in the case, and create a * corresponding DerivedFile object. * * @param derivedFile Derived file for the attachment. * * @throws TskCoreException */ public FileAttachment(DerivedFile derivedFile) throws TskCoreException { objectID = derivedFile.getId(); path = derivedFile.getUniquePath(); } /** * Creates a file attachment from a file. * * @param abstractFile Abstract file for attachment.. */ public FileAttachment(AbstractFile abstractFile) { objectID = abstractFile.getId(); path = abstractFile.getParentPath() + "/" + abstractFile.getName(); } /** * Returns the full path name of the file. * * @return full path name. */ public String getPathName() { return path; } /** * Returns the objectID of the attachment file, if the file was found in * the data source. * * @return object id of the file. -1 if no matching file is found. */ public long getObjectId() { return objectID; } /** * Normalizes the input path - convert all slashes to TSK convention, * and checks for any well know mount point prefixes that need stripped. * * @param path path to normalize * * @return normalized path. */ private String normalizePath(String path) { //normalize the slashes, replace encoded space String adjustedPath = path.replace("\\", "/").replace("%20", " "); // Strip common known mountpoints. for (String mountPoint : KNOWN_MOUNTPOINTS) { if (adjustedPath.toLowerCase().startsWith(mountPoint)) { adjustedPath = ("/").concat(adjustedPath.substring(mountPoint.length())); break; } } return adjustedPath; } /** * Finds the attachment file, given the name and path, and returns the * object id of the matched file. * * @param caseDb Case database. * @param fileName Name of attachment file. * @param parentPathSubstring Partial parent path of the attachment * file. * @param dataSource Data source the message was found in. * * @throws TskCoreException If there is an error in finding the attached * file. * @return Object id of the matching file. -1 if no suitable match is * found. */ private long findAttachmentFile(SleuthkitCase caseDb, String fileName, String parentPathSubstring, Content dataSource) throws TskCoreException { // Find all files with matching name and parent path substring String whereClause = String.format("LOWER(name) = LOWER('%s') AND LOWER(parent_path) LIKE LOWER('%%%s%%')", fileName, parentPathSubstring); List matchedFiles = caseDb.findAllFilesWhere(whereClause); // separate the matching files into allocated files on same datsource, // allocated files on other data sources, and unallocated files. List allocFileMatchesOnSameDatasource = new ArrayList<>(); List allocFileMatchesOnOtherDatasources = new ArrayList<>(); List unallocFileMatches = new ArrayList<>(); for (AbstractFile file : matchedFiles) { if (file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { if (dataSource.getId() == file.getDataSource().getId()) { allocFileMatchesOnSameDatasource.add(file.getId()); } else { allocFileMatchesOnOtherDatasources.add(file.getId()); } } else { // unallocated file unallocFileMatches.add(file.getId()); } } // pick the best match from the 3 lists. return pickBestMatchFile(allocFileMatchesOnSameDatasource, allocFileMatchesOnOtherDatasources, unallocFileMatches); } /** * Returns best match file from the specified lists, based on the * following algorithm: * * - If there is exactly one allocated file on the same data source as * the message, that file is returned. - If there is exactly one * allocated match on one of the other data sources, that file is * returned. - If there is exactly one unallocated file matched among * all data sources, that file is returned. - If no match is found or * there are more than one equally suitable matches, then -1 is * returned. * * @param allocFileMatchesOnSameDatasource List of matching allocated * file object ids, found on * the same data source as the * message. * @param allocFileMatchesOnOtherDatasources List of matching allocated * file object ids, found on * data sources other than the * one where the the message * is found. * @param unallocFileMatches List of matching * unallocated file object * ids, * * @return Object id of the best match file, -1 if there's no definitive * best match. */ private long pickBestMatchFile(List allocFileMatchesOnSameDatasource, List allocFileMatchesOnOtherDatasources, List unallocFileMatches) { // check if there's an allocated file match on the same data source if (!allocFileMatchesOnSameDatasource.isEmpty() && allocFileMatchesOnSameDatasource.size() == 1) { return allocFileMatchesOnSameDatasource.get(0); } // if no match found yet,check if there's an allocated file match on other data sources. if (!allocFileMatchesOnOtherDatasources.isEmpty() && allocFileMatchesOnOtherDatasources.size() == 1) { return allocFileMatchesOnOtherDatasources.get(0); } // if no match found yet, check if there is an unallocated file that matches. if (!unallocFileMatches.isEmpty() && unallocFileMatches.size() == 1) { return unallocFileMatches.get(0); } // no single suitable match found return -1; } @Override public String getLocation() { return this.path; } @Override public Long getObjId() { return this.objectID; } } private final Collection fileAttachments; private final Collection urlAttachments; /** * Builds Message attachments from the given file attachments and URL * attachments. * * @param fileAttachments Collection of file attachments. * @param urlAttachments Collection of URL attachments. */ public MessageAttachments(Collection fileAttachments, Collection urlAttachments) { this.fileAttachments = fileAttachments; this.urlAttachments = urlAttachments; } /** * Returns collection of file attachments. * * @return Collection of File attachments. */ public Collection getFileAttachments() { return Collections.unmodifiableCollection(fileAttachments); } /** * Returns collection of URL attachments. * * @return Collection of URL attachments. */ public Collection getUrlAttachments() { return Collections.unmodifiableCollection(urlAttachments); } /** * Returns total count of attachments. * * @return Count of attachments. */ public int getAttachmentsCount() { return (fileAttachments.size() + urlAttachments.size()); } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/GeoWaypoints.java000755 000765 000024 00000010127 14137073413 035224 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils.attributes; import com.google.gson.annotations.SerializedName; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * A GeoWaypoints object is a collection of Waypoint objects. A Waypoint object * represents a waypoint for a GPS-enabled device with a navigation capability. * Every waypoint is a location, possibly named, in a geographic coordinate * system with latitude, longitude and altitude (elevation) axes. * * GeoWaypoints objects are designed to be used as the string value of the * TSK_GEO_WAYPOINTS attribute of a TSK_GPS_ROUTE artifact. TSK_GPS_ROUTE * artifacts are used to record one or more waypoints linked together as a route * to be navigated from waypoint to waypoint. */ public class GeoWaypoints implements Iterable { private final List points; /** * Constructs an empty GeoWaypoints object. */ public GeoWaypoints() { points = new ArrayList<>(); } /** * Adds a waypoint to this list of waypoints. * * @param wayPoint A waypoint. */ public void addPoint(Waypoint wayPoint) { if (wayPoint == null) { throw new IllegalArgumentException("addPoint was passed a null waypoint"); } points.add(wayPoint); } /** * Returns whether or not this list of waypoints is empty. * * @return True or false. */ public boolean isEmpty() { return points.isEmpty(); } @Override public Iterator iterator() { return points.iterator(); } /** * A representation of a waypoint, which is a a location, possibly named, in * a geographic coordinate system with latitude, longitude and altitude * (elevation) axes. */ public static class Waypoint { @SerializedName("TSK_GEO_LATITUDE") private final Double latitude; @SerializedName("TSK_GEO_LONGITUDE") private final Double longitude; @SerializedName("TSK_GEO_ALTITUDE") private final Double altitude; @SerializedName("TSK_NAME") private final String name; /** * Constructs a representation of a waypoint, which is a a location, * possibly named, in a geographic coordinate system with latitude, * longitude and altitude (elevation) axes. * * @param latitude The latitude of the waypoint. * @param longitude The longitude of the waypoint. * @param altitude The altitude of the waypoint, may be null. * @param name The name of the waypoint, may be null. */ public Waypoint(Double latitude, Double longitude, Double altitude, String name) { if (latitude == null) { throw new IllegalArgumentException("Constructor was passed null latitude"); } if (longitude == null) { throw new IllegalArgumentException("Constructor was passed null longitude"); } this.latitude = latitude; this.longitude = longitude; this.altitude = altitude; this.name = name; } /** * Gets the latitude of this waypoint. * * @return The latitude. */ public Double getLatitude() { return latitude; } /** * Gets the longitude of this waypoint. * * @return The longitude. */ public Double getLongitude() { return longitude; } /** * Gets the altitude of this waypoint, if available. * * @return The altitude, may be null or zero. */ public Double getAltitude() { return altitude; } /** * Gets the name of this waypoint, if available. * * @return The name, may be null or empty. */ public String getName() { return name; } } } bindings/java/src/org/sleuthkit/datamodel/blackboardutils/attributes/GeoAreaPoints.java000644 000765 000024 00000006306 14137073413 035275 0ustar00carrierstaff000000 000000 sleuthkit-4.11.1/* * Sleuth Kit Data Model * * Copyright 2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.blackboardutils.attributes; import com.google.gson.annotations.SerializedName; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * A GeoAreaPoints object is a collection of AreaPoint objects. * Every AreaPoint is a location in a geographic coordinate * system with latitude and longitude axes. * * GeoWaypoints objects are designed to be used as the string value of the * TSK_GEO_AREAPOINTS attribute of a TSK_GPS_AREA artifact. TSK_GPS_AREA * artifacts are used to record a series of locations used to outline an * area on the map. */ public class GeoAreaPoints implements Iterable { private final List points; /** * Constructs an empty GeoAreaPoints object. */ public GeoAreaPoints() { points = new ArrayList<>(); } /** * Adds an area point to this list of points outlining the area. * * @param areaPoint A point. */ public void addPoint(AreaPoint areaPoint) { if (areaPoint == null) { throw new IllegalArgumentException("addPoint was passed a null waypoint"); } points.add(areaPoint); } /** * Returns whether or not this list of area points is empty. * * @return True or false. */ public boolean isEmpty() { return points.isEmpty(); } @Override public Iterator iterator() { return points.iterator(); } /** * A representation of an area point, which is a a location in * a geographic coordinate system with latitude and longitude axes. * Area points are used to mark the outline of an area on the map. */ public static class AreaPoint { @SerializedName("TSK_GEO_LATITUDE") private final Double latitude; @SerializedName("TSK_GEO_LONGITUDE") private final Double longitude; /** * Constructs a representation of an area point. * * @param latitude The latitude of the area point. * @param longitude The longitude of the area point. */ public AreaPoint(Double latitude, Double longitude) { if (latitude == null) { throw new IllegalArgumentException("Constructor was passed null latitude"); } if (longitude == null) { throw new IllegalArgumentException("Constructor was passed null longitude"); } this.latitude = latitude; this.longitude = longitude; } /** * Gets the latitude of this area point. * * @return The latitude. */ public Double getLatitude() { return latitude; } /** * Gets the longitude of this area point. * * @return The longitude. */ public Double getLongitude() { return longitude; } } } sleuthkit-4.11.1/bindings/java/src/org/sleuthkit/datamodel/Examples/Sample.java000755 000765 000024 00000006043 14137073413 030321 0ustar00carrierstaff000000 000000 /* * Sleuth Kit Data Model * * Copyright 2012-2018 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel.Examples; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitJNI.CaseDbHandle.AddImageProcess; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; /** * */ public class Sample { public static void run(String imagePath) { try { SleuthkitCase sk = SleuthkitCase.newCase(imagePath + ".db"); // initialize the case with an image String timezone = ""; AddImageProcess process = sk.makeAddImageProcess(timezone, true, false, ""); ArrayList paths = new ArrayList(); paths.add(imagePath); try { process.run(UUID.randomUUID().toString(), paths.toArray(new String[paths.size()]), 0); } catch (TskDataException ex) { Logger.getLogger(Sample.class.getName()).log(Level.SEVERE, null, ex); } // print out all the images found, and their children List images = sk.getImages(); for (Image image : images) { System.out.println("Found image: " + image.getName()); System.out.println("There are " + image.getChildren().size() + " children."); for (Content content : image.getChildren()) { System.out.println('"' + content.getName() + '"' + " is a child of " + image.getName()); } } // print out all .txt files found List files = sk.findAllFilesWhere("LOWER(name) LIKE LOWER('%.txt')"); for (AbstractFile file : files) { System.out.println("Found text file: " + file.getName()); } } catch (TskCoreException e) { System.out.println("Exception caught: " + e.getMessage()); Sample.usage(e.getMessage()); } } public static void usage(String error) { System.out.println("Usage: ant -Dimage:{image string} run-sample"); if (error.contains("deleted first")) { System.out.println("A database for the image already exists. Delete it to run this sample again."); } else if (error.contains("unable to open database")) { System.out.println("Image must be encapsulated by double quotes. Ex: ant -Dimage=\"C:\\Users\\You\\image.E01\" run-sample"); } } public static void main(String[] args) { Sample.run(args[0]); } } sleuthkit-4.11.1/bindings/java/jni/dataModel_SleuthkitJNI.cpp000644 000765 000024 00000222260 14137073413 024704 0ustar00carrierstaff000000 000000 /* ** dataModel_SleuthkitJNI ** The Sleuth Kit ** ** Brian Carrier [carrier sleuthkit [dot] org] ** Copyright (c) 2010-2018 Brian Carrier. All Rights reserved ** ** This software is distributed under the Common Public License 1.0 ** */ #include "tsk/tsk_tools_i.h" #include "tsk/auto/tsk_case_db.h" #include "tsk/hashdb/tsk_hash_info.h" #include "tsk/auto/tsk_is_image_supported.h" #include "tsk/img/img_writer.h" #include "tsk/img/raw.h" #include "auto_db_java.h" #if HAVE_LIBEWF #include "tsk/img/ewf.h" #include "tsk/img/tsk_img_i.h" #endif #include "jni.h" #include "dataModel_SleuthkitJNI.h" #include #include #include #include #include #include #include using std::string; using std::vector; using std::map; using std::stringstream; static std::vector hashDbs; /* * JNI file handle structure encapsulates both * TSK_FS_FILE file handle and TSK_FS_ATTR attribute * to support multiple attributes for the same file. * TSK_FS_FILE still needs be maintained for opening and closing. */ typedef struct { uint32_t tag; TSK_FS_FILE *fs_file; TSK_FS_ATTR *fs_attr; } TSK_JNI_FILEHANDLE; #define TSK_JNI_FILEHANDLE_TAG 0x10101214 //stack-allocated buffer size for read method #define FIXED_BUF_SIZE (16 * 1024) /** * Sets flag to throw an TskCoreException back up to the Java code with a specific message. * Note: exception is thrown to Java code after the native function returns * not when setThrowTskCoreError() is invoked - this must be addressed in the code following the exception * @param the java environment to send the exception to * @param msg message string */ static void setThrowTskCoreError(JNIEnv * env, const char *msg) { jclass exception; exception = env->FindClass("org/sleuthkit/datamodel/TskCoreException"); env->ThrowNew(exception, msg); } /** * Sets flag to throw an TskCoreException back up to the Java code with the currently set error message. * Note: exception is thrown to Java code after the native function returns * not when setThrowTskCoreError() is invoked - this must be addressed in the code following the exception * @param the java environment to send the exception to */ static void setThrowTskCoreError(JNIEnv * env) { const char *msg = tsk_error_get(); setThrowTskCoreError(env, msg); } /** * Sets flag to throw an TskDataException back up to the Java code with a specific message. * Note: exception is thrown to Java code after the native function returns * not when setThrowTskDataError() is invoked - this must be addressed in the code following the exception * @param the java environment to send the exception to * @param msg message string */ static void setThrowTskDataError(JNIEnv * env, const char *msg) { jclass exception; exception = env->FindClass("org/sleuthkit/datamodel/TskDataException"); env->ThrowNew(exception, msg); } #if 0 /** * Sets flag to throw an TskDataException back up to the Java code with the currently set error message. * Note: exception is thrown to Java code after the native function returns * not when setThrowTskDataError() is invoked - this must be addressed in the code following the exception * @param the java environment to send the exception to */ static void setThrowTskDataError(JNIEnv * env) { const char *msg = tsk_error_get(); setThrowTskDataError(env, msg); } #endif /***** Methods to cast from jlong to data type and check tags They all throw an exception if the incorrect type is passed in. *****/ static TSK_IMG_INFO * castImgInfo(JNIEnv * env, jlong ptr) { TSK_IMG_INFO *lcl = (TSK_IMG_INFO *) ptr; if (!lcl || lcl->tag != TSK_IMG_INFO_TAG) { setThrowTskCoreError(env, "Invalid IMG_INFO object"); return 0; } return lcl; } static TSK_VS_INFO * castVsInfo(JNIEnv * env, jlong ptr) { TSK_VS_INFO *lcl = (TSK_VS_INFO *) ptr; if (!lcl || lcl->tag != TSK_VS_INFO_TAG) { setThrowTskCoreError(env, "Invalid VS_INFO object"); return 0; } // verify that image handle is still open if (!castImgInfo(env, (jlong) lcl->img_info)) { return 0; } return lcl; } static TSK_VS_PART_INFO * castVsPartInfo(JNIEnv * env, jlong ptr) { TSK_VS_PART_INFO *lcl = (TSK_VS_PART_INFO *) ptr; if (!lcl || lcl->tag != TSK_VS_PART_INFO_TAG) { setThrowTskCoreError(env, "Invalid VS_PART_INFO object"); return 0; } // verify that all handles are still open if (!castVsInfo(env, (jlong) lcl->vs)) { return 0; } return lcl; } static TSK_POOL_INFO * castPoolInfo(JNIEnv * env, jlong ptr) { TSK_POOL_INFO *lcl = (TSK_POOL_INFO *)ptr; if (!lcl || lcl->tag != TSK_POOL_INFO_TAG) { setThrowTskCoreError(env, "Invalid TSK_POOL_INFO object"); return 0; } return lcl; } static TSK_FS_INFO * castFsInfo(JNIEnv * env, jlong ptr) { TSK_FS_INFO *lcl = (TSK_FS_INFO *) ptr; if (!lcl || lcl->tag != TSK_FS_INFO_TAG) { setThrowTskCoreError(env, "Invalid FS_INFO object"); return 0; } // verify that image handle is still open if (!castImgInfo(env, (jlong) lcl->img_info)) { return 0; } return lcl; } static TSK_FS_FILE * castFsFile(JNIEnv * env, jlong ptr) { TSK_FS_FILE *lcl = (TSK_FS_FILE *)ptr; if (lcl == NULL || lcl->tag != TSK_FS_FILE_TAG) { setThrowTskCoreError(env, "Invalid FS_FILE object"); return 0; } // verify that file system handle is still open if (!castFsInfo(env, (jlong)lcl->fs_info)) { return 0; } return lcl; } static TSK_JNI_FILEHANDLE * castJniFileHandle(JNIEnv * env, jlong ptr) { TSK_JNI_FILEHANDLE *lcl = (TSK_JNI_FILEHANDLE *) ptr; if (!lcl || lcl->tag != TSK_JNI_FILEHANDLE_TAG) { setThrowTskCoreError(env, "Invalid TSK_JNI_FILEHANDLE object"); return 0; } // verify that all handles are still open if (!castFsFile(env, (jlong) lcl->fs_file)) { return 0; } return lcl; } static TskCaseDb * castCaseDb(JNIEnv * env, jlong ptr) { TskCaseDb *lcl = ((TskCaseDb *) ptr); if (lcl == NULL || lcl->m_tag != TSK_CASE_DB_TAG) { setThrowTskCoreError(env, "Invalid TskCaseDb object"); return 0; } return lcl; } /** * Convert a jstring (UTF-8) to a TCHAR to pass into TSK methods. * @param buffer Buffer to store resulting string into * @param size Length of buffer * @param strJ string to convert * @returns 1 on error */ static int toTCHAR(JNIEnv * env, TSK_TCHAR * buffer, size_t size, jstring strJ) { jboolean isCopy; char *str8 = (char *) env->GetStringUTFChars(strJ, &isCopy); #ifdef TSK_WIN32 // Windows TCHAR is UTF16 in Windows, so convert UTF16 *utf16 = (UTF16 *) buffer; UTF8 *utf8 = (UTF8 *) str8;; TSKConversionResult retval; size_t lengthOfUtf8 = strlen(str8); retval = tsk_UTF8toUTF16((const UTF8 **) &utf8, &utf8[lengthOfUtf8], &utf16, &utf16[size], TSKlenientConversion); if (retval != TSKconversionOK) { tsk_error_set_errno(TSK_ERR_IMG_CONVERT); tsk_error_set_errstr ("toTCHAR: Error converting UTF8 %s to UTF16, error %d", utf8, retval); env->ReleaseStringUTFChars(strJ, str8); return 1; } // "utf16" now points to last char. Need to NULL terminate the string. *utf16 = '\0'; #else // nothing to convert. Keep it as UTF8 strncpy((char *)&buffer[0], str8, size); #endif env->ReleaseStringUTFChars(strJ, str8); return 0; } /** * Opens an existing hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param pathJ The path to the hash database. * @return A handle for the hash database. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbOpenNat(JNIEnv * env, jclass obj, jstring pathJ) { TSK_TCHAR pathT[1024]; toTCHAR(env, pathT, 1024, pathJ); TSK_HDB_INFO *db = tsk_hdb_open(pathT, TSK_HDB_OPEN_NONE); if (!db) { setThrowTskCoreError(env, tsk_error_get_errstr()); return -1; } // The index of the pointer in the vector is used as a handle for the // database. hashDbs.push_back(db); return (jint)hashDbs.size(); } /** * Creates a new hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param pathJ The path to the hash database. * @return A handle for the hash database. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbNewNat(JNIEnv * env, jclass obj, jstring pathJ) { TSK_TCHAR pathT[1024]; toTCHAR(env, pathT, 1024, pathJ); if (tsk_hdb_create(pathT)) { setThrowTskCoreError(env, tsk_error_get_errstr()); return -1; } TSK_HDB_INFO *db = tsk_hdb_open(pathT, TSK_HDB_OPEN_NONE); if (!db) { setThrowTskCoreError(env, tsk_error_get_errstr()); return -1; } // The index of the pointer in the vector is used as a handle for the // database. hashDbs.push_back(db); return (jint)hashDbs.size(); } /** * Begins a hash database transaction. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return 1 on error and 0 on success. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbBeginTransactionNat( JNIEnv *env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } TSK_HDB_INFO *db = hashDbs.at(dbHandle - 1); if (!db) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } return tsk_hdb_begin_transaction(db); } /** * Commits a hash database transaction. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return 1 on error and 0 on success. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCommitTransactionNat( JNIEnv *env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } TSK_HDB_INFO *db = hashDbs.at(dbHandle - 1); if (!db) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } return tsk_hdb_commit_transaction(db); } /** * Rolls back a hash database transaction. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return 1 on error and 0 on success. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbRollbackTransactionNat( JNIEnv *env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (!db) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } return tsk_hdb_rollback_transaction(db); } /** * Adds data to a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param filenameJ Name of the file that was hashed (can be null). * @param hashMd5J MD5 hash of file contents (can be null). * @param hashSha1J SHA-1 hash of file contents (can be null). * @param hashSha256J Text of SHA256 hash (can be null). * @param dbHandle A handle for the hash database. * @return 1 on error and 0 on success. */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbAddEntryNat(JNIEnv * env, jclass obj, jstring filenameJ, jstring hashMd5J, jstring hashSha1J, jstring hashSha256J, jstring commentJ, jint dbHandle) { if((size_t) dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } TSK_HDB_INFO * db = hashDbs.at(dbHandle-1); if(!db) { setThrowTskCoreError(env, "Invalid database handle"); return 1; } if(!db->accepts_updates()) { setThrowTskCoreError(env, "Database does not accept updates"); return 1; } jboolean isCopy; const char * name = filenameJ ? (const char *) env->GetStringUTFChars(filenameJ, &isCopy) : NULL; const char * md5 = hashMd5J ? (const char *) env->GetStringUTFChars(hashMd5J, &isCopy) : NULL; const char * sha1 = hashSha1J ? (const char *) env->GetStringUTFChars(hashSha1J, &isCopy) : NULL; const char * sha256 = hashSha256J ? (const char *) env->GetStringUTFChars(hashSha256J, &isCopy) : NULL; const char * comment = commentJ ? (const char *) env->GetStringUTFChars(commentJ, &isCopy) : NULL; if (tsk_hdb_add_entry(db, name, md5, sha1, sha256, comment)) { setThrowTskCoreError(env, tsk_error_get_errstr()); } if (filenameJ) { env->ReleaseStringUTFChars(filenameJ, (const char *) name); } if (hashMd5J) { env->ReleaseStringUTFChars(hashMd5J, (const char *) md5); } if (hashSha1J) { env->ReleaseStringUTFChars(hashSha1J, (const char *) sha1); } if (hashSha256J) { env->ReleaseStringUTFChars(hashSha256J, (const char *) sha256); } if (commentJ) { env->ReleaseStringUTFChars(commentJ, (const char *) comment); } return 0; } /** * Queries whether or not a hash database accepts updates. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return True if hash database can be updated. */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsUpdateableNat(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } return (jboolean)(tsk_hdb_accepts_updates(db) == static_cast(1)); } /** * Queries whether or not a hash database can be re-indexed. Only text-format * databases with external indexes can be re-indexed. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return True if hash database can be indexed. */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsReindexableNat(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } return (jboolean)((tsk_hdb_uses_external_indexes(db) == 1) && (tsk_hdb_is_idx_only(db) == 0)); } /** * Gets the path of a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return Path to the hash database or "None" if no path is available. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbPathNat(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } jstring jPath = NULL; const TSK_TCHAR *dbPath = tsk_hdb_get_db_path(db); if (NULL != dbPath) { const size_t pathLength = TSTRLEN(dbPath); char *cPath = (char*)tsk_malloc((pathLength + 1) * sizeof(char)); snprintf(cPath, pathLength + 1, "%" PRIttocTSK, dbPath); jPath = env->NewStringUTF(cPath); free(cPath); } else { jPath = env->NewStringUTF("None"); } return jPath; } /* * Gets the path of the external MD5 hash index for a text-format database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return Path to the requested index or "None" if no path is available. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIndexPathNat(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } // Currently only supporting md5 indexes through Java binding. jstring jPath = NULL; const TSK_TCHAR *indexPath = tsk_hdb_get_idx_path(db, TSK_HDB_HTYPE_MD5_ID); if (NULL != indexPath) { const size_t pathLength = TSTRLEN(indexPath); char *cPath = (char*)tsk_malloc((pathLength + 1) * sizeof(char)); snprintf(cPath, pathLength + 1, "%" PRIttocTSK, indexPath); jPath = env->NewStringUTF(cPath); free(cPath); } else { jPath = env->NewStringUTF("None"); } return jPath; } /** * Queries whether the hash database is actually an external index for a * text-format database that is being used for simple yes/no look ups in * place of the roginal hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return True if the hash database is an external index serving as a * database. */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsIdxOnlyNat(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } return (jboolean)(tsk_hdb_is_idx_only(db) == static_cast(1)); } /** * Gets the display name of a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return The display name. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbGetDisplayName (JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } jstring j_name = NULL; const char *db_name = tsk_hdb_get_display_name(db); if (NULL != db_name) { j_name = env->NewStringUTF(db_name); } return j_name; } /** * Closes all open hash databases. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCloseAll(JNIEnv * env, jclass obj) { for (std::vector::iterator it = hashDbs.begin(); it != hashDbs.end(); ++it) { if (NULL != *it) { tsk_hdb_close(*it); } } hashDbs.clear(); } /** * Closes a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbClose(JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return; } tsk_hdb_close(db); // Do NOT erase the element because that would shift the indices, // messing up the existing handles. hashDbs.at(dbHandle-1) = NULL; } /** * Looks up a hash in a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return True if the hash is found in the hash database, false otherwise. */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbLookup (JNIEnv * env, jclass obj, jstring hash, jint dbHandle) { if ((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } jboolean isCopy; const char *cHashStr = (const char *) env->GetStringUTFChars(hash, &isCopy); jboolean file_known = false; int8_t retval = tsk_hdb_lookup_str(db, cHashStr, TSK_HDB_FLAG_QUICK, NULL, NULL); if (retval == -1) { setThrowTskCoreError(env, tsk_error_get_errstr()); } else if (retval) { file_known = true; } env->ReleaseStringUTFChars(hash, (const char *) cHashStr); return file_known; } /** * Looks up a hash in a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return A HashInfo object if the hash is found, NULL otherwise. */ JNIEXPORT jobject JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbLookupVerbose (JNIEnv * env, jclass obj, jstring hash, jint dbHandle) { if ((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return NULL; } jboolean isCopy; const char *inputHash = (const char *) env->GetStringUTFChars(hash, &isCopy); TskHashInfo result; int8_t returnCode = tsk_hdb_lookup_verbose_str(db, inputHash, (void*)&result); env->ReleaseStringUTFChars(hash, (const char *) inputHash); if (returnCode == -1) { setThrowTskCoreError(env, tsk_error_get_errstr()); return NULL; } else if (returnCode == 0) { return NULL; } // Convert the hashes from the hash database so they can be written into // the Java version of a HashInfo object. const char *md5 = result.hashMd5.c_str(); jstring md5j = env->NewStringUTF(md5); const char *sha1 = result.hashSha1.c_str(); jstring sha1j = env->NewStringUTF(sha1); const char *sha256 = result.hashSha2_256.c_str(); jstring sha256j = env->NewStringUTF(sha256); // Create and return a Java HashInfo object. jclass clazz; clazz = env->FindClass("org/sleuthkit/datamodel/HashHitInfo"); jmethodID ctor = env->GetMethodID(clazz, "", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); jmethodID addName = env->GetMethodID(clazz, "addName", "(Ljava/lang/String;)V"); jmethodID addComment = env->GetMethodID(clazz, "addComment", "(Ljava/lang/String;)V"); jobject hashInfo = env->NewObject(clazz, ctor, md5j, sha1j, sha256j); for (std::vector::iterator it = result.fileNames.begin(); it != result.fileNames.end(); ++it) { jstring namej = env->NewStringUTF((*it).c_str()); env->CallVoidMethod(hashInfo, addName, namej); } for (std::vector::iterator it = result.comments.begin(); it != result.comments.end(); ++it) { jstring commentj = env->NewStringUTF((*it).c_str()); env->CallVoidMethod(hashInfo, addComment, commentj); } return hashInfo; } /* * Initialize a process for adding an image to a case database. * * @param env Pointer to java environment. * @param obj Pointer the Java class object. * @param timeZone The time zone for the image. * @param addUnallocSpace Pass true to create virtual files for unallocated space. Ignored if addFileSystems is false. * @param skipFatFsOrphans Pass true to skip processing of orphan files for FAT file systems. Ignored if addFileSystems is false. * @param hostId Id of the host (already in the database). * * @return A pointer to the process (TskAutoDbJava object) or NULL on error. */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_initAddImgNat(JNIEnv * env, jclass obj, jobject callbackObj, jstring timeZone, jboolean addUnallocSpace, jboolean skipFatFsOrphans) { return Java_org_sleuthkit_datamodel_SleuthkitJNI_initializeAddImgNat(env, obj, callbackObj, timeZone, true, addUnallocSpace, skipFatFsOrphans); } /* * Initialize a process for adding an image to a case database. * * @param env Pointer to java environment. * @param obj Pointer the Java class object. * @param timeZone The time zone for the image. * @param addFileSystems Pass true to attempt to add file systems within the image to the case database. * @param addUnallocSpace Pass true to create virtual files for unallocated space. Ignored if addFileSystems is false. * @param skipFatFsOrphans Pass true to skip processing of orphan files for FAT file systems. Ignored if addFileSystems is false. * @param hostId The ID of the host (already in database). * * @return A pointer to the process (TskAutoDbJava object) or NULL on error. */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_initializeAddImgNat(JNIEnv * env, jclass obj, jobject callbackObj, jstring timeZone, jboolean addFileSystems, jboolean addUnallocSpace, jboolean skipFatFsOrphans) { jboolean isCopy; if (env->GetStringUTFLength(timeZone) > 0) { const char *tzstr = env->GetStringUTFChars(timeZone, &isCopy); if (strlen(tzstr) > 64) { env->ReleaseStringUTFChars(timeZone, tzstr); stringstream ss; ss << "Timezone is too long"; setThrowTskCoreError(env, ss.str().c_str()); return 0; } char envstr[70]; snprintf(envstr, 70, "TZ=%s", tzstr); env->ReleaseStringUTFChars(timeZone, tzstr); if (0 != putenv(envstr)) { stringstream ss; ss << "Error setting timezone environment, using: "; ss << envstr; setThrowTskCoreError(env, ss.str().c_str()); return 0; } /* we should be checking this somehow */ TZSET(); } TskAutoDbJava *tskAutoJava = new TskAutoDbJava(); if (tskAutoJava == NULL) { setThrowTskCoreError(env, "Error creating TskAutoDbJava"); return 0; } // set the options flags tskAutoJava->setAddFileSystems(addFileSystems?true:false); if (addFileSystems) { if (addUnallocSpace) { // Minimum size of unalloc files: 500 MB, maximum size: 1 GB tskAutoJava->setAddUnallocSpace((int64_t)500 * 1024 * 1024, (int64_t)1024 * 1024 * 1024); } else { tskAutoJava->setAddUnallocSpace(false); } tskAutoJava->setNoFatFsOrphans(skipFatFsOrphans?true:false); } else { tskAutoJava->setAddUnallocSpace(false); tskAutoJava->setNoFatFsOrphans(true); } // Set up the callbacks if (TSK_ERR == tskAutoJava->initializeJni(env, callbackObj)) { setThrowTskCoreError(env, "Error initializing JNI callbacks"); return 0; } return (jlong)tskAutoJava; } /* * Add an image to a database using a pre-created process, which can be cancelled. * MUST call commitAddImg or revertAddImg afterwards once runAddImg returns. If there is an * error, you do not need to call revert or commit and the 'process' handle will be deleted. * * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param process the add-image process created by initAddImgNat * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID) * @param paths array of strings from java, the paths to the image parts * @param numImgs number of image parts * @param timeZone the timezone the image is from */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_runOpenAndAddImgNat(JNIEnv * env, jclass obj, jlong process, jstring deviceId, jobjectArray paths, jint numImgs, jstring timeZone) { TskAutoDbJava *tskAuto = ((TskAutoDbJava *) process); if (!tskAuto || tskAuto->m_tag != TSK_AUTO_TAG) { setThrowTskCoreError(env, "runOpenAndAddImgNat: Invalid TskAutoDbJava object passed in"); return; } jboolean isCopy; const char *device_id = NULL; if (NULL != deviceId) { device_id = (const char *) env->GetStringUTFChars(deviceId, &isCopy); if (NULL == device_id) { setThrowTskCoreError(env, "runOpenAndAddImgNat: Can't convert data source id string"); return; } } // Get pointers to each of the image file names. char **imagepaths8 = (char **) tsk_malloc(numImgs * sizeof(char *)); if (imagepaths8 == NULL) { setThrowTskCoreError(env); return; } for (int i = 0; i < numImgs; i++) { jstring jsPath = (jstring) env->GetObjectArrayElement(paths, i); imagepaths8[i] = (char *) env-> GetStringUTFChars(jsPath, &isCopy); if (imagepaths8[i] == NULL) { setThrowTskCoreError(env, "runOpenAndAddImgNat: Can't convert path strings."); // @@@ should cleanup here paths that have been converted in imagepaths8[i] return; } } // Set the time zone. if (env->GetStringLength(timeZone) > 0) { const char *time_zone = env->GetStringUTFChars(timeZone, &isCopy); tskAuto->setTz(string(time_zone)); env->ReleaseStringUTFChars(timeZone, time_zone); } // Add the data source. uint8_t ret = 0; if ( (ret = tskAuto->startAddImage((int) numImgs, imagepaths8, TSK_IMG_TYPE_DETECT, 0, device_id)) != 0) { stringstream msgss; msgss << "Errors occurred while ingesting image " << std::endl; vector errors = tskAuto->getErrorList(); for (size_t i = 0; i < errors.size(); i++) { msgss << (i+1) << ". "; msgss << (TskAuto::errorRecordToString(errors[i])); msgss << " " << std::endl; } if (ret == 1) { // Fatal error setThrowTskCoreError(env, msgss.str().c_str()); } else if (ret == 2) { // Non-fatal error setThrowTskDataError(env, msgss.str().c_str()); } } // @@@ SHOULD WE CLOSE HERE before we commit / revert etc. //close image first before freeing the image paths tskAuto->closeImage(); // Cleanup for (int i = 0; i < numImgs; i++) { jstring jsPath = (jstring) env->GetObjectArrayElement(paths, i); env-> ReleaseStringUTFChars(jsPath, imagepaths8[i]); env->DeleteLocalRef(jsPath); } free(imagepaths8); env->ReleaseStringUTFChars(deviceId, (const char *) device_id); // // Must call finishAddImgNat to free the TskAutoDb } /* * Add an image to a database using a pre-created process, which can be cancelled. * MUST call commitAddImg or revertAddImg afterwards once runAddImg returns. If there is an * error, you do not need to call revert or commit and the 'process' handle will be deleted. * * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param process the add-image process created by initAddImgNat * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID) * @param a_img_info image info object * @param img_id The object ID of the image in the database * @param timeZone the timezone the image is from */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_runAddImgNat(JNIEnv * env, jclass obj, jlong process, jstring deviceId, jlong a_img_info, jlong img_id, jstring timeZone, jstring imageWriterPathJ) { TskAutoDbJava *tskAuto = ((TskAutoDbJava *)process); if (!tskAuto || tskAuto->m_tag != TSK_AUTO_TAG) { setThrowTskCoreError(env, "runAddImgNat: Invalid TskAutoDbJava object passed in"); return; } jboolean isCopy; const char *device_id = NULL; if (NULL != deviceId) { device_id = (const char *)env->GetStringUTFChars(deviceId, &isCopy); if (NULL == device_id) { setThrowTskCoreError(env, "runAddImgNat: Can't convert data source id string"); return; } } // Set the data source object ID tskAuto->setDatasourceObjId(img_id); // Set the time zone. if (env->GetStringLength(timeZone) > 0) { const char *time_zone = env->GetStringUTFChars(timeZone, &isCopy); tskAuto->setTz(string(time_zone)); env->ReleaseStringUTFChars(timeZone, time_zone); } // Set up the TSK_IMG_INFO object TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); // Set up image writer, if the output path is present if (env->GetStringLength(imageWriterPathJ) > 0) { const char *imageWriterPath = env->GetStringUTFChars(imageWriterPathJ, &isCopy); if (TSK_OK != tskAuto->enableImageWriter(imageWriterPath)) { env->ReleaseStringUTFChars(imageWriterPathJ, imageWriterPath); setThrowTskCoreError(env, "runAddImgNat: error enabling image writer."); return; } env->ReleaseStringUTFChars(imageWriterPathJ, imageWriterPath); } else { tskAuto->disableImageWriter(); } // Add the data source. uint8_t ret = 0; if ((ret = tskAuto->startAddImage(img_info, device_id)) != 0) { stringstream msgss; msgss << "Errors occurred while ingesting image " << std::endl; vector errors = tskAuto->getErrorList(); for (size_t i = 0; i < errors.size(); i++) { msgss << (i + 1) << ". "; msgss << (TskAuto::errorRecordToString(errors[i])); msgss << " " << std::endl; } if (ret == 1) { // Fatal error setThrowTskCoreError(env, msgss.str().c_str()); } else if (ret == 2) { // Non-fatal error setThrowTskDataError(env, msgss.str().c_str()); } } // @@@ SHOULD WE CLOSE HERE before we commit / revert etc. //close image first before freeing the image paths tskAuto->closeImage(); // Cleanup env->ReleaseStringUTFChars(deviceId, (const char *)device_id); // Must call finishAddImgNat to free the TskAutoDb } /* * Cancel the given add-image process. * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param process the add-image process created by initAddImgNat */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_stopAddImgNat(JNIEnv * env, jclass obj, jlong process) { TskAutoDbJava *tskAuto = ((TskAutoDbJava *) process); if (!tskAuto || tskAuto->m_tag != TSK_AUTO_TAG) { setThrowTskCoreError(env, "stopAddImgNat: Invalid TskAutoDbJava object passed in"); return; } tskAuto->stopAddImage(); } /* * Completes the given add-image process. Deletes the 'process' handle and * returns the ID of the added image. * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param process the add-image process created by initAddImgNat */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_finishAddImgNat(JNIEnv * env, jclass obj, jlong process) { TskAutoDbJava *tskAuto = ((TskAutoDbJava *)process); if (!tskAuto || tskAuto->m_tag != TSK_AUTO_TAG) { setThrowTskCoreError(env, "commitAddImgNat: Invalid TskAutoDb object passed in"); return -1; } int64_t imgId = tskAuto->getImageID(); tskAuto->close(); delete tskAuto; tskAuto = 0; if (imgId == -1) { setThrowTskCoreError(env); return -1; } return imgId; } /* * Open an image pointer for the given image. * @return the created TSK_IMG_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param paths the paths to the image parts * @param num_imgs number of image parts * @param sector_size the sector size (use '0' for autodetect) */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openImgNat(JNIEnv * env, jclass obj, jobjectArray paths, jint num_imgs, jint sector_size) { TSK_IMG_INFO *img_info; jboolean isCopy; // get pointers to each of the file names char **imagepaths8 = (char **)tsk_malloc(num_imgs * sizeof(char *)); if (imagepaths8 == NULL) { setThrowTskCoreError(env); return 0; } for (int i = 0; i < num_imgs; i++) { imagepaths8[i] = (char *)env-> GetStringUTFChars((jstring)env->GetObjectArrayElement(paths, i), &isCopy); // @@@ Error check } // open the image img_info = tsk_img_open_utf8((int)num_imgs, imagepaths8, TSK_IMG_TYPE_DETECT, sector_size); if (img_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); } // cleanup for (int i = 0; i < num_imgs; i++) { env-> ReleaseStringUTFChars((jstring) env->GetObjectArrayElement(paths, i), imagepaths8[i]); } free(imagepaths8); return (jlong) img_info; } /* * Get the full list of paths associated with an image. */ JNIEXPORT jobjectArray JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getPathsForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } char **img_ptrs; #ifdef TSK_WIN32 // convert image paths to UTF-8 img_ptrs = (char **)tsk_malloc(img_info->num_img * sizeof(char *)); if (img_ptrs == NULL) { return (jobjectArray)env->NewObjectArray(0, env->FindClass("java/lang/String"), env->NewStringUTF("")); } for (int i = 0; i < img_info->num_img; i++) { char * img2 = (char*)tsk_malloc(1024 * sizeof(char)); UTF8 *ptr8; UTF16 *ptr16; ptr8 = (UTF8 *)img2; ptr16 = (UTF16 *)img_info->images[i]; uint8_t retval = tsk_UTF16toUTF8_lclorder((const UTF16 **)&ptr16, (UTF16 *) & ptr16[TSTRLEN(img_info->images[i]) + 1], &ptr8, (UTF8 *)((uintptr_t)ptr8 + 1024), TSKlenientConversion); if (retval != TSKconversionOK) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_UNICODE); tsk_error_set_errstr("Error converting image to UTF-8\n"); return (jobjectArray)env->NewObjectArray(0, env->FindClass("java/lang/String"), env->NewStringUTF("")); } img_ptrs[i] = img2; } #else img_ptrs = img_info->images; #endif jobjectArray path_list = (jobjectArray)env->NewObjectArray(img_info->num_img, env->FindClass("java/lang/String"), env->NewStringUTF("")); for (int i = 0; i < img_info->num_img; i++) { env->SetObjectArrayElement(path_list, i, env->NewStringUTF(img_ptrs[i])); } return path_list; } /* * Get the size of an image. */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSizeForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } return img_info->size; } /* * Get the type of an image. */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getTypeForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } return img_info->itype; } /* * Get the computed sector size of an image. */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSectorSizeForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } return img_info->sector_size; } /* * Get the md5 hash of an image. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getMD5HashForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } // env->NewStringUTF(img_ptrs[i]) #if HAVE_LIBEWF if (img_info->itype == TSK_IMG_TYPE_EWF_EWF) { IMG_EWF_INFO *ewf_info = (IMG_EWF_INFO *)img_info; if (ewf_info->md5hash_isset) { return env->NewStringUTF(ewf_info->md5hash); } } #endif return env->NewStringUTF(""); } /* * Get the sha1 hash of an image. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSha1HashForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } // env->NewStringUTF(img_ptrs[i]) #if HAVE_LIBEWF if (img_info->itype == TSK_IMG_TYPE_EWF_EWF) { IMG_EWF_INFO *ewf_info = (IMG_EWF_INFO *)img_info; if (ewf_info->sha1hash_isset) { return env->NewStringUTF(ewf_info->sha1hash); } } #endif return env->NewStringUTF(""); } /* * Get the collection details of an image. */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getCollectionDetailsForImageNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } // env->NewStringUTF(img_ptrs[i]) #if HAVE_LIBEWF if (img_info->itype == TSK_IMG_TYPE_EWF_EWF) { IMG_EWF_INFO *ewf_info = (IMG_EWF_INFO *)img_info; ewf_get_details(ewf_info); } #endif return env->NewStringUTF(""); } /* * Open the volume system at the given offset * @return the created TSK_VS_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the pointer to the parent img object * @param vsOffset the offset of the volume system in bytes */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openVsNat (JNIEnv * env, jclass obj, jlong a_img_info, jlong vsOffset) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } TSK_VS_INFO *vs_info; vs_info = tsk_vs_open(img_info, vsOffset, TSK_VS_TYPE_DETECT); if (vs_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); } return (jlong) vs_info; } /* * Open volume with the given id from the given volume system * @return the created TSK_VS_PART_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_vs_info the pointer to the parent vs object * @param vol_id the id of the volume to get */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openVolNat(JNIEnv * env, jclass obj, jlong a_vs_info, jlong vol_id) { TSK_VS_INFO *vs_info = castVsInfo(env, a_vs_info); if (vs_info == 0) { //exception already set return 0; } const TSK_VS_PART_INFO *vol_part_info; vol_part_info = tsk_vs_part_get(vs_info, (TSK_PNUM_T) vol_id); if (vol_part_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); } return (jlong) vol_part_info; } /* * Open pool with the given offset * @return the created TSK_POOL_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the pointer to the parent img object * @param offset the offset in bytes to the pool */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openPoolNat(JNIEnv * env, jclass obj, jlong a_img_info, jlong offset) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } const TSK_POOL_INFO *pool = tsk_pool_open_img_sing(img_info, offset, TSK_POOL_TYPE_DETECT); if (pool == NULL) { tsk_error_print(stderr); if (tsk_error_get_errno() == TSK_ERR_POOL_UNSUPTYPE) tsk_pool_type_print(stderr); setThrowTskCoreError(env, tsk_error_get()); } return (jlong) pool; } /* * Create new image info to use with a specific pool volume * @return the created TSK_IMG_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_pool_info the pointer to the pool object * @param pool_block the block number of the pool volume */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getImgInfoForPoolNat (JNIEnv * env, jclass obj, jlong a_pool_info, jlong pool_block) { TSK_POOL_INFO *pool_info = castPoolInfo(env, a_pool_info); if (pool_info == 0) { //exception already set return 0; } TSK_IMG_INFO *img_info = pool_info->get_img_info(pool_info, pool_block); return (jlong)img_info; } /* * Open file system with the given offset * @return the created TSK_FS_INFO pointer * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the pointer to the parent img object * @param fs_offset the offset in bytes to the file system */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openFsNat (JNIEnv * env, jclass obj, jlong a_img_info, jlong fs_offset) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return 0; } TSK_FS_INFO *fs_info; fs_info = tsk_fs_open_img(img_info, (TSK_OFF_T) fs_offset, TSK_FS_TYPE_DETECT); if (fs_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); } return (jlong) fs_info; } /* * Open the file with the given id in the given file system * @return the created TSK_JNI_FILEHANDLE pointer, set throw exception on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_fs_info the pointer to the parent file system object * @param file_id id of the file to open * @param attr_type type of the file attribute to open * @param attr_id id of the file attribute to open */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openFileNat(JNIEnv * env, jclass obj, jlong a_fs_info, jlong file_id, jint attr_type, jint attr_id) { TSK_FS_INFO *fs_info = castFsInfo(env, a_fs_info); if (fs_info == 0) { //exception already set return 0; } TSK_FS_FILE *file_info; //open file file_info = tsk_fs_file_open_meta(fs_info, NULL, (TSK_INUM_T) file_id); if (file_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); return 0; } //open attribute const TSK_FS_ATTR * tsk_fs_attr = tsk_fs_file_attr_get_type(file_info, (TSK_FS_ATTR_TYPE_ENUM)attr_type, (uint16_t)attr_id, 1); if (tsk_fs_attr == NULL) { tsk_fs_file_close(file_info); setThrowTskCoreError(env, tsk_error_get()); return 0; } //allocate file handle structure to encapsulate file and attribute TSK_JNI_FILEHANDLE * fileHandle = (TSK_JNI_FILEHANDLE *) tsk_malloc(sizeof(TSK_JNI_FILEHANDLE)); if (fileHandle == NULL) { tsk_fs_file_close(file_info); setThrowTskCoreError(env, "Could not allocate memory for TSK_JNI_FILEHANDLE"); return 0; } fileHandle->tag = TSK_JNI_FILEHANDLE_TAG; fileHandle->fs_file = file_info; fileHandle->fs_attr = const_cast(tsk_fs_attr); return (jlong)fileHandle; } /** move a local buffer into a new Java array. * @param env JNI env * @param buf Buffer to copy from * @param len Length of bytes in buf * @returns Pointer to newly created java byte array or NULL if there is an error */ #if 0 static jbyteArray copyBufToByteArray(JNIEnv * env, const char *buf, ssize_t len) { jbyteArray return_array = env->NewByteArray(len); if (return_array == NULL) { setThrowTskCoreError(env, "NewByteArray returned error while getting an array to copy buffer into."); return 0; } env->SetByteArrayRegion(return_array, 0, len, (jbyte*)buf); return return_array; } #endif /** move a local buffer into an existing Java array. * @param env JNI env * @param jbuf Buffer to copy to * @param buf Buffer to copy from * @param len Length of bytes in buf * @returns number of bytes copied or -1 on error */ inline static ssize_t copyBufToByteArray(JNIEnv * env, jbyteArray jbuf, const char *buf, ssize_t len) { env->SetByteArrayRegion(jbuf, 0, (jsize)len, (jbyte*)buf); return len; } /* * Read bytes from the given image * @return number of bytes read from the image, -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the pointer to the image object * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readImgNat(JNIEnv * env, jclass obj, jlong a_img_info, jbyteArray jbuf, jlong offset, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf [FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *) tsk_malloc((size_t) len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { if (dynBuf) { free(buf); } //exception already set return -1; } ssize_t bytesread = tsk_img_read(img_info, (TSK_OFF_T) offset, buf, (size_t) len); if (bytesread == -1) { if (dynBuf) { free(buf); } setThrowTskCoreError(env, tsk_error_get()); return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /* * Read bytes from the given pool * @return number of bytes read from the pool, -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_pool_info the pointer to the pool object * @param jbuf the buffer to write to * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readPoolNat(JNIEnv * env, jclass obj, jlong a_pool_info, jbyteArray jbuf, jlong offset, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf[FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *)tsk_malloc((size_t)len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } TSK_POOL_INFO *pool_info = castPoolInfo(env, a_pool_info); if (pool_info == 0) { //exception already set if (dynBuf) { free(buf); } return -1; } ssize_t bytesread = tsk_pool_read(pool_info, (TSK_DADDR_T)offset, buf, (size_t)len); if (bytesread == -1) { setThrowTskCoreError(env, tsk_error_get()); if (dynBuf) { free(buf); } return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /* * Read bytes from the given volume system * @return number of bytes read from the volume system, -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_vs_info the pointer to the volume system object * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readVsNat(JNIEnv * env, jclass obj, jlong a_vs_info, jbyteArray jbuf, jlong offset, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf [FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *) tsk_malloc((size_t) len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } TSK_VS_INFO *vs_info = castVsInfo(env, a_vs_info); if (vs_info == 0) { //exception already set if (dynBuf) { free(buf); } return -1; } ssize_t bytesread = tsk_vs_read_block(vs_info, (TSK_DADDR_T) offset, buf, (size_t) len); if (bytesread == -1) { setThrowTskCoreError(env, tsk_error_get()); if (dynBuf) { free(buf); } return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /* * Read bytes from the given volume * @return number of bytes read from the volume or -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_vol_info the pointer to the volume object * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readVolNat(JNIEnv * env, jclass obj, jlong a_vol_info, jbyteArray jbuf, jlong offset, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf [FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *) tsk_malloc((size_t) len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } TSK_VS_PART_INFO *vol_part_info = castVsPartInfo(env, a_vol_info); if (vol_part_info == 0) { if (dynBuf) { free(buf); } //exception already set return -1; } ssize_t bytesread = tsk_vs_part_read(vol_part_info, (TSK_OFF_T) offset, buf, (size_t) len); if (bytesread == -1) { setThrowTskCoreError(env, tsk_error_get()); if (dynBuf) { free(buf); } return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /* * Read bytes from the given file system * @return number of bytes read from the file system, -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_fs_info the pointer to the file system object * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readFsNat(JNIEnv * env, jclass obj, jlong a_fs_info, jbyteArray jbuf, jlong offset, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf [FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *) tsk_malloc((size_t) len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } TSK_FS_INFO *fs_info = castFsInfo(env, a_fs_info); if (fs_info == 0) { if (dynBuf) { free(buf); } //exception already set return -1; } ssize_t bytesread = tsk_fs_read(fs_info, (TSK_OFF_T) offset, buf, (size_t) len); if (bytesread == -1) { if (dynBuf) { free(buf); } setThrowTskCoreError(env, tsk_error_get()); return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /** * Flag used by readFileNat to specify if the offset is relative to the start of the file * or the start of the slack space */ typedef enum { TSK_FS_FILE_READ_OFFSET_TYPE_START_OF_FILE = 0x00, TSK_FS_FILE_READ_OFFSET_TYPE_START_OF_SLACK = 0x01, } TSK_FS_FILE_READ_OFFSET_TYPE_ENUM; /* * Read bytes from the given file * @return number of bytes read, or -1 on error * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_file_handle the pointer to the TSK_JNI_FILEHANDLE object * @param jbuf jvm allocated buffer to read to * @param offset the offset in bytes to start at * @param len number of bytes to read */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readFileNat(JNIEnv * env, jclass obj, jlong a_file_handle, jbyteArray jbuf, jlong offset, jint offset_type, jlong len) { //use fixed size stack-allocated buffer if possible char fixed_buf [FIXED_BUF_SIZE]; char * buf = fixed_buf; bool dynBuf = false; if (len > FIXED_BUF_SIZE) { dynBuf = true; buf = (char *) tsk_malloc((size_t) len); if (buf == NULL) { setThrowTskCoreError(env); return -1; } } const TSK_JNI_FILEHANDLE *file_handle = castJniFileHandle(env, a_file_handle); if (file_handle == 0) { if (dynBuf) { free(buf); } //exception already set return -1; } TSK_FS_ATTR * tsk_fs_attr = file_handle->fs_attr; TSK_FS_FILE_READ_FLAG_ENUM readFlag = TSK_FS_FILE_READ_FLAG_NONE; TSK_OFF_T readOffset = (TSK_OFF_T) offset; if(offset_type == TSK_FS_FILE_READ_OFFSET_TYPE_START_OF_SLACK){ readFlag = TSK_FS_FILE_READ_FLAG_SLACK; readOffset += tsk_fs_attr->nrd.initsize; } //read attribute ssize_t bytesread = tsk_fs_attr_read(tsk_fs_attr, readOffset, buf, (size_t) len, readFlag); if (bytesread == -1) { if (dynBuf) { free(buf); } setThrowTskCoreError(env, tsk_error_get()); return -1; } // package it up for return // adjust number bytes to copy ssize_t copybytes = bytesread; jsize jbuflen = env->GetArrayLength(jbuf); if (jbuflen < copybytes) copybytes = jbuflen; ssize_t copiedbytes = copyBufToByteArray(env, jbuf, buf, copybytes); if (dynBuf) { free(buf); } if (copiedbytes == -1) { setThrowTskCoreError(env, tsk_error_get()); } return (jint)copiedbytes; } /** * Runs istat on a given file and saves the output to a temp file. * * @returns -1 on error (and throws exception) */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_saveFileMetaDataTextNat (JNIEnv *env, jclass obj, jlong a_file_handle, jstring a_tmp_path) { const TSK_JNI_FILEHANDLE *file_handle = castJniFileHandle(env, a_file_handle); if (file_handle == 0) { //exception already set return -1; } // check the pointers if (file_handle->fs_file == NULL || file_handle->fs_file->fs_info == NULL || file_handle->fs_file->meta == NULL) { setThrowTskCoreError(env, "NULL pointers for istat file."); return -1; } TSK_FS_INFO *fs_info = file_handle->fs_file->fs_info; // open a file to write the details to jboolean isCopy; char *str8 = (char *) env->GetStringUTFChars(a_tmp_path, &isCopy); FILE *hFile = fopen(str8, "w"); if (hFile == NULL) { env->ReleaseStringUTFChars(a_tmp_path, str8); setThrowTskCoreError(env, "Couldn't open istat temp file for writing."); return -1; } env->ReleaseStringUTFChars(a_tmp_path, str8); if (fs_info->istat(fs_info, TSK_FS_ISTAT_RUNLIST, hFile, file_handle->fs_file->meta->addr, 0, 0) != 0) { fclose(hFile); setThrowTskCoreError(env); return -1; } fclose(hFile); return 0; } /* * Close the given image * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the pointer to the image object */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeImgNat(JNIEnv * env, jclass obj, jlong a_img_info) { TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); if (img_info == 0) { //exception already set return; } tsk_img_close(img_info); } /* * Close the given volume system * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_vs_info the pointer to the volume system object */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeVsNat (JNIEnv * env, jclass obj, jlong a_vs_info) { TSK_VS_INFO *vs_info = castVsInfo(env, a_vs_info); if (vs_info == 0) { //exception already set return; } tsk_vs_close(vs_info); } /* * Close the given file system * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_fs_info the pointer to the file system object */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeFsNat (JNIEnv * env, jclass obj, jlong a_fs_info) { TSK_FS_INFO *fs_info = castFsInfo(env, a_fs_info); if (fs_info == 0) { //exception already set return; } tsk_fs_close(fs_info); } /* * Close the given pool * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_pool_info the pointer to the pool object */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closePoolNat (JNIEnv * env, jclass obj, jlong a_pool_info) { TSK_POOL_INFO *pool_info = castPoolInfo(env, a_pool_info); if (pool_info == 0) { //exception already set return; } tsk_pool_close(pool_info); } /* * Close the given file * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_file_info the pointer to the file object */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeFileNat(JNIEnv * env, jclass obj, jlong a_file_info) { TSK_JNI_FILEHANDLE *file_handle = castJniFileHandle(env, a_file_info); if (file_handle == 0) { //exception already set return; } TSK_FS_FILE * file_info = file_handle->fs_file; tsk_fs_file_close(file_info); //also closes the attribute file_handle->fs_file = NULL; file_handle->fs_attr = NULL; file_handle->tag = 0; free (file_handle); } /* * Get the current Sleuthkit version number * @return the version string * @param env pointer to java environment this was called from * @param obj the java object this was called from */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getVersionNat(JNIEnv * env, jclass obj) { const char *cversion = tsk_version_get_str(); jstring jversion = (*env).NewStringUTF(cversion); return jversion; } /* * Get the current directory being analyzed during AddImage * @return the path of the current directory * */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getCurDirNat (JNIEnv * env,jclass obj, jlong dbHandle) { TskAutoDbJava *tskAuto = ((TskAutoDbJava *) dbHandle); const std::string curDir = tskAuto->getCurDir(); jstring jdir = (*env).NewStringUTF(curDir.c_str()); return jdir; } /* * Enable verbose logging and redirect stderr to the given log file. * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param logPath The log file to append to. */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_startVerboseLoggingNat (JNIEnv * env, jclass obj, jstring logPath) { jboolean isCopy; char *str8 = (char *) env->GetStringUTFChars(logPath, &isCopy); if (freopen(str8, "a", stderr) == NULL) { env->ReleaseStringUTFChars(logPath, str8); setThrowTskCoreError(env, "Couldn't open verbose log file for appending."); return; } env->ReleaseStringUTFChars(logPath, str8); tsk_verbose++; } /* * Creates an MD5 index for a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCreateIndexNat (JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return; } TSK_TCHAR idx_type[1024]; if(db->db_type == TSK_HDB_DBTYPE_MD5SUM_ID) { TSNPRINTF(idx_type, 1024, _TSK_T("%") PRIcTSK, TSK_HDB_DBTYPE_MD5SUM_STR); } else if(db->db_type == TSK_HDB_DBTYPE_HK_ID) { TSNPRINTF(idx_type, 1024, _TSK_T("%") PRIcTSK, TSK_HDB_DBTYPE_HK_STR); } else if(db->db_type == TSK_HDB_DBTYPE_ENCASE_ID) { TSNPRINTF(idx_type, 1024, _TSK_T("%") PRIcTSK, TSK_HDB_DBTYPE_ENCASE_STR); } else { // The Java bindings only support the generation of md5 indexes for // an NSRL hash database. TSNPRINTF(idx_type, 1024, _TSK_T("%") PRIcTSK, TSK_HDB_DBTYPE_NSRL_MD5_STR); } if (tsk_hdb_make_index(db, idx_type) != 0) { setThrowTskCoreError(env, tsk_error_get_errstr()); } } /* * Queries whether or not an index for MD5 look ups exists for a hash database. * @param env Pointer to Java environment from which this method was called. * @param obj The Java object from which this method was called. * @param dbHandle A handle for the hash database. * @return True if the index exists. */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIndexExistsNat (JNIEnv * env, jclass obj, jint dbHandle) { if((size_t)dbHandle > hashDbs.size()) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } TSK_HDB_INFO *db = hashDbs.at(dbHandle-1); if (db == NULL) { setThrowTskCoreError(env, "Invalid database handle"); return (jboolean)false; } return (jboolean)(db->has_index(db, TSK_HDB_HTYPE_MD5_ID) == 1); } /* * Query and get size of the device (such as physical disk, or image) pointed by the path * Might require elevated priviletes to work (otherwise will error) * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param devPathJ the device path * @return size of device, set throw jni exception on error */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_findDeviceSizeNat (JNIEnv * env, jclass obj, jstring devPathJ) { jlong devSize = 0; const char* devPath = env->GetStringUTFChars(devPathJ, 0); // open the image to get the size TSK_IMG_INFO * img_info = tsk_img_open_utf8_sing(devPath, TSK_IMG_TYPE_DETECT, 0); if (img_info == NULL) { setThrowTskCoreError(env, tsk_error_get()); env->ReleaseStringUTFChars(devPathJ , devPath); return -1; } TSK_OFF_T imgSize = img_info->size; devSize = imgSize; //cleanup tsk_img_close(img_info); env->ReleaseStringUTFChars(devPathJ , devPath); return devSize; } /* * Test whether an image is supported * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param imagePathJ the image path * @return true if the image can be processed, false otherwise */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_isImageSupportedNat (JNIEnv * env, jclass obj, jstring imagePathJ) { TskIsImageSupported tskIsImage; TSK_TCHAR imagePathT[1024]; toTCHAR(env, imagePathT, 1024, imagePathJ); // It seems like passing &imagePathT should work instead of making this new array, // but it generated an EXCEPTION_ACCESS_VIOLATION during testing. TSK_TCHAR ** imagePaths = (TSK_TCHAR**)tsk_malloc((1) * sizeof(TSK_TCHAR*)); bool result; imagePaths[0] = imagePathT; if (tskIsImage.openImage(1, imagePaths, TSK_IMG_TYPE_DETECT, 0)) { result = false; } else { if (tskIsImage.findFilesInImg()) { result = false; } else { if (tskIsImage.isImageSupported()) { result = true; } else { result = false; } } } // Cleanup free(imagePaths); return (jboolean) result; } /* * Returns the current Sleuthkit version as a long * @return the current version */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSleuthkitVersionNat (JNIEnv * env, jclass obj) { return (jlong)TSK_VERSION_NUM; } /* * Finish the image being created by image writer. * @param env pointer to java environment this was called from * @param obj the java object this was called from * @param a_img_info the image info pointer */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_finishImageWriterNat (JNIEnv * env, jclass obj, jlong a_img_info) { // Set up the TSK_IMG_INFO object TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); return tsk_img_writer_finish(img_info); } /* * Get the progess of the finishImage process as an integer from 0 to 100 */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getFinishImageProgressNat (JNIEnv * env, jclass obj, jlong a_img_info) { // Set up the TSK_IMG_INFO object TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); IMG_RAW_INFO *raw_info = (IMG_RAW_INFO*)img_info; if (raw_info->img_writer != NULL) { return (raw_info->img_writer->finishProgress); } return 0; } /* * Cancel the finishImage process */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_cancelFinishImageNat (JNIEnv * env, jclass obj, jlong a_img_info) { // Set up the TSK_IMG_INFO object TSK_IMG_INFO *img_info = castImgInfo(env, a_img_info); IMG_RAW_INFO *raw_info = (IMG_RAW_INFO*)img_info; if (raw_info->img_writer != NULL) { raw_info->img_writer->cancelFinish = 1; } return ; } sleuthkit-4.11.1/bindings/java/jni/auto_db_java.cpp000644 000765 000024 00000201270 14137073414 023032 0ustar00carrierstaff000000 000000 /* ** The Sleuth Kit ** ** Brian Carrier [carrier sleuthkit [dot] org] ** Copyright (c) 2020 Brian Carrier. All Rights reserved ** ** This software is distributed under the Common Public License 1.0 ** */ /** * \file auto_db_java.cpp * Contains code to populate database with volume and file system information from a specific image. */ #include "auto_db_java.h" #include "jni.h" #include "tsk/img/img_writer.h" #if HAVE_LIBEWF #include "tsk/img/ewf.h" #include "tsk/img/tsk_img_i.h" #endif #include #include #include using std::stringstream; using std::for_each; /** */ TskAutoDbJava::TskAutoDbJava() { m_curImgId = 0; m_curVsId = 0; m_curVolId = 0; m_curFsId = 0; m_curFileId = 0; m_curUnallocDirId = 0; m_curDirAddr = 0; m_curDirPath = ""; m_vsFound = false; m_volFound = false; m_poolFound = false; m_stopped = false; m_foundStructure = false; m_attributeAdded = false; m_addFileSystems = true; m_noFatFsOrphans = false; m_addUnallocSpace = false; m_minChunkSize = -1; m_maxChunkSize = -1; m_jniEnv = NULL; tsk_init_lock(&m_curDirPathLock); } TskAutoDbJava::~TskAutoDbJava() { closeImage(); tsk_deinit_lock(&m_curDirPathLock); } /** * Look up all callback method IDs * @param jniEnv pointer to java environment this was called from * @param jobj the TskCaseDbBridge object this was called from */ TSK_RETVAL_ENUM TskAutoDbJava::initializeJni(JNIEnv * jniEnv, jobject jobj) { m_jniEnv = jniEnv; m_javaDbObj = m_jniEnv->NewGlobalRef(jobj); jclass localCallbackClass = m_jniEnv->FindClass("org/sleuthkit/datamodel/TskCaseDbBridge"); if (localCallbackClass == NULL) { return TSK_ERR; } m_callbackClass = (jclass)m_jniEnv->NewGlobalRef(localCallbackClass); m_addImageMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addImageInfo", "(IJLjava/lang/String;JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)J"); if (m_addImageMethodID == NULL) { return TSK_ERR; } m_addAcquisitionDetailsMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addAcquisitionDetails", "(JLjava/lang/String;)V"); if (m_addAcquisitionDetailsMethodID == NULL) { return TSK_ERR; } m_addVolumeSystemMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addVsInfo", "(JIJJ)J"); if (m_addVolumeSystemMethodID == NULL) { return TSK_ERR; } m_addVolumeMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addVolume", "(JJJJLjava/lang/String;J)J"); if (m_addVolumeMethodID == NULL) { return TSK_ERR; } m_addPoolMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addPool", "(JI)J"); if (m_addPoolMethodID == NULL) { return TSK_ERR; } m_addFileSystemMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addFileSystem", "(JJIJJJJJ)J"); if (m_addFileSystemMethodID == NULL) { return TSK_ERR; } m_addFileMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addFile", "(JJJIIILjava/lang/String;JJIIIIJJJJJIIILjava/lang/String;Ljava/lang/String;JJJLjava/lang/String;)J"); if (m_addFileMethodID == NULL) { return TSK_ERR; } m_addUnallocParentMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addUnallocFsBlockFilesParent", "(JLjava/lang/String;)J"); if (m_addUnallocParentMethodID == NULL) { return TSK_ERR; } m_addLayoutFileMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addLayoutFile", "(JJJILjava/lang/String;J)J"); if (m_addLayoutFileMethodID == NULL) { return TSK_ERR; } m_addLayoutFileRangeMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addLayoutFileRange", "(JJJJ)J"); if (m_addLayoutFileRangeMethodID == NULL) { return TSK_ERR; } return TSK_OK; } /** * Cache a database object for later use. Should be called on image, volume system, volume, * pool, and file system. * @param objId The object ID of the new object * @param parObjId The object ID of the new object's parent * @param type The type of object */ void TskAutoDbJava::saveObjectInfo(int64_t objId, int64_t parObjId, TSK_DB_OBJECT_TYPE_ENUM type) { TSK_DB_OBJECT objectInfo; objectInfo.objId = objId; objectInfo.parObjId = parObjId; objectInfo.type = type; m_savedObjects.push_back(objectInfo); } /** * Get a previously cached database object. * @param objId The object ID of the object being loaded */ TSK_RETVAL_ENUM TskAutoDbJava::getObjectInfo(int64_t objId, TSK_DB_OBJECT** obj_info) { for (vector::iterator itObjs = m_savedObjects.begin(); itObjs != m_savedObjects.end(); ++itObjs) { TSK_DB_OBJECT* tskDbObj = &(*itObjs); if (tskDbObj->objId == objId) { *obj_info = tskDbObj; return TSK_OK; } } // Object not found return TSK_ERR; } /** * Adds image details to the existing database tables. Object ID for new image stored in objId. * * @param type Image type * @param ssize Size of device sector in bytes (or 0 for default) * @param objId The object id assigned to the image (out param) * @param timeZone The timezone the image is from * @param size The size of the image in bytes. * @param md5 MD5 hash of the image * @param sha1 SHA1 hash of the image * @param sha256 SHA256 hash of the image * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID). * @param collectionDetails collection details * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addImageInfo(int type, TSK_OFF_T ssize, int64_t & objId, const string & timezone, TSK_OFF_T size, const string &md5, const string& sha1, const string& sha256, const string& deviceId, const string& collectionDetails, char** img_ptrs, int num_imgs) { const char *tz_cstr = timezone.c_str(); jstring tzj = m_jniEnv->NewStringUTF(tz_cstr); const char *md5_cstr = md5.c_str(); jstring md5j = m_jniEnv->NewStringUTF(md5_cstr); const char *sha1_cstr = sha1.c_str(); jstring sha1j = m_jniEnv->NewStringUTF(sha1_cstr); const char *sha256_cstr = sha256.c_str(); jstring sha256j = m_jniEnv->NewStringUTF(sha256_cstr); const char *devId_cstr = deviceId.c_str(); jstring devIdj = m_jniEnv->NewStringUTF(devId_cstr); const char *coll_cstr = collectionDetails.c_str(); jstring collj = m_jniEnv->NewStringUTF(coll_cstr); jobjectArray imgNamesj = (jobjectArray)m_jniEnv->NewObjectArray( num_imgs, m_jniEnv->FindClass("java/lang/String"), m_jniEnv->NewStringUTF("")); for (int i = 0; i < num_imgs; i++) { m_jniEnv->SetObjectArrayElement( imgNamesj, i, m_jniEnv->NewStringUTF(img_ptrs[i])); } jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addImageMethodID, type, ssize, tzj, size, md5j, sha1j, sha256j, devIdj, collj, imgNamesj); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } saveObjectInfo(objId, 0, TSK_DB_OBJECT_TYPE_IMG); return TSK_OK; } void TskAutoDbJava::addAcquisitionDetails(int64_t imgId, const string& collectionDetails) { const char *coll_cstr = collectionDetails.c_str(); jstring collj = m_jniEnv->NewStringUTF(coll_cstr); m_jniEnv->CallLongMethod(m_javaDbObj, m_addAcquisitionDetailsMethodID, imgId, collj); } /** * Adds volume system to database. Object ID for new vs stored in objId. * * @param vs_info Struct containing info for this volume system * @param parObjId Parent object ID for the volume system * @param objId Object ID of new volume system * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addVsInfo(const TSK_VS_INFO* vs_info, int64_t parObjId, int64_t& objId) { jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addVolumeSystemMethodID, parObjId, vs_info->vstype, vs_info->offset, (uint64_t)vs_info->block_size); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } // Save the vs info to use for unallocated blocks later TSK_DB_VS_INFO vs_db; vs_db.objId = objId; vs_db.offset = vs_info->offset; vs_db.vstype = vs_info->vstype; vs_db.block_size = vs_info->block_size; m_savedVsInfo.push_back(vs_db); saveObjectInfo(objId, parObjId, TSK_DB_OBJECT_TYPE_VS); return TSK_OK; } /** * Adds pool and pool volume system to database. Object ID for new pool vs stored in objId. * * @param pool_info Struct containing info for this pool * @param parObjId Parent object ID for the pool * @param objId Object ID of new pool volume system * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addPoolInfoAndVS(const TSK_POOL_INFO *pool_info, int64_t parObjId, int64_t& objId) { // Add the pool jlong poolObjIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addPoolMethodID, parObjId, pool_info->ctype); int64_t poolObjId = (int64_t)poolObjIdj; if (poolObjId < 0) { return TSK_ERR; } saveObjectInfo(poolObjId, parObjId, TSK_DB_OBJECT_TYPE_POOL); // Add the pool volume jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addVolumeSystemMethodID, poolObjIdj, TSK_VS_TYPE_APFS, pool_info->img_offset, (uint64_t)pool_info->block_size); objId = (int64_t)objIdj; saveObjectInfo(objId, poolObjId, TSK_DB_OBJECT_TYPE_VS); return TSK_OK; } /** * Adds a pool volume to database. Object ID for new pool volume stored in objId. * * @param pool_vol Struct containing info for this pool volume * @param parObjId Parent object ID * @param objId Object ID of new pool volume * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addPoolVolumeInfo(const TSK_POOL_VOLUME_INFO* pool_vol, int64_t parObjId, int64_t& objId) { jstring descj = m_jniEnv->NewStringUTF(pool_vol->desc); jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addVolumeMethodID, parObjId, (int64_t)pool_vol->index, pool_vol->block, pool_vol->num_blocks, descj, pool_vol->flags); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } saveObjectInfo(objId, parObjId, TSK_DB_OBJECT_TYPE_VOL); return TSK_OK; } /** * Adds a volume to database. Object ID for new volume stored in objId. * * @param vs_part Struct containing info for this volume * @param parObjId Parent object ID * @param objId Object ID of new volume * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addVolumeInfo(const TSK_VS_PART_INFO* vs_part, int64_t parObjId, int64_t& objId) { jstring descj = m_jniEnv->NewStringUTF(vs_part->desc); jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addVolumeMethodID, parObjId, (uint64_t)vs_part->addr, vs_part->start, vs_part->len, descj, vs_part->flags); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } // Save the volume info for creating unallocated blocks later TSK_DB_VS_PART_INFO vs_part_db; vs_part_db.objId = objId; vs_part_db.addr = vs_part->addr; vs_part_db.start = vs_part->start; vs_part_db.len = vs_part->len; strncpy(vs_part_db.desc, vs_part->desc, TSK_MAX_DB_VS_PART_INFO_DESC_LEN - 1); vs_part_db.flags = vs_part->flags; m_savedVsPartInfo.push_back(vs_part_db); saveObjectInfo(objId, parObjId, TSK_DB_OBJECT_TYPE_VOL); return TSK_OK; } /** * Adds a file system to database. Object ID for new file system stored in objId. * * @param fs_info Struct containing info for this file system * @param parObjId Parent object ID * @param objId Object ID of new file system * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addFsInfo(const TSK_FS_INFO* fs_info, int64_t parObjId, int64_t& objId) { jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addFileSystemMethodID, parObjId, fs_info->offset, (int)fs_info->ftype, (uint64_t)fs_info->block_size, fs_info->block_count, fs_info->root_inum, fs_info->first_inum, fs_info->last_inum); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } // Save the file system info for creating unallocated blocks later TSK_DB_FS_INFO fs_info_db; fs_info_db.objId = objId; fs_info_db.imgOffset = fs_info->offset; fs_info_db.fType = fs_info->ftype; fs_info_db.block_size = fs_info->block_size; fs_info_db.block_count = fs_info->block_count; fs_info_db.root_inum = fs_info->root_inum; fs_info_db.first_inum = fs_info->first_inum; fs_info_db.last_inum = fs_info->last_inum; m_savedFsInfo.push_back(fs_info_db); saveObjectInfo(objId, parObjId, TSK_DB_OBJECT_TYPE_FS); return TSK_OK; } /** * Adds a file to database. Object ID for new file stored in objId. * * @param fs_file * @param fs_attr * @param path File path * @param parObjId Parent object ID * @param fsObjId Object ID of the file system * @param objId Object ID of new file * @param dataSourceObjId Object ID of the data source * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addFsFile(TSK_FS_FILE* fs_file, const TSK_FS_ATTR* fs_attr, const char* path, int64_t fsObjId, int64_t& objId, int64_t dataSourceObjId) { if (fs_file->name == NULL) return TSK_ERR; // The object id for the parent folder. Will stay as zero if not the root folder int64_t parObjId = 0; // Root directory's parent should be the file system object. // Make sure it doesn't have a name, so that we don't pick up ".." entries if ((fs_file->fs_info->root_inum == fs_file->name->meta_addr) && ((fs_file->name->name == NULL) || (strlen(fs_file->name->name) == 0))) { // File is in the root directory parObjId = fsObjId; } // Add the file to the database return addFile(fs_file, fs_attr, path, fsObjId, parObjId, dataSourceObjId); } /** * Extract the extension from the given file name and store it in the supplied string. * @param name A file name * @param extension The file name extension will be extracted to extension. */ void extractExtension(char *name, char *extension) { char *ext = strrchr(name, '.'); //if ext is not null and is not the entire filename... if (ext && (name != ext)) { size_t extLen = strlen(ext); //... and doesn't only contain the '.' and isn't too long to be a real extension. if ((1 < extLen) && (extLen < 15)) { strncpy(extension, ext + 1, extLen - 1); //normalize to lower case, only works for ascii for (int i = 0; extension[i]; i++) { extension[i] = tolower(extension[i]); } } } } /** * Convert a sequence of characters to a jstring object. * We first convert the character sequence to UTF16 and then * use the JNI NewString() method to create the jstring. * We do it this way because we encountered data that contained * 4 byte (or more) UTF8 encoded characters and the JNI NewStringUTF() * method does not handle 4 byte UTF8 encoding. * * @param input The sequence of characters to be turned into a jstring. * @param newJString The new jstring object created from the input. * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::createJString(const char * input, jstring & newJString) { size_t input_len = strlen(input) + 1; UTF16 * utf16_input; if ((utf16_input = (UTF16 *)tsk_malloc(input_len * sizeof(UTF16))) == NULL) { return TSK_ERR; } UTF8 * source = (UTF8 *)input; UTF16 * target = utf16_input; if (tsk_UTF8toUTF16((const UTF8 **)&source, (const UTF8 *)&source[input_len], &target, &target[input_len], TSKlenientConversion) != TSKconversionOK) { free(utf16_input); return TSK_ERR; } /* * To determine the length of the new string we we subtract the address * of the start of the UTF16 buffer from the address at the end of the * UTF16 buffer (target is advanced in the call to the conversion routine * above). */ newJString = m_jniEnv->NewString(utf16_input, (target - utf16_input) - 1); free(utf16_input); return TSK_OK; } /** * Adds a file and its associated slack file to database. * Does not learn object ID for new files, and files may * not be added to the database immediately. * * @param fs_file * @param fs_attr * @param path File path * @param fsObjId Object ID of the file system * @param parObjId Parent object ID if known, 0 otherwise * @param dataSourceObjId Object ID of the data source * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addFile(TSK_FS_FILE* fs_file, const TSK_FS_ATTR* fs_attr, const char* path, int64_t fsObjId, int64_t parObjId, int64_t dataSourceObjId) { time_t mtime = 0; time_t crtime = 0; time_t ctime = 0; time_t atime = 0; TSK_OFF_T size = 0; int meta_type = 0; int meta_flags = 0; int meta_mode = 0; int meta_seq = 0; int gid = 0; int uid = 0; int type = TSK_FS_ATTR_TYPE_NOT_FOUND; int idx = 0; if (fs_file->name == NULL) return TSK_OK; if (fs_file->meta) { mtime = fs_file->meta->mtime; atime = fs_file->meta->atime; ctime = fs_file->meta->ctime; crtime = fs_file->meta->crtime; meta_type = fs_file->meta->type; meta_flags = fs_file->meta->flags; meta_mode = fs_file->meta->mode; gid = fs_file->meta->gid; uid = fs_file->meta->uid; meta_seq = fs_file->meta->seq; } size_t attr_nlen = 0; if (fs_attr) { type = fs_attr->type; idx = fs_attr->id; size = fs_attr->size; if (fs_attr->name) { if ((fs_attr->type != TSK_FS_ATTR_TYPE_NTFS_IDXROOT) || (strcmp(fs_attr->name, "$I30") != 0)) { attr_nlen = strlen(fs_attr->name); } } } // sanity check if (size < 0) { size = 0; } // combine name and attribute name size_t len = strlen(fs_file->name->name); char * name; size_t nlen = len + attr_nlen + 11; // Extra space for possible colon and '-slack' if ((name = (char *)tsk_malloc(nlen)) == NULL) { return TSK_ERR; } strncpy(name, fs_file->name->name, nlen); char extension[24] = ""; extractExtension(name, extension); // Add the attribute name if (attr_nlen > 0) { strncat(name, ":", nlen - strlen(name)); if (fs_attr != NULL) { strncat(name, fs_attr->name, nlen - strlen(name)); } } jstring namej; if (createJString(name, namej) != TSK_OK) { free(name); return TSK_ERR; } // clean up path // +2 = space for leading slash and terminating null size_t path_len = strlen(path) + 2; char* escaped_path; if ((escaped_path = (char *)tsk_malloc(path_len)) == NULL) { free(name); return TSK_ERR; } strncpy(escaped_path, "/", path_len); strncat(escaped_path, path, path_len - strlen(escaped_path)); jstring pathj; if (createJString(escaped_path, pathj) != TSK_OK) { free(name); free(escaped_path); return TSK_ERR; } // Escaped path is not needed beyond this point so free it. free(escaped_path); jstring extj; if (createJString(extension, extj) != TSK_OK) { free(name); return TSK_ERR; } /* NTFS uses sequence, otherwise we hash the path. We do this to map to the * correct parent folder if there are two from the root dir that eventually point to * the same folder (one deleted and one allocated) or two hard links. */ jlong par_seqj; if (TSK_FS_TYPE_ISNTFS(fs_file->fs_info->ftype)) { par_seqj = fs_file->name->par_seq; } else { par_seqj = -1; } TSK_INUM_T par_meta_addr = fs_file->name->par_addr; char *sid_str = NULL; jstring sidj = NULL; // return null across JNI if sid is not available if (tsk_fs_file_get_owner_sid(fs_file, &sid_str) == 0) { if (createJString(sid_str, sidj) != TSK_OK) { free(sid_str); return TSK_ERR; } free(sid_str); } // Add the file to the database jlong ret_val = m_jniEnv->CallLongMethod(m_javaDbObj, m_addFileMethodID, parObjId, fsObjId, dataSourceObjId, TSK_DB_FILES_TYPE_FS, type, idx, namej, fs_file->name->meta_addr, (uint64_t)fs_file->name->meta_seq, fs_file->name->type, meta_type, fs_file->name->flags, meta_flags, size, (unsigned long long)crtime, (unsigned long long)ctime, (unsigned long long) atime, (unsigned long long) mtime, meta_mode, gid, uid, pathj, extj, (uint64_t)meta_seq, par_meta_addr, par_seqj, sidj); if (ret_val < 0) { free(name); return TSK_ERR; } // Add entry for the slack space. // Current conditions for creating a slack file: // - File name is not empty, "." or ".." // - Data is non-resident // - The allocated size is greater than the initialized file size // See github issue #756 on why initsize and not size. // - The data is not compressed if ((fs_attr != NULL) && ((strlen(name) > 0) && (!TSK_FS_ISDOT(name))) && (!(fs_file->meta->flags & TSK_FS_META_FLAG_COMP)) && (fs_attr->flags & TSK_FS_ATTR_NONRES) && (fs_attr->nrd.allocsize > fs_attr->nrd.initsize)) { strncat(name, "-slack", 6); if (strlen(extension) > 0) { strncat(extension, "-slack", 6); } jstring slackNamej; if (createJString(name, slackNamej) != TSK_OK) { free(name); return TSK_ERR; } jstring slackExtj; if (createJString(extension, slackExtj) != TSK_OK) { free(name); return TSK_ERR; } TSK_OFF_T slackSize = fs_attr->nrd.allocsize - fs_attr->nrd.initsize; // Add slack file to database jlong ret_val = m_jniEnv->CallLongMethod(m_javaDbObj, m_addFileMethodID, parObjId, fsObjId, dataSourceObjId, TSK_DB_FILES_TYPE_SLACK, type, idx, slackNamej, fs_file->name->meta_addr, (uint64_t)fs_file->name->meta_seq, TSK_FS_NAME_TYPE_REG, TSK_FS_META_TYPE_REG, fs_file->name->flags, meta_flags, slackSize, (unsigned long long)crtime, (unsigned long long)ctime, (unsigned long long) atime, (unsigned long long) mtime, meta_mode, gid, uid, // md5TextPtr, known, pathj, slackExtj, (uint64_t)meta_seq, par_meta_addr, par_seqj, sidj); if (ret_val < 0) { free(name); return TSK_ERR; } } free(name); return TSK_OK; } // Internal function object to check for range overlap typedef struct _checkFileLayoutRangeOverlap { const vector & ranges; bool hasOverlap; explicit _checkFileLayoutRangeOverlap(const vector & ranges) : ranges(ranges), hasOverlap(false) {} bool getHasOverlap() const { return hasOverlap; } void operator() (const TSK_DB_FILE_LAYOUT_RANGE & range) { if (hasOverlap) return; //no need to check other uint64_t start = range.byteStart; uint64_t end = start + range.byteLen; vector::const_iterator it; for (it = ranges.begin(); it != ranges.end(); ++it) { const TSK_DB_FILE_LAYOUT_RANGE * otherRange = &(*it); if (&range == otherRange) continue; //skip, it's the same range uint64_t otherStart = otherRange->byteStart; uint64_t otherEnd = otherStart + otherRange->byteLen; if (start <= otherEnd && end >= otherStart) { hasOverlap = true; break; } } } } checkFileLayoutRangeOverlap; /** * Internal helper method to add unalloc, unused and carved files with layout ranges to db * Generates file_name and populates tsk_files, tsk_objects and tsk_file_layout tables * Adds a single entry to tsk_files table with an auto-generated file name, tsk_objects table, and one or more entries to tsk_file_layout table * @param dbFileType Type of file * @param parentObjId Id of the parent object in the database (fs, volume, or image) * @param fsObjId parent fs, or NULL if the file is not associated with fs * @param size Number of bytes in file * @param ranges vector containing one or more TSK_DB_FILE_LAYOUT_RANGE layout ranges (in) * @param objId object id of the file object created (output) * @param dataSourceObjId The object ID for the data source * @returns TSK_OK on success or TSK_ERR on error. */ TSK_RETVAL_ENUM TskAutoDbJava::addFileWithLayoutRange(const TSK_DB_FILES_TYPE_ENUM dbFileType, const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId) { const size_t numRanges = ranges.size(); if (numRanges < 1) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); tsk_error_set_errstr("Error addFileWithLayoutRange() - no ranges present"); return TSK_ERR; } stringstream fileNameSs; switch (dbFileType) { case TSK_DB_FILES_TYPE_UNALLOC_BLOCKS: fileNameSs << "Unalloc"; break; case TSK_DB_FILES_TYPE_UNUSED_BLOCKS: fileNameSs << "Unused"; break; case TSK_DB_FILES_TYPE_CARVED: fileNameSs << "Carved"; break; default: stringstream sserr; tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); sserr << "Error addFileWithLayoutRange() - unsupported file type for file layout range: "; sserr << (int)dbFileType; tsk_error_set_errstr("%s", sserr.str().c_str()); return TSK_ERR; } //ensure layout ranges are sorted (to generate file name and to be inserted in sequence order) sort(ranges.begin(), ranges.end()); //dome some checking //ensure there is no overlap and each range has unique byte range const checkFileLayoutRangeOverlap & overlapRes = for_each(ranges.begin(), ranges.end(), checkFileLayoutRangeOverlap(ranges)); if (overlapRes.getHasOverlap()) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); tsk_error_set_errstr("Error addFileWithLayoutRange() - overlap detected between ranges"); return TSK_ERR; } //construct filename with parent obj id, start byte of first range, end byte of last range fileNameSs << "_" << parentObjId << "_" << ranges[0].byteStart; fileNameSs << "_" << (ranges[numRanges - 1].byteStart + ranges[numRanges - 1].byteLen); jstring namej = m_jniEnv->NewStringUTF(fileNameSs.str().c_str()); // Insert into tsk files and tsk objects jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addLayoutFileMethodID, parentObjId, fsObjId, dataSourceObjId, dbFileType, namej, size); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } // Fill in fileObjId and insert ranges for (vector::iterator it = ranges.begin(); it != ranges.end(); ++it) { TSK_DB_FILE_LAYOUT_RANGE & range = *it; range.fileObjId = objId; if (-1 == m_jniEnv->CallLongMethod(m_javaDbObj, m_addLayoutFileRangeMethodID, objId, range.byteStart, range.byteLen, (uint64_t)range.sequence)) { return TSK_ERR; } } return TSK_OK; } /** * Adds information about a unallocated file with layout ranges into the database. * Adds a single entry to tsk_files table with an auto-generated file name, tsk_objects table, and one or more entries to tsk_file_layout table * @param parentObjId Id of the parent object in the database (fs, volume, or image) * @param fsObjId parent fs, or NULL if the file is not associated with fs * @param size Number of bytes in file * @param ranges vector containing one or more TSK_DB_FILE_LAYOUT_RANGE layout ranges (in) * @param objId object id of the file object created (output) * @param dataSourceObjId The object ID for the data source * @returns TSK_OK on success or TSK_ERR on error. */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocBlockFile(const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId) { return addFileWithLayoutRange(TSK_DB_FILES_TYPE_UNALLOC_BLOCKS, parentObjId, fsObjId, size, ranges, objId, dataSourceObjId); } /** * Adds information about a unused file with layout ranges into the database. * Adds a single entry to tsk_files table with an auto-generated file name, tsk_objects table, and one or more entries to tsk_file_layout table * @param parentObjId Id of the parent object in the database (fs, volume, or image) * @param fsObjId parent fs, or NULL if the file is not associated with fs * @param size Number of bytes in file * @param ranges vector containing one or more TSK_DB_FILE_LAYOUT_RANGE layout ranges (in) * @param objId object id of the file object created (output) * @param dataSourceObjId The object ID for the data source * @returns TSK_OK on success or TSK_ERR on error. */ TSK_RETVAL_ENUM TskAutoDbJava::addUnusedBlockFile(const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId) { return addFileWithLayoutRange(TSK_DB_FILES_TYPE_UNUSED_BLOCKS, parentObjId, fsObjId, size, ranges, objId, dataSourceObjId); } /** * Add a virtual dir to hold unallocated block files for this file system. * @param fsObjId Object ID of the file system * @param objId Object ID of the created virtual dir * @param dataSourceObjId Object ID of the data source */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocFsBlockFilesParent(const int64_t fsObjId, int64_t& objId, int64_t dataSourceObjId) { const char * const unallocDirName = "$Unalloc"; jstring namej = m_jniEnv->NewStringUTF(unallocDirName); jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addUnallocParentMethodID, fsObjId, namej); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } return TSK_OK; } /** * Adds a new volume that will hold the unallocated blocks for the pool. * * @param vol_index The index for the new volume (should be one higher than the number of pool volumes) * @param parObjId The object ID of the parent volume system * @param objId Will be set to the object ID of the new volume * * @returns TSK_ERR on error, TSK_OK on success */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocatedPoolVolume(int vol_index, int64_t parObjId, int64_t& objId) { const char *desc = "Unallocated Blocks"; jstring descj = m_jniEnv->NewStringUTF(desc); jlong objIdj = m_jniEnv->CallLongMethod(m_javaDbObj, m_addVolumeMethodID, parObjId, vol_index, 0, 0, descj, 0); objId = (int64_t)objIdj; if (objId < 0) { return TSK_ERR; } return TSK_OK; } void TskAutoDbJava::close() { if (m_jniEnv == NULL) { return; } if (m_javaDbObj != NULL) { m_jniEnv->DeleteGlobalRef(m_javaDbObj); } if (m_callbackClass != NULL) { m_jniEnv->DeleteGlobalRef(m_callbackClass); } } int64_t TskAutoDbJava::getImageID() { return m_curImgId; } void TskAutoDbJava::closeImage() { TskAuto::closeImage(); } void TskAutoDbJava::setAddFileSystems(bool addFileSystems) { m_addFileSystems = addFileSystems; } void TskAutoDbJava::setNoFatFsOrphans(bool noFatFsOrphans) { m_noFatFsOrphans = noFatFsOrphans; } void TskAutoDbJava::setAddUnallocSpace(bool addUnallocSpace) { setAddUnallocSpace(addUnallocSpace, -1); } void TskAutoDbJava::setAddUnallocSpace(bool addUnallocSpace, int64_t minChunkSize) { m_addUnallocSpace = addUnallocSpace; m_minChunkSize = minChunkSize; m_maxChunkSize = -1; } void TskAutoDbJava::setAddUnallocSpace(int64_t minChunkSize, int64_t maxChunkSize) { m_addUnallocSpace = true; m_minChunkSize = minChunkSize; m_maxChunkSize = maxChunkSize; } /** * Adds an image to the database. * * @param a_num Number of image parts * @param a_images Array of paths to the image parts * @param a_type Image type * @param a_ssize Size of device sector in bytes (or 0 for default) * @param a_deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID). * @return 0 for success, 1 for failure */ uint8_t TskAutoDbJava::openImageUtf8(int a_num, const char *const a_images[], TSK_IMG_TYPE_ENUM a_type, unsigned int a_ssize, const char* a_deviceId) { uint8_t retval = TskAuto::openImageUtf8(a_num, a_images, a_type, a_ssize); if (retval != 0) { return retval; } if (addImageDetails(a_deviceId)) { return 1; } return 0; } /** * Adds an image to the database. * * @param a_num Number of image parts * @param a_images Array of paths to the image parts * @param a_type Image type * @param a_ssize Size of device sector in bytes (or 0 for default) * @param a_deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID). * @return 0 for success, 1 for failure */ uint8_t TskAutoDbJava::openImage(int a_num, const TSK_TCHAR * const a_images[], TSK_IMG_TYPE_ENUM a_type, unsigned int a_ssize, const char* a_deviceId) { #ifdef TSK_WIN32 uint8_t retval = TskAuto::openImage(a_num, a_images, a_type, a_ssize); if (retval != 0) { return retval; } return (addImageDetails(a_deviceId)); #else return openImageUtf8(a_num, a_images, a_type, a_ssize, a_deviceId); #endif } /** * Adds an image to the database. Requires that m_img_info is already initialized * * @param a_deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID). * @return 0 for success, 1 for failure */ uint8_t TskAutoDbJava::openImage(const char* a_deviceId) { if (m_img_info == NULL) { return 1; } return(addImageDetails(a_deviceId)); } /** * Adds image details to the existing database tables. * * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID). * @return Returns 0 for success, 1 for failure */ uint8_t TskAutoDbJava::addImageDetails(const char* deviceId) { string md5 = ""; string sha1 = ""; string collectionDetails = ""; #if HAVE_LIBEWF if (m_img_info->itype == TSK_IMG_TYPE_EWF_EWF) { // @@@ This should really probably be inside of a tsk_img_ method IMG_EWF_INFO *ewf_info = (IMG_EWF_INFO *)m_img_info; if (ewf_info->md5hash_isset) { md5 = ewf_info->md5hash; } if (ewf_info->sha1hash_isset) { sha1 = ewf_info->sha1hash; } collectionDetails = ewf_get_details(ewf_info); } #endif // If the image has already been added to the database, update the acquisition details and return. if (m_curImgId > 0) { addAcquisitionDetails(m_curImgId, collectionDetails); return 0; } string devId; if (NULL != deviceId) { devId = deviceId; } else { devId = ""; } char **img_ptrs; #ifdef TSK_WIN32 // convert image paths to UTF-8 img_ptrs = (char **)tsk_malloc(m_img_info->num_img * sizeof(char *)); if (img_ptrs == NULL) { return 1; } for (int i = 0; i < m_img_info->num_img; i++) { char * img2 = (char*)tsk_malloc(1024 * sizeof(char)); UTF8 *ptr8; UTF16 *ptr16; ptr8 = (UTF8 *)img2; ptr16 = (UTF16 *)m_img_info->images[i]; uint8_t retval = tsk_UTF16toUTF8_lclorder((const UTF16 **)&ptr16, (UTF16 *) & ptr16[TSTRLEN(m_img_info->images[i]) + 1], &ptr8, (UTF8 *)((uintptr_t)ptr8 + 1024), TSKlenientConversion); if (retval != TSKconversionOK) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_UNICODE); tsk_error_set_errstr("Error converting image to UTF-8\n"); return 1; } img_ptrs[i] = img2; } #else img_ptrs = m_img_info->images; #endif if (TSK_OK != addImageInfo(m_img_info->itype, m_img_info->sector_size, m_curImgId, m_curImgTZone, m_img_info->size, md5, sha1, "", devId, collectionDetails, img_ptrs, m_img_info->num_img)) { registerError(); return 1; } #ifdef TSK_WIN32 //cleanup for (int i = 0; i < m_img_info->num_img; ++i) { free(img_ptrs[i]); } free(img_ptrs); #endif return 0; } TSK_FILTER_ENUM TskAutoDbJava::filterVs(const TSK_VS_INFO * vs_info) { m_vsFound = true; if (TSK_OK != addVsInfo(vs_info, m_curImgId, m_curVsId)) { registerError(); return TSK_FILTER_STOP; } return TSK_FILTER_CONT; } TSK_FILTER_ENUM TskAutoDbJava::filterPool(const TSK_POOL_INFO * pool_info) { m_poolFound = true; if (m_volFound && m_vsFound) { // there's a volume system and volume if (TSK_OK != addPoolInfoAndVS(pool_info, m_curVolId, m_curPoolVs)) { registerError(); return TSK_FILTER_STOP; } // Save the parent obj ID for the pool m_poolOffsetToParentId[pool_info->img_offset] = m_curVolId; } else { // pool doesn't live in a volume, use image as parent if (TSK_OK != addPoolInfoAndVS(pool_info, m_curImgId, m_curPoolVs)) { registerError(); return TSK_FILTER_STOP; } // Save the parent obj ID for the pool m_poolOffsetToParentId[pool_info->img_offset] = m_curImgId; } // Store the volume system object ID for later use m_poolOffsetToVsId[pool_info->img_offset] = m_curPoolVs; return TSK_FILTER_CONT; } /** * Adds unallocated pool blocks to a new volume. * * @param numPool Will be updated with the number of pools processed * * @return Returns 0 for success, 1 for failure */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocatedPoolBlocksToDb(size_t & numPool) { for (size_t i = 0; i < m_poolInfos.size(); i++) { const TSK_POOL_INFO * pool_info = m_poolInfos[i]; if (m_poolOffsetToVsId.find(pool_info->img_offset) == m_poolOffsetToVsId.end()) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); tsk_error_set_errstr("Error addUnallocatedPoolBlocksToDb() - could not find volume system object ID for pool at offset %lld", pool_info->img_offset); return TSK_ERR; } int64_t curPoolVs = m_poolOffsetToVsId[pool_info->img_offset]; /* Make sure the pool_info is still allocated */ if (pool_info->tag != TSK_POOL_INFO_TAG) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); tsk_error_set_errstr("Error addUnallocatedPoolBlocksToDb() - pool_info is not allocated"); return TSK_ERR; } /* Only APFS pools are currently supported */ if (pool_info->ctype != TSK_POOL_TYPE_APFS) { continue; } /* Increment the count of pools found */ numPool++; /* Create the volume */ int64_t unallocVolObjId; if (TSK_ERR == addUnallocatedPoolVolume(pool_info->num_vols, curPoolVs, unallocVolObjId)) { tsk_error_reset(); tsk_error_set_errno(TSK_ERR_AUTO_DB); tsk_error_set_errstr("Error addUnallocatedPoolBlocksToDb() - error createing unallocated space pool volume"); return TSK_ERR; } /* Create the unallocated space files */ TSK_FS_ATTR_RUN * unalloc_runs = tsk_pool_unallocated_runs(pool_info); TSK_FS_ATTR_RUN * current_run = unalloc_runs; vector ranges; while (current_run != NULL) { TSK_DB_FILE_LAYOUT_RANGE tempRange(current_run->addr * pool_info->block_size, current_run->len * pool_info->block_size, 0); ranges.push_back(tempRange); int64_t fileObjId = 0; if (TSK_ERR == addUnallocBlockFile(unallocVolObjId, 0, current_run->len * pool_info->block_size, ranges, fileObjId, m_curImgId)) { registerError(); tsk_fs_attr_run_free(unalloc_runs); return TSK_ERR; } current_run = current_run->next; ranges.clear(); } tsk_fs_attr_run_free(unalloc_runs); } return TSK_OK; } TSK_FILTER_ENUM TskAutoDbJava::filterPoolVol(const TSK_POOL_VOLUME_INFO * pool_vol) { if (TSK_OK != addPoolVolumeInfo(pool_vol, m_curPoolVs, m_curPoolVol)) { registerError(); return TSK_FILTER_STOP; } return TSK_FILTER_CONT; } TSK_FILTER_ENUM TskAutoDbJava::filterVol(const TSK_VS_PART_INFO * vs_part) { m_volFound = true; m_foundStructure = true; m_poolFound = false; if (TSK_OK != addVolumeInfo(vs_part, m_curVsId, m_curVolId)) { registerError(); return TSK_FILTER_STOP; } return TSK_FILTER_CONT; } TSK_FILTER_ENUM TskAutoDbJava::filterFs(TSK_FS_INFO * fs_info) { TSK_FS_FILE *file_root; m_foundStructure = true; if (m_poolFound) { // there's a pool if (TSK_OK != addFsInfo(fs_info, m_curPoolVol, m_curFsId)) { registerError(); return TSK_FILTER_STOP; } } else if (m_volFound && m_vsFound) { // there's a volume system and volume if (TSK_OK != addFsInfo(fs_info, m_curVolId, m_curFsId)) { registerError(); return TSK_FILTER_STOP; } } else { // file system doesn't live in a volume, use image as parent if (TSK_OK != addFsInfo(fs_info, m_curImgId, m_curFsId)) { registerError(); return TSK_FILTER_STOP; } } // We won't hit the root directory on the walk, so open it now if ((file_root = tsk_fs_file_open(fs_info, NULL, "/")) != NULL) { processFile(file_root, ""); tsk_fs_file_close(file_root); file_root = NULL; } // make sure that flags are set to get all files -- we need this to // find parent directory TSK_FS_DIR_WALK_FLAG_ENUM filterFlags = (TSK_FS_DIR_WALK_FLAG_ENUM) (TSK_FS_DIR_WALK_FLAG_ALLOC | TSK_FS_DIR_WALK_FLAG_UNALLOC); //check if to skip processing of FAT orphans if (m_noFatFsOrphans && TSK_FS_TYPE_ISFAT(fs_info->ftype) ) { filterFlags = (TSK_FS_DIR_WALK_FLAG_ENUM) (filterFlags | TSK_FS_DIR_WALK_FLAG_NOORPHAN); } setFileFilterFlags(filterFlags); return TSK_FILTER_CONT; } /* Insert the file data into the file table. * @param fs_file * @param fs_attr * @param path * Returns TSK_ERR on error. */ TSK_RETVAL_ENUM TskAutoDbJava::insertFileData(TSK_FS_FILE * fs_file, const TSK_FS_ATTR * fs_attr, const char *path) { if (TSK_ERR == addFsFile(fs_file, fs_attr, path, m_curFsId, m_curFileId, m_curImgId)) { registerError(); return TSK_ERR; } return TSK_OK; } /** * Analyzes the open image and adds image info to a database. * Does not deal with transactions and such. Refer to startAddImage() * for more control. * @returns 1 if a critical error occurred (DB doesn't exist, no file system, etc.), 2 if errors occurred at some point adding files to the DB (corrupt file, etc.), and 0 otherwise. Errors will have been registered. */ uint8_t TskAutoDbJava::addFilesInImgToDb() { // @@@ This seems bad because we are overriding what the user may // have set. We should remove the public API if we are going to // override it -- presumably this was added so that we always have // unallocated volume space... setVolFilterFlags((TSK_VS_PART_FLAG_ENUM) (TSK_VS_PART_FLAG_ALLOC | TSK_VS_PART_FLAG_UNALLOC)); uint8_t retVal = 0; if (findFilesInImg()) { // map the boolean return value from findFiles to the three-state return value we use // @@@ findFiles should probably return this three-state enum too if (m_foundStructure == false) { retVal = 1; } else { retVal = 2; } } TSK_RETVAL_ENUM addUnallocRetval = TSK_OK; if (m_addUnallocSpace) addUnallocRetval = addUnallocSpaceToDb(); // findFiles return value trumps unalloc since it can return either 2 or 1. if (retVal) { return retVal; } else if (addUnallocRetval == TSK_ERR) { return 2; } else { return 0; } } /** * Start the process to add image/file metadata to database inside of a transaction. * User must call either commitAddImage() to commit the changes, * or revertAddImage() to revert them. * * @param numImg Number of image parts * @param imagePaths Array of paths to the image parts * @param imgType Image type * @param sSize Size of device sector in bytes (or 0 for default) * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID) * @return 0 for success, 1 for failure */ uint8_t TskAutoDbJava::startAddImage(int numImg, const TSK_TCHAR * const imagePaths[], TSK_IMG_TYPE_ENUM imgType, unsigned int sSize, const char* deviceId) { if (tsk_verbose) tsk_fprintf(stderr, "TskAutoDbJava::startAddImage: Starting add image process\n"); if (openImage(numImg, imagePaths, imgType, sSize, deviceId)) { tsk_error_set_errstr2("TskAutoDbJava::startAddImage"); registerError(); return 1; } if (m_imageWriterEnabled) { tsk_img_writer_create(m_img_info, m_imageWriterPath); } if (m_addFileSystems) { return addFilesInImgToDb(); } else { return 0; } } /** * Start the process to add image/file metadata to database inside of a transaction. * User must call either commitAddImage() to commit the changes, * or revertAddImage() to revert them. * * @param img_info Previously initialized TSK_IMG_INFO object * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID) * @return 0 for success, 1 for failure */ uint8_t TskAutoDbJava::startAddImage(TSK_IMG_INFO * img_info, const char* deviceId) { openImageHandle(img_info); if (m_img_info == NULL) { return 1; } if (tsk_verbose) tsk_fprintf(stderr, "TskAutoDbJava::startAddImage: Starting add image process\n"); if (openImage(deviceId)) { tsk_error_set_errstr2("TskAutoDbJava::startAddImage"); registerError(); return 1; } if (m_imageWriterEnabled) { if (tsk_img_writer_create(m_img_info, m_imageWriterPath)) { registerError(); return 1; } } if (m_addFileSystems) { return addFilesInImgToDb(); } else { return 0; } } #ifdef WIN32 /** * Start the process to add image/file metadata to database inside of a transaction. * Same functionality as addFilesInImgToDb(). Reverts * all changes on error. User must call either commitAddImage() to commit the changes, * or revertAddImage() to revert them. * * @param numImg Number of image parts * @param imagePaths Array of paths to the image parts * @param imgType Image type * @param sSize Size of device sector in bytes (or 0 for default) * @param deviceId An ASCII-printable identifier for the device associated with the data source that is intended to be unique across multiple cases (e.g., a UUID) * @return 0 for success 1, for failure */ uint8_t TskAutoDbJava::startAddImage(int numImg, const char *const imagePaths[], TSK_IMG_TYPE_ENUM imgType, unsigned int sSize, const char* deviceId) { if (tsk_verbose) tsk_fprintf(stderr, "TskAutoDbJava::startAddImage_utf8: Starting add image process\n"); if (openImageUtf8(numImg, imagePaths, imgType, sSize, deviceId)) { tsk_error_set_errstr2("TskAutoDbJava::startAddImage"); registerError(); return 1; } if (m_imageWriterEnabled) { tsk_img_writer_create(m_img_info, m_imageWriterPath); } if (m_addFileSystems) { return addFilesInImgToDb(); } else { return 0; } } #endif /** * Cancel the running process. Will not be handled immediately. */ void TskAutoDbJava::stopAddImage() { if (tsk_verbose) tsk_fprintf(stderr, "TskAutoDbJava::stopAddImage: Stop request received\n"); m_stopped = true; setStopProcessing(); // flag is checked every time processFile() is called } /** * Set the current image's timezone */ void TskAutoDbJava::setTz(string tzone) { m_curImgTZone = tzone; } /** * Set the object ID for the data source */ void TskAutoDbJava::setDatasourceObjId(int64_t img_id) { m_curImgId = img_id; } TSK_RETVAL_ENUM TskAutoDbJava::processFile(TSK_FS_FILE * fs_file, const char *path) { // Check if the process has been canceled if (m_stopped) { if (tsk_verbose) tsk_fprintf(stderr, "TskAutoDbJava::processFile: Stop request detected\n"); return TSK_STOP; } /* Update the current directory, which can be used to show * progress. If we get a directory, then use its name. We * do this so that when we are searching for orphan files, then * we at least show $OrphanFiles as status. The secondary check * is to grab the parent folder from files once we return back * into a folder when we are doing our depth-first recursion. */ if (isDir(fs_file)) { m_curDirAddr = fs_file->name->meta_addr; tsk_take_lock(&m_curDirPathLock); m_curDirPath = string(path) + fs_file->name->name; tsk_release_lock(&m_curDirPathLock); } else if (m_curDirAddr != fs_file->name->par_addr) { m_curDirAddr = fs_file->name->par_addr; tsk_take_lock(&m_curDirPathLock); m_curDirPath = path; tsk_release_lock(&m_curDirPathLock); } /* process the attributes. The case of having 0 attributes can occur * with virtual / sparse files and HFS directories. * At some point, this can probably be cleaned * up if TSK is more consistent about if there should always be an * attribute or not. Sometimes, none of the attributes are added * because of their type and we always want to add a reference to * every file. */ TSK_RETVAL_ENUM retval = TSK_OK; m_attributeAdded = false; if (tsk_fs_file_attr_getsize(fs_file) > 0) { retval = processAttributes(fs_file, path); } // insert a general row if we didn't add a specific attribute one if ((retval == TSK_OK) && (m_attributeAdded == false)) { retval = insertFileData(fs_file, NULL, path); } // reset the file id m_curFileId = 0; if (retval == TSK_STOP) return TSK_STOP; else return TSK_OK; } // we return only OK or STOP -- errors are registered only and OK is returned. TSK_RETVAL_ENUM TskAutoDbJava::processAttribute(TSK_FS_FILE * fs_file, const TSK_FS_ATTR * fs_attr, const char *path) { // add the file metadata for the default attribute type if (isDefaultType(fs_file, fs_attr)) { if (insertFileData(fs_attr->fs_file, fs_attr, path) == TSK_ERR) { registerError(); return TSK_OK; } else { m_attributeAdded = true; } } return TSK_OK; } /** * Callback invoked per every unallocated block in the filesystem * Creates file ranges and file entries * A single file entry per consecutive range of blocks * @param a_block block being walked * @param a_ptr a pointer to an UNALLOC_BLOCK_WLK_TRACK struct * @returns TSK_WALK_CONT if continue, otherwise TSK_WALK_STOP if stop processing requested */ TSK_WALK_RET_ENUM TskAutoDbJava::fsWalkUnallocBlocksCb(const TSK_FS_BLOCK *a_block, void *a_ptr) { UNALLOC_BLOCK_WLK_TRACK * unallocBlockWlkTrack = (UNALLOC_BLOCK_WLK_TRACK *) a_ptr; if (unallocBlockWlkTrack->tskAutoDbJava.m_stopAllProcessing) return TSK_WALK_STOP; // initialize if this is the first block if (unallocBlockWlkTrack->isStart) { unallocBlockWlkTrack->isStart = false; unallocBlockWlkTrack->curRangeStart = a_block->addr; unallocBlockWlkTrack->prevBlock = a_block->addr; unallocBlockWlkTrack->size = unallocBlockWlkTrack->fsInfo.block_size; unallocBlockWlkTrack->nextSequenceNo = 0; return TSK_WALK_CONT; } // We want to keep consecutive blocks in the same run, so simply update prevBlock and the size // if this one is consecutive with the last call. But, if we have hit the max chunk // size, then break up this set of consecutive blocks. if ((a_block->addr == unallocBlockWlkTrack->prevBlock + 1) && ((unallocBlockWlkTrack->maxChunkSize <= 0) || (unallocBlockWlkTrack->size < unallocBlockWlkTrack->maxChunkSize))) { unallocBlockWlkTrack->prevBlock = a_block->addr; unallocBlockWlkTrack->size += unallocBlockWlkTrack->fsInfo.block_size; return TSK_WALK_CONT; } // this block is not contiguous with the previous one or we've hit the maximum size; create and add a range object const uint64_t rangeStartOffset = unallocBlockWlkTrack->curRangeStart * unallocBlockWlkTrack->fsInfo.block_size + unallocBlockWlkTrack->fsInfo.offset; const uint64_t rangeSizeBytes = (1 + unallocBlockWlkTrack->prevBlock - unallocBlockWlkTrack->curRangeStart) * unallocBlockWlkTrack->fsInfo.block_size; unallocBlockWlkTrack->ranges.push_back(TSK_DB_FILE_LAYOUT_RANGE(rangeStartOffset, rangeSizeBytes, unallocBlockWlkTrack->nextSequenceNo++)); // Return (instead of adding this run) if we are going to: // a) Make one big file with all unallocated space (minChunkSize == 0) // or // b) Only make an unallocated file once we have at least chunkSize bytes // of data in our current run (minChunkSize > 0) // In either case, reset the range pointers and add this block to the size if ((unallocBlockWlkTrack->minChunkSize == 0) || ((unallocBlockWlkTrack->minChunkSize > 0) && (unallocBlockWlkTrack->size < unallocBlockWlkTrack->minChunkSize))) { unallocBlockWlkTrack->size += unallocBlockWlkTrack->fsInfo.block_size; unallocBlockWlkTrack->curRangeStart = a_block->addr; unallocBlockWlkTrack->prevBlock = a_block->addr; return TSK_WALK_CONT; } // at this point we are either chunking and have reached the chunk limit // or we're not chunking. Either way we now add what we've got to the DB int64_t fileObjId = 0; TskAutoDbJava & tskAutoDbJava = unallocBlockWlkTrack->tskAutoDbJava; if (tskAutoDbJava.addUnallocBlockFile(tskAutoDbJava.m_curUnallocDirId, unallocBlockWlkTrack->fsObjId, unallocBlockWlkTrack->size, unallocBlockWlkTrack->ranges, fileObjId, tskAutoDbJava.m_curImgId) == TSK_ERR) { // @@@ Handle error -> Don't have access to registerError() though... } // reset unallocBlockWlkTrack->curRangeStart = a_block->addr; unallocBlockWlkTrack->prevBlock = a_block->addr; unallocBlockWlkTrack->size = unallocBlockWlkTrack->fsInfo.block_size; // The current block is part of the new range unallocBlockWlkTrack->ranges.clear(); unallocBlockWlkTrack->nextSequenceNo = 0; //we don't know what the last unalloc block is in advance //and will handle the last range in addFsInfoUnalloc() return TSK_WALK_CONT; } /** * Add unallocated space for the given file system to the database. * Create files for consecutive unalloc block ranges. * @param dbFsInfo fs to process * @returns TSK_OK on success, TSK_ERR on error */ TSK_RETVAL_ENUM TskAutoDbJava::addFsInfoUnalloc(const TSK_DB_FS_INFO & dbFsInfo) { // Unalloc space is handled separately for APFS if (dbFsInfo.fType == TSK_FS_TYPE_APFS) { return TSK_OK; } //open the fs we have from database TSK_FS_INFO * fsInfo = tsk_fs_open_img(m_img_info, dbFsInfo.imgOffset, dbFsInfo.fType); if (fsInfo == NULL) { tsk_error_set_errstr2("TskAutoDbJava::addFsInfoUnalloc: error opening fs at offset %" PRIdOFF, dbFsInfo.imgOffset); registerError(); return TSK_ERR; } //create a "fake" dir to hold the unalloc files for the fs if (addUnallocFsBlockFilesParent(dbFsInfo.objId, m_curUnallocDirId, m_curImgId) == TSK_ERR) { tsk_error_set_errstr2("addFsInfoUnalloc: error creating dir for unallocated space"); registerError(); return TSK_ERR; } //walk unalloc blocks on the fs and process them //initialize the unalloc block walk tracking UNALLOC_BLOCK_WLK_TRACK unallocBlockWlkTrack(*this, *fsInfo, dbFsInfo.objId, m_minChunkSize, m_maxChunkSize); uint8_t block_walk_ret = tsk_fs_block_walk(fsInfo, fsInfo->first_block, fsInfo->last_block, (TSK_FS_BLOCK_WALK_FLAG_ENUM)(TSK_FS_BLOCK_WALK_FLAG_UNALLOC | TSK_FS_BLOCK_WALK_FLAG_AONLY), fsWalkUnallocBlocksCb, &unallocBlockWlkTrack); if (block_walk_ret == 1) { stringstream errss; tsk_fs_close(fsInfo); errss << "TskAutoDbJava::addFsInfoUnalloc: error walking fs unalloc blocks, fs id: "; errss << unallocBlockWlkTrack.fsObjId; tsk_error_set_errstr2("%s", errss.str().c_str()); registerError(); return TSK_ERR; } if(m_stopAllProcessing) { tsk_fs_close(fsInfo); return TSK_OK; } // handle creation of the last range // make range inclusive from curBlockStart to prevBlock const uint64_t byteStart = unallocBlockWlkTrack.curRangeStart * fsInfo->block_size + fsInfo->offset; const uint64_t byteLen = (1 + unallocBlockWlkTrack.prevBlock - unallocBlockWlkTrack.curRangeStart) * fsInfo->block_size; unallocBlockWlkTrack.ranges.push_back(TSK_DB_FILE_LAYOUT_RANGE(byteStart, byteLen, unallocBlockWlkTrack.nextSequenceNo++)); int64_t fileObjId = 0; if (addUnallocBlockFile(m_curUnallocDirId, dbFsInfo.objId, unallocBlockWlkTrack.size, unallocBlockWlkTrack.ranges, fileObjId, m_curImgId) == TSK_ERR) { registerError(); tsk_fs_close(fsInfo); return TSK_ERR; } //cleanup tsk_fs_close(fsInfo); return TSK_OK; } /** * Process all unallocated space for this disk image and create "virtual" files with layouts * @returns TSK_OK on success, TSK_ERR on error */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocSpaceToDb() { if (m_stopAllProcessing) { return TSK_OK; } size_t numVsP = 0; size_t numFs = 0; size_t numPool = 0; TSK_RETVAL_ENUM retFsSpace = addUnallocFsSpaceToDb(numFs); TSK_RETVAL_ENUM retVsSpace = addUnallocVsSpaceToDb(numVsP); TSK_RETVAL_ENUM retPoolSpace = addUnallocatedPoolBlocksToDb(numPool); //handle case when no fs and no vs partitions and no pools TSK_RETVAL_ENUM retImgFile = TSK_OK; if (numVsP == 0 && numFs == 0 && numPool == 0) { retImgFile = addUnallocImageSpaceToDb(); } if (retFsSpace == TSK_ERR || retVsSpace == TSK_ERR || retPoolSpace == TSK_ERR || retImgFile == TSK_ERR) return TSK_ERR; else return TSK_OK; } /** * Process each file system in the database and add its unallocated sectors to virtual files. * @param numFs (out) number of filesystems found * @returns TSK_OK on success, TSK_ERR on error (if some or all fs could not be processed) */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocFsSpaceToDb(size_t & numFs) { if(m_stopAllProcessing) { return TSK_OK; } numFs = m_savedFsInfo.size(); TSK_RETVAL_ENUM allFsProcessRet = TSK_OK; for (vector::iterator it = m_savedFsInfo.begin(); it!= m_savedFsInfo.end(); ++it) { if (m_stopAllProcessing) { break; } if (addFsInfoUnalloc(*it) == TSK_ERR) allFsProcessRet = TSK_ERR; } //TODO set parent_path for newly created virt dir/file hierarchy for consistency return allFsProcessRet; } /** * Process each volume in the database and add its unallocated sectors to virtual files. * @param numVsP (out) number of vs partitions found * @returns TSK_OK on success, TSK_ERR on error */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocVsSpaceToDb(size_t & numVsP) { numVsP = m_savedVsPartInfo.size(); //get fs infos to see if this vspart has fs for (vector::const_iterator it = m_savedVsPartInfo.begin(); it != m_savedVsPartInfo.end(); ++it) { if (m_stopAllProcessing) { break; } const TSK_DB_VS_PART_INFO &vsPart = *it; //interested in unalloc, meta, or alloc and no fs if ( (vsPart.flags & (TSK_VS_PART_FLAG_UNALLOC | TSK_VS_PART_FLAG_META)) == 0 ) { //check if vspart has no fs bool hasFs = false; for (vector::const_iterator itFs = m_savedFsInfo.begin(); itFs != m_savedFsInfo.end(); ++itFs) { const TSK_DB_FS_INFO & fsInfo = *itFs; TSK_DB_OBJECT* fsObjInfo = NULL; if (getObjectInfo(fsInfo.objId, &fsObjInfo) == TSK_ERR ) { stringstream errss; errss << "addUnallocVsSpaceToDb: error getting object info for fs from db, objId: " << fsInfo.objId; tsk_error_set_errstr2("%s", errss.str().c_str()); registerError(); return TSK_ERR; } if (fsObjInfo->parObjId == vsPart.objId) { hasFs = true; break; } } if (hasFs == true) { //skip processing this vspart continue; } // Check if the volume contains a pool bool hasPool = false; for (std::map::iterator iter = m_poolOffsetToParentId.begin(); iter != m_poolOffsetToParentId.end(); ++iter) { if (iter->second == vsPart.objId) { hasPool = true; } } if (hasPool) { // Skip processing this vspart continue; } } // Get sector size and image offset from parent vs info // Get parent id of this vs part TSK_DB_OBJECT* vsPartObj = NULL; if (getObjectInfo(vsPart.objId, &vsPartObj) == TSK_ERR) { stringstream errss; errss << "addUnallocVsSpaceToDb: error getting object info for vs part from db, objId: " << vsPart.objId; tsk_error_set_errstr2("%s", errss.str().c_str()); registerError(); return TSK_ERR; } if (vsPartObj == NULL) { return TSK_ERR; } TSK_DB_VS_INFO* vsInfo = NULL; for (vector::iterator itVs = m_savedVsInfo.begin(); itVs != m_savedVsInfo.end(); ++itVs) { TSK_DB_VS_INFO* temp_vs_info = &(*itVs); if (temp_vs_info->objId == vsPartObj->parObjId) { vsInfo = temp_vs_info; } } if (vsInfo == NULL ) { stringstream errss; errss << "addUnallocVsSpaceToDb: error getting volume system info from db, objId: " << vsPartObj->parObjId; tsk_error_set_errstr2("%s", errss.str().c_str()); registerError(); return TSK_ERR; } // Create an unalloc file with unalloc part, with vs part as parent vector ranges; const uint64_t byteStart = vsInfo->offset + vsInfo->block_size * vsPart.start; const uint64_t byteLen = vsInfo->block_size * vsPart.len; TSK_DB_FILE_LAYOUT_RANGE tempRange(byteStart, byteLen, 0); ranges.push_back(tempRange); int64_t fileObjId = 0; if (addUnallocBlockFile(vsPart.objId, 0, tempRange.byteLen, ranges, fileObjId, m_curImgId) == TSK_ERR) { registerError(); return TSK_ERR; } } return TSK_OK; } /** * Adds unalloc space for the image if there is no volumes and no file systems. * * @returns TSK_OK on success, TSK_ERR on error */ TSK_RETVAL_ENUM TskAutoDbJava::addUnallocImageSpaceToDb() { const TSK_OFF_T imgSize = getImageSize(); if (imgSize == -1) { tsk_error_set_errstr("addUnallocImageSpaceToDb: error getting current image size, can't create unalloc block file for the image."); registerError(); return TSK_ERR; } else { TSK_DB_FILE_LAYOUT_RANGE tempRange(0, imgSize, 0); //add unalloc block file for the entire image vector ranges; ranges.push_back(tempRange); int64_t fileObjId = 0; if (TSK_ERR == addUnallocBlockFile(m_curImgId, 0, imgSize, ranges, fileObjId, m_curImgId)) { return TSK_ERR; } } return TSK_OK; } /** * Returns the directory currently being analyzed by processFile(). * Safe to use from another thread than processFile(). * * @returns curDirPath string representing currently analyzed directory */ const std::string TskAutoDbJava::getCurDir() { string curDirPath; tsk_take_lock(&m_curDirPathLock); curDirPath = m_curDirPath; tsk_release_lock(&m_curDirPathLock); return curDirPath; } sleuthkit-4.11.1/bindings/java/jni/dataModel_SleuthkitJNI.h000644 000765 000024 00000037277 14137073413 024365 0ustar00carrierstaff000000 000000 /* DO NOT EDIT THIS FILE - it is machine generated */ #include /* Header for class org_sleuthkit_datamodel_SleuthkitJNI */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI #define _Included_org_sleuthkit_datamodel_SleuthkitJNI #ifdef __cplusplus extern "C" { #endif /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getVersionNat * Signature: ()Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getVersionNat (JNIEnv *, jclass); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: startVerboseLoggingNat * Signature: (Ljava/lang/String;)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_startVerboseLoggingNat (JNIEnv *, jclass, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbOpenNat * Signature: (Ljava/lang/String;)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbOpenNat (JNIEnv *, jclass, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbNewNat * Signature: (Ljava/lang/String;)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbNewNat (JNIEnv *, jclass, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbBeginTransactionNat * Signature: (I)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbBeginTransactionNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbCommitTransactionNat * Signature: (I)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCommitTransactionNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbRollbackTransactionNat * Signature: (I)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbRollbackTransactionNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbAddEntryNat * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbAddEntryNat (JNIEnv *, jclass, jstring, jstring, jstring, jstring, jstring, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbIsUpdateableNat * Signature: (I)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsUpdateableNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbIsReindexableNat * Signature: (I)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsReindexableNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbPathNat * Signature: (I)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbPathNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbIndexPathNat * Signature: (I)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIndexPathNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbGetDisplayName * Signature: (I)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbGetDisplayName (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbCloseAll * Signature: ()V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCloseAll (JNIEnv *, jclass); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbClose * Signature: (I)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbClose (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbCreateIndexNat * Signature: (I)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbCreateIndexNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbIndexExistsNat * Signature: (I)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIndexExistsNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbIsIdxOnlyNat * Signature: (I)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbIsIdxOnlyNat (JNIEnv *, jclass, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbLookup * Signature: (Ljava/lang/String;I)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbLookup (JNIEnv *, jclass, jstring, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: hashDbLookupVerbose * Signature: (Ljava/lang/String;I)Lorg/sleuthkit/datamodel/HashHitInfo; */ JNIEXPORT jobject JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbLookupVerbose (JNIEnv *, jclass, jstring, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: initAddImgNat * Signature: (Lorg/sleuthkit/datamodel/TskCaseDbBridge;Ljava/lang/String;ZZ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_initAddImgNat (JNIEnv *, jclass, jobject, jstring, jboolean, jboolean); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: initializeAddImgNat * Signature: (Lorg/sleuthkit/datamodel/TskCaseDbBridge;Ljava/lang/String;ZZZ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_initializeAddImgNat (JNIEnv *, jclass, jobject, jstring, jboolean, jboolean, jboolean); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: runOpenAndAddImgNat * Signature: (JLjava/lang/String;[Ljava/lang/String;ILjava/lang/String;)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_runOpenAndAddImgNat (JNIEnv *, jclass, jlong, jstring, jobjectArray, jint, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: runAddImgNat * Signature: (JLjava/lang/String;JJLjava/lang/String;Ljava/lang/String;)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_runAddImgNat (JNIEnv *, jclass, jlong, jstring, jlong, jlong, jstring, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: stopAddImgNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_stopAddImgNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: finishAddImgNat * Signature: (J)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_finishAddImgNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openImgNat * Signature: ([Ljava/lang/String;II)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openImgNat (JNIEnv *, jclass, jobjectArray, jint, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openVsNat * Signature: (JJ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openVsNat (JNIEnv *, jclass, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openVolNat * Signature: (JJ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openVolNat (JNIEnv *, jclass, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openPoolNat * Signature: (JJ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openPoolNat (JNIEnv *, jclass, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getImgInfoForPoolNat * Signature: (JJ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getImgInfoForPoolNat (JNIEnv *, jclass, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openFsNat * Signature: (JJ)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openFsNat (JNIEnv *, jclass, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: openFileNat * Signature: (JJII)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_openFileNat (JNIEnv *, jclass, jlong, jlong, jint, jint); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readImgNat * Signature: (J[BJJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readImgNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readVsNat * Signature: (J[BJJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readVsNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readPoolNat * Signature: (J[BJJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readPoolNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readVolNat * Signature: (J[BJJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readVolNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readFsNat * Signature: (J[BJJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readFsNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: readFileNat * Signature: (J[BJIJ)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_readFileNat (JNIEnv *, jclass, jlong, jbyteArray, jlong, jint, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: saveFileMetaDataTextNat * Signature: (JLjava/lang/String;)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_saveFileMetaDataTextNat (JNIEnv *, jclass, jlong, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getPathsForImageNat * Signature: (J)[Ljava/lang/String; */ JNIEXPORT jobjectArray JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getPathsForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getSizeForImageNat * Signature: (J)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSizeForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getTypeForImageNat * Signature: (J)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getTypeForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getSectorSizeForImageNat * Signature: (J)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSectorSizeForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getMD5HashForImageNat * Signature: (J)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getMD5HashForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getSha1HashForImageNat * Signature: (J)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSha1HashForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getCollectionDetailsForImageNat * Signature: (J)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getCollectionDetailsForImageNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: closeImgNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeImgNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: closePoolNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closePoolNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: closeVsNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeVsNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: closeFsNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeFsNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: closeFileNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_closeFileNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: findDeviceSizeNat * Signature: (Ljava/lang/String;)J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_findDeviceSizeNat (JNIEnv *, jclass, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getCurDirNat * Signature: (J)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getCurDirNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: isImageSupportedNat * Signature: (Ljava/lang/String;)Z */ JNIEXPORT jboolean JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_isImageSupportedNat (JNIEnv *, jclass, jstring); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getSleuthkitVersionNat * Signature: ()J */ JNIEXPORT jlong JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getSleuthkitVersionNat (JNIEnv *, jclass); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: finishImageWriterNat * Signature: (J)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_finishImageWriterNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: getFinishImageProgressNat * Signature: (J)I */ JNIEXPORT jint JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_getFinishImageProgressNat (JNIEnv *, jclass, jlong); /* * Class: org_sleuthkit_datamodel_SleuthkitJNI * Method: cancelFinishImageNat * Signature: (J)V */ JNIEXPORT void JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_cancelFinishImageNat (JNIEnv *, jclass, jlong); #ifdef __cplusplus } #endif #endif /* Header for class org_sleuthkit_datamodel_SleuthkitJNI_TSK_FS_FILE_READ_OFFSET_TYPE_ENUM */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI_TSK_FS_FILE_READ_OFFSET_TYPE_ENUM #define _Included_org_sleuthkit_datamodel_SleuthkitJNI_TSK_FS_FILE_READ_OFFSET_TYPE_ENUM #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif #endif /* Header for class org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle #define _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif #endif /* Header for class org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle_AddImageProcess */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle_AddImageProcess #define _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseDbHandle_AddImageProcess #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif #endif /* Header for class org_sleuthkit_datamodel_SleuthkitJNI_HandleCache */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI_HandleCache #define _Included_org_sleuthkit_datamodel_SleuthkitJNI_HandleCache #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif #endif /* Header for class org_sleuthkit_datamodel_SleuthkitJNI_CaseHandles */ #ifndef _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseHandles #define _Included_org_sleuthkit_datamodel_SleuthkitJNI_CaseHandles #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif #endif sleuthkit-4.11.1/bindings/java/jni/Makefile.am000644 000765 000024 00000000631 14137073413 021741 0ustar00carrierstaff000000 000000 AM_CPPFLAGS = -I../../.. -I$(srcdir)/../../.. $(JNI_CPPFLAGS) AM_CXXFLAGS += -Wno-unused-command-line-argument -Wno-overloaded-virtual EXTRA_DIST = .indent.pro lib_LTLIBRARIES = libtsk_jni.la libtsk_jni_la_SOURCES = dataModel_SleuthkitJNI.cpp dataModel_SleuthkitJNI.h auto_db_java.h auto_db_java.cpp libtsk_jni_la_LIBADD = ../../../tsk/libtsk.la indent: indent *.cpp *.h clean-local: -rm -f *.c~ *.h~ sleuthkit-4.11.1/bindings/java/jni/.indent.pro000644 000765 000024 00000000036 14137073413 021765 0ustar00carrierstaff000000 000000 -kr -psl -nce -ip2 -nlp -nut sleuthkit-4.11.1/bindings/java/jni/Makefile.in000644 000765 000024 00000054243 14137073437 021770 0ustar00carrierstaff000000 000000 # Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = bindings/java/jni ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ax_pkg_check_modules.m4 \ $(top_srcdir)/m4/tsk_opt_dep_check.m4 \ $(top_srcdir)/m4/ax_pthread.m4 $(top_srcdir)/m4/cppunit.m4 \ $(top_srcdir)/m4/ax_jni_include_dir.m4 \ $(top_srcdir)/m4/ac_prog_javac_works.m4 \ $(top_srcdir)/m4/ac_prog_javac.m4 \ $(top_srcdir)/m4/ac_prog_java_works.m4 \ $(top_srcdir)/m4/ac_prog_java.m4 \ $(top_srcdir)/m4/ax_cxx_compile_stdcxx.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/tsk/tsk_config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(libdir)" LTLIBRARIES = $(lib_LTLIBRARIES) libtsk_jni_la_DEPENDENCIES = ../../../tsk/libtsk.la am_libtsk_jni_la_OBJECTS = dataModel_SleuthkitJNI.lo auto_db_java.lo libtsk_jni_la_OBJECTS = $(am_libtsk_jni_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)/tsk depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libtsk_jni_la_SOURCES) DIST_SOURCES = $(libtsk_jni_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/config/depcomp DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ ALLOCA = @ALLOCA@ AMTAR = @AMTAR@ AM_CFLAGS = @AM_CFLAGS@ AM_CXXFLAGS = @AM_CXXFLAGS@ -Wno-unused-command-line-argument \ -Wno-overloaded-virtual AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ ANT_FOUND = @ANT_FOUND@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ AX_PACKAGE_REQUIRES = @AX_PACKAGE_REQUIRES@ AX_PACKAGE_REQUIRES_PRIVATE = @AX_PACKAGE_REQUIRES_PRIVATE@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EWF_CFLAGS = @EWF_CFLAGS@ EWF_LIBS = @EWF_LIBS@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HAVE_CXX14 = @HAVE_CXX14@ IGNORE = @IGNORE@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ JAVA = @JAVA@ JAVAC = @JAVAC@ JNI_CPPFLAGS = @JNI_CPPFLAGS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBTSK_LDFLAGS = @LIBTSK_LDFLAGS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_LIBS_PRIVATE = @PACKAGE_LIBS_PRIVATE@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PERL = @PERL@ PKGCONFIG = @PKGCONFIG@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ PTHREAD_CC = @PTHREAD_CC@ PTHREAD_CFLAGS = @PTHREAD_CFLAGS@ PTHREAD_LIBS = @PTHREAD_LIBS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SQLITE3_CFLAGS = @SQLITE3_CFLAGS@ SQLITE3_LIBS = @SQLITE3_LIBS@ STRIP = @STRIP@ VERSION = @VERSION@ VHDI_CFLAGS = @VHDI_CFLAGS@ VHDI_LIBS = @VHDI_LIBS@ VMDK_CFLAGS = @VMDK_CFLAGS@ VMDK_LIBS = @VMDK_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ _ACJNI_JAVAC = @_ACJNI_JAVAC@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ ax_pthread_config = @ax_pthread_config@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ uudecode = @uudecode@ AM_CPPFLAGS = -I../../.. -I$(srcdir)/../../.. $(JNI_CPPFLAGS) EXTRA_DIST = .indent.pro lib_LTLIBRARIES = libtsk_jni.la libtsk_jni_la_SOURCES = dataModel_SleuthkitJNI.cpp dataModel_SleuthkitJNI.h auto_db_java.h auto_db_java.cpp libtsk_jni_la_LIBADD = ../../../tsk/libtsk.la all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign bindings/java/jni/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign bindings/java/jni/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-libLTLIBRARIES: $(lib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \ } uninstall-libLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \ done clean-libLTLIBRARIES: -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) @list='$(lib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libtsk_jni.la: $(libtsk_jni_la_OBJECTS) $(libtsk_jni_la_DEPENDENCIES) $(EXTRA_libtsk_jni_la_DEPENDENCIES) $(AM_V_CXXLD)$(CXXLINK) -rpath $(libdir) $(libtsk_jni_la_OBJECTS) $(libtsk_jni_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/auto_db_java.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dataModel_SleuthkitJNI.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(libdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libLTLIBRARIES clean-libtool clean-local \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-libLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-libLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libLTLIBRARIES clean-libtool clean-local cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-libLTLIBRARIES install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am uninstall-libLTLIBRARIES .PRECIOUS: Makefile indent: indent *.cpp *.h clean-local: -rm -f *.c~ *.h~ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sleuthkit-4.11.1/bindings/java/jni/auto_db_java.h000644 000765 000024 00000026057 14137073414 022507 0ustar00carrierstaff000000 000000 /* ** The Sleuth Kit ** ** Brian Carrier [carrier sleuthkit [dot] org] ** Copyright (c) 2020 Brian Carrier. All Rights reserved ** ** This software is distributed under the Common Public License 1.0 ** */ /** * \file auto_db_java.h * Contains the class that creates a case-level database of file system * data from the JNI code. */ #ifndef _AUTO_DB_JAVA_H #define _AUTO_DB_JAVA_H #include using std::map; #include using std::string; #include "tsk/auto/tsk_auto_i.h" #include "tsk/auto/tsk_db.h" #include "jni.h" /** \internal * C++ class that implements TskAuto to load file metadata into a database. * This is used by the TskCaseDb class. */ class TskAutoDbJava :public TskAuto { public: TskAutoDbJava(); virtual ~TskAutoDbJava(); virtual uint8_t openImage(int, const TSK_TCHAR * const images[], TSK_IMG_TYPE_ENUM, unsigned int a_ssize, const char* deviceId = NULL); virtual uint8_t openImage(const char* a_deviceId = NULL); virtual uint8_t openImageUtf8(int, const char *const images[], TSK_IMG_TYPE_ENUM, unsigned int a_ssize, const char* deviceId = NULL); virtual void closeImage(); void close(); virtual void setTz(string tzone); virtual void setDatasourceObjId(int64_t img_id); virtual TSK_FILTER_ENUM filterVs(const TSK_VS_INFO * vs_info); virtual TSK_FILTER_ENUM filterVol(const TSK_VS_PART_INFO * vs_part); virtual TSK_FILTER_ENUM filterPool(const TSK_POOL_INFO * pool_info); virtual TSK_FILTER_ENUM filterPoolVol(const TSK_POOL_VOLUME_INFO * pool_vol); virtual TSK_FILTER_ENUM filterFs(TSK_FS_INFO * fs_info); virtual TSK_RETVAL_ENUM processFile(TSK_FS_FILE * fs_file, const char *path); const std::string getCurDir(); /** * Sets whether or not the file systems for an image should be added when * the image is added to the case database. The default value is true. */ void setAddFileSystems(bool addFileSystems); /** * Skip processing of orphans on FAT filesystems. * This will make the loading of the database much faster * but you will not have all deleted files. Default value is false. * @param noFatFsOrphans flag set to true if to skip processing orphans on FAT fs */ virtual void setNoFatFsOrphans(bool noFatFsOrphans); /** * When enabled, records for unallocated file system space will be added to the database. Default value is false. * @param addUnallocSpace If true, create records for contiguous unallocated file system sectors. */ virtual void setAddUnallocSpace(bool addUnallocSpace); /** * When enabled, records for unallocated file system space will be added to the database. Default value is false. * @param addUnallocSpace If true, create records for contiguous unallocated file system sectors. * @param minChunkSize the number of bytes to group unallocated data into. A value of 0 will create * one large chunk and group only on volume boundaries. A value of -1 will group each consecutive * chunk. */ virtual void setAddUnallocSpace(bool addUnallocSpace, int64_t minChunkSize); /** * When enabled, records for unallocated file system space will be added to the database with the given parameters. * Automatically sets the flag to create records for contiguous unallocated file system sectors. * @param minChunkSize the number of bytes to group unallocated data into. A value of 0 will create * one large chunk and group only on volume boundaries. A value of -1 will group each consecutive * chunk. * @param maxChunkSize the maximum number of bytes in one record of unallocated data. A value of -1 will not * split the records based on size */ virtual void setAddUnallocSpace(int64_t minChunkSize, int64_t maxChunkSize); uint8_t addFilesInImgToDb(); /** * */ uint8_t startAddImage(int numImg, const TSK_TCHAR * const imagePaths[], TSK_IMG_TYPE_ENUM imgType, unsigned int sSize, const char* deviceId = NULL); uint8_t startAddImage(TSK_IMG_INFO * img_info, const char* deviceId = NULL); #ifdef WIN32 uint8_t startAddImage(int numImg, const char *const imagePaths[], TSK_IMG_TYPE_ENUM imgType, unsigned int sSize, const char* deviceId = NULL); #endif void stopAddImage(); int64_t getImageID(); TSK_RETVAL_ENUM initializeJni(JNIEnv *, jobject); private: int64_t m_curImgId; ///< Object ID of image currently being processed int64_t m_curVsId; ///< Object ID of volume system currently being processed int64_t m_curVolId; ///< Object ID of volume currently being processed int64_t m_curPoolVol; ///< Object ID of the pool volume currently being processed int64_t m_curPoolVs; ///< Object ID of the pool volume system currently being processed int64_t m_curFsId; ///< Object ID of file system currently being processed int64_t m_curFileId; ///< Object ID of file currently being processed TSK_INUM_T m_curDirAddr; ///< Meta address the directory currently being processed int64_t m_curUnallocDirId; string m_curDirPath; //< Path of the current directory being processed tsk_lock_t m_curDirPathLock; //< protects concurrent access to m_curDirPath string m_curImgTZone; bool m_vsFound; bool m_volFound; bool m_poolFound; bool m_stopped; bool m_addFileSystems; bool m_noFatFsOrphans; bool m_addUnallocSpace; int64_t m_minChunkSize; ///< -1 for no minimum, 0 for no chunking at all, greater than 0 to wait for that number of chunks before writing to the database int64_t m_maxChunkSize; ///< Max number of unalloc bytes to process before writing to the database, even if there is no natural break. -1 for no chunking bool m_foundStructure; ///< Set to true when we find either a volume or file system bool m_attributeAdded; ///< Set to true when an attribute was added by processAttributes // These are used to write unallocated blocks for pools at the end of the add image // process. We can't load the pool_info objects directly from the database so we will // store info about them here. std::map m_poolOffsetToParentId; std::map m_poolOffsetToVsId; // JNI data JNIEnv * m_jniEnv = NULL; jclass m_callbackClass = NULL; jobject m_javaDbObj = NULL; jmethodID m_addImageMethodID = NULL; jmethodID m_addImageNameMethodID = NULL; jmethodID m_addAcquisitionDetailsMethodID = NULL; jmethodID m_addVolumeSystemMethodID = NULL; jmethodID m_addVolumeMethodID = NULL; jmethodID m_addPoolMethodID = NULL; jmethodID m_addFileSystemMethodID = NULL; jmethodID m_addFileMethodID = NULL; jmethodID m_addUnallocParentMethodID = NULL; jmethodID m_addLayoutFileMethodID = NULL; jmethodID m_addLayoutFileRangeMethodID = NULL; // Cached objects vector m_savedFsInfo; vector m_savedVsInfo; vector m_savedVsPartInfo; vector m_savedObjects; void saveObjectInfo(int64_t objId, int64_t parObjId, TSK_DB_OBJECT_TYPE_ENUM type); TSK_RETVAL_ENUM getObjectInfo(int64_t objId, TSK_DB_OBJECT** obj_info); TSK_RETVAL_ENUM createJString(const char * inputString, jstring & newJString); // prevent copying until we add proper logic to handle it TskAutoDbJava(const TskAutoDbJava&); TskAutoDbJava & operator=(const TskAutoDbJava&); //internal structure to keep track of temp. unalloc block range typedef struct _UNALLOC_BLOCK_WLK_TRACK { _UNALLOC_BLOCK_WLK_TRACK(TskAutoDbJava & tskAutoDbJava, const TSK_FS_INFO & fsInfo, const int64_t fsObjId, int64_t minChunkSize, int64_t maxChunkSize) : tskAutoDbJava(tskAutoDbJava),fsInfo(fsInfo),fsObjId(fsObjId),curRangeStart(0), minChunkSize(minChunkSize), maxChunkSize(maxChunkSize), prevBlock(0), isStart(true), nextSequenceNo(0) {} TskAutoDbJava & tskAutoDbJava; const TSK_FS_INFO & fsInfo; const int64_t fsObjId; vector ranges; TSK_DADDR_T curRangeStart; int64_t size; const int64_t minChunkSize; const int64_t maxChunkSize; TSK_DADDR_T prevBlock; bool isStart; uint32_t nextSequenceNo; } UNALLOC_BLOCK_WLK_TRACK; uint8_t addImageDetails(const char *); TSK_RETVAL_ENUM insertFileData(TSK_FS_FILE * fs_file, const TSK_FS_ATTR *, const char *path); virtual TSK_RETVAL_ENUM processAttribute(TSK_FS_FILE *, const TSK_FS_ATTR * fs_attr, const char *path); TSK_RETVAL_ENUM addUnallocatedPoolBlocksToDb(size_t & numPool); static TSK_WALK_RET_ENUM fsWalkUnallocBlocksCb(const TSK_FS_BLOCK *a_block, void *a_ptr); TSK_RETVAL_ENUM addFsInfoUnalloc(const TSK_DB_FS_INFO & dbFsInfo); TSK_RETVAL_ENUM addUnallocFsSpaceToDb(size_t & numFs); TSK_RETVAL_ENUM addUnallocVsSpaceToDb(size_t & numVsP); TSK_RETVAL_ENUM addUnallocImageSpaceToDb(); TSK_RETVAL_ENUM addUnallocSpaceToDb(); // JNI methods TSK_RETVAL_ENUM addImageInfo(int type, TSK_OFF_T ssize, int64_t & objId, const string & timezone, TSK_OFF_T size, const string &md5, const string& sha1, const string& sha256, const string& deviceId, const string& collectionDetails, char** img_ptrs, int num_imgs); void addAcquisitionDetails(int64_t imgId, const string& collectionDetails); TSK_RETVAL_ENUM addVsInfo(const TSK_VS_INFO* vs_info, int64_t parObjId, int64_t& objId); TSK_RETVAL_ENUM addPoolInfoAndVS(const TSK_POOL_INFO *pool_info, int64_t parObjId, int64_t& objId); TSK_RETVAL_ENUM addPoolVolumeInfo(const TSK_POOL_VOLUME_INFO* pool_vol, int64_t parObjId, int64_t& objId); TSK_RETVAL_ENUM addVolumeInfo(const TSK_VS_PART_INFO* vs_part, int64_t parObjId, int64_t& objId); TSK_RETVAL_ENUM addFsInfo(const TSK_FS_INFO* fs_info, int64_t parObjId, int64_t& objId); TSK_RETVAL_ENUM addFsFile(TSK_FS_FILE* fs_file, const TSK_FS_ATTR* fs_attr, const char* path, int64_t fsObjId, int64_t& objId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addFile(TSK_FS_FILE* fs_file, const TSK_FS_ATTR* fs_attr, const char* path, int64_t fsObjId, int64_t parObjId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addFileWithLayoutRange(const TSK_DB_FILES_TYPE_ENUM dbFileType, const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addUnallocBlockFile(const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addUnusedBlockFile(const int64_t parentObjId, const int64_t fsObjId, const uint64_t size, vector& ranges, int64_t& objId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addUnallocFsBlockFilesParent(const int64_t fsObjId, int64_t& objId, int64_t dataSourceObjId); TSK_RETVAL_ENUM addUnallocatedPoolVolume(int vol_index, int64_t parObjId, int64_t& objId); }; #endif sleuthkit-4.11.1/bindings/java/doxygen/blackboard.dox000644 000765 000024 00000053757 14137073413 023423 0ustar00carrierstaff000000 000000 /*! \page mod_bbpage The Blackboard \section jni_bb_overview Overview The blackboard allows modules (in Autopsy or other frameworks) to communicate and store results. A module can post data to the blackboard so that subsequent modules can see its results. It can also query the blackboard to see what previous modules have posted. \subsection jni_bb_concepts Concepts The blackboard is a collection of artifacts. Each artifact is a either a data artifact or an analysis result. In general, data artifacts record data found in the image (ex: a call log entry) while analysis results are more subjective (ex: a file matching a user-created interesting file set rule). Each artifact has a type, such as web browser history, EXIF, or GPS route. The Sleuth Kit has many artifact types already defined (see org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE and the \ref artifact_catalog_page "artifact catalog") and you can also \ref jni_bb_artifact2 "create your own". Each artifact has a set of name-value pairs called attributes. Attributes also have types, such as URL, created date, or device make. The Sleuth Kit has many attribute types already defined (see org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE) and you can also \ref jni_bb_artifact2 "create your own". See the \ref artifact_catalog_page "artifact catalog" for a list of artifacts and the attributes that should be associated with each. \subsection jni_bb_specialart Special Artifact Types There are two special types of artifacts that are used a bit differently than the rest. The first is the org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO artifact. A Content object should have only one artifact of this type and it is used to store a independent attributes that will not be displayed in the UI. Autopsy used to store the MD5 hash and MIME type in TSK_GEN_INFO, but they are now in the files table of the database. There are special methods to access this artifact to ensure that only a single TSK_GEN_INFO artifact is created per Content object and that you get a cached version of the artifact. These methods will be given in the relevant sections below. The second special type of artifact is the TSK_ASSOCIATED_OBJECT. All artifacts are created as the child of a file or artifact. This TSK_ASSOCIATED_OBJECT is used to make additional relationships with files and artifacts apart from this parent-child relationship. See the \ref jni_bb_associated_object section below. \section jni_bb_access Accessing the Blackboard Modules can access the blackboard from either org.sleuthkit.datamodel.SleuthkitCase, org.sleuthkit.datamodel.Blackboard, or a org.sleuthkit.datamodel.Content object. The methods associated with org.sleuthkit.datamodel.Content all limit the Blackboard to a specific file. \subsection jni_bb_access_post Posting to the Blackboard First you need to decide what type of artifact you are making and what category it is. Artifact types fall into two categories:
  • Analysis Result: Result from an analysis technique on a given object with a given configuration. Includes Conclusion, Relevance Score, and Confidence.
  • Data Artifact: Data that was originally embedded by an application/OS in a file or other data container.
Consult the \ref artifact_catalog_page "artifact catalog" for a list of built-in types and what categories they belong to. If you are creating a data artifact, you can optionally add an OS account to it. If you are creating an analysis result, you can optionally add a score and other notes about the result. Note that you must use the category defined in the artifact catalog for each type or you will get an error. For example, you can't create a web bookmark analysis result. There are may ways to create artifacts, but we will focus on creating them through the Blackboard class or directly through a Content object. Regardless of how they are created, all artifacts must be associated with a Content object.
  • org.sleuthkit.datamodel.AbstractContent.newDataArtifact(BlackboardArtifact.Type artifactType, Collection attributesList, Long osAccountId)
  • org.sleuthkit.datamodel.AbstractContent.newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection attributesList)
  • org.sleuthkit.datamodel.Blackboard.newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection attributes, Long osAccountId)
  • org.sleuthkit.datamodel.Blackboard.newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, Collection attributesList, CaseDbTransaction transaction)
Attributes are created by making a new instance of org.sleuthkit.datamodel.BlackboardAttribute using one of the various constructors. Attributes can either be added when creating the artifact using the methods in the above list or at a later time using org.sleuthkit.datamodel.BlackboardArtifact.addAttribute() (or org.sleuthkit.datamodel.BlackboardArtifact.addAttributes() if you have several to add - it’s faster). Note that you should not manually add attributes of type JSON for standard attribute types such as TSK_ATTACHMENTS or TSK_GEO_TRACKPOINTS. Instead, you should use the helper classes in org.sleuthkit.datamodel.blackboardutils.attributes or org.sleuthkit.datamodel.blackboardutils to create your artifacts. If you want to create an attribute in the TSK_GEN_INFO artifact, use org.sleuthkit.datamodel.Content.getGenInfoArtifact() to ensure that you do not create a second TSK_GEN_INFO artifact for the file and to ensure that you used the cached version (which will be faster for you). \subsubsection jni_bb_artifact2 Creating Multiple Artifacts or Multiple Attributes In some cases, it may not be clear if you should post multiple single-attribute artifacts for a file or post a single multiple-attribute artifact. Here are some guidelines: - If a single file is associated with multiple items of the same type (e.g., log entries in a log file, bookmarks in a bookmark file, cookies in a cookie database), then each instance should be posted as a separate artifact so that you can differentiate them and keep all related attributes clearly grouped (e.g., it is clear which date goes with which log entry). - All attributes in artifacts other than in org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO artifacts should be closely related to each other. \subsubsection jni_bb_artifact_helpers Artifact Helpers Artifact helpers are a set of classes that make it easier for module developers to create artifacts. These classes provide methods that abstract the details of artifacts and attributes, and provide simpler and more readable API. The following helpers are available:
  • org.sleuthkit.datamodel.blackboardutils.ArtifactsHelper - provides methods for creating general artifacts
    • addInstalledPrograms(): creates TSK_INSTALLED_PROG artifact
  • org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper - provides methods for creating web browser related artifacts
    • addWebBookmark(): creates TSK_WEB_BOOKMARK artifact for browser bookmarks
    • addWebCookie(): creates TSK_WEB_COOKIE artifact for browser cookies
    • addWebDownload(): creates TSK_WEB_DOWNLOAD artifact for web downloads.
    • addWebFormAddress(): creates TSK_WEB_FORM_ADDRESS artifact for form address data
    • addWebFormAutofill(): creates TSK_WEB_FORM_AUTOFILL artifact for autofill data
    • addWebHistory(): creates TSK_WEB_HISTORY artifact for web history.
  • org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper - provides methods for communication related artifacts: contacts, call logs, messages.
    • addCalllog(): creates TSK_CALLLOG artifact for call logs.
    • addContact() creates TSK_CONTACT artifact for contacts.
    • addMessage() creates a TSK_MESSAGE artifact for messages.
    • addAttachments() adds attachments to a message.
  • org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper - provides methods for GPS related artifacts
    • addRoute(): creates TSK_ROUTE artifact for GPS routes.
    • addContact() creates TSK_CONTACT artifact for contacts.
    • addMessage() creates a TSK_MESSAGE artifact for messages.
    • addAttachments() adds attachments to a message.
\subsubsection jni_bb_associated_object Associated Objects Artifacts should be created as children of the file that they were derived or parsed from. For example, a TSK_WEB_DOWNLOAD artifact would be a child of the browser's SQLite database that was parsed. This creates a relationship between the source file and the artifact. But, sometimes you also want to make a relationship between the artifact and another file (or artifact). This is where the TSK_ASSOCIATED_OBJECT artifact comes in. For example, suppose you have a module that parses a SQLite database that has a log of downloaded files. Each entry might contain the URL the file was downloaded from, timestamp information, and the location the file was saved to on disk. This data would be saved in a TSK_WEB_DOWNLOAD artifact that would be a child of the SQLite database. But suppose the downloaded file also exists in our image. It would be helpful to link that file to our TSK_WEB_DOWNLOAD artifact to show when and where it was downloaded from. We achieve this relationship by creating a TSK_ASSOCIATED_OBJECT artifact on the downloaded file. This artifact stores the ID of the TSK_WEB_DOWNLOAD artifact in a TSK_ASSOCIATED_ARTIFACT attribute so we have a direct link from the file to the artifact that shows where it came from. \image html associated_object.png \subsection jni_bb_query Querying the Blackboard You can find artifacts by querying the blackboard in a variety of ways. It is preferable to use the methods that specifically return either data artifacts or analysis results since these will contain the complete information for the artifact. You can use the more general "Artifact" or "BlackboardArtifact" methods to get both, however these results will only contain the blackboard attributes and not any associated OS account or score/justification. You can find artifacts using a variety of ways: - org.sleuthkit.datamodel.Content.getAllDataArtifacts() to get all data artifacts for a specific Content object. - org.sleuthkit.datamodel.Content.getAnalysisResults() to get analysis results of a given type for a specific Content object. - org.sleuthkit.datamodel.Content.getArtifacts() in its various forms to get a specific type of artifact for a specific Content object. - org.sleuthkit.datamodel.Content.getGenInfoArtifact() to get the TSK_GEN_INFO artifact for a specific content object. - org.sleuthkit.datamodel.SleuthkitCase.getBlackboardArtifacts() in its various forms to get artifacts based on some combination of artifact type, attribute type and value, and content object. \section jni_bb_custom_types Custom Artifacts and Attributes This section outlines how to create artifact and attribute types because the standard ones do not meet your needs. These custom artifacts will be displayed in the Autopsy UI alongside the built in artifacts and will also appear in the reports. \subsection jni_bb_custom_make Making Custom Artifacts and Attributes org.sleuthkit.datamodel.SleuthkitCase.addBlackboardArtifactType() is used to create a custom artifact. Give it the display name, unique name and category (data artifact or analysis result) and it will return a org.sleuthkit.datamodel.BlackboardArtifact.Type object with a unique ID. You will need to call this once for each case to create the artifact ID. You can then use this ID to make an artifact of the given type. To check if the artifact type has already been added to the blackboard or to get the ID after it was created, use org.sleuthkit.datamodel.SleuthkitCase.getArtifactType(). To create custom attributes, use org.sleuthkit.datamodel.SleuthkitCase.addArtifactAttributeType() to create the artifact type and get its ID. Like artifacts, you must create the attribute type for each new case. To get a type after it has been created in the case, use org.sleuthkit.datamodel.SleuthkitCase.getAttributeType(). Your attribute will be a name-value pair where the value is of the type you specified when creating it. The current types are: String, Integer, Long, Double, Byte, Datetime, and JSON. If you believe you need to create an attribute with type JSON, please read the \ref jni_bb_json_attr_overview "overview" and \ref jni_bb_json_attr "tutorial" sections below. Note that "TSK" is an abbreviation of "The Sleuth Kit." Artifact and attribute type names with a "TSK_" prefix indicate the names of standard or "built in" types. User-defined artifact and attribute types should not be given names with "TSK_" prefixes. \subsection jni_bb_json_attr_overview JSON Attribute Overview This section will give a quick overview of how to use JSON attributes. If this is your first time using JSON attributes please read the \ref jni_bb_json_attr below as well. \subsubsection jni_bb_json_attr_overview_usage JSON Attribute Usage Attributes with values of type JSON should be used only when the data can't be stored as an unordered set of attributes. To date, the most common need for this has been where an artifact needs to store multiple ordered instances of the same type of data in a single artifact. For example, one of the standard JSON attributes is TSK_GEO_TRACKPOINTS which stores an ordered list of track points, each containing coordinates, a timestamp, and other data. \subsubsection jni_bb_json_attr_overview_format JSON Attribute Format The underlying data in a JSON attribute will be either an array of individual attributes or an array of maps of attributes. For example, an artifact containing two track points could look similar to this (some attributes have been removed for brevity): \verbatim {"pointList": [ {"TSK_DATETIME":1255822646, "TSK_GEO_LATITUDE":47.644548, "TSK_GEO_LONGITUDE":-122.326897}, {"TSK_DATETIME":1255822651, "TSK_GEO_LATITUDE":47.644548, "TSK_GEO_LONGITUDE":-122.326897} ] } \endverbatim In practice you will not be required to deal with the raw JSON, but it is important to note that in the name/value pairs, the name should always be the name of a blackboard artifact type. This allows Autopsy to better process each attribute, for example by displaying timestamps in human-readable format. \subsubsection jni_bb_json_attr_overview_create Saving JSON Attributes To start, follow the instructions in the \ref jni_bb_custom_make section above to create your custom attribute with value type JSON. Next you'll need to put your data into the new attribute. There are two general methods:
  1. Manually create the JSON string. This is not recommended as the code will be hard to read and prone to errors.
  2. Create a helper plain old Java object (POJO) to hold the data you want to serialize.
Assuming you go the POJO route (highly recommended), there are two options for creating your class. As discussed above, each field name should match an attribute name (either built-in or custom). You could create a class like this: \verbatim class WebLogEntry { long TSK_DATETIME; String TSK_URL; \endverbatim The downside here is that your code will likely be a bit less readable like this. The other option is to use annotations specifying which attribute type goes with each of your fields, like this: \verbatim class WebLogEntry { @SerializedName("TSK_DATETIME") long accessDate; @SerializedName("TSK_URL") String urlVisited; \endverbatim You may need to make multiple POJOs to hold the data you need to serialize. This would most commonly happen if you want to store a list of values. In our example above, we would likely need to create a WebLog class to hold our list of WebLogEntry objects. Now we need to convert our object into a JSON attribute. The easiest way to do this using the method org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil.toAttribute(). This method will return a BlackboardAttribute serialized from your object. You can then add this new attribute to your BlackboardArtifact. \subsubsection jni_bb_json_attr_overview_load Loading JSON Attributes If you need to process JSON attributes you created and you created your own POJO as discussed in the previous section, you can use the method org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil.fromAttribute(). It will return an instance of your class containing the data from a given BlackboardAttribute. \subsection jni_bb_json_attr JSON Attribute Tutorial The following describes an example of when you might need a JSON-valued attribute and the different methods for creating one. It also shows generally how to create custom artifacts and attributes so may be useful even if you do not need a JSON-type attribute. Suppose we had a module that could record the last few times an app was accessed and which user opened it. The data we'd like to store for one app could have the form: \verbatim App name: Sample App Logins: user1, 2020-03-31 10:06:37 EDT user2, 2020-03-30 06:19:57 EDT user1, 2020-03-26 18:59:57 EDT \endverbatim We could make a separate artifact for each of those logins (each with the app name, user name, and timestamp) it might be nicer to have them all under one and keep them in order. This is where the JSON-type attribute comes into play. We can store all the login data in a single blackboard attribute. To start, we'll need to create our new artifact and attribute types. We'll need a new artifact type to hold our login data and a new attribute type to hold the logins themselves (this will be our JSON attribute). We'll use a standard attribute later for the app name. This part should only be done once, possibly in the startUp() method of your ingest module. \verbatim SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); // Add the new artifact type to the case if it does not already exist String artifactName = "APP_LOG"; String artifactDisplayName = "Application Logins"; BlackboardArtifact.Type artifactType = skCase.getArtifactType(artifactName); if (artifactType == null) { artifactType = skCase.addBlackboardArtifactType(artifactName, artifactDisplayName); } // Add the new attribute type to the case if it does not already exist String attributeName = "LOGIN_DATA"; String attributeDisplayName = "Login Data"; BlackboardAttribute.Type loginDataAttributeType = skCase.getAttributeType(attributeName); if (loginDataAttributeType == null) { loginDataAttributeType = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON, attributeDisplayName); } \endverbatim You'll want to save the new artifact and attribute type objects to use later. Now our ingest module can create artifacts for the data it extracts. In the code below, we create our new "APP_LOG" artifact, add a standard attribute for the user name, and then create and store a JSON-formatted string which will contain each entry from the "loginData" list. Note that manually creating the JSON as shown below is not recommeded and is just for illustrative purposes - an easier method will be given afterward. \verbatim BlackboardArtifact art = content.newArtifact(artifactType.getTypeID()); List attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, moduleName, appName)); String jsonLoginStr = "{ LoginData : [ "; String dataStr = ""; for(LoginData data : loginData) { if (!dataStr.isEmpty()) { dataStr += ", "; } dataStr += "{\"TSK_USER_NAME\" : \"" + data.getUserName() + "\", " + "\"TSK_DATATIME\" : \"" + data.getTimestamp() + "\"} "; } jsonLoginStr += dataStr + " ] }"; attributes.add(new BlackboardAttribute(loginDataAttributeType, moduleName, jsonLoginStr)); art.addAttributes(attributes); \endverbatim It is important that each of the name-value pairs starts with an existing blackboard attribute name. This will allow Autopsy to use the corresponding value, for example, to extract out a timestamp to show this artifact in the Timeline viewer. Here's what our newly-created artifact will look like in Autopsy: \image html json_attribute.png The above method for storing the data works but formatting the JSON attribute manually is prone to errors. Luckily, in most cases instead of writing the JSON ourselves we can serialize a Java object. If the data that will go into the JSON attribute is contained in plain old Java objects (POJOs), then we can add annotations to that class to produce the JSON automatically. Here they've been added to the LoginData class: \verbatim // Requires package com.google.gson.annotations.SerializedName; private class LoginData { @SerializedName("TSK_USER_NAME") String userName; @SerializedName("TSK_DATETIME") long timestamp; LoginData(String userName, long timestamp) { this.userName = userName; this.timestamp = timestamp; } } \endverbatim We want our JSON attribute to store a list of these LoginData objects, so we'll create another POJO for that: \verbatim private class LoginDataLog { List dataLog; LoginDataLog() { dataLog = new ArrayList<>(); } void addData(LoginData data) { dataLog.add(data); } } \endverbatim Now we use org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil.toAttribute() to convert our LoginDataLog object into a BlackboardAttribute, greatly simplifying the code. Here, "dataLog" is an instance of a LoginDataLog object that contains all of the login data. \verbatim BlackboardArtifact art = content.newArtifact(artifactType.getTypeID()); List attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, moduleName, appName)); attributes.add(BlackboardJsonAttrUtil.toAttribute(loginDataAttributeType, moduleName, dataLog)); art.addAttributes(attributes); \endverbatim */ sleuthkit-4.11.1/bindings/java/doxygen/artifact_catalog.dox000644 000765 000024 00000073203 14137073413 024612 0ustar00carrierstaff000000 000000 /*! \page artifact_catalog_page Standard Artifacts Catalog # Introduction This document reflects current standard usage of artifact and attribute types for posting analysis results to the case blackboard in Autopsy. Refer to \ref mod_bbpage for more background on the blackboard and how to make artifacts. The catalog section below has one entry for each standard artifact type divided by categories. Each entry lists the required and optional attributes of artifacts of the type. The category types are: - \ref art_catalog_analysis "Analysis Result": Result from an analysis technique on a given object with a given configuration. Includes Conclusion, Relevance Score, and Confidence. - \ref art_catalog_data "Data Artifact": Data that was originally embedded by an application/OS in a file or other data container. NOTE: - While we have listed some attributes as "Required", nothing will enforce that they exist. Modules that use artifacts from the blackboard should assume that some of the attributes may not actually exist. - You are not limited to the attributes listed below for each artifact. Attributes are listed below as "Optional" if at least one, but not all, Autopsy modules create them. If you want to store data that is not listed below, use an existing attribute type or make your own. For the full list of types, refer to: - org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE - org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE \section art_catalog_analysis Analysis Result Types --- ## TSK_DATA_SOURCE_USAGE Describes how a data source was used, e.g., as a SIM card or an OS drive (such as for Windows or Android). ### REQUIRED ATTRIBUTES - TSK_DESCRIPTION (Description of the usage, e.g., "OS Drive (Windows Vista)"). --- ## TSK_ENCRYPTION_DETECTED An indication that the content is encrypted. ### REQUIRED ATTRIBUTES - TSK_COMMENT (A comment on the encryption, e.g., encryption type or password) --- ## TSK_ENCRYPTION_SUSPECTED An indication that the content is likely encrypted. ### REQUIRED ATTRIBUTES - TSK_COMMENT (Reason for suspecting encryption) --- ## TSK_EXT_MISMATCH_DETECTED An indication that the registered extensions for a file's mime type do not match the file's extension. ### REQUIRED ATTRIBUTES None --- ## TSK_FACE_DETECTED An indication that a human face was detected in some content. ### REQUIRED ATTRIBUTES None --- ## TSK_HASHSET_HIT Indicates that the MD5 hash of a file matches a set of known MD5s (possibly user defined). ### REQUIRED ATTRIBUTES - TSK_SET_NAME (Name of hashset containing the file's MD5) ### OPTIONAL ATTRIBUTES - TSK_COMMENT (Additional comments about the hit) --- ## TSK_INTERESTING_ARTIFACT_HIT Indicates that the source artifact matches some set of criteria which deem it interesting. Artifacts with this meta artifact will be brought to the attention of the user. ### REQUIRED ATTRIBUTES - TSK_ASSOCIATED_ARTIFACT (The source artifact) - TSK_SET_NAME (The name of the set of criteria which deemed this artifact interesting) ### OPTIONAL ATTRIBUTES - TSK_COMMENT (Comment on the reason that the source artifact is interesting) - TSK_CATEGORY (The set membership rule that was satisfied) --- ## TSK_INTERESTING_FILE_HIT Indication that the source file matches some set of criteria (possibly user defined) which deem it interesting. Files with this artifact will be brought to the attention of the user. ### REQUIRED ATTRIBUTES - TSK_SET_NAME (The name of the set of criteria which deemed this file interesting) ### OPTIONAL ATTRIBUTES - TSK_COMMENT (Comment on the reason that the source artifact is interesting) - TSK_CATEGORY (The set membership rule that was satisfied. I.e. a particular mime) --- ## TSK_KEYWORD_HIT Indication that the source artifact or file contains a keyword. Keywords are grouped into named sets. ### REQUIRED ATTRIBUTES - TSK_KEYWORD (Keyword that was found in the artifact or file) - TSK_KEYWORD_SEARCH_TYPE (Specifies the type of match, e.g., an exact match, a substring match, or a regex match) - TSK_SET_NAME (The set name that the keyword was contained in) - TSK_KEYWORD_REGEXP (The regular expression that matched, only required for regex matches) - TSK_ASSOCIATED_ARTIFACT (Only required if the keyword hit source is an artifact) ### OPTIONAL ATTRIBUTES - TSK_KEYWORD_PREVIEW (Snippet of text around keyword) --- ## TSK_OBJECT_DETECTED Indicates that an object was detected in a media file. Typically used by computer vision software to classify images. ### REQUIRED ATTRIBUTES - TSK_COMMENT (What was detected) ### OPTIONAL ATTRIBUTES - TSK_DESCRIPTION (Additional comments about the object or observer, e.g., what detected the object) --- ## TSK_PREVIOUSLY_NOTABLE Indicates that the file or artifact was previously tagged as "Notable" in another Autopsy case. ### REQUIRED ATTRIBUTES - TSK_CORRELATION_TYPE (The correlation type that was previously tagged as notable) - TSK_CORRELATION_VALUE (The correlation value that was previously tagged as notable) - TSK_OTHER_CASES (The list of cases containing this file or artifact at the time the artifact is created) --- ## TSK_PREVIOUSLY_SEEN Indicates that the file or artifact was previously seen in another Autopsy case. ### REQUIRED ATTRIBUTES - TSK_CORRELATION_TYPE (The correlation type that was previously seen) - TSK_CORRELATION_VALUE (The correlation value that was previously seen) - TSK_OTHER_CASES (The list of cases containing this file or artifact at the time the artifact is created) --- ## TSK_PREVIOUSLY_UNSEEN Indicates that the file or artifact was previously unseen in another Autopsy case. ### REQUIRED ATTRIBUTES - TSK_CORRELATION_TYPE (The correlation type that was previously seen) - TSK_CORRELATION_VALUE (The correlation value that was previously seen) --- ## TSK_USER_CONTENT_SUSPECTED An indication that some media file content was generated by the user. ### REQUIRED ATTRIBUTES - TSK_COMMENT (The reason why user-generated content is suspected) --- ## TSK_VERIFICATION_FAILED An indication that some data did not pass verification. One example would be verifying a SHA-1 hash. ### REQUIRED ATTRIBUTES - TSK_COMMENT (Reason for failure, what failed) --- ## TSK_WEB_ACCOUNT_TYPE A web account type entry. ### REQUIRED ATTRIBUTES - TSK_DOMAIN (Domain of the URL) - TSK_TEXT (Indicates type of account (admin/moderator/user) and possible platform) - TSK_URL (URL indicating the user has an account on this domain) --- ## TSK_WEB_CATEGORIZATION The categorization of a web host using a specific usage type, e.g. mail.google.com would correspond to Web Email. ### REQUIRED ATTRIBUTES - TSK_NAME (The usage category identifier, e.g. Web Email) - TSK_DOMAIN (The domain of the host, e.g. google.com) - TSK_HOST (The full host, e.g. mail.google.com) --- ## TSK_YARA_HIT Indicates that the some content of the file was a hit for a YARA rule match. ### REQUIRED ATTRIBUTES - TSK_RULE (The rule that was a hit for this file) - TSK_SET_NAME (Name of the rule set containing the matching rule YARA rule) --- ## TSK_METADATA_EXIF EXIF metadata found in an image or audio file. ### REQUIRED ATTRIBUTES - At least one of: - TSK_DATETIME_CREATED (Creation date of the file, in seconds since 1970-01-01T00:00:00Z) - TSK_DEVICE_MAKE (Device make, generally the manufacturer, e.g., Apple) - TSK_DEVICE_MODEL (Device model, generally the product, e.g., iPhone) - TSK_GEO_ALTITUDE (The camera's altitude when the image/audio was taken) - TSK_GEO_LATITUDE (The camera's latitude when the image/audio was taken) - TSK_GEO_LONGITUDE (The camera's longitude when the image/audio was taken)

\section art_catalog_data Data Artifact Types --- ## TSK_ACCOUNT Details about a credit card or communications account. ### REQUIRED ATTRIBUTES - TSK_ACCOUNT_TYPE (Type of the account, e.g., Skype) - TSK_ID (Unique identifier of the account) or TSK_CARD_NUMBER (Credit card number) ### OPTIONAL ATTRIBUTES - TSK_KEYWORD_SEARCH_DOCUMENT_ID (Document ID of the Solr document that contains the TSK_CARD_NUMBER when the account is a credit card discovered by the Autopsy regular expression search for credit cards) - TSK_SET_NAME (The keyword list name, i.e., "Credit Card Numbers", when the account is a credit card discovered by the Autopsy regular expression search for credit cards) --- ## TSK_ASSOCIATED_OBJECT Provides a backwards link to an artifact that references the parent file of this artifact. Example usage is that a downloaded file will have this artifact and it will point back to the TSK_WEB_DOWNLOAD artifact that is associated with a browser's SQLite database. See \ref jni_bb_associated_object. ### REQUIRED ATTRIBUTES - TSK_ASSOCIATED_ARTIFACT (Artifact ID of associated artifact) --- ## TSK_BACKUP_EVENT Details about System/aplication/file backups. ### REQUIRED ATTRIBUTES - TSK_DATETIME_START (Date/Time the backup happened) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_END (Date/Time the backup ended) --- ## TSK_BLUETOOTH_ADAPTER Details about a Bluetooth adapter. ### REQUIRED ATTRIBUTES - TSK_MAC_ADDRESS (MAC address of the Bluetooth adapter) - TSK_NAME (Name of the device) - TSK_DATETIME (Time device was last seen) - TSK_DEVICE_ID (UUID of the device) --- ## TSK_BLUETOOTH_PAIRING Details about a Bluetooth pairing event. ### REQUIRED ATTRIBUTES - TSK_DEVICE_NAME (Name of the Bluetooth device) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (When the pairing occurred, in seconds since 1970-01-01T00:00:00Z) - TSK_MAC_ADDRESS (MAC address of the Bluetooth device) - TSK_DEVICE_ID (UUID of the device) - TSK_DATETIME_ACCESSED (Last Connection Time) --- ## TSK_CALENDAR_ENTRY A calendar entry in an application file or database. ### REQUIRED ATTRIBUTES - TSK_CALENDAR_ENTRY_TYPE (E.g., Reminder, Event, Birthday, etc.) - TSK_DATETIME_START (Start of the entry, in seconds since 1970-01-01T00:00:00Z) ### OPTIONAL ATTRIBUTES - TSK_DESCRIPTION (Description of the entry, such as a note) - TSK_LOCATION (Location of the entry, such as an address) - TSK_DATETIME_END (End of the entry, in seconds since 1970-01-01T00:00:00Z) --- ## TSK_CALLLOG A call log record in an application file or database. ### REQUIRED ATTRIBUTES - At least one of: - TSK_PHONE_NUMBER (A phone number involved in this call record) - TSK_PHONE_NUMBER_FROM (The phone number that initiated the call) - TSK_PHONE_NUMBER_TO (The phone number that receives the call) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_END (When the call ended, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_START (When the call started, in seconds since 1970-01-01T00:00:00Z) - TSK_DIRECTION (The communication direction, i.e., Incoming or Outgoing) - TSK_NAME (The name of the caller or callee) --- ## TSK_CLIPBOARD_CONTENT Data found on the operating system's clipboard. ### REQUIRED ATTRIBUTES - TSK_TEXT (Text on the clipboard) --- ## TSK_CONTACT A contact book entry in an application file or database. ### REQUIRED ATTRIBUTES - At least one of: - TSK_EMAIL (An email address associated with the contact) - TSK_EMAIL_HOME (An email address that is known to be the personal email of the contact) - TSK_EMAIL_OFFICE (An email address that is known to be the work email of the contact) - TSK_PHONE_NUMBER (A phone number associated with the contact) - TSK_PHONE_NUMBER_HOME (A phone number that is known to be the home phone number of the contact) - TSK_PHONE_NUMBER_MOBILE (A phone number that is known to be the mobile phone number of the contact) - TSK_PHONE_NUMBER_OFFICE (A phone number that is known to be the work phone number of the contact) - TSK_NAME (Contact name) ### OPTIONAL ATTRIBUTES - TSK_ORGANIZATION (An organization that the contact belongs to, e.g., Stanford University, Google) - TSK_URL (e.g., the URL of an image if the contact is a vCard) --- ## TSK_DELETED_PROG Programs that have been deleted from the system. ### REQUIRED ATTRIBUTES - TSK_DATETIME (Date/Time the program was deleted) - TSK_PROG_NAME (Program that was deleted) ### OPTIONAL Attributes - TSK_PATH (Location where the program resided before being deleted) --- ## TSK_DEVICE_ATTACHED Details about a device that was physically attached to a data source. ### REQUIRED ATTRIBUTES - TSK_DEVICE_ID (String that uniquely identifies the attached device) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (When the device was attached, in seconds since 1970-01-01T00:00:00Z) - TSK_DEVICE_MAKE (Make of the attached device, e.g., Apple) - TSK_DEVICE_MODEL (Model of the attached device, e.g., iPhone 6s) - TSK_MAC_ADDRESS (Mac address of the attached device) --- ## TSK_DEVICE_INFO Details about a device data source. ### REQUIRED ATTRIBUTES - At least one of: - TSK_IMEI (IMEI number of the device) - TSK_ICCID (ICCID number of the SIM) - TSK_IMSI (IMSI number of the device) --- ## TSK_EMAIL_MSG An email message found in an application file or database. ### OPTIONAL ATTRIBUTES - At least one of: - TSK_EMAIL_CONTENT_HTML (Representation of email as HTML) - TSK_EMAIL_CONTENT_PLAIN (Representation of email as plain text) - TSK_EMAIL_CONTENT_RTF (Representation of email as RTF) - TSK_DATETIME_RCVD (When email message was received, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_SENT (When email message was sent, in seconds since 1970-01-01T00:00:00Z) - TSK_EMAIL_BCC (BCC'd recipient, multiple recipients should be in a comma separated string) - TSK_EMAIL_CC (CC'd recipient, multiple recipients should be in a comma separated string) - TSK_EMAIL_FROM (Email address that sent the message) - TSK_EMAIL_TO (Email addresses the email message was sent to, multiple emails should be in a comma separated string) - TSK_HEADERS (Transport message headers) - TSK_MSG_ID (Message ID supplied by the email application) - TSK_PATH (Path in the data source to the file containing the email message) - TSK_SUBJECT (Subject of the email message) - TSK_THREAD_ID (ID specified by the analysis module to group emails into threads for display purposes) --- ## TSK_EXTRACTED_TEXT Text extracted from some content. ### REQUIRED ATTRIBUTES - TSK_TEXT (The extracted text) --- ## TSK_GEN_INFO A generic information artifact. Each content object will have at most one TSK_GEN_INFO artifact, which is easily accessed through org.sleuthkit.datamodel.AbstractContent.getGenInfoArtifact() and related methods. The TSK_GEN_INFO object is useful for storing values related to the content object without making a new artifact type. ### REQUIRED ATTRIBUTES None ### OPTIONAL ATTRIBUTES - TSK_PHOTODNA_HASH (The PhotoDNA hash of an image) --- ## TSK_GPS_AREA An outline of an area. ### REQUIRED ATTRIBUTES - TSK_GEO_WAYPOINTS (JSON list of waypoints. Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints class to create/process) ### OPTIONAL ATTRIBUTES - TSK_LOCATION (Location of the route, e.g., a state or city) - TSK_NAME (Name of the area, e.g., Minute Man Trail) - TSK_PROG_NAME (Name of the application that was the source of the GPS route) --- ## TSK_GPS_BOOKMARK A bookmarked GPS location or saved waypoint. ### REQUIRED ATTRIBUTES - TSK_GEO_LATITUDE (The latitude value of the bookmark) - TSK_GEO_LONGITUDE (The longitude value of the bookmark) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Timestamp of the GPS bookmark, in seconds since 1970-01-01T00:00:00Z) - TSK_GEO_ALTITUDE (The altitude of the specified latitude and longitude) - TSK_LOCATION (The address of the bookmark. Ex: 123 Main St.) - TSK_NAME (The name of the bookmark. Ex: Boston) - TSK_PROG_NAME (Name of the application that was the source of the GPS bookmark) --- ## TSK_GPS_LAST_KNOWN_LOCATION The last known location of a GPS connected device. This may be from a perspective other than the device. ### REQUIRED ATTRIBUTES - TSK_GEO_LATITUDE (Last known latitude value) - TSK_GEO_LONGITUDE (Last known longitude value) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Timestamp of the last known location, in seconds since 1970-01-01T00:00:00Z) - TSK_GEO_ALTITUDE (Altitude of the last known latitude and longitude) - TSK_LOCATION (The address of the last known location. Ex: 123 Main St.) - TSK_NAME (The name of the last known location. Ex: Boston) --- ## TSK_GPS_ROUTE A GPS route. ### REQUIRED ATTRIBUTES - TSK_GEO_WAYPOINTS (JSON list of waypoints. Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints class to create/process) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Timestamp of the GPS route, in seconds since 1970-01-01T00:00:00Z) - TSK_LOCATION (Location of the route, e.g., a state or city) - TSK_NAME (Name of the route, e.g., Minute Man Trail) - TSK_PROG_NAME (Name of the application that was the source of the GPS route) --- ## TSK_GPS_SEARCH A GPS location that was known to have been searched by the device or user. ### REQUIRED ATTRIBUTES - TSK_GEO_LATITUDE (The GPS latitude value that was searched) - TSK_GEO_LONGITUDE (The GPS longitude value that was searched) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Timestamp of the GPS search, in seconds since 1970-01-01T00:00:00Z) - TSK_GEO_ALTITUDE (Altitude of the searched GPS coordinates) - TSK_LOCATION (The address of the target location, e.g., 123 Main St.) - TSK_NAME (The name of the target location, e.g., Boston) --- ## TSK_GPS_TRACK A Global Positioning System (GPS) track artifact records the track, or path, of a GPS-enabled dvice as a connected series of track points. A track point is a location in a geographic coordinate system with latitude, longitude and altitude (elevation) axes. ### REQUIRED ATTRIBUTES - TSK_GEO_TRACKPOINTS (JSON list of trackpoints. Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints class to create/process) ### OPTIONAL ATTRIBUTES - TSK_NAME (The name of the trackpoint set. Ex: Boston) - TSK_PROG_NAME (Name of application containing the GPS trackpoint set) --- ## TSK_INSTALLED_PROG Details about an installed program. ### REQUIRED ATTRIBUTES - TSK_PROG_NAME (Name of the installed program) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (When the program was installed, in seconds since 1970-01-01T00:00:00Z) - TSK_PATH (Path to the installed program in the data source) - TSK_PATH_SOURCE (Path to an Android Package Kit (APK) file for an Android program) - TSK_PERMISSIONS (Permissions of the installed program) - TSK_VERSION (Version number of the program) --- ## TSK_MESSAGE A message that is found in some content. ### REQUIRED ATTRIBUTES - TSK_TEXT (The text of the message) - TSK_MESSAGE_TYPE (E.g., WhatsApp Message, Skype Message, etc.) ### OPTIONAL ATTRIBUTES - TSK_ATTACHMENTS (Attachments - use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper class to add an attachment) - TSK_DATETIME (Timestamp the message was sent or received, in seconds since 1970-01-01T00:00:00Z) - TSK_DIRECTION (Direction of the message, e.g., incoming or outgoing) - TSK_EMAIL_FROM (Email address of the sender) - TSK_EMAIL_TO (Email address of the recipient) - TSK_PHONE_NUMBER (A phone number associated with the message) - TSK_PHONE_NUMBER_FROM (The phone number of the sender) - TSK_PHONE_NUMBER_TO (The phone number of the recipient) - TSK_READ_STATUS (Status of the message, e.g., read or unread) - TSK_SUBJECT (Subject of the message) - TSK_THREAD_ID (ID for keeping threaded messages together) --- ## TSK_METADATA General metadata for some content. ### REQUIRED ATTRIBUTES None ### OPTIONAL ATTRIBUTES - TSK_DATETIME_CREATED (Timestamp the document was created) - TSK_DATETIME_MODIFIED (Timestamp the document was modified) - TSK_DESCRIPTION (Title of the document) - TSK_LAST_PRINTED_DATETIME (Timestamp when document was last printed) - TSK_ORGANIZATION (Organization/Company who owns the document) - TSK_OWNER (Author of the document) - TSK_PROG_NAME (Program used to create the document) - TSK_USER_ID (Last author of the document) - TSK_VERSION (Version number of the program used to create the document) --- ## TSK_OS_INFO Details about an operating system recovered from the data source. ### REQUIRED ATTRIBUTES - TSK_PROG_NAME (Name of the OS) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Datetime of the OS installation, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (Windows domain for a Windows OS) - TSK_ORGANIZATION (Registered organization for the OS installation) - TSK_OWNER (Registered owner of the OS installation) - TSK_PATH (System root for the OS installation) - TSK_PROCESSOR_ARCHITECTURE (Details about the processor architecture as captured by the OS) - TSK_NAME (Name of computer that the OS was installed on) - TSK_PRODUCT_ID (Product ID for the OS installation) - TSK_TEMP_DIR (Temp directory for the OS) - TSK_VERSION (Version of the OS) --- ## TSK_PROG_NOTIFICATIONS Notifications to the user. ### REQUIRED ATTRIBUTES - TSK_DATETIME (When the notification was sent/received) - TSK_PROG_NAME (Program to send/receive notification) ### OPTIONAL ATTRIBUTES - TSK_TITLE (Title of the notification) - TSK_VALUE (Message being sent or received) --- ## TSK_PROG_RUN The number of times a program/application was run. ### REQUIRED ATTRIBUTES - TSK_PROG_NAME (Name of the application) ### OPTIONAL ATTRIBUTES - TSK_COUNT (Number of times program was run, should be at least 1) - TSK_DATETIME (Timestamp that application was run last, in seconds since 1970-01-01T00:00:00Z) - TSK_BYTES_SENT (Number of bytes sent) - TSK_BYTES_RECEIVED (Number of bytes received) - TSK_USER_NAME (User who executed the program) - TSK_COMMENT (Source of the attribute) - TSK_PATH (Path of the executable program) --- ## TSK_RECENT_OBJECT Indicates recently accessed content. Examples: Recent Documents or Recent Downloads menu items on Windows. ### REQUIRED ATTRIBUTES - TSK_PATH (Path to the recent object content in the data source) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_ACCESSED (Timestamp that the content was last accessed at, in seconds since 1970-01-01T00:00:00Z) - TSK_PATH_ID (ID of the file instance in the data source) - TSK_PROG_NAME (Application or application extractor that stored this object as recent) - TSK_NAME (If found in the registry, the name of the attribute) - TSK_VALUE (If found in the registry, the value of the attribute) - TSK_COMMENT (What the source of the attribute may be) --- ## TSK_REMOTE_DRIVE Details about a remote drive found in the data source. ### REQUIRED ATTRIBUTES - TSK_REMOTE_PATH (Fully qualified UNC path to the remote drive) ### OPTIONAL ATTRIBUTES - TSK_LOCAL_PATH (The local path of this remote drive. This path may be mapped, e.g., 'D:/' or 'F:/') --- ## TSK_SCREEN_SHOTS Screenshots from a device or application. ### REQUIRED ATTRIBUTES - TSK_DATETIME (When the screenshot was taken) - TSK_PROG_NAME (Program that took the screenshot) ### OPTIONAL ATTRIBUTES - TSK_PATH (Location of screenshot) --- ## TSK_SERVICE_ACCOUNT An application or web user account. ### REQUIRED ATTRIBUTES - TSK_PROG_NAME (The name of the service, e.g., Netflix) - TSK_USER_ID (User ID of the service account) ### OPTIONAL ATTRIBUTES - TSK_CATEGORY (Type of service, e.g., Web, TV, Messaging) - TSK_DATETIME_CREATED (When this service account was created, in seconds since 1970-01-01T00:00:00Z) - TSK_DESCRIPTION (Name of the mailbox, if this is an email account) - TSK_DOMAIN (The sign on realm) - TSK_EMAIL_REPLYTO (Email reply to address, if this is an email account) - TSK_NAME (Display name of the user account) - TSK_PASSWORD (Password of the service account) - TSK_PATH (Path to the application installation, if it is local) - TSK_SERVER_NAME (Name of the mail server, if this is an email account) - TSK_URL (URL of the service, if the service is a Web service) - TSK_URL_DECODED (Decoded URL of the service, if the service is a Web service) - TSK_USER_NAME (User name of the service account) --- ## TSK_SIM_ATTACHED Details about a SIM card that was physically attached to the device. ### REQUIRED ATTRIBUTES - At least one of: - TSK_ICCID (ICCID number of this SIM card) - TSK_IMSI (IMSI number of this SIM card) --- ## TSK_SPEED_DIAL_ENTRY A speed dial entry. ### REQUIRED ATTRIBUTES - TSK_PHONE_NUMBER (Phone number of the speed dial entry) ### OPTIONAL ATTRIBUTES - TSK_NAME_PERSON (Contact name of the speed dial entry) - TSK_SHORTCUT (Keyboard shortcut) --- ## TSK_TL_EVENT An event in the timeline of a case. ### REQUIRED ATTRIBUTES - TSK_TL_EVENT_TYPE (The type of the event, e.g., aTimelineEventType) - TSK_DATETIME (When the event occurred, in seconds since 1970-01-01T00:00:00Z) - TSK_DESCRIPTION (A description of the event) --- ## TSK_USER_DEVICE_EVENT Activity on the system or from an application. Example usage is a mobile device being locked and unlocked. ### REQUIRED ATTRIBUTES - TSK_DATETIME_START (When activity started) ### OPTIONAL ATTRIBUTES - TSK_ACTIVITY_TYPE (Activity type i.e.: On or Off) - TSK_DATETIME_END (When activity ended) - TSK_PROG_NAME (Name of the program doing the activity) - TSK_VALUE (Connection type) --- ## TSK_WEB_BOOKMARK A web bookmark entry. ### REQUIRED ATTRIBUTES - TSK_URL (Bookmarked URL) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_CREATED (Timestamp that this web bookmark was created, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (Domain of the bookmarked URL) - TSK_PROG_NAME (Name of application or application extractor that stored this web bookmark entry) - TSK_NAME (Name of the bookmark entry) - TSK_TITLE (Title of the web page that was bookmarked) --- ## TSK_WEB_CACHE A web cache entry. The resource that was cached may or may not be present in the data source. ### REQUIRED ATTRIBUTES - TSK_PATH (Path to the cached file. This could point to a container file that has smaller cached data in it.) - TSK_URL (URL of the resource cached in this entry) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_CREATED (Creation date of the cache entry, in seconds since 1970-01-01T00:00:00Z) - TSK_HEADERS (HTTP headers on cache entry) - TSK_PATH_ID (Object ID of the source cache file) - TSK_DOMAIN (Domain of the URL) --- ## TSK_WEB_COOKIE A Web cookie found. ### REQUIRED ATTRIBUTES - TSK_URL (Source URL of the web cookie) - TSK_NAME (The Web cookie name attribute, e.g., sessionToken) - TSK_VALUE (The Web cookie value attribute) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_ACCESSED (Datetime the Web Cookie was last accessed, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_CREATED (Datetime the Web cookie was created, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_END (Expiration datetime of the Web cookie, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (The domain the Web cookie serves) - TSK_PROG_NAME (Name of the application or application extractor that stored the Web cookie) --- ## TSK_WEB_DOWNLOAD A Web download. The downloaded resource may or may not be present in the data source. ### REQUIRED ATTRIBUTES - TSK_URL (URL that hosts this downloaded resource) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_ACCESSED (Last accessed timestamp, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (Domain that hosted the downloaded resource) - TSK_PATH_ID (Object ID of the file instance in the data source) - TSK_PATH (Path to the downloaded resource in the datasource) - TSK_PROG_NAME (Name of the application or application extractor that downloaded this resource) --- ## TSK_WEB_FORM_ADDRESS Contains autofill data for a person's address. Form data is usually saved by a Web browser. ### REQUIRED ATTRIBUTES - TSK_LOCATION (The address of the person, e.g., 123 Main St.) ### OPTIONAL ATTRIBUTES - TSK_COMMENT (Comment if the autofill data is encrypted) - TSK_COUNT (Number of times the Web form data was used) - TSK_DATETIME_ACCESSED (Last accessed timestamp of the Web form data, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_MODIFIED (Last modified timestamp of the Web form data, in seconds since 1970-01-01T00:00:00Z) - TSK_EMAIL (Email address from the form data) - TSK_NAME_PERSON (Name of a person from the form data) - TSK_PHONE_NUMBER (Phone number from the form data) --- ## TSK_WEB_FORM_AUTOFILL Contains autofill data for a Web form. Form data is usually saved by a Web browser. Each field value pair in the form should be stored in separate artifacts. ### REQUIRED ATTRIBUTES - One pair of: - TSK_NAME (Name of the autofill field) - TSK_VALUE (Value of the autofill field) ### OPTIONAL ATTRIBUTES - TSK_COMMENT (Comment if the form autofill data is encrypted) - TSK_COUNT (Number of times this Web form data has been used) - TSK_DATETIME_CREATED (Datetime this Web form autofill data was created, in seconds since 1970-01-01T00:00:00Z) - TSK_DATETIME_ACCESSED (Datetime this Web form data was last accessed, in seconds since 1970-01-01T00:00:00Z) - TSK_PROG_NAME (The application that stored this form information) --- ## TSK_WEB_HISTORY A Web history entry. ### REQUIRED ATTRIBUTES - TSK_URL (The URL) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_ACCESSED (The datetime the URL was accessed, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (The domain name of the URL) - TSK_PROG_NAME (The application or application extractor that stored this Web history entry) - TSK_REFERRER (The URL of a Web page that linked to the page) - TSK_TITLE (Title of the Web page that was visited) - TSK_URL_DECODED (The decoded URL) - TSK_USER_NAME (Name of the user that viewed the Web page) - TSK_DATETIME_CREATED (The datetime the page was created, ie: offline pages) --- ## TSK_WEB_SEARCH_QUERY Details about a Web search query. ### REQUIRED ATTRIBUTES - TSK_TEXT (Web search query text) ### OPTIONAL ATTRIBUTES - TSK_DATETIME_ACCESSED (When the Web search query was last used, in seconds since 1970-01-01T00:00:00Z) - TSK_DOMAIN (Domain of the search engine used to execute the query) - TSK_PROG_NAME (Application or application extractor that stored the Web search query) --- ## TSK_WIFI_NETWORK Details about a WiFi network. ### REQUIRED ATTRIBUTES - TSK_SSID (The name of the WiFi network) ### OPTIONAL ATTRIBUTES - TSK_DATETIME (Timestamp, in seconds since 1970-01-01T00:00:00Z. This timestamp could be last connected time or creation time) - TSK_DEVICE_ID (String that uniquely identifies the WiFi network) - TSK_MAC_ADDRESS (Mac address of the adapter) - TSK_DEVICE_MODEL (Model of the decvice) --- ## TSK_WIFI_NETWORK_ADAPTER Details about a WiFi adapter. ### REQUIRED ATTRIBUTES - TSK_MAC_ADDRESS (Mac address of the adapter) */ sleuthkit-4.11.1/bindings/java/doxygen/query_database.dox000644 000765 000024 00000020222 14137073413 024305 0ustar00carrierstaff000000 000000 /*! \page query_database_page Query the Database \section types_of_databases Database Queries This page is for people who are developing their own Autopsy plugin modules that require SQL queries. If you are not developing a module requiring SQL queries, you can skip this page. Autopsy currently allows either SQLite or PostgreSQL as the back-end database system for a case. Any module you write could be used with either as the backend database, at the user's discretion. If you are writing code actually for Autopsy, not just an Autopsy module, you may need to be able to INSERT and UPDATE into the database as well. Please see \subpage insert_and_update_database_page.
\subsection which_db Which Database is my Module Accessing? In an Autopsy Module, you can check the database type currently in use with the following code snippet: \code{.java} Case currentCase = Case.getCurrentCase(); if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) { // PostgreSQL in use } else { // SQLite in use } \endcode
\section db_user_calls TSK methods to Query the Database With User-Supplied SQL The following SleuthkitCase methods are available for the user to supply all of, or a portion of, a SQL query. \code{.java} ArrayList getMatchingAttributes(String whereClause) ArrayList getMatchingArtifacts(String whereClause) long countFilesWhere(String sqlWhereClause) List findAllFilesWhere(String sqlWhereClause) List findAllFileIdsWhere(String sqlWhereClause) CaseDbQuery executeQuery(String query) List findFilesWhere(String sqlWhereClause) [deprecated] ResultSet runQuery(String query) [deprecated] void closeRunQuery(ResultSet resultSet) [deprecated] \endcode The majority of them only allow the user to specify a WHERE clause, determining which records to SELECT.

The following example finds all the .txt files in the case: \code{.java} List files = sk.findAllFilesWhere("LOWER(name) LIKE '%.txt'"); \endcode
\section db_pitfalls_to_avoid How to Avoid Pitfalls When Using the Query Methods Because there are multiple backend databases, care must be taken to use strict SQL. When things must be different between database types, use \ref which_db to determine which database type is currently in use and create the proper SQL statements. Be sure to test your module with both types of databases. They behave differently and will give you different resulting output order.

\subsection general_items WHERE Clause Syntax - Do not use backticks. PostgreSQL does not use them like SQLite does.

- Use only single quotes to quote values. Do not use double quotes for this. Quoting values is not required.
\code{.java} SELECT * FROM tsk_files WHERE has_path = "1" // Bad example SELECT * FROM tsk_files WHERE has_path = '1' // Good example SELECT * FROM tsk_files WHERE has_path = 1 // Good example \endcode
- Use only double quotes to quote column names. Do not use single quotes for this. Quoting column names is not required.
\code{.java} SELECT 'obj_id' FROM tsk_files WHERE has_path = 1 // Bad example SELECT "obj_id" FROM tsk_files WHERE has_path = 1 // Good example SELECT obj_id FROM tsk_files WHERE has_path = 1 // Good example \endcode
- Do not use || and && to connect logical clauses. This does not exist in PostgreSQL. Use OR and AND instead. \code{.java} SELECT COUNT(*) FROM tsk_files WHERE dir_type = '5' && md5 IS NULL || size > '0' // Bad Example SELECT COUNT(*) FROM tsk_files WHERE dir_type = '5' AND md5 IS NULL OR size > '0' // Good Example \endcode
- PostgreSQL compares are case-sensitive. Always specify what type of compare you want. UPPER() and LOWER() can help with that. \code{.java} SELECT * from people WHERE first_name LIKE '%somename%' // Will be case sensitive in PostgreSQL, not in SQLite SELECT * from people WHERE first_name ILIKE '%somename%' // Works in PostgreSQL, does not exist in SQLite SELECT * from people WHERE LOWER(first_name) LIKE LOWER('%somename%') // Not case sensitive in either database \endcode
- When generating WHERE queries via code, some folks include an AND(1) or OR(0) clause in the query as a placeholder that does not effect the outcome of the query but simplifies the query-generation logic. PostgreSQL does not allow true or false comparisons with integers. The PostgreSql syntax is AND(true) or OR(false). SQLite does not allow the PostgreSQL syntax and PostgreSQL does not allow the SQLite syntax. Do not use this trick to generate queries. Instead, have your code handle the edge cases of if there are no entries for the AND or OR portion of a clause. \code{.java} WHERE id=12 AND(1) // SQLite example, will not work in PostgreSQL WHERE id=12 AND(true) // PostgreSQL example, will not work in SQLite WHERE id=12 // Will work in both, just a bit harder to handle all the cases in query-generation code \endcode
- SQLite allows non-standard usage of the IS keyword. Standard usage of IS checks if something IS NULL or IS NOT NULL. It does not compare against specific values. Remember when comparing values to use = instead of the IS keyword. If you want to check for NULL, then IS NULL is the right tool. Example: \code{.java} WHERE value IS '4' // Bad example. Works in SQLite, does not work in PostgreSQL WHERE value = '4' // Good example. Works in both SQLite and PostgreSQL WHERE value != '4' // Good example. Works in both SQLite and PostgreSQL WHERE value IS NULL // Good example. Works in both SQLite and PostgreSQL WHERE value IS NOT NULL // Good example. Works in both SQLite and PostgreSQL \endcode

\subsection order_by How to ORDER BY Consistently - SQLite and PostgreSQL have different default sort orders for returned records, so you want to fully specify ORDER BY clauses for both database types. Example: \code{.java} Case currentCase = Case.getCurrentCase(); String orderByClause; if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) { orderByClause = "ORDER BY att.value_text, ASC NULLS FIRST"; //PostgreSQL } else { orderByClause = "ORDER BY att.value_text ASC"; //SQLite } \endcode
- Do not use COLLATE NOCASE to order output. This does not exist in PostgreSQL. Use LOWER() or UPPER() instead. \code{.java} ORDER BY tsk_files.dir_type, tsk_files.name COLLATE NOCASE // Bad Example ORDER BY tsk_files.dir_type, LOWER(tsk_files.name) // Good Example \endcode
- In ORDER BY clauses, PostgreSQL ignores leading dashes. Given the following data, you will see the following two sort orders for the different databases.
| Data | PostgreSQL sort order | SQLite sort order| |:--------:|:------------------------:|:------------------:| |Alpha | Alpha | -Bravo | |-Bravo | -Bravo | Alpha | |Charlie | Charlie | Charlie |
To force PostgreSQL to not ignore leading dashes, convert strings to SQL_ASCII before sorting by them. This is done with convert_to(), but it only exists in PostgreSQL.
\code{.java} ORDER BY some_value // Bad example ORDER BY convert_to(some_value, 'SQL_ASCII') // Good example \endcode
With the code above, using SQL_ASCII encoding, the following results are seen:
| Data | PostgreSQL sort order | SQLite sort order| |:--------:|:------------------------:|:------------------:| |Alpha | -Bravo | -Bravo | |-Bravo | Alpha | Alpha | |Charlie | Charlie | Charlie |
- PostgreSQL sorts NULLs last for ASC and first for DESC. SQLite does the opposite. PostgreSQL allows you to control the NULL sort order with NULLS FIRST or NULLS LAST \code{.java} ORDER BY att.value_text ASC // SQLite example, will give different ordering in PostgreSQL ORDER BY convert_to(att.value_text, 'SQL_ASCII') ASC NULLS FIRST // PostgreSQL example. The command NULLS FIRST does not exist in SQLite, but SQLite will sort nulls first by default. \endcode
*/ sleuthkit-4.11.1/bindings/java/doxygen/communications.dox000644 000765 000024 00000021002 14137073413 024341 0ustar00carrierstaff000000 000000 /*! \page mod_compage Communications NOTE: This is a work in progress \section jni_com_overview Overview The Java code and database in Sleuth Kit contain special classes and tables to deal with communications between two parties. This page outlines what a developer should do when they are parsing communications data so that it can be properly displayed and used by other code (such as the Autopsy Communications UI). \section jni_com_types Terminology First, let's cover the terminology that we use. \subsection jni_com_types_account Accounts An Account is an entity with a type and an identifier that is unique to the type. Common examples of types include: - Credit Card (and the unique identifier is the credit card number) - Email (and the unique identifier is the email address) - Phone (and the unique identifier is the phone number) - Twitter (with a unique identifier of the login) - ... Accounts are found in a digital investigation when parsing structured data (such as email messages) or keyword searching. \subsection jni_com_types_relationships Relationships Two accounts have a relationship if they are believed to have communicated in some way. Examples of interactions that cause a relationship are: - Being part of the same email message - Being in a call log - Being in an address book When there are multiple people involved with an email message, a relationship is made between each of them. For example, if A sends a message to B and CC:s C, then there will be relationships between A <-> B, A <-> C, and B <-> C. Relationships in The Sleuth Kit are not directional. A relationship source is where we learned about the relationship. This typically comes from Blackboard Artifacts, but may come from generic files in the future. \subsection jni_com_types_devaccount Device Accounts In some situations, we may not know a specific account that a relationship exists with. For example, when we find a contact book a thumb drive, we want to make a relationship between the accounts in the contact book and the accounts associated with the owner of that thumb drive. But, we may not know which accounts are for that owner. The contacts could be just a bunch of vCards and not tied to a specific email or phone number. In this situation, we make a device account that is associated with the data source or device being analyzed. You should make an account of type Account.Type.DEVICE (instead of something like EMAIL) and the identifier is the device id of the data source where the other accounts were located. \section jni_com_add Adding Communication Information to Database Now let's cover what you should do when you are parsing some communications data and want to store it in the TSK database. Let's assume we are parsing a smart phone app that has messages. \subsection jni_com_add_acct Adding Account Instances When you encounter a message, the first thing to do is store information about the accounts. TSK wants to know about each file that had a reference of the account. You should call org.sleuthkit.datamodel.CommunicationsManager.createAccountFileInstance() for each file that you encounter a given account. To make a device account, you'd have logic similar to: \code AccountFileInstance deviceAccountInstance = tskCase.getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, abstractFile.getDataSource().getDeviceId(), "Module Name", abstractFile); \endcode Behind the scenes, createAccountFileInstance will make an entry in the accounts table for each unique account on a given device and will make a org.sleuthkit.datamodel.BlackboardArtifact for each unique account in a given file. If you want to create a custom account type, call org.sleuthkit.datamodel.CommunicationsManager.addAccountType(). \subsection jni_com_add_msg Adding The Message (Relationship Source) You also need to make sure that you store the org.sleuthkit.datamodel.BlackboardArtifact that used the accounts and had the relationship. You can do this before or after calling createAccountFileInstance(). The order does not matter. For a messaging app, you would make org.sleuthkit.datamodel.BlackboardArtifact objects with a type of org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE. That artifact would store various name and value pairs using org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE values. There is nothing communication-specific about this step. It is the same Blackboard artifacts and attributes that are used in many other places. \subsection jni_com_add_relationship Adding the Relationship The final step is to store the relationships between the accounts. You can do this via org.sleuthkit.datamodel.CommunicationsManager.addRelationships(). This method will require you to pass in the org.sleuthkit.datamodel.AccountInstance objects that you created and the org.sleuthkit.datamodel.BlackboardArtifact that you created for the message or other source. The source of the relationship can be a device account (for things like call logs and contacts) if you are unsure about the specific account (such as phone number) associated with the device. As an example, you can refer to some code in Autopsy, such as: - [Email Module addArtifact()] (https://github.com/sleuthkit/autopsy/blob/develop/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java) \section jni_com_comm_artifacts_helper Communication Artifacts Helper An alternative to individually creating artifacts, accounts and relationships is to use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper. CommunicationArtifactsHelper provides APIs that create the artifact, create accounts, and create relationships between the accounts, all with a single API call. \subsection jni_com_comm_artifacts_helper_create_helper Creating a Communications Artifacts Helper To use the communication artifacts helper, you must first create a new instance of the helper for each source file from which you are extracting communications artifacts. To create a helper, use the constructor org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper.CommunicationArtifactsHelper(). When creating the helper, you must specify the account type for the accounts that will be created by this instance of the helper. Addtionally, you may specify the "self" account identifier - i.e. the application specific account identifier for the owner of the device, if it is known. If the self account is not known, you may omit it, in which case the helper uses the Device account as proxy for the self account. \subsection jni_com_comm_artifacts_helper_add_contact Adding Contacts Use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper.addContact() method to add contacts. The helper creates a TSK_CONTACT artifact. It also creates contact accounts for each of the specified contact method, and finally creates relationships between the contact accounts and the self account. \subsection jni_com_comm_artifacts_helper_add_calllog Adding Call logs Use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper.addCalllog() method to add call log. The helper creates a TSK_CALLLOG artifact. It also creates accounts for the caller and each of the callees, if specified. Finally it creates a relationship between the caller and each of the callees. \subsection jni_com_comm_artifacts_helper_add_message Adding Messages Use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper.addMessage() method to add a message. The helper creates a TSK_MESSAGE artifact. It also creates accounts for the sender and each of the recipients, if specified. Finally it creates a relationship between the sender and each of the recipients. \subsection jni_com_comm_artifacts_helper_add_attachments Adding Attachments to message Use the org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper.addAttachments() method to add org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments to a message. As an example, you can refer to some code in Autopsy, such as: - [Android Text Messages] (https://github.com/sleuthkit/autopsy/blob/develop/InternalPythonModules/android/textmessage.py) - [Facebook messenger Messages] (https://github.com/sleuthkit/autopsy/blob/develop/InternalPythonModules/android/fbmessenger.py) \section jni_com_schema Database Schema For details of how this is stored in the database, refer to the wiki. */ sleuthkit-4.11.1/bindings/java/doxygen/os_accounts.dox000644 000765 000024 00000017163 14137073413 023646 0ustar00carrierstaff000000 000000 /*! \page mod_os_accounts_page OS Accounts and Realms \section os_acct_overview Overview This page outlines some of the core concepts around OS accounts and realms and how they are stored. OS accounts are unique data types in the TSK datamodel and have more complexity than other types because we often will not fully understand the details when creating the OS accounts early in the processing and will need to update them at various points as analysis continues. \section os_acct_basics Basic Terminology - An OS account allows a person to do some action or access some resource on a device. - A realm is the scope in which the OS account is defined. A realm can be scoped to a single host (i.e., for accounts that exist only on a single host) or to a network domain (such as Windows domain accounts). \section os_acct_challenges OS Account Challenges A key challenge with OS accounts is that we do not know the account information until we have started to parse files, and the more detailed information will only come from OS configuration files. It is also possible that we may never know the details if we have only a media card. As a user adds a disk image to the case, we may learn about addresses from the files. But, we won't yet know the account name or if it is domain-scoped or local-scoped. So, the basic properties of the realm and account may change as more data is ingested and analyzed. This could even result in needing to merge realms and accounts. Another difference from other data types in the TSK data model is that OS accounts may span multiple data sources if they are domain accounts. Therefore, they are not "children" of a data source and exist outside of the usual tree model in TSK. \section os_acct_realm OS Account Realms An org.sleuthkit.datamodel.OsAccountRealm represents the scope of a set of OS accounts. A realm's scope is defined by org.sleuthkit.datamodel.OsAccountRealm.RealmScope. By default, the scope is set to host-level and the org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence is set to inferred. As more is learned, the confidence and scope can be made more specific. A realm has two core fields: - Address that the OS uses internally, such as part of a Windows SID - Name that is what users more often see When searching for realms, the address has priority over the name. Often times with Windows systems, we may have a realm address from SIDs but not a specific realm name. Realms are managed by org.sleuthkit.datamodel.OsAccountRealmManager. \section os_acct_acct OS Accounts An org.sleuthkit.datamodel.OsAccount represents an account that was configured in an operating system. It must be defined within the scope of an OsAccountRealm. An OS account has two core fields: - Login name that the user enters (such as jdoe) - Address that the operating system uses internally (such as a UID of 0 or a Windows SID) OS accounts also have other properties, such as full name, creation date, etc., that can be set after the account is created. OS accounts are managed by org.sleuthkit.datamodel.OsAccountManager. \subsection os_acct_acct_os Supported Operating Systems At this point, APIs exist for only Windows accounts, such as: - org.sleuthkit.datamodel.OsAccountManager.newWindowsOsAccount() - org.sleuthkit.datamodel.OsAccountManager.getWindowsOsAccount() The underlying database schema supports other operating systems, but the utility APIs do not exist to populate them other than with Windows SIDs. These methods may be added in the future. \section os_account_storing Storing Original Account Data We recommend that the OS account addresses or names that were parsed from the data source be saved alongside any references to OsAccount objects. For example, the case database stores the UID or SID that was stored in a file system for a file in addition to the reference to the OsAccount object that is associated with that address. This helps to ensure the original data is preserved in case an Os account can't be created, gets deleted, or is incorrectly merged. \section os_acct_example Example Creation & Update Code There are three unique elements to creating and updating OS accounts when adding data to the case database:
  1. When creating and updating OS accounts in the case database, you need to avoid some pitfalls involving doing a lot of work in a transaction. Why? For single-user cases, if you have created a org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction, you should never call another database access method unless it allows you to pass in the CaseDbTransaction you are using. Otherwise, the method that you call will attempt to create its own transaction and because you already have the underlying SQLite case database locked, the called method will block forever waiting for a lock it cannot obtain. For a multi-user case, you will run the risk of attempting to create OS accounts in the case database that would duplicate accounts created by another user on another machine. In this scenario, uniqueness constraints will cause your entire transaction to fail and everything you have done up to that point will be rolled back and will have to be redone. This means that if you want to use a CaseDbTransation to add a lot of files or artifacts associated with OS accounts, you'll need to:
    1. Pre-process the data to identify what OS accounts you need to create or look up
    2. Look up or create the OS accounts in individual transactions
    3. Start a new transaction and add the files or artifacts with the references to the OS accounts
  2. You need to check if you have more information than what is already stored (e.g., maybe the realm name was unknown).
  3. You need to record that an OS account was referenced on a given data source because OS accounts are stored in parallel to data sources and are not children of them.
Here are some examples. \subsection os_acct_ex_get Adding a File or Data Artifact If you pass in an OsAccount to the various methods to add files and data artifacts, then the database will make the association and record the occurence. All you need to do is get the account. You can do that with org.sleuthkit.datamodel.OsAccountManager.getWindowsOsAccount(). Note that sometimes that call will fail if the SID associated with the file is for a group, for example, if the OS account has admin rights. If you get an OsAccount, you can try to update it if you think you may have new information. Here is example pseudo-code: \code OsAccount osAcct = null; try { Optional osAcctOpt = getWindowsOsAccount("S-....", "jdoe", "ACME", host); if (osAcctOpt.isPresent(()) { osAcct = osAcctOpt.get(); updateWindowsOsAccount(osAccount, "S-.....", "jdoe", "ACME", host); } else { osAcct = newWindowsOsAccount("S-....", "jdoe", "ACME", host) } } catch (NotUserSIDException ex) { // Ignore this SID } // Pass in osAcct when making artifacts and files \endcode \subsection os_acct_ex_update Parsing OS Configuration Data When parsing the Windows registry or other OS Configuration file, you may find updated information about OS accounts. You can call various org.sleuthkit.datamodel.OsAccountManager methods to get and update the accounts. When adding extended attributes, you can choose to limit the scope of the attribute to the single host being parsed or to the domain-level. You should make sure to call org.sleuthkit.datamodel.OsAccountManager.newOsAccountInstance() to ensure it is recorded that there was at least some reference to account on that data source. Otherwise, it will not be associated with the data source unless there were also files or artifacts that were mapped to the OS account. */ sleuthkit-4.11.1/bindings/java/doxygen/insert_and_update_database.dox000644 000765 000024 00000002472 14137073413 026637 0ustar00carrierstaff000000 000000 /*! \page insert_and_update_database_page Inserting Data \section types_of_databases_available Inserting Into the Database This page is for people who are developing Sleuth Kit code and need to place items into the database with SQL statements. If you are simply writing modules that read from the database (such as Autopsy ingest modules), there is nothing for you here. The Sleuth Kit currently allows either SQLite or PostgreSQL as the back-end database system for a case. Any code you write could be used with either as the backend database, at the user's discretion. Be sure to test your work with both platforms. - For SQLite compatibility, use SQL statements supported by SQLite 3 - For PostgreSQL compatibility, use SQL statements supported by PostgreSQL 9.4 \section insert_pitfalls_to_avoid How to Avoid Pitfalls When INSERTing into the Database - Do not use INSERT OR REPLACE INTO. It does not exist in PostgreSQL. - Do not use INSERT OR IGNORE INTO. It does not exist in PostgreSQL. - Do not insert [NUL characters](http://en.wikipedia.org/wiki/Null_character) into the database as UTF-8 (NUL characters are not NULL fields). Translate NUL characters to the [SUB character](http://en.wikipedia.org/wiki/Substitute_character) with the following instead: \code{.java} private String replaceNulls(String text); \endcode */ sleuthkit-4.11.1/bindings/java/doxygen/footer.html000644 000765 000024 00000000451 14137073413 022766 0ustar00carrierstaff000000 000000

Copyright © 2011-2021 Brian Carrier. (carrier -at- sleuthkit -dot- org)
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.

sleuthkit-4.11.1/bindings/java/doxygen/Doxyfile000644 000765 000024 00000314244 14137073430 022317 0ustar00carrierstaff000000 000000 # Doxyfile 1.8.9.1 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all text # before the first occurrence of this tag. Doxygen uses libiconv (or the iconv # built into libc) for the transcoding. See http://www.gnu.org/software/libiconv # for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = "Sleuth Kit Java Bindings (JNI)" # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. # NOTE: This is updated by the release-unix.pl script PROJECT_NUMBER = 4.11.1 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = "Java bindings for using The Sleuth Kit" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = docs # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines. ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = YES # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, # C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: # FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: # Fortran. In the later case the parser tries to guess whether the code is fixed # or free formatted code, this is the default for Fortran type files), VHDL. For # instance to make doxygen treat .inc files as Fortran files (default is PHP), # and .f files as C (default is Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See http://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = YES # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = YES # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # (class|struct|union) declarations. If set to NO, these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = NO # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = YES # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = YES # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = YES # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = YES # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. # The default value is: NO. WARN_NO_PARAMDOC = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text " # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. # Note: If this tag is empty the current directory is searched. INPUT = main.dox \ query_database.dox \ blackboard.dox \ artifact_catalog.dox \ insert_and_update_database.dox \ communications.dox \ datasources.dox \ os_accounts.dox \ schema/schema_list.dox \ schema/db_schema_8_6.dox \ schema/db_schema_9_0.dox \ schema/db_schema_9_1.dox \ ../src # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: http://www.gnu.org/software/libiconv) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank the # following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, # *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, # *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, # *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, # *.qsf, *.as and *.js. FILE_PATTERNS = *.java # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = images/ # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = YES # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # function all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = YES # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see http://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the config file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: http://clang.llvm.org/) for more accurate parsing at the # cost of reduced performance. This can be particularly helpful with template # rich C++ code for which doxygen's built-in parser lacks the necessary type # information. # Note: The availability of this option depends on whether or not doxygen was # compiled with the --with-libclang option. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. # NOTE: This is updated by the release-unix.pl script HTML_OUTPUT = jni-docs/4.11.1/ # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = footer.html # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # http://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to NO can help when comparing the output of multiple runs. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = YES # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: http://developer.apple.com/tools/xcode/), introduced with # OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html # for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = YES # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Doxygen # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = YES # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = YES # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # http://www.mathjax.org) which uses client side Javascript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from http://www.mathjax.org before deployment. # The default value is: http://cdn.mathjax.org/mathjax/latest. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /