polygraph-4.3.2/0000755000175000017500000000000011546445454013137 5ustar testertesterpolygraph-4.3.2/tools/0000755000175000017500000000000011546445454014277 5ustar testertesterpolygraph-4.3.2/tools/pmix2-ips.man0000644000175000017500000000113711336340427016615 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-PMIX2-IPS "1" "February 2010" "polygraph-pmix2-ips - Web Polygraph" "User Commands" .SH NAME polygraph-pmix2-ips \- PolyMix\-2 address calculator .SH SYNOPSIS .B pmix2-ips.pl \fI \fR[\fIcltId|srvId\fR] .SH DESCRIPTION Prints IP addresses for robots and servers given request rate and bench id. .PP .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/tools/webaxe4-ips.h2m0000644000175000017500000000013211335553726017031 0ustar testertester[DESCRIPTION] Prints IP addresses for robots and servers given request rate and bench id. polygraph-4.3.2/tools/polygraph.supp0000644000175000017500000000026411326435545017213 0ustar testertester# Valgrind supression file for polyclt and polysrv. # # Usage: valgrind --suppressions=polygraph.supp polyclt ... { OpenSSL Memcheck:Addr8 fun:AES_cbc_encrypt obj:* } polygraph-4.3.2/tools/msl_test_linux.c0000644000175000017500000001113011546440450017477 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ /* this file should only be compiled as a part of msl_test.c */ #ifndef COMPILING_MSL_TEST #error make msl_test instead #else /* * Linux-dependent stuff for msl_test */ #include #include #include #define TCPSYNFLAG(tcphdr) (tcphdr->syn) #define TCPFINFLAG(tcphdr) (tcphdr->fin) #define TCPPUSHFLAG(tcphdr) (tcphdr->psh) #define TCPACKFLAG(tcphdr) (tcphdr->ack) #define TCPRSTFLAG(tcphdr) (tcphdr->rst) #define TCPSOURCEPORT(tcphdr) (tcphdr->source) #define TCPDESTPORT(tcphdr) (tcphdr->dest) static int raw_open(char *interface); static void raw_close(void); static void msl_probe(void *, int); static int deleteRoute(struct sockaddr_in); static int addRoute(struct sockaddr_in); static void *find_our_packet(int *len); const int RECV_BUF_SIZE = 8192; static int raw_sock = -1; static struct sockaddr_ll From; int addRoute(struct sockaddr_in dst) { int route; struct rtentry rt; struct sockaddr *rt_dst = NULL; if ((route = socket(AF_INET, SOCK_DGRAM, 0)) < 0) { fprintf(stderr, "socket failed with: %s\n", strerror(errno)); return (-1); } rt_dst = (struct sockaddr *) &dst; rt.rt_dst = *rt_dst; rt.rt_dev = (char *) malloc(sizeof(char) * 3); strcpy(rt.rt_dev, "lo"); rt.rt_metric = 0; rt.rt_flags = RTF_UP | RTF_STATIC | RTF_HOST; if (ioctl(route, SIOCADDRT, &rt) < 0) { fprintf(stderr, "ioctl failed with: %s\n", strerror(errno)); close(route); return (-1); } close(route); return (1); } int deleteRoute(struct sockaddr_in dst) { int route; struct rtentry rt; struct sockaddr *rt_dst = NULL; if ((route = socket(AF_INET, SOCK_DGRAM, 0)) < 0) { fprintf(stderr, "socket failed with: %s\n", strerror(errno)); return (-1); } rt_dst = (struct sockaddr *) &dst; rt.rt_dst = *rt_dst; rt.rt_dev = (char *) malloc(sizeof(char) * 3); strcpy(rt.rt_dev, "lo"); rt.rt_metric = 0; rt.rt_flags = RTF_UP | RTF_STATIC | RTF_HOST; if (ioctl(route, SIOCDELRT, &rt) < 0) { close(route); return (-1); } close(route); return (1); } void msl_probe(void *frame, int framelen) { char buf[RECV_BUF_SIZE]; struct timeval timeout; struct timeval start; struct timeval last; struct timeval now; int nfds = raw_sock + 1; gettimeofday(&start, NULL); last = start; fprintf(stderr, "Probing"); for (;;) { fd_set readfds; gettimeofday(&now, NULL); if (now.tv_sec - last.tv_sec > 0) { FD_ZERO(&readfds); FD_SET(raw_sock, &readfds); fprintf(stderr, "."); if (sendto(raw_sock, frame, framelen, 0, (struct sockaddr *) &From, sizeof(From)) < 0) { perror("sendto: raw_sock"); return; } last = now; } timeout.tv_sec = 1; timeout.tv_usec = 0; if (select(nfds, &readfds, NULL, NULL, &timeout)) { if (recv(raw_sock, buf, RECV_BUF_SIZE, 0) < 0) { perror("recv: raw_sock"); return; } if (0 == packetIsEirSynAck(buf)) break; } } fprintf(stderr, "\n"); printf("TCP TIME_WAIT of %d seconds\n", (int) (now.tv_sec - start.tv_sec)); } static int raw_open(char *ifname) { int s = socket(PF_PACKET, SOCK_RAW, htons(ETH_P_ALL)); struct sockaddr_ll sa; if (s < 0) { perror("SOCK_RAW"); return s; } memset(&sa, '\0', sizeof(sa)); sa.sll_family = AF_PACKET; strncpy((char *) sa.sll_addr, ifname, sizeof(sa.sll_addr)); if (bind(s, (struct sockaddr *) &sa, sizeof(sa)) < 0) { perror(ifname); close(s); return -1; } return (raw_sock = s); } static void raw_close(void) { assert(raw_sock > -1); close(raw_sock); } static void * find_our_packet(int *len) { static char buf[RECV_BUF_SIZE]; static void *synbuf = NULL; int state = 0; int l; int count = 20; socklen_t fl; fprintf(stderr, "Reading our packets"); while (count-- && 2 != state) { memset(buf, '\0', RECV_BUF_SIZE); memset(&From, '\0', fl = sizeof(From)); if ((l = recvfrom(raw_sock, buf, RECV_BUF_SIZE, 0, (struct sockaddr *) &From, &fl)) < 0) { perror("find_our_packet: read"); return NULL; } fprintf(stderr, "."); switch (state) { case 0: /* looking for a SYN */ if (0 == packetIsOurSyn(buf)) { state++; synbuf = malloc(l); memcpy(synbuf, buf, l); *len = l; } break; case 1: /* looking for a FIN+ACK */ if (0 == packetIsOurFinAck(buf)) state++; break; default: assert(0); break; } } fprintf(stderr, "\n"); return synbuf; } #endif /* COMPILING_MSL_TEST */ polygraph-4.3.2/tools/pmix3-ips.man0000644000175000017500000000113711336340427016616 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-PMIX3-IPS "1" "February 2010" "polygraph-pmix3-ips - Web Polygraph" "User Commands" .SH NAME polygraph-pmix3-ips \- PolyMix\-3 address calculator .SH SYNOPSIS .B pmix3-ips.pl \fI \fR[\fIcltId|srvId\fR] .SH DESCRIPTION Prints IP addresses for robots and servers given request rate and bench id. .PP .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/tools/beepmon.h2m0000644000175000017500000000022211335553726016326 0ustar testertester[DESCRIPTION] Connects to specified BEEP servers, starts channel #1, receives BEEP messages, and forwards message content to an external program. polygraph-4.3.2/tools/pmix2-ips.h2m0000644000175000017500000000013211335553726016531 0ustar testertester[DESCRIPTION] Prints IP addresses for robots and servers given request rate and bench id. polygraph-4.3.2/tools/Makefile.in0000644000175000017500000004353411546445454016355 0ustar testertester# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # settings common to all Makefile.ams VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ DIST_COMMON = $(dist_bin_SCRIPTS) $(dist_man1_MANS) \ $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(top_srcdir)/common.am subdir = tools ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/cfgaux/ax_create_stdint_h.m4 \ $(top_srcdir)/cfgaux/check_zlib.m4 $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" SCRIPTS = $(dist_bin_SCRIPTS) SOURCES = DIST_SOURCES = man1dir = $(mandir)/man1 NROFF = nroff MANS = $(dist_man1_MANS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AR_R = @AR_R@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HELP2MAN = @HELP2MAN@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LDFLAG_RDYNAMIC = @LDFLAG_RDYNAMIC@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIB_CURSES = @LIB_CURSES@ LIB_DL = @LIB_DL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ std_include = @std_include@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ dist_bin_SCRIPTS = \ beepmon.pl \ pmix2-ips.pl \ pmix3-ips.pl \ webaxe4-ips.pl \ cmp-lx.pl dist_man1_MANS = \ beepmon.man \ pmix2-ips.man \ pmix3-ips.man \ webaxe4-ips.man \ cmp-lx.man EXTRA_DIST = \ beepmon.h2m \ pmix2-ips.h2m \ pmix3-ips.h2m \ webaxe4-ips.h2m \ cmp-lx.h2m \ msl_test.c \ msl_test_bsd.c \ msl_test_linux.c # top_builddir/ is needed for generated config.h # top_builddir/src/ is needed for generated src/xstd/h/stdint.h # top_srcdir/ is needed for post-config.h # TODO: move post-config.h and generated config.h to src? AM_CPPFLAGS = -I$(top_builddir) -I$(top_builddir)/src -I$(top_srcdir) -I$(top_srcdir)/src all: all-am .SUFFIXES: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/common.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign tools/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign tools/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-dist_binSCRIPTS: $(dist_bin_SCRIPTS) @$(NORMAL_INSTALL) test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" @list='$(dist_bin_SCRIPTS)'; test -n "$(bindir)" || list=; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-dist_binSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(dist_bin_SCRIPTS)'; test -n "$(bindir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(dist_man1_MANS) @$(NORMAL_INSTALL) test -z "$(man1dir)" || $(MKDIR_P) "$(DESTDIR)$(man1dir)" @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ { for i in $$list; do echo "$$i"; done; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ test -z "$$files" || { \ echo " ( cd '$(DESTDIR)$(man1dir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(man1dir)" && rm -f $$files; } tags: TAGS TAGS: ctags: CTAGS CTAGS: distdir: $(DISTFILES) @list='$(MANS)'; if test -n "$$list"; then \ list=`for p in $$list; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; else :; fi; done`; \ if test -n "$$list" && \ grep 'ab help2man is required to generate this page' $$list >/dev/null; then \ echo "error: found man pages containing the \`missing help2man' replacement text:" >&2; \ grep -l 'ab help2man is required to generate this page' $$list | sed 's/^/ /' >&2; \ echo " to fix them, install help2man, remove and regenerate the man pages;" >&2; \ echo " typically \`make maintainer-clean' will remove them" >&2; \ exit 1; \ else :; fi; \ else :; fi @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(SCRIPTS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-dist_binSCRIPTS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-dist_binSCRIPTS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ distclean distclean-generic distclean-libtool distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-dist_binSCRIPTS \ install-dvi install-dvi-am install-exec install-exec-am \ install-html install-html-am install-info install-info-am \ install-man install-man1 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am uninstall uninstall-am uninstall-dist_binSCRIPTS \ uninstall-man uninstall-man1 #AM_LDFLAGS = #imported_libs = @ENABLE_MANPAGES_GEN_TRUE@manpages-am: @top_srcdir@/common.h2m @ENABLE_MANPAGES_GEN_TRUE@ @for binary in $(bin_PROGRAMS) $(dist_bin_SCRIPTS); do \ @ENABLE_MANPAGES_GEN_TRUE@ echo "Generating manpage for $$binary"; \ @ENABLE_MANPAGES_GEN_TRUE@ manpage=`echo -n "$$binary" | sed -e 's/\..*//'`; \ @ENABLE_MANPAGES_GEN_TRUE@ name=`(grep \ @ENABLE_MANPAGES_GEN_TRUE@ --after-context=1 \ @ENABLE_MANPAGES_GEN_TRUE@ ".B \\\\\%polygraph-$$manpage" \ @ENABLE_MANPAGES_GEN_TRUE@ '@top_srcdir@/polygraph.man.in' || \ @ENABLE_MANPAGES_GEN_TRUE@ echo -n ' a part of Web Polygraph performance benchmark') | \ @ENABLE_MANPAGES_GEN_TRUE@ tail -1 | cut -c4-`; \ @ENABLE_MANPAGES_GEN_TRUE@ $(HELP2MAN) \ @ENABLE_MANPAGES_GEN_TRUE@ --no-info \ @ENABLE_MANPAGES_GEN_TRUE@ --name="$$name" \ @ENABLE_MANPAGES_GEN_TRUE@ --version-string="polygraph-$$manpage - $(PACKAGE_NAME)" \ @ENABLE_MANPAGES_GEN_TRUE@ --include='@top_srcdir@/common.h2m' \ @ENABLE_MANPAGES_GEN_TRUE@ --opt-include="$$manpage.h2m" \ @ENABLE_MANPAGES_GEN_TRUE@ --output="$$manpage.man" \ @ENABLE_MANPAGES_GEN_TRUE@ "./$$binary";\ @ENABLE_MANPAGES_GEN_TRUE@ done @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpages-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpages-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpages-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpages-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpages: Makefile $(LIBRARIES) $(PROGRAMS) manpages-am manpages-recursive @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-am: @ENABLE_MANPAGES_GEN_TRUE@ @rm -f $(dist_man1_MANS) @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpagesclean-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpagesclean-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpagesclean: manpagesclean-am manpagesclean-recursive @ENABLE_MANPAGES_GEN_TRUE@.PHONY: manpages-am manpages-recursive manpages \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-am manpagesclean-recursive manpagesclean @ENABLE_MANPAGES_GEN_FALSE@manpages: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@manpagesclean: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@.PHONY: manpages manpagesclean # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: polygraph-4.3.2/tools/webaxe4-ips.pl0000755000175000017500000001725711546440450016772 0ustar testertester#!/usr/bin/perl -w # Web Polygraph http://www.web-polygraph.org/ # Copyright 2003-2011 The Measurement Factory # Licensed under the Apache License, Version 2.0 require 5.003; use strict; if (@ARGV == 1 && $ARGV[0] eq '--help') { print usage(); exit; } use POSIX; &web2term(); # default workload configuration my $CltSide = { max_host_load => 500, # maximum host load max_agent_load => 0.4, y_octet_offset => 0, }; my $SrvSide = { max_host_load => $CltSide->{max_host_load}, max_agent_load => $CltSide->{max_host_load}, y_octet_offset => 128, }; my $OptRoute = 0; &getOpts(); die(&usage()) unless 2 <= @ARGV && @ARGV <= 3; $SIG{__WARN__} = sub { print(STDERR &usage()); die $_[0] }; my $Quiet = @ARGV == 3; my ($Bench, $RR, $Id) = @ARGV; exit(&main()); sub main { &compute($CltSide); &compute($SrvSide); #die("$CltSide->{theHostCount} != $SrvSide->{theHostCount}") if # $CltSide->{theHostCount} != $SrvSide->{theHostCount}; #my $Pairs = $CltSide->{theHostCount}; if (!$Quiet) { my $Robots = scalar @{$CltSide->{theIps}}; my $Servers = scalar @{$SrvSide->{theIps}}; xprintf("bench: %6d\n", $Bench); xprintf("req.rate: %6d/sec (actual: %8.2f/sec)\n", $RR, $Robots*$CltSide->{max_agent_load}); xprintf("PCs: %6d clients + %d servers (%d total)\n", $CltSide->{theHostCount}, $SrvSide->{theHostCount}, $CltSide->{theHostCount} + $SrvSide->{theHostCount}); # xprintf("robots: %6d (%4d/machine)\n", $Robots, $Robots/$CltSide->{theHostCount}); # xprintf("servers: %6d (%4d/machine)\n", $Servers, $Servers/$SrvSide->{theHostCount}); xprintf("\n"); printSide('clt', $CltSide); xprintf("\n"); printSide('srv', $SrvSide); xprintf("\n"); } return 0 unless defined $Id; if ($Id =~ /^clts$/) { print(&getIps($CltSide)); } elsif ($Id =~ /^srvs$/) { print(&getIps($SrvSide)); } elsif ($Id =~ /^clt(\d+)$/) { my $id = $1; die("there are only $CltSide->{theHostCount} clients, cannot have $Id client\n") if $id > $CltSide->{theHostCount}; print(&getIps($CltSide, $id-1)); } elsif ($Id =~ /^srv(\d+)$/) { my $id = $1; die("there are only $ SrvSide->{theHostCount} servers, cannot have $Id server\n") if $id > $SrvSide->{theHostCount}; print(&getIps($SrvSide, $id-1)); } else { die("$0: cannot parse `$Id'; expected something like clt1 or srv4\n"); } print("\n"); return 0; } sub printSide { my ($label, $side) = @_; printf("\t%s.%-20s %10s\n", $label, 'max_host_load:', $side->{max_host_load}); printf("\t%s.%-20s %10s\n", $label, 'max_agent_load:', $side->{max_agent_load}); printf("\t%s.%-20s %10s\n", $label, 'subnet:', $side->{theSubnet}); printf("\t%s.%-20s %10s\n", $label, 'max_addr_per_subnet:', $side->{theMaxAddrPerSnet}); printf("\t%s.%-20s %10s\n", $label, 'host_count:', $side->{theHostCount}); printf("\t%s.%-20s %10s\n", $label, 'agent_per_host:', $side->{theAgentPerHost}); for (my $h = 0; $h <= $#{$side->{thePerHostIps}}; ++$h) { printf("\t\t%s.%-20s %s\n", $label, sprintf('%s_%02d.ips:', 'host', $h+1), getIps($side, $h)); } # @{$side->{theIps}} = @ips; } sub compute { my $side = shift; my $reqRate = $RR or die(); my $hostLoad = $side->{max_host_load} or die(); my $agentLoad = $side->{max_agent_load} or die(); # find min subnet that can fit maxAddrPerHost addresses my $maxAddrPerHost = int($hostLoad/$agentLoad); my $subnet = 25; my $maxAddrPerSnet = -1; for (my $i = 1; $maxAddrPerSnet < $maxAddrPerHost && $i <= 128; $i *= 2) { $maxAddrPerSnet = $i * 250; --$subnet; } die() if $maxAddrPerSnet < $maxAddrPerHost; die() if $subnet == 25; # find the number of agents (i.e. the number of a.b.x.y addresses) my $hostCnt = int(xceil($reqRate, $hostLoad)); my $agentCnt = &doubleDiv($hostCnt, $reqRate, $agentLoad); my $agentPerHost = $agentCnt / $hostCnt; die() unless $agentCnt <= $maxAddrPerHost * $hostCnt; # distribute agentCnt agents among subnets (upto maxAddrPerSnet each) my @ips = (); $side->{thePerHostIps} = []; for (my $s = 0; @ips < $agentCnt; ++$s) { # one subnet, one host my $host = {}; my $x = $s * int($maxAddrPerSnet/250); my $y = 1; for (my $a = 0; $a < $agentPerHost; ++$a) { die("request rate is too high; ". "ran out of IP addresses while accomodating $agentCnt agents") if $x >= 128; my $actualX = $x + $side->{y_octet_offset}; push @ips, "10.$Bench.$actualX.$y"; $side->{theFirstX} = $actualX unless defined $side->{theFirstX}; $side->{theFirstY} = $y unless defined $side->{theFirstY}; $host->{theFirstX} = $actualX unless defined $host->{theFirstX}; $host->{theFirstY} = $y unless defined $host->{theFirstY}; $side->{theLastX} = $host->{theLastX} = $actualX; $side->{theLastY} = $host->{theLastY} = $y; if (++$y == 251) { ++$x; $y = 1; } } push @{$side->{thePerHostIps}}, $host; } die() unless $agentCnt == @ips; @{$side->{theIps}} = @ips; $side->{theSubnet} = $subnet; $side->{theMaxAddrPerSnet} = $maxAddrPerSnet; $side->{theMaxAddrPerHost} = $maxAddrPerHost; $side->{theHostCount} = $hostCnt; $side->{theAgentPerHost} = $agentCnt / $hostCnt; return undef(); } sub getIps { my ($side, $id) = @_; return &formatIps($side, $side->{thePerHostIps}->[$id]) if defined $id; return &formatIps($side, $side); } sub formatIps { my ($side, $descr) = @_; if ($OptRoute || $descr->{theFirstX} == $descr->{theLastX} || $descr->{theLastY} == 250) { return &ipRange2Str($side, $descr->{theFirstX}, $descr->{theLastX}, $descr->{theLastY}); } else { return sprintf('%s;%s', &ipRange2Str($side, $descr->{theFirstX}, $descr->{theLastX}-1, 250), &ipRange2Str($side, $descr->{theLastX}, $descr->{theLastX}, $descr->{theLastY})); } } sub ipRange2Str { my ($side, $minX, $maxX, $maxY) = @_ or die(); my $minY = 1; if ($OptRoute) { my $subnet = $side->{theSubnet}; return "10.$Bench.$minX.0/$subnet"; } else { my $x = ($minX == $maxX) ? $minX : "$minX-$maxX"; my $y = ($minY == $maxY) ? $minY : "$minY-$maxY"; return "10.$Bench.$x.$y"; # /$subnet"; } } sub doubleDiv { my ($factor, $n, $d) = @_; my $apx = xceil($n, $d); return int($factor * xceil($apx, $factor)); } # try "ceil(700/0.7)" to see why xceil is needed sub xceil { my ($large, $small) = @_; my $c = ceil($large/$small); return ($c-1)*$small >= $large ? $c-1 : $c; } sub xprintf { my $fmt = shift; #printf(STDERR "$0: $fmt", @_); printf("\t$fmt", @_); } sub web2term { my $query = $ENV{QUERY_STRING}; return unless defined $query; open(STDERR, ">&STDOUT"); print("Content-type: text/plain\r\n\r\n"); @ARGV = ($query =~ /=([^&]+)/g); printf("./pmix2-ips.pl %s\n\n", join(' ', @ARGV)); } sub getOpts { my @newOpts = (); my $sides = { 'client' => $CltSide, 'server' => $SrvSide, }; for (my $i = 0; $i <= $#ARGV; ++$i) { my $opt = $ARGV[$i]; if ($opt !~ /^--/) { push @newOpts, $opt; next; } if ($opt eq '--route') { $OptRoute = 1; next; } if ($opt =~ /^--(client|server)_side\.(max_(?:host|agent)_load)/) { die(&usage()) unless defined $sides->{$1} && defined $sides->{$1}->{$2}; $sides->{$1}->{$2} = $ARGV[++$i] or die(&usage()); next; } die("$0: unknown option: $opt\n"); } @ARGV = @newOpts; } sub usage { return "Usage: $0 [--option] ... [cltId|srvId]\n\n". "Options:\n". " --route show addresses in route-friendly format\n". " --client_side.max_host_load load per polyclt process\n". " --server_side.max_host_load load per polysrv process\n". " --client_side.max_agent_load load per robot agent\n". " --server_side.max_agent_load load per server agent\n"; } sub suffix { my ($ord) = @_; return 'st' if $ord == 1; return 'nd' if $ord == 2; return 'rd' if $ord == 3; return 'th'; } polygraph-4.3.2/tools/Makefile.am0000644000175000017500000000057211335553726016335 0ustar testertester dist_bin_SCRIPTS = \ beepmon.pl \ pmix2-ips.pl \ pmix3-ips.pl \ webaxe4-ips.pl \ cmp-lx.pl dist_man1_MANS = \ beepmon.man \ pmix2-ips.man \ pmix3-ips.man \ webaxe4-ips.man \ cmp-lx.man EXTRA_DIST = \ beepmon.h2m \ pmix2-ips.h2m \ pmix3-ips.h2m \ webaxe4-ips.h2m \ cmp-lx.h2m \ msl_test.c \ msl_test_bsd.c \ msl_test_linux.c include $(top_srcdir)/common.am polygraph-4.3.2/tools/pmix2-ips.pl0000755000175000017500000000603411546440450016461 0ustar testertester#!/usr/bin/perl -w # Web Polygraph http://www.web-polygraph.org/ # Copyright 2003-2011 The Measurement Factory # Licensed under the Apache License, Version 2.0 require 5.003; use strict; use POSIX; if (@ARGV == 1 && $ARGV[0] eq '--help') { print usage(); exit; } &web2term(); die(&usage()) unless 2 <= @ARGV && @ARGV <= 3; $SIG{__WARN__} = sub { print(STDERR &usage()); die $_[0] }; my $Quiet = @ARGV == 3; my ($Bench, $RR, $Id) = @ARGV; my $Pairs; exit(&main()); sub main { $Pairs = xceil($RR, 400); my $Robots = xceil($RR, 0.4); $Robots = $Pairs*xceil($Robots, $Pairs); my $Servers = ceil($Robots*0.1 + 500); $Servers = $Pairs*xceil($Servers, $Pairs); my ($rbtX, $rbtY) = &countToB($Robots); my ($srvX, $srvY) = &countToB($Servers); # must recalc (adjust) again! $Robots = $rbtX * $rbtY; $Servers = $srvX * $srvY; my ($r, $s) = ($Robots/$Pairs, $Servers/$Pairs); if (!$Quiet) { xprintf("bench: %6d\n", $Bench); xprintf("req.rate: %6d/sec (actual: %8.2f/sec)\n", $RR, $Robots*0.4); xprintf("PCs: %6dpairs\n", $Pairs); xprintf("robots: %6d (%4d/machine)\n", $Robots, $r); xprintf("servers: %6d (%4d/machine)\n", $Servers, $s); xprintf("\n"); xprintf("rbt_ips: %20s\n", ipRange2Str(1, $rbtX, $rbtY)); xprintf("srv_ips: %20s\n", ipRange2Str(128+1, 128+$srvX, $srvY)); } die("$0: math went wrong for robots\n") if $r != int($Robots/$Pairs); die("$0: math went wrong for servers\n") if $s != int($Servers/$Pairs); return 0 unless defined $Id; if ($Id =~ /^clts$/) { print(ipRange2Str(1, $rbtX, $rbtY)); } elsif ($Id =~ /^srvs$/) { print(ipRange2Str(129, 128+$srvX, $srvY)); } elsif ($Id =~ /^clt(\d+)$/) { my $id = $1; die("there are only $Pairs clients, cannot have $Id client\n") if $id > $Pairs; my $step = $rbtX/$Pairs; print(ipRange2Str(($id-1)*$step+1, $id*$step, $rbtY)); } elsif ($Id =~ /^srv(\d+)$/) { my $id = $1; die("there are only $Pairs servers, cannot have $Id server\n") if $id > $Pairs; my $step = $srvX/$Pairs; print(ipRange2Str(128+($id-1)*$step+1, 128+$id*$step, $srvY)); } else { die("$0: cannot parse `$Id'; expected something like clt1 or srv4\n"); } print("\n"); # if -t STDOUT; return 0; } sub countToB { my ($countTot) = @_; my $rangeX = xceil($countTot, 250); $rangeX = $Pairs*xceil($rangeX, $Pairs); my $rangeY = xceil($countTot, $rangeX); return ($rangeX, $rangeY); } sub ipRange2Str { my ($minX, $maxX, $maxY) = @_; return "10.$Bench.$minX-$maxX.1-$maxY"; } sub xprintf { my $fmt = shift; #printf(STDERR "$0: $fmt", @_); printf("\t$fmt", @_); } # try "ceil(700/0.7)" to see why xceil is needed sub xceil { my ($large, $small) = @_; my $c = ceil($large/$small); return ($c-1)*$small >= $large ? $c-1 : $c; } sub web2term { my $query = $ENV{QUERY_STRING}; return unless defined $query; open(STDERR, ">&STDOUT"); print("Content-type: text/plain\r\n\r\n"); @ARGV = ($query =~ /=([^&]+)/g); printf("./pmix2-ips.pl %s\n\n", join(' ', @ARGV)); } sub usage { return "Usage: $0 [cltId|srvId]\n"; } polygraph-4.3.2/tools/pmix3-ips.h2m0000644000175000017500000000013211335553726016532 0ustar testertester[DESCRIPTION] Prints IP addresses for robots and servers given request rate and bench id. polygraph-4.3.2/tools/pmix3-ips.pl0000755000175000017500000000701111546440450016456 0ustar testertester#!/usr/bin/perl -w # Web Polygraph http://www.web-polygraph.org/ # Copyright 2003-2011 The Measurement Factory # Licensed under the Apache License, Version 2.0 require 5.003; use strict; use POSIX; if (@ARGV == 1 && $ARGV[0] eq '--help') { print usage(); exit; } &web2term(); die(&usage()) unless 2 <= @ARGV && @ARGV <= 3; $SIG{__WARN__} = sub { print(STDERR &usage()); die $_[0] }; my $Quiet = @ARGV == 3; my ($Bench, $RR, $Id) = @ARGV; my $Pairs; exit(&main()); sub main { $Pairs = xceil($RR, 400); my $Robots = xceil($RR, 0.4); $Robots = $Pairs*xceil($Robots, $Pairs); my $Servers = ceil($Robots*0.1 + 500); $Servers = $Pairs*xceil($Servers, $Pairs); my ($rbtX, $rbtY) = &countToB($Robots); my ($srvX, $srvY) = &countToB($Servers); # must recalc (adjust) again! $Robots = $rbtX * $rbtY; $Servers = $srvX * $srvY; my ($r, $s) = ($Robots/$Pairs, $Servers/$Pairs); if (!$Quiet) { xprintf("bench: %6d\n", $Bench); xprintf("req.rate: %6d/sec (actual: %8.2f/sec)\n", $RR, $Robots*0.4); xprintf("PCs: %6dpairs\n", $Pairs); xprintf("robots: %6d (%4d/machine)\n", $Robots, $r); xprintf("servers: %6d (%4d/machine)\n", $Servers, $s); xprintf("\n"); xprintf("rbt_ips: %20s\n", ipRange2Str(1, $rbtX, $rbtY)); for (my $id=1; $id<=$Pairs; $id++) { my $step = $rbtX/$Pairs; xprintf("\t machine_%d_ips:\t %20s\n", $id, ipRange2Str(($id-1)*$step+1, $id*$step, $rbtY)); } xprintf("srv_ips: %20s\n", ipRange2Str(128+1, 128+$srvX, $srvY)); for (my $id=1; $id<=$Pairs; $id++) { my $step = $srvX/$Pairs; xprintf("\t machine_%d_ips:\t %20s\n", $id, ipRange2Str(128+($id-1)*$step+1, 128+$id*$step, $srvY)); } } die("$0: math went wrong for robots\n") if $r != int($Robots/$Pairs); die("$0: math went wrong for servers\n") if $s != int($Servers/$Pairs); return 0 unless defined $Id; if ($Id =~ /^clts$/) { print(ipRange2Str(1, $rbtX, $rbtY)); } elsif ($Id =~ /^srvs$/) { print(ipRange2Str(129, 128+$srvX, $srvY)); } elsif ($Id =~ /^clt(\d+)$/) { my $id = $1; die("there are only $Pairs clients, cannot have $Id client\n") if $id > $Pairs; my $step = $rbtX/$Pairs; print(ipRange2Str(($id-1)*$step+1, $id*$step, $rbtY)); } elsif ($Id =~ /^srv(\d+)$/) { my $id = $1; die("there are only $Pairs servers, cannot have $Id server\n") if $id > $Pairs; my $step = $srvX/$Pairs; print(ipRange2Str(128+($id-1)*$step+1, 128+$id*$step, $srvY)); } else { die("$0: cannot parse `$Id'; expected something like clt1 or srv4\n"); } print("\n"); # if -t STDOUT; return 0; } sub countToB { my ($countTot) = @_; my $rangeX = xceil($countTot, 250); $rangeX = $Pairs*xceil($rangeX, $Pairs); my $rangeY = xceil($countTot, $rangeX); return ($rangeX, $rangeY); } sub ipRange2Str { my ($minX, $maxX, $maxY) = @_; return "10.$Bench.$minX-$maxX.1-$maxY"; } sub xprintf { my $fmt = shift; #printf(STDERR "$0: $fmt", @_); printf("\t$fmt", @_); } # try "ceil(700/0.7)" to see why xceil is needed sub xceil { my ($large, $small) = @_; my $c = ceil($large/$small); return ($c-1)*$small >= $large ? $c-1 : $c; } sub web2term { my $query = $ENV{QUERY_STRING}; return unless defined $query; open(STDERR, ">&STDOUT"); print("Content-type: text/plain\r\n\r\n"); @ARGV = ($query =~ /=([^&]+)/g); printf("./pmix2-ips.pl %s\n\n", join(' ', @ARGV)); } sub usage { return "Usage: $0 [cltId|srvId]\n"; } sub suffix { my ($ord) = @_; return 'st' if $ord == 1; return 'nd' if $ord == 2; return 'rd' if $ord == 3; return 'th'; } polygraph-4.3.2/tools/cmp-lx.pl0000755000175000017500000001364411546440450016036 0ustar testertester#!/usr/bin/perl -w # Web Polygraph http://www.web-polygraph.org/ # Copyright 2003-2011 The Measurement Factory # Licensed under the Apache License, Version 2.0 use strict; use warnings; use Getopt::Long; sub isImportant($); my $scope = 'important'; my $precision = '10'; die(&usage()) unless GetOptions( 'precision=f' => \$precision, 'scope=s' => \$scope, 'help' => sub { print usage(); exit; } ) && ($scope eq 'important' || $scope eq 'all') && ($precision >= 0); # compute diff percentage reporting precision based .00s used by the user # XXX: does not work for scientific (1e6) precision format my $accuracy = length($precision) - length(int($precision)); $accuracy -= 1 if $accuracy > 0; my $pctWidth = 5 + $accuracy; # used for percents and labels my $pctFormat = sprintf('%%%d.%df', $pctWidth-1, $accuracy); my ($Fname1, $Fname2) = @ARGV or die(&usage()); my @keys = (); my %hists = (); my $Meas1 = &load($Fname1); my $Meas2 = &load($Fname2); my %seen = (); my $diffCount = 0; foreach my $key (@keys) { next if exists $seen{$key}; $seen{$key} = undef(); next unless ($scope eq 'all') || isImportant($key); my $m1 = $Meas1->{$key}; my $m2 = $Meas2->{$key}; my $res = undef(); if (!defined $m1) { next if $m2 !~ /^\d/ || $m2 <= 0; $res = "uniq $m2 in $Fname2"; # only report used meas } elsif (!defined $m2) { next if $m1 !~ /^\d/ || $m1 <= 0; $res = "uniq $m1 in $Fname1"; # only report used meas } else { if ($m1 =~ /^-?\d+(?:.\d*)?$/ && $m2 =~ /^-?\d+(?:.\d*)?$/) { # numbers my $diff = $m1 - $m2; my $min = $m1 < $m2 ? $m1 : $m2; my $max = $m1 < $m2 ? $m2 : $m1; next if $min <= 0 && $max <= 0; # undefined or zero #warn("$m1 $m2"); if ($min <= 0 && $max > 0) { $res = sprintf("%${pctWidth}s %3.2f vs %3.2f", 'inf', $m1, $m2) } else { my $rdiff = abs($diff/$min); # relative difference next if $rdiff < $precision/100; # ignore "minor" differences my $diffDir; if ($rdiff < 1e-6) { $diffDir = '='; } elsif ($m1 > $m2) { $diffDir = '>'; ++$diffCount; } else { $diffDir = '<'; ++$diffCount; } $res = sprintf("${pctFormat}%% %3.2f %s %3.2f", 100*$rdiff, $m1, $diffDir, $m2); } } else { # strings my $diffDir; if ($m1 eq $m2) { # identical strings next if $precision > 1e-6; $diffDir = '=='; } else { ++$diffCount; $diffDir = 'vs'; } $res = sprintf("%${pctWidth}s %6s %2s %6s", 'str', $m1, $diffDir, $m2); } } printf("%-40s %s\n", "$key:", $res); } exit $diffCount ? 1 : 0; sub load($) { my $fname = shift; my $meas = {}; open(OF, "<$fname") or die("cannot read $fname: $!, stopped"); while () { if (/(^\S+).hist:$/) { loadHistogram($meas, $1, $fname, \*OF); } elsif (/:$/) { while () { last if /^\s*$/; } # skip tables } elsif (/(^\S+):\s+(\S+)$/) { addMeasurement($meas, $1, $2); } else { warn("$0: skipping unknown line format in $fname:\n$_\n"); } } close(OF); return $meas; } sub loadHistogram($) { my $meas = shift; my $name = shift; my $fname = shift; my $OF = shift; my $aggr_min; my $aggr_max; my $aggr_sum = 0; my $aggr_count = 0; my %bins; while (<$OF>) { if (/^\s*$/) { last; } elsif (/^#/) { next; } elsif (/^\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)$/) { my $bin = $1; my $min = $2; my $max = $3; my $count = $4; my $contr = $5; my $acc = $6; my $sum = $bin * $count; # remove first and last bin(s) that contributed less than 1% next if $acc < 1 || $acc > 99; $aggr_min = $min if !defined($aggr_min) || $min < $aggr_min; $aggr_max = $max if !defined($aggr_max) || $max > $aggr_max; $aggr_sum += $sum; $aggr_count += $count; $bins{$bin} = 0 unless exists $bins{$bin}; $bins{$bin} += $count; } else { warn("$0: skipping unknown line format in $fname:\n$_\n"); } } if ($aggr_count > 0) { $hists{$name} = undef(); addMeasurement($meas, "$name.hist.min", $aggr_min); addMeasurement($meas, "$name.hist.max", $aggr_max); addMeasurement($meas, "$name.hist.mean", $aggr_sum / $aggr_count); addMeasurement($meas, "$name.hist.count", $aggr_count); my $i = 25; my $count = 0; foreach my $bin (sort keys %bins) { $count += $bins{$bin}; for (; ($count * 100/ $aggr_count >= $i) && ($i < 100); $i += 25) { addMeasurement($meas, "$name.hist.p$i", $bin); } last if $i >= 100; } } } sub addMeasurement($) { my $meas = shift; my $key = shift; my $value = shift; $meas->{$key} = $value; push(@keys, $key); } sub isImportant($) { my $key = shift; # errors are always important return 1 if $key =~ /err/; # .last measurements are not important return 0 if $key =~ /\.last$/; # not a histogram unless ($key =~ /\.hist\./) { # .min and .max measurements are not important return 0 if ($key =~ /\.min$/) || ($key =~ /\.max$/); # .mean blacklisted by .hist.mean if ($key =~ /^(.+)\.mean$/) { return 0 if exists $hists{$1}; } } return 0 if isRareEvent($key); return 1; } sub isRareEvent($) { my $key = shift; my @parts = split(/\./, $key); my ($name) = @parts; my $main_key; my @events = qw(basic offered hit miss cachable uncachable fill ims reload range head post put abort page ssl ftp 100_continue proxy_validations auth tunneled proxy_validation rep req); if (grep {$_ eq $name} @events) { @parts[@parts - 1] = 'count'; my $count_key = join('.', @parts); if ((exists $Meas1->{$count_key}) && (exists $Meas2->{$count_key}) && (exists $Meas1->{'xact.started'}) && (exists $Meas2->{'xact.started'})) { my $rate1 = $Meas1->{$count_key} / $Meas1->{'xact.started'}; my $rate2 = $Meas2->{$count_key} / $Meas2->{'xact.started'}; return 1 if ($rate1 < 0.1) && ($rate2 < 0.1); } } return 0; } sub usage { return "Usage: $0 [--precision=percent] [--scope=important|all] \n". "Options:\n". " --precision=percent difference threshold in percent, default: 10\n". " --scope=important|all which stats to compare, default: important\n"; } polygraph-4.3.2/tools/webaxe4-ips.man0000644000175000017500000000174611336340427017123 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-WEBAXE4-IPS "1" "February 2010" "polygraph-webaxe4-ips - Web Polygraph" "User Commands" .SH NAME polygraph-webaxe4-ips \- WebAxe\-4 address calculator .SH SYNOPSIS .B webaxe4-ips.pl [\fI--option\fR] ... \fI \fR[\fIcltId|srvId\fR] .SH DESCRIPTION Prints IP addresses for robots and servers given request rate and bench id. .SH OPTIONS .TP \fB\-\-route\fR show addresses in route\-friendly format .TP \fB\-\-client_side\fR.max_host_load load per polyclt process .TP \fB\-\-server_side\fR.max_host_load load per polysrv process .TP \fB\-\-client_side\fR.max_agent_load load per robot agent .TP \fB\-\-server_side\fR.max_agent_load load per server agent .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/tools/beepmon.pl0000755000175000017500000001735311546440450016264 0ustar testertester#!/usr/bin/perl -w # Web Polygraph http://www.web-polygraph.org/ # Copyright 2003-2011 The Measurement Factory # Licensed under the Apache License, Version 2.0 require 5.003; use strict; my $TheSelect; my $TheHandler; package Client; use strict; use Fcntl; use IO::Socket; my @ConnectingClients = (); sub new { my ($proto, $addr, $sock) = @_; die() unless @_ == 2 || @_ == 3; my $type = ref($proto) || $proto; my $this = {}; bless ($this, $type); $this->{theAddr} = $addr; $this->{theSock} = $sock; $this->{theBufIn} = ''; $this->{doReconnect} = ! defined($sock); if ($this->{theSock}) { $this->noteConnected(); } else { $this->connectLater(); } return $this; } sub action { my $this = shift; return unless defined $this->{theAction}; my $action = $this->{theAction}; $this->{theAction} = undef(); &{$action}($this); } sub connect { my $this = shift; $this->log("connecting...", 3); my $s = $this->{theSock} = new IO::Socket(); $this->mydie($!) unless defined $s; my ($remote) = ($this->{theAddr} =~ /^(.*):/); my ($port) = ($this->{theAddr} =~ /:(\d+)$/); $this->mydie("malformed server address") unless $remote && $port; my $iaddr = inet_aton($remote) || $this->mydie("no host: $remote"); my $paddr = sockaddr_in($port, $iaddr); my $proto = getprotobyname('tcp'); $s->socket(PF_INET, SOCK_STREAM, $proto) || $this->mydie("socket: $!"); if (!$s->connect($paddr)) { $this->mydie() unless $this->{doReconnect}; $this->log("connect failed, will retry"); $this->connectLater(); } else { $this->log("connected", 3); $this->noteConnected(); } } sub noteConnected { my $this = shift; my $s = $this->{theSock} || die(); $s->print("RPY 0 0 . 0 11\r\n"); $s->print(''); $s->print("END\r\n"); my $start = "\n". " \n". "\n"; $s->printf("MSG 0 1 . 11 %d\r\n", length($start)); $s->print($start); $s->print("END\r\n"); fcntl($s, F_SETFL, O_NONBLOCK) || $this->mydie("fcntl: $!"); $TheSelect->add([$s, $this]); } sub noteReadReady { my $this = shift; my $s = $this->{theSock}; if (my @newText = $s->getlines()) { $this->{theBufIn} .= join('', @newText); $this->noteContent(); } else { $TheSelect->remove($s); $s->close(); $this->{theSock} = undef(); if ($this->{doReconnect}) { $this->log("disconnected, will try to reconnect"); $this->connectLater(); } else { $this->log("disconnected"); } } } sub noteContent { my $this= shift; while ($this->{theBufIn} =~ s|]*)/>||m) { my $cnt = $&; $this->{theBufIn} = $'; $this->log("received: $cnt", 2); $this->handle($cnt); chomp($cnt); print(STDOUT "$cnt\n"); } } sub handle { my ($this, $msg) = @_; return unless defined $TheHandler; my %savedENV = %ENV; while (length($msg)) { $msg =~ s|^[^<]+||; while ($msg =~ s~^.*?<(\w+)\s+(\w+)=(?:\'([^\']*)\'|\"([^\"]*)\")\s*~<$1 ~m) { $ENV{"$1__$2"} = $3; # print("\$ENV{\"$1__$2\"} = $3\n"); } while ($msg =~ s|^.*?<(\w+)\s*/>||m) { $ENV{"$1__"} = $1; # print("\$ENV{\"$1__\"} = $1;\n"); } } system($TheHandler) == 0 or $this->mydie("$TheHandler failed: $!"); %ENV = %savedENV; } sub connectLater { my $this = shift; $this->{theAction} = \&connect; push @Client::ConnectingClients, $this; } sub log { my ($this, $msg, $lvl) = @_; &Log($this->{theAddr}. ": $msg", $lvl); } sub mydie { my $this = shift; $this->log('fatal error: '. join(' ', @_), -1); die("\n"); } 1; use IO::Select; my $VerbLevel = 1; my @Clients = (); my @TheDoormanAddrs = (); my $TheListenAddr; &getOpts(); die(&usage()) unless @TheDoormanAddrs || $TheListenAddr; $SIG{__WARN__} = sub { print(STDERR &usage()); die $_[0] }; exit(&main()); sub main { $TheSelect = new IO::Select; foreach my $addr (map { (&range2arr($_)) } @TheDoormanAddrs) { push @Clients, new Client($addr); } die() unless @Clients == @Client::ConnectingClients; &startServer($TheListenAddr) if $TheListenAddr; while (1) { &checkIo(); &checkClients(); select(undef, undef, undef, 1) unless $TheSelect->count(); } } sub startServer { my $addr = shift; my $s = new IO::Socket::INET(LocalAddr => $addr, Listen => 1024, Reuse => 1, Proto => "tcp"); die("failed to listen at $addr: $!\n") unless defined $s; fcntl($s, F_SETFL, O_NONBLOCK) || die("fcntl: $!"); $TheSelect->add([$s, undef()]); &Log("listening at $addr", 1); } sub checkIo { return unless $TheSelect->count(); &Log("waiting for activity...", 3); my $timeout = @Client::ConnectingClients ? 1 : undef(); foreach my $h ($TheSelect->can_read($timeout)) { my ($sock, $clt) = @{$h}; $clt ? $clt->noteReadReady($sock) : &accept($sock); } } sub checkClients { my @cltsToCheck = @Client::ConnectingClients; @Client::ConnectingClients = (); foreach my $clt (@cltsToCheck) { $clt->action(); } } sub accept { my $listSock = shift; my $sock= $listSock->accept(); die("failed to accept a connection at $TheListenAddr: $!\n") unless $sock; my $them = sprintf('%s:%d', $sock->peerhost(), $sock->peerport()); &Log("accepted a connection from $them", 2); my $clt = new Client($them, $sock); push @Clients, $clt; } sub Log { my ($msg, $level) = @_; $msg .= "\n" unless $msg =~ /\n$/; $msg = "$0: $msg" unless $msg =~ /:\s/; print(STDERR $msg) if !defined($level) || ($level <= $VerbLevel); } sub range2arr { my $range = shift; my @bins = (); while ($range =~ s/([.:])?([^.:]+)//) { my $sep = $1 || ''; my $spec = $2; my ($min, $max) = $spec =~ /-/ ? ($spec =~ /^(\d+)-(\d+)$/) : ($spec =~ /^(\d+)$/); return undef unless defined $min; $max = $min if !defined($max); push @bins, { min=>$min, max=>$max, pos=>$min, sep=>$sep }; } my @res = (); while (1) { push @res, &curAddr(\@bins); } continue { last unless nextIter(\@bins); } return @res; } sub nextIter { my ($bins, $level) = @_; $level = $#{$bins} if !defined $level; return undef if $level < 0; my $b = $bins->[$level]; if ($b->{pos} >= $b->{max}) { $b->{pos} = $b->{min}; return &nextIter($bins, $level-1); } $b->{pos}++; return 1; } sub curAddr { my $bins = shift; my $addr = ''; for (my $i = 0; $i <= $#{$bins}; ++$i) { my $b = $bins->[$i]; $addr .= $b->{sep}; $addr .= sprintf("%d", $b->{pos}); } return $addr; } sub getOpts { my @newOpts = (); for (my $i = 0; $i <= $#ARGV; ++$i) { my $opt = $ARGV[$i]; if ($opt !~ /^--/) { push @newOpts, $opt; next; } if ($opt =~ /^--recv_from?$/) { my $addr = $ARGV[++$i]; die("$0: --recv_from requires an argument\n") if !defined($addr); push (@TheDoormanAddrs, $addr); next; } if ($opt eq '--listen_at') { my $addr = $ARGV[++$i]; die("$0: --listen_at requires an argument\n") if !defined($addr); die("$0: only one --listen option is allowed\n") if defined($TheListenAddr); $TheListenAddr = $addr; next; } if ($opt eq '--handler') { $TheHandler = $ARGV[++$i]; die("$0: --handler requires an argument\n") if !defined($TheHandler); next; } if ($opt eq '--verb_lvl') { $VerbLevel = $ARGV[++$i]; die("$0: --verb_lvl requires an integer argument\n") if !defined($VerbLevel) || ($VerbLevel !~ /^\-?\d+/); next; } if ($opt eq '--help') { print usage(); exit; } die("$0: unknown option: $opt\n"); } push (@TheDoormanAddrs, @newOpts); } sub usage { return "Usage: $0 [--option] ... [recv_from_addr ...]\n\n". "Options:\n". " --recv_from where to connect to receieve beep messages\n". " --listen_at where to listen for beep messages\n". " --handler execute cmd for every message received\n". " --verb_lvl verbosity level for stderr log\n\n". "At least one `recv_from' or `listen_at' address must be given\n"; } polygraph-4.3.2/tools/msl_test.c0000644000175000017500000001643711546440450016277 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include static int createTCPsession(struct sockaddr_in); static void printPacket(void *); static void parseOptions(int argc, char *argv[]); static void usage(const char *prg_name); static void breakupPacket(void *, struct ether_header **, struct ip **, struct tcphdr **); int packetIsOurSyn(void *buf); int packetIsOurFinAck(void *buf); int packetIsEirSynAck(void *buf); #define READ_SIZE 1024 #define HTTP_REQUEST "GET / HTTP/1.0\n\n" static char *arg_interface = NULL; static struct sockaddr_in Dest; static struct sockaddr_in Source; /* * XXX Ugliness Warning: The .c files included below have #define macros * that are used in this file. TCPSYNFLAG, etc. */ #define COMPILING_MSL_TEST #ifdef __linux__ #include "msl_test_linux.c" #else #include "msl_test_bsd.c" #endif static void usage(const char *prg_name) { fprintf(stderr, "Usage: %s -i interface-name -s src-host -d dst-host -p dst-port\n" " -i interface-name The name of the interface which to listen.\n" " -s src-host The IP address of the local host\n" " -d dst-host The IP address of the destination.\n" " -p dst-port Port which to connect to.\n" "You must be root to run this program.\n", prg_name); exit(1); } static void parseOptions(int argc, char *argv[]) { extern char *optarg; int c; memset(&Dest, '\0', sizeof(Dest)); memset(&Source, '\0', sizeof(Source)); Dest.sin_family = AF_INET; Source.sin_family = AF_INET; while ((c = getopt(argc, argv, "i:s:d:p:?")) != -1) { switch (c) { case 'i': arg_interface = strdup(optarg); break; case 's': Source.sin_addr.s_addr = inet_addr(optarg); break; case 'd': Dest.sin_addr.s_addr = inet_addr(optarg); break; case 'p': Dest.sin_port = htons(atoi(optarg)); break; case '?': default: usage(argv[0]); break; } } } int main(int argc, char **argv) { void *pkt = NULL; int etherlen = 0; setbuf(stdout, NULL); setbuf(stderr, NULL); if (argc == 9) { parseOptions(argc, argv); } else { usage(argv[0]); } if (getuid()) { printf("You must be root to run this program!\n"); return (-1); } (void) deleteRoute(Dest); if (raw_open(arg_interface) < 0) return 1; if (createTCPsession(Dest)) return 1; /* * now we want to find our SYN packet so we can * replay it again. */ if (NULL == (pkt = find_our_packet(ðerlen))) { fprintf(stderr, "Failed to capture our SYN packet\n"); return 1; } if (addRoute(Dest) < 0) { printf("Unable to create blackhole route\n"); return 1; } printf("Sending a %d byte packet: ", etherlen); printPacket(pkt); msl_probe(pkt, etherlen); if (deleteRoute(Dest) < 0) printf("Unable to remove blackhole route\n"); raw_close(); return 0; } static void breakupPacket(void *buf, struct ether_header **e, struct ip **i, struct tcphdr **t) { int ip_hl; *e = (struct ether_header *) buf; *i = NULL; *t = NULL; if (ETHERTYPE_IP != ntohs((*e)->ether_type)) return; *i = (struct ip *) (*e + 1); if (IPPROTO_TCP != (*i)->ip_p) return; ip_hl = (*i)->ip_hl << 2; *t = (struct tcphdr *) ((char *) *i + ip_hl); } void printPacket(void *buf) { struct ether_header *e; struct ip *i; struct tcphdr *t; breakupPacket(buf, &e, &i, &t); if (NULL == e) { printf("Not an Ethernet Packet\n"); return; } if (NULL == i) { printf("Not an IP Packet\n"); return; } if (NULL == t) { printf("Not a TCP Packet\n"); return; } if (80 != ntohs(TCPSOURCEPORT(t)) && 80 != ntohs(TCPDESTPORT(t))) return; printf("%s.%d > ", inet_ntoa(i->ip_src), ntohs(TCPSOURCEPORT(t))); printf("%s.%d\t", inet_ntoa(i->ip_dst), ntohs(TCPDESTPORT(t))); if (TCPSYNFLAG(t)) printf("SYN "); if (TCPFINFLAG(t)) printf("FIN "); if (TCPPUSHFLAG(t)) printf("PUSH "); if (TCPACKFLAG(t)) printf("ACK "); if (TCPRSTFLAG(t)) printf("RST "); printf("\n"); } int createTCPsession(struct sockaddr_in S) { int sock; socklen_t nl; char buf[READ_SIZE]; if ((sock = socket(AF_INET, SOCK_STREAM, 0)) < 0) return (1); fprintf(stderr, "Connecting to %s:%d\n", inet_ntoa(S.sin_addr), ntohs(S.sin_port)); if (connect(sock, (struct sockaddr *) &S, sizeof(S))) { perror("connect"); return (1); } nl = sizeof(Source); getsockname(sock, (struct sockaddr *) &Source, &nl); fprintf(stderr, "Local port is %d\n", ntohs(Source.sin_port)); fprintf(stderr, "Writing HTTP request\n"); write(sock, HTTP_REQUEST, strlen(HTTP_REQUEST)); fprintf(stderr, "Reading HTTP response\n"); while (read(sock, buf, READ_SIZE)); fprintf(stderr, "Closing HTTP socket\n"); close(sock); return (0); } int packetIsOurSyn(void *buf) { struct ether_header *e; struct ip *i; struct tcphdr *t; breakupPacket(buf, &e, &i, &t); if (NULL == e) return 2; if (NULL == i) return 3; if (i->ip_src.s_addr != Source.sin_addr.s_addr) return 3; if (i->ip_dst.s_addr != Dest.sin_addr.s_addr) return 3; if (NULL == t) return 4; if (TCPFINFLAG(t)) return 4; if (TCPPUSHFLAG(t)) return 4; if (TCPACKFLAG(t)) return 4; if (TCPRSTFLAG(t)) return 4; if (!TCPSYNFLAG(t)) return 4; if (TCPSOURCEPORT(t) != Source.sin_port) return 9; if (TCPDESTPORT(t) != Dest.sin_port) return 10; return 0; } /* * This function returns ZERO when the packet in 'buf' is our * TCP FIN+ACK from the connection created by createTCPsession() */ int packetIsOurFinAck(void *buf) { struct ether_header *e; struct ip *i; struct tcphdr *t; breakupPacket(buf, &e, &i, &t); if (NULL == e) return 2; if (NULL == i) return 3; if (i->ip_src.s_addr != Source.sin_addr.s_addr) return 3; if (i->ip_dst.s_addr != Dest.sin_addr.s_addr) return 3; if (NULL == t) return 4; if (TCPSYNFLAG(t)) return 4; if (TCPPUSHFLAG(t)) return 4; if (TCPRSTFLAG(t)) return 4; if (!TCPACKFLAG(t)) return 4; if (!TCPFINFLAG(t)) return 4; if (TCPSOURCEPORT(t) != Source.sin_port) return 4; if (TCPDESTPORT(t) != Dest.sin_port) return 4; return 0; } int packetIsEirSynAck(void *buf) { struct ether_header *e; struct ip *i; struct tcphdr *t; breakupPacket(buf, &e, &i, &t); if (NULL == e) return 2; if (NULL == i) return 3; if (i->ip_src.s_addr != Dest.sin_addr.s_addr) return 3; if (i->ip_dst.s_addr != Source.sin_addr.s_addr) return 3; if (NULL == t) return 4; if (TCPFINFLAG(t)) return 4; if (TCPPUSHFLAG(t)) return 4; if (TCPRSTFLAG(t)) return 4; if (!TCPACKFLAG(t)) return 4; if (!TCPSYNFLAG(t)) return 4; if (TCPSOURCEPORT(t) != Dest.sin_port) return 4; if (TCPDESTPORT(t) != Source.sin_port) return 4; return 0; } polygraph-4.3.2/tools/msl_test_bsd.c0000644000175000017500000001642211546440450017121 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ /* this file should only be compiled as a part of msl_test.c */ #ifndef COMPILING_MSL_TEST #error make msl_test instead #else #include #include #include #define TCPSYNFLAG(tcphdr) (tcphdr->th_flags & TH_SYN) #define TCPFINFLAG(tcphdr) (tcphdr->th_flags & TH_FIN) #define TCPPUSHFLAG(tcphdr) (tcphdr->th_flags & TH_PUSH) #define TCPACKFLAG(tcphdr) (tcphdr->th_flags & TH_ACK) #define TCPRSTFLAG(tcphdr) (tcphdr->th_flags & TH_RST) #define TCPSOURCEPORT(tcphdr) (tcphdr->th_sport) #define TCPDESTPORT(tcphdr) (tcphdr->th_dport) static int raw_open(char *interface); static void msl_probe(void *, int); static int deleteRoute(struct sockaddr_in); static int addRoute(struct sockaddr_in); static int chooseBPFint(void); static void *find_our_packet(int *len); #define SEQ_ADD_ROUTE 100 #define SEQ_DELETE_ROUTE 200 #define SIZEOF_BPF_HDR (sizeof(struct bpf_hdr) <= 20 ? 18 : \ sizeof(struct bpf_hdr)) #define MAX_BPF_BUFLEN 8192 static int bpf_sock = -1; static unsigned int bpf_buflen = MAX_BPF_BUFLEN; static void *bpfWalkStart = NULL; static void *bpfWalkCur = NULL; void * bpfWalkInit(void *start, int *len) { struct bpf_hdr *b; char *pkt; assert(NULL == bpfWalkCur); bpfWalkStart = bpfWalkCur = b = start; pkt = (void *) b + b->bh_hdrlen; *len = b->bh_caplen; return pkt; } void bpfWalkEnd(void) { bpfWalkStart = bpfWalkCur = NULL; } /* sorry, this function is really ugly */ void * bpfWalkNext(int *len) { struct bpf_hdr *b = bpfWalkCur; char *pkt; assert(NULL != bpfWalkCur); if (b->bh_hdrlen == 0) { bpfWalkStart = bpfWalkCur = NULL; return NULL; } bpfWalkCur += BPF_WORDALIGN(b->bh_hdrlen + b->bh_caplen); if (bpfWalkCur >= (bpfWalkStart + bpf_buflen)) { bpfWalkStart = bpfWalkCur = NULL; return NULL; } b = bpfWalkCur; if (b->bh_hdrlen == 0) { bpfWalkStart = bpfWalkCur = NULL; return NULL; } pkt = (void *) b + b->bh_hdrlen; *len = b->bh_caplen; return pkt; } int addRoute(struct sockaddr_in dst) { struct { struct rt_msghdr rtm; struct sockaddr_in host; struct sockaddr_in localhost; } rtmsg; int route; int seq = SEQ_ADD_ROUTE; int msglen; if ((route = socket(PF_ROUTE, SOCK_RAW, 0)) < 0) { perror("socket: PF_ROUTE"); return -1; } msglen = sizeof(rtmsg); memset(&rtmsg, 0, msglen); rtmsg.host.sin_family = AF_INET; rtmsg.host.sin_addr.s_addr = dst.sin_addr.s_addr; rtmsg.host.sin_len = sizeof(struct sockaddr_in);; rtmsg.localhost.sin_family = AF_INET; rtmsg.localhost.sin_addr.s_addr = inet_addr("127.0.0.1"); rtmsg.localhost.sin_len = sizeof(struct sockaddr_in); rtmsg.rtm.rtm_msglen = msglen; rtmsg.rtm.rtm_version = RTM_VERSION; rtmsg.rtm.rtm_type = RTM_ADD; rtmsg.rtm.rtm_flags = RTF_UP | RTF_STATIC | RTA_GATEWAY | RTF_HOST | RTF_BLACKHOLE; rtmsg.rtm.rtm_addrs = RTA_DST | RTA_GATEWAY; rtmsg.rtm.rtm_pid = getpid(); rtmsg.rtm.rtm_seq = seq; if (write(route, &rtmsg, msglen) < 0) { perror("write: route"); return -1; } close(route); return 0; } int deleteRoute(struct sockaddr_in dst) { int route; struct { struct rt_msghdr rtm; struct sockaddr_in host; } rtmsg; int seq = SEQ_DELETE_ROUTE; int msglen; if ((route = socket(PF_ROUTE, SOCK_RAW, 0)) < 0) { perror("socket: PF_ROUTE"); return -1; } msglen = sizeof(rtmsg); memset(&rtmsg, 0, msglen); rtmsg.host.sin_family = AF_INET; rtmsg.host.sin_addr.s_addr = dst.sin_addr.s_addr; rtmsg.host.sin_len = sizeof(struct sockaddr_in);; rtmsg.rtm.rtm_msglen = msglen; rtmsg.rtm.rtm_version = RTM_VERSION; rtmsg.rtm.rtm_type = RTM_DELETE; rtmsg.rtm.rtm_addrs = RTA_DST; rtmsg.rtm.rtm_pid = getpid(); rtmsg.rtm.rtm_seq = seq; if (write(route, (char *) &rtmsg, msglen) < 0) { perror("write: route"); return -1; } close(route); return 0; } void msl_probe(void *frame, int framelen) { char buf[MAX_BPF_BUFLEN]; struct timeval start; struct timeval now; struct timeval last; int nfds = bpf_sock + 1; int state = 0; gettimeofday(&start, NULL); last = start; fprintf(stderr, "Probing"); while (0 == state) { struct timeval timeout; fd_set readfds; gettimeofday(&now, NULL); if (now.tv_sec - last.tv_sec > 0) { int x; fprintf(stderr, "."); x = write(bpf_sock, frame, framelen); if (x < 0) { perror("write: bpf"); return; } last = now; } FD_ZERO(&readfds); FD_SET(bpf_sock, &readfds); timeout.tv_sec = 1; timeout.tv_usec = 0; if (select(nfds, &readfds, NULL, NULL, &timeout)) { char *p; int l; memset(buf, '\0', bpf_buflen); if (read(bpf_sock, buf, bpf_buflen) < 0) { perror("read: bpf"); return; } for (p = bpfWalkInit(buf, &l); p; p = bpfWalkNext(&l)) { if (0 == packetIsEirSynAck(p)) { state++; break; } } } bpfWalkEnd(); } fprintf(stderr, "\n"); gettimeofday(&now, NULL); printf("TCP TIME_WAIT of %d\n", (int) (now.tv_sec - start.tv_sec)); } int raw_open(char *interface) { int bpf; struct ifreq *ifreq = NULL; struct timeval rto; unsigned char yes = 1; bpf = chooseBPFint(); ifreq = (struct ifreq *) malloc(sizeof(struct ifreq)); strcpy(ifreq->ifr_name, interface); if (ioctl(bpf, BIOCSETIF, ifreq)) { perror("BIOCSETIF"); return -1; } free(ifreq); if (ioctl(bpf, BIOCGBLEN, &bpf_buflen)) { perror("BIOCGBLEN"); return -1; } assert(bpf_buflen <= MAX_BPF_BUFLEN); fprintf(stderr, "BPF buffer size is %d\n", bpf_buflen); rto.tv_sec = 1; rto.tv_usec = 0; #if 0 if (ioctl(bpf, BIOCSRTIMEOUT, &rto)) { perror("BIOCSRTIMEOUT"); return -1; } #endif if (ioctl(bpf, BIOCIMMEDIATE, &yes)) { perror("BIOCIMMEDIATE"); return -1; } if (ioctl(bpf, BIOCFLUSH, NULL)) { perror("BIOCFLUSH"); return -1; } return (bpf_sock = bpf); } int chooseBPFint(void) { int bpf, i; char p[32]; for (i = 0; i < 16; i++) { snprintf(p, 32, "/dev/bpf%d", i); bpf = open(p, O_RDWR); if (bpf >= 0) { fprintf(stderr, "Using %s\n", p); return bpf; } perror(p); } return -1; } static void raw_close(void) { assert(bpf_sock > -1); close(bpf_sock); } static void * find_our_packet(int *len) { char buf[MAX_BPF_BUFLEN]; static void *synbuf = NULL; int state = 0; int l; int count = 20; fprintf(stderr, "Reading our packets"); while (count-- && 2 != state) { void *p = buf; memset(buf, '\0', MAX_BPF_BUFLEN); l = read(bpf_sock, buf, bpf_buflen); if (l < 0) { perror("find_our_packet: read"); return NULL; } for (p = bpfWalkInit(buf, &l); p; p = bpfWalkNext(&l)) { fprintf(stderr, "."); switch (state) { case 0: /* looking for a SYN */ if (0 == packetIsOurSyn(p)) { state++; synbuf = malloc(l); memcpy(synbuf, p, l); *len = l; } break; case 1: /* looking for a FIN+ACK */ if (0 == packetIsOurFinAck(p)) state++; break; default: assert(0); break; } if (2 == state) break; } bpfWalkEnd(); } fprintf(stderr, "\n"); return synbuf; } #endif /* COMPILING_MSL_TEST */ polygraph-4.3.2/tools/beepmon.man0000644000175000017500000000176111336340427016415 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-BEEPMON "1" "February 2010" "polygraph-beepmon - Web Polygraph" "User Commands" .SH NAME polygraph-beepmon \- forwards BEEP messages to an external program .SH SYNOPSIS .B beepmon.pl [\fI--option\fR] ... [\fIrecv_from_addr \fR...] .SH DESCRIPTION Connects to specified BEEP servers, starts channel #1, receives BEEP messages, and forwards message content to an external program. .SH OPTIONS .TP \fB\-\-recv_from\fR where to connect to receieve beep messages .TP \fB\-\-listen_at\fR where to listen for beep messages .TP \fB\-\-handler\fR execute cmd for every message received .TP \fB\-\-verb_lvl\fR verbosity level for stderr log .PP At least one `recv_from' or `listen_at' address must be given .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/tools/cmp-lx.h2m0000644000175000017500000000137711335553726016115 0ustar testertester[DESCRIPTION] Cmp-lx compares results from two tests and prints stats that differ significantly. With important scope some stats are ignored. The following events are not important: *.last *.min and *.max except for *.hist.min and *.hist.max *.mean if there is corresponding *.hist.mean rare events Histogram and errors are important. Rare events are events that happened in less than 0.1% of the cases where they could have occurred. Measurements related to such events are not important. Event rate is calculated using xact.started measurement as total count. List of events: basic offered hit miss cachable uncachable fill ims reload range head post put abort page ssl ftp 100_continue proxy_validations auth tunneled proxy_validation rep req polygraph-4.3.2/tools/cmp-lx.man0000644000175000017500000000266711336340427016176 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-CMP-LX "1" "February 2010" "polygraph-cmp-lx - Web Polygraph" "User Commands" .SH NAME polygraph-cmp-lx \- log comparison tool .SH SYNOPSIS .B cmp-lx.pl [\fI--precision=percent\fR] [\fI--scope=important|all\fR] \fI \fR .SH DESCRIPTION Cmp-lx compares results from two tests and prints stats that differ significantly. With important scope some stats are ignored. The following events are not important: *.last *.min and *.max except for *.hist.min and *.hist.max *.mean if there is corresponding *.hist.mean rare events Histogram and errors are important. Rare events are events that happened in less than 0.1% of the cases where they could have occurred. Measurements related to such events are not important. Event rate is calculated using xact.started measurement as total count. List of events: basic offered hit miss cachable uncachable fill ims reload range head post put abort page ssl ftp 100_continue proxy_validations auth tunneled proxy_validation rep req .SH OPTIONS .TP \fB\-\-precision\fR=\fIpercent\fR difference threshold in percent, default: 10 .TP \fB\-\-scope\fR=\fIimportant\fR|all which stats to compare, default: important .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/configure.in0000644000175000017500000005622011546442314015445 0ustar testertesterdnl dnl @configure_input@ dnl dnl Configuration input file for Web Polygraph dnl AC_INIT([Web Polygraph],[4.3.2],[],[polygraph]) AC_CONFIG_SRCDIR([src/base/polygraph.h]) AC_CONFIG_AUX_DIR(cfgaux) AM_INIT_AUTOMAKE([1.5 nostdinc]) AM_MAINTAINER_MODE AC_CONFIG_HEADERS([config.h]) AC_PREFIX_DEFAULT(/usr/local) AH_TOP( [#ifndef POLYGRAPH__CONFIG_H #define POLYGRAPH__CONFIG_H /* here are some typedefs that configure script might be missing */ #undef rlim_t]) AH_BOTTOM( [#include "post-config.h" #endif /* POLYGRAPH__CONFIG_H */]) dnl general stuff used by other checks AH_VERBATIM(CONFIG_HOST_TYPE, [/* host type from configure */ #define CONFIG_HOST_TYPE "-unknown-"]) AC_CANONICAL_HOST AC_DEFINE_UNQUOTED(CONFIG_HOST_TYPE, "$host") dnl add polygraph- prefix and strip .pl extension during installation program_transform_name="s/^/polygraph-/; s/\\.pl\$$//; $program_transform_name" AC_LANG([C++]) dnl save pre-set values (if any) for variables that are dnl created by autoconf PRESET_CXXFLAGS="$CXXFLAGS" PRESET_LDFLAGS="$LDFLAGS" dnl AC_CONFIG_SUBDIRS(src/Hapy) dnl check for programs AC_PROG_CXX AC_PROG_CXXCPP AC_PROG_INSTALL dnl to be removed once all Makefiles are libtoolized AC_PATH_PROG(AR, ar, ar) AR_R="$AR r" AC_SUBST(AR_R) dnl user can overwrite this default with --enable-shared AC_DISABLE_SHARED AC_PROG_LIBTOOL AC_PATH_PROG(HELP2MAN, help2man) AM_CONDITIONAL(ENABLE_MANPAGES_GEN, [test -n "$HELP2MAN"]) # change default extention for source files ac_ext=cc dnl check if alleged C++ compiler understands C++ AC_MSG_CHECKING(whether the C++ compiler ($CXX) is a C++ compiler) AC_TRY_COMPILE([ #define Concat(a,b) a ## b struct T { virtual ~T(); virtual int m() const = 0; mutable bool a; }; template struct C: public P { C(int): c(0) {} int m() const; int c; }; template int C

::m() const { Concat(re,turn) c; } inline int test_inline() { return 0; } ],[ // this is main()'s body static C ct(1); return 0; ],[ AC_MSG_RESULT(yes) ],[ AC_MSG_RESULT(no) AC_MSG_ERROR(the compiler ($CXX) failed to pass a simple C++ test; check config.log for details) ]) dnl check for optional features AC_SUBST(std_include) AC_ARG_WITH(std-include, AC_HELP_STRING([--with-std-include=DIR], [where to find standard C++ headers]), [ std_include=`echo -I$withval | sed 's/:/ -I/g'` CPPFLAGS="$CPPFLAGS $std_include" ], [ std_include='' ] ) dnl adjust program options OLD_CXXFLAGS=$CXXFLAGS if test -z "$PRESET_CXXFLAGS" then if test "x$GXX" = xyes then [ # reset to preferred options # replace -O? with -O3 CXXFLAGS=`echo $CXXFLAGS | sed 's/-O[0-9]*/-O3/'`; # enable useful warnings CXXFLAGS="$CXXFLAGS -Wall -Wwrite-strings -Woverloaded-virtual" # custom host-dependent tuning case "$host" in alpha-*) # we get random coredumps with g++ -O on alpha-dec-osf4 # g++ -O3 warns about being buggy on alpha-*-freebsd4.* echo deleting -On on $host CXXFLAGS=`echo $CXXFLAGS | sed -e 's/-O[0-9]* *//'` ;; *-linux-*) # -O2,3 seems to produce coredumps on RH and MDK linux echo enforcing -O1 on $host CXXFLAGS=`echo $CXXFLAGS | sed -e 's/-O[0-9]* */-O1 /'` ;; esac ] fi fi if test "x$PRESET_CXXFLAGS" != "x$CXXFLAGS" then AC_MSG_CHECKING(whether custom $CXX flags work) AC_TRY_COMPILE( [ ],[ return 0; ],[ AC_MSG_RESULT(probably) echo "changing $CXX flags to $CXXFLAGS" ],[ AC_MSG_RESULT(no) CXXFLAGS=$OLD_CXXFLAGS echo "leaving $CXX flags at $CXXFLAGS" ] ) fi dnl check whether compiler is ok with passing -rdynamic to linker dnl that flag is required with GCC for linking executables that call dlopen if test "x$GXX" = "xyes" then AC_MSG_CHECKING(whether -rdynamic $CXX flag works) AC_TRY_LINK( [ ],[ return 0; ],[ AC_MSG_RESULT(probably) LDFLAG_RDYNAMIC=-rdynamic ],[ AC_MSG_RESULT(no) LDFLAG_RDYNAMIC="" ] ) fi dnl check for libraries AC_SEARCH_LIBS(gethostbyname, nsl) AC_CHECK_LIB(socket, main) AC_CHECK_LIB(m, main) AC_MSG_CHECKING(whether to link statically) AC_ARG_WITH(static-linking, [ --with-static-linking link binaries statically], [ case "$withval" in no) AC_MSG_RESULT(no) ;; *) AC_MSG_RESULT(yes) LDFLAGS="-static $LDFLAGS" esac ],[ AC_MSG_RESULT(no) ] ) dnl check if compiler can find C++ headers AC_CHECK_HEADER(iostream, [], AC_MSG_WARN([ Failure to find 'iostream' header file indicates a compiler installation problem; You may want to use --enable-std-include option to help your compiler to find directories with standard C++ include files.]) ) AC_CACHE_CHECK(whether C++ std:: namespace works, ac_cv_std_namespace_works, [ AC_TRY_COMPILE([ #include #include ],[ std::cout << std::endl; return 0; ],[ ac_cv_std_namespace_works=yes ],[ ac_cv_std_namespace_worls=no ]) ] ) dnl Check for the presence of SSL libraries and headers dnl This code was inspired by curl's configure.in dnl Default to compiler/linker defaults for SSL files and libraries. OPT_SSL=off AC_ARG_WITH(ssl,dnl AC_HELP_STRING([--with-ssl=PATH], [where to look for OpenSSL libs and headers]) AC_HELP_STRING([--without-ssl], [disable SSL support]), [OPT_SSL=$withval]) if test X"$OPT_SSL" = Xno then AC_MSG_WARN([ Support for Polygraph SSL features is explicitly prohibited by the user.]) sleep 1; else dnl default to disable; the checks below may enable it OPENSSL_ENABLED=0 dnl save the pre-ssl check flags for a while CLEANLDFLAGS="$LDFLAGS" CLEANCPPFLAGS="$CPPFLAGS" dnl Check for and handle argument to --with-ssl. case "$OPT_SSL" in yes) EXTRA_SSL=/usr/local/ssl ;; off) EXTRA_SSL= ;; *) dnl user-specified PATH EXTRA_SSL=$OPT_SSL LDFLAGS="$LDFLAGS -L$EXTRA_SSL/lib" CPPFLAGS="$CPPFLAGS -I$EXTRA_SSL/include" ;; esac AC_CHECK_LIB(crypto, CRYPTO_lock,[ HAVECRYPTO="yes" ],[ OLDLDFLAGS="$LDFLAGS" OLDCPPFLAGS="$CPPFLAGS" LDFLAGS="$CLEANLDFLAGS -L$EXTRA_SSL/lib" CPPFLAGS="$CLEANCPPFLAGS -I$EXTRA_SSL/include" AC_CHECK_LIB(crypto, CRYPTO_add_lock,[ HAVECRYPTO="yes" ], [ LDFLAGS="$OLDLDFLAGS" CPPFLAGS="$OLDCPPFLAGS" ]) ]) if test "x$HAVECRYPTO" = "xyes" then dnl check for SSL libs dnl this is only reasonable to do if crypto actually is there dnl it is important to do this _after_ the crypto lib AC_CHECK_LIB(crypto, CRYPTO_add_lock) AC_CHECK_LIB(ssl, SSL_connect) if test "$ac_cv_lib_ssl_SSL_connect" != yes then dnl we didn't find the SSL lib, try the RSAglue/rsaref stuff AC_MSG_CHECKING(for ssl with RSAglue/rsaref libs in use) OLIBS=$LIBS LIBS="$LIBS -lRSAglue -lrsaref" AC_CHECK_LIB(ssl, SSL_connect) if test "$ac_cv_lib_ssl_SSL_connect" != yes then dnl still no SSL_connect AC_MSG_RESULT(no) LIBS=$OLIBS else AC_MSG_RESULT(yes) fi fi dnl Check for SSLeay headers AC_CHECK_HEADERS(openssl/ssl.h openssl/err.h openssl/rand.h, OPENSSL_ENABLED=1) dnl If the ENGINE library seems to be around, dnl check for the OpenSSL engine header dnl it is kind of "separated" from the main SSL check dnl AC_CHECK_FUNC(ENGINE_init, dnl AC_CHECK_HEADERS(openssl/engine.h)) fi if test X"$OPT_SSL" != Xoff -a "$OPENSSL_ENABLED" != "1" then AC_MSG_ERROR([OpenSSL libraries and/or directories were not found in $EXTRA_SSL]) fi dnl these can only exist if openssl exists AC_CHECK_FUNCS( RAND_status \ RAND_screen \ RAND_egd ) AC_DEFINE_UNQUOTED(OPENSSL_ENABLED, $OPENSSL_ENABLED, [Define if OpenSSL support is enabled ]) fi CHECK_ZLIB if test "$zlib_cv_libz" = "yes" then AC_CACHE_CHECK(whether zlib supports HTTP gzip encoding natively, ac_cv_zlib_gzip, [ AC_TRY_RUN([ #include int main() { z_stream z; z.zalloc = Z_NULL; z.zfree = Z_NULL; z.opaque = 0; // the magic constants below are taken from zlib.h to force // gzip header and footer for the deflated stream int res = deflateInit2(&z, Z_DEFAULT_COMPRESSION, Z_DEFLATED, 15 + 16, 8, Z_DEFAULT_STRATEGY); return (res == Z_OK) ? 0 : -1; } ],[ ac_cv_zlib_gzip="yes"; ],[ AC_MSG_ERROR([ Your zlib library does not seem to support HTTP gzip encoding natively. Upgrade to zlib version 1.2.0.4 (or later) OR disable zlib usage with --without-zlib]) ],[ dnl TODO: this warning disrupts output and dnl should not be conditional; dnl move it after the AC_CACHE_CHECK call. AC_MSG_WARN([ Skipping checks for native HTTP gzip encoding support in zlib due to cross compiling.]) ]) ] ) else AC_MSG_WARN([ Polygraph HTTP compression features are explicitly disabled by the user.]) sleep 1; fi dnl check whether loganalizers/comparator should be built AC_MSG_CHECKING(whether to build comparator) AC_ARG_ENABLE(comparator, AC_HELP_STRING([--enable-comparator],[build comparator [[default=no]]]), [case "${enableval}" in yes) result=yes ;; no) result=no ;; *) AC_MSG_ERROR([bad value ${enableval} for --enable-comparator ]) ;; esac], [result="no (by default)"] ) AC_MSG_RESULT($result) AM_CONDITIONAL(ENABLE_COMPARATOR, test "x$result" = xyes) dnl library checks below do not use default actions to dnl avoid adding found libraries to all executables via LIBS dnl dlopen may be in libc or in libdl HAVE_DLOPEN_SOMEWHERE=no AC_CHECK_LIB(c, dlopen, [ HAVE_DLOPEN_SOMEWHERE=yes LIB_DL="" ],[ AC_CHECK_LIB(dl, dlopen, [ HAVE_DLOPEN_SOMEWHERE=yes LIB_DL="-ldl" ],[ LIB_DL="" ]) ] ) if test $HAVE_DLOPEN_SOMEWHERE = yes then AC_DEFINE(HAVE_DLOPEN, 1, [Define if you have the dlopen/sum/error/close.]) fi AC_CHECK_LIB(ncurses, main, [ AC_DEFINE(HAVE_LIBNCURSES, 1, [Define if you have the ncurses library (-lncurses).]) AC_DEFINE(NCURSES_OPAQUE, 0, [Mac OS X 10.6 (at least) defines NCURSES_OPAQUE to 1 by default. Some structs we use (e.g. WINDOW) are defined only if NCURSES_OPAQUE is 0.]) LIB_CURSES="-lncurses" ],[ LIB_CURSES="" ] ) dnl checks for header files AC_CHECK_HEADERS(\ arpa/inet.h \ dlfcn.h \ fcntl.h \ ifaddrs.h \ iomanip \ iomanip.h \ iosfwd \ iosfwd.h \ iostream \ iostream.h \ math.h \ ncurses.h \ netdb.h \ netinet/in.h \ netinet/in_var.h \ netinet/ip_dummynet.h \ netinet/ip_fw.h \ netinet/tcp.h \ net/if.h \ net/if_var.h \ process.h \ signal.h \ string.h \ strings.h \ sstream \ strstrea.h \ strstream \ strstream.h \ sys/ioctl.h \ sys/param.h \ sys/resource.h \ sys/select.h \ sys/socket.h \ sys/sockio.h \ sys/sysinfo.h \ sys/time.h \ sys/types.h \ time.h \ unistd.h \ regex.h \ winbase.h \ winsock2.h,,,-) dnl check for function parameters AC_CACHE_CHECK(for signal handler type, ac_cv_signal_handler_type, [ AC_TRY_COMPILE([ #include extern void my_sig_handler(int signo); ],[ signal(SIGINT, my_sig_handler); return 0; ],[ ac_cv_signal_handler_type="void SignalHandler(int)"; ],[ # best we can do without checking further ac_cv_signal_handler_type="void SignalHandler(...)"; ]) ] ) AC_DEFINE_UNQUOTED(SIGNAL_HANDLER_TYPE, $ac_cv_signal_handler_type, [sighandler prototype (e.g. "void SignalHandler(...)" on IRIX]) dnl check for types dnl check for rlim_t type in sys/socket.h AH_TEMPLATE(HAVE_TYPE_RLIM_T) AC_CACHE_CHECK(for rlim_t, ac_cv_type_rlim_t, [ AC_EGREP_CPP( [rlim_t[^a-zA-Z_0-9]], [ #include #include #if STDC_HEADERS #include #include #endif ], ac_cv_type_rlim_t=yes, ac_cv_type_rlim_t=no ) ]) if test "x$ac_cv_type_rlim_t" = xyes; then AC_DEFINE(HAVE_TYPE_RLIM_T, 1) fi AC_CACHE_CHECK(for socklen_t, ac_cv_type_socklen_t, [ AC_EGREP_CPP( [socklen_t[^a-zA-Z_0-9]], [ #include #include #if STDC_HEADERS #include #include #endif ], ac_cv_type_socklen_t=yes, ac_cv_type_socklen_t=no ) ]) if test "x$ac_cv_type_socklen_t" = xyes; then AC_DEFINE(HAVE_TYPE_SOCKLEN_T, 1, [Some systems use socklen_t typedef for some socket operations. Socklen_t may conflict with "int" that is also used.]) fi AH_TEMPLATE(HAVE_TYPE_RUSAGE) AC_CACHE_CHECK(for rusage, ac_cv_have_type_rusage, [ AC_TRY_COMPILE([ #include #include ],[ struct rusage R; return sizeof(R) == 0; ],[ ac_cv_have_type_rusage="yes" ],[ ac_cv_have_type_rusage="no" ]) ]) if test "x$ac_cv_have_type_rusage" = xyes ; then AC_DEFINE(HAVE_TYPE_RUSAGE) fi dnl tm.tm_gmtoff AH_TEMPLATE(HAVE_TYPE_TIMEVAL) AC_CACHE_CHECK(for timeval, ac_cv_have_type_timeval, [ AC_TRY_COMPILE([ #include #include ],[ struct timeval t; t.tv_sec = 0; t.tv_usec = 0; return 0; ],[ ac_cv_have_type_timeval="yes"; ],[ ac_cv_have_type_timeval="no"; ]) ]) if test "x$ac_cv_have_type_timeval" = xyes; then AC_DEFINE(HAVE_TYPE_TIMEVAL, 1) fi AH_TEMPLATE(HAVE_TYPE_IFREQ) AC_CACHE_CHECK(for ifreq, ac_cv_have_type_ifreq, [ AC_TRY_COMPILE([ #include #include #ifdef HAVE_NET_IF_H #include #endif ],[ struct ifreq r; return sizeof(r) == 0; ],[ ac_cv_have_type_ifreq="yes" ],[ ac_cv_have_type_ifreq="no" ]) ]) if test "x$ac_cv_have_type_ifreq" = xyes ; then AC_DEFINE(HAVE_TYPE_IFREQ) fi AH_TEMPLATE(HAVE_TYPE_IFALIASREQ) AC_CACHE_CHECK(for ifaliasreq, ac_cv_have_type_ifaliasreq, [ AC_TRY_COMPILE([ #include #include #ifdef HAVE_NET_IF_H #include #endif ],[ struct ifaliasreq r; return sizeof(r) == 0; ],[ ac_cv_have_type_ifaliasreq="yes" ],[ ac_cv_have_type_ifaliasreq="no" ]) ]) if test "x$ac_cv_have_type_ifaliasreq" = xyes ; then AC_DEFINE(HAVE_TYPE_IFALIASREQ) fi AH_TEMPLATE(HAVE_TYPE_IN6_ALIASREQ) AC_CACHE_CHECK(for in6_aliasreq, ac_cv_have_type_in6_aliasreq, [ AC_TRY_COMPILE([ #include #include #ifdef HAVE_NET_IF_H #include #endif #ifdef HAVE_NET_IF_VAR_H #include #endif #ifdef HAVE_NETINET_IN_H #include #endif #ifdef HAVE_NETINET_IN_VAR_H #include #endif ],[ struct in6_aliasreq r; return sizeof(r) == 0; ],[ ac_cv_have_type_in6_aliasreq="yes" ],[ ac_cv_have_type_in6_aliasreq="no" ]) ]) if test "x$ac_cv_have_type_in6_aliasreq" = xyes ; then AC_DEFINE(HAVE_TYPE_IN6_ALIASREQ) fi AH_TEMPLATE(HAVE_TYPE_IN6_IFREQ) AC_CACHE_CHECK(for in6_ifreq, ac_cv_have_type_in6_ifreq, [ AC_TRY_COMPILE([ #include #include #ifdef HAVE_NET_IF_H #include #endif #ifdef HAVE_NET_IF_VAR_H #include #endif #ifdef HAVE_NETINET_IN_H #include #endif #ifdef HAVE_NETINET_IN_VAR_H #include #endif ],[ struct in6_ifreq r; return sizeof(r) == 0; ],[ ac_cv_have_type_in6_ifreq="yes" ],[ ac_cv_have_type_in6_ifreq="no" ]) ]) if test "x$ac_cv_have_type_in6_ifreq" = xyes ; then AC_DEFINE(HAVE_TYPE_IN6_IFREQ) fi AH_TEMPLATE(HAVE_TYPE_STREAMPOS) AC_CACHE_CHECK(for streampos, ac_cv_have_streampos, [ AC_TRY_COMPILE([ #include ],[ const streampos pos = 0; return sizeof(pos) == 0; ],[ ac_cv_have_type_streampos="yes" ],[ ac_cv_have_type_streampos="no" ]) ]) if test "x$ac_cv_have_type_streampos" = xyes ; then AC_DEFINE(HAVE_TYPE_STREAMPOS) fi AH_TEMPLATE(HAVE_TYPE_IOS_BASE_FMTFLAGS) AC_CACHE_CHECK(for ios_base::fmtflags, ac_cv_have_type_ios_base_fmtflags, [ AC_TRY_COMPILE([ #include ],[ std::ios_base::fmtflags flags; return sizeof(flags) == 0; ],[ ac_cv_have_type_ios_base_fmtflags="yes" ],[ ac_cv_have_type_ios_base_fmtflags="no" ]) ]) if test "x$ac_cv_have_type_ios_base_fmtflags" = xyes ; then AC_DEFINE(HAVE_TYPE_IOS_BASE_FMTFLAGS) fi AH_TEMPLATE(HAVE_TYPE_IOS_FMTFLAGS) AC_CACHE_CHECK(for ios::fmtflags, ac_cv_have_type_ios_fmtflags, [ AC_TRY_COMPILE([ #include ],[ ios::fmtflags flags; return sizeof(flags) == 0; ],[ ac_cv_have_type_ios_fmtflags="yes" ],[ ac_cv_have_type_ios_fmtflags="no" ]) ]) if test "x$ac_cv_have_type_ios_fmtflags" = xyes ; then AC_DEFINE(HAVE_TYPE_IOS_FMTFLAGS) fi dnl sockaddr.sa_len AC_CACHE_CHECK(whether sockaddr has sa_len, ac_cv_sockaddr_has_sa_len, [ AC_TRY_COMPILE([ #include #include ],[ // this is main()'s body struct sockaddr addr; addr.sa_len = 0; return 0; ],[ ac_cv_sockaddr_has_sa_len="yes"; ],[ ac_cv_sockaddr_has_sa_len="no"; ]) ] ) if test "x$ac_cv_sockaddr_has_sa_len" = xyes; then AC_DEFINE(HAVE_SA_LEN, 1, [sockaddr structure has sa_len member]) fi dnl tm.tm_gmtoff AC_CACHE_CHECK(whether tm has tm_gmtoff, ac_cv_tm_has_tm_gmtoff, [ AC_TRY_COMPILE([ #include #include ],[ struct tm t; t.tm_gmtoff = 0; return 0; ],[ ac_cv_tm_has_tm_gmtoff="yes"; ],[ ac_cv_tm_has_tm_gmtoff="no"; ]) ] ) if test "x$ac_cv_tm_has_tm_gmtoff" = xyes; then AC_DEFINE(HAVE_TM_GMTOFF, 1, [tm structure has tm_gmtoff member]) fi dnl check for global variables dnl timezone(s) AC_DEFUN([AC_TMP], [AC_TRY_COMPILE([ #include #include ],[ return (int)_timezone; ],[ ac_cv_var_timezone="_timezone"; ],[ AC_TRY_COMPILE([ #include #include extern time_t timezone; ],[ return (int)timezone; ],[ ac_cv_var_timezone="extern"; ],[ ac_cv_var_timezone="none"; ]) ])]) AC_CACHE_CHECK(for global timezone variable, ac_cv_var_timezone, [ AC_TRY_RUN([ #include #include int main() { /* function name or a variable name? */ return (((void*)&timezone) == ((void*)timezone)) ? -1 : 0; } ],[ ac_cv_var_timezone="timezone"; ],[ AC_TMP ],[ AC_TMP ]) ] ) AH_TEMPLATE(HAVE_TIMEZONE, [how time zone global variable looks like (timezone, _timezone, etc.)]) if test "x$ac_cv_var_timezone" = xnone; then :; else if test "x$ac_cv_var_timezone" = xextern; then AC_DEFINE(HAVE_EXTERN_TIMEZONE, 1, [must explicitly declare timezone global as extern]) AC_DEFINE(HAVE_TIMEZONE, timezone) else AC_DEFINE_UNQUOTED(HAVE_TIMEZONE, $ac_cv_var_timezone) fi fi AH_TEMPLATE(HAVE_ALTZONE, [altzone global is supported (Solaris only?)]) AC_CACHE_CHECK(for altzone, ac_cv_have_altzone, [ AC_TRY_COMPILE([ #include #include ],[ return (int)altzone; ],[ ac_cv_have_altzone="yes"; ],[ ac_cv_have_altzone="no"; ]) ] ) if test "x$ac_cv_have_altzone" = xyes; then AC_DEFINE(HAVE_ALTZONE, 1) fi dnl check for compiler characteristics dnl check for functions and methods AC_CHECK_FUNCS(\ rint \ ceilf \ getifaddrs \ gettimeofday \ getpagesize \ getrlimit \ getrusage \ ioctl \ poll \ epoll_create \ signal \ unlink \ sleep \ fork \ strcasecmp \ strncasecmp \ timegm \ pclose \ popen \ inet_makeaddr \ inet_lnaof \ inet_netof \ \ _ftime \ _pclose \ _popen \ ioctlsocket \ stricmp \ strnicmp \ GetLastError \ SetLastError \ WSAStartup \ WSACleanup \ WSAIoctl \ closesocket ) AH_TEMPLATE(HAVE_SET_NEW_HANDLER) AC_CACHE_CHECK(for set_new_handler, ac_cv_have_set_new_handler, [ AC_TRY_RUN([ #include using namespace std; static void myHandler() {} int main() { set_new_handler(&myHandler); return 0; } ], ac_cv_have_set_new_handler="yes", ac_cv_have_set_new_handler="no", ac_cv_have_set_new_handler="no", ) ]) if test "x$ac_cv_have_set_new_handler" = xyes ; then AC_DEFINE(HAVE_SET_NEW_HANDLER, 1, [have set_new_handler or std::set_new_handler]) fi dnl check for system services AH_VERBATIM(DEFAULT_FD_SETSIZE, [/* negative or zero if we failed to detect it */ #define DEFAULT_FD_SETSIZE -1]) AC_MSG_CHECKING(Default FD_SETSIZE value) AC_TRY_RUN([ #include #include #include #include int main() { fprintf(fopen("conftestval", "w"), "%d\n", FD_SETSIZE); return 0; } ], DEFAULT_FD_SETSIZE=`cat conftestval`, DEFAULT_FD_SETSIZE=-1, DEFAULT_FD_SETSIZE=-1, ) AC_MSG_RESULT($DEFAULT_FD_SETSIZE) AC_DEFINE_UNQUOTED(DEFAULT_FD_SETSIZE, $DEFAULT_FD_SETSIZE) AH_VERBATIM(PROBED_MAXFD, [/* negative or zero if no probing has been done or probing failed */ #define PROBED_MAXFD -1]) AC_MSG_CHECKING(Maximum number of filedescriptors we can open) AC_TRY_RUN( [ /* this ingenuous check is derived from uncopyrighted Squid/configure.in */ #include #include #include /* needed on FreeBSD */ #include #include // see SSI_FD_NEWMAX below #ifdef HAVE_SYS_SYSINFO_H #include #endif int main() { #ifdef SSI_FD_NEWMAX if (setsysinfo(SSI_FD_NEWMAX, 0, 0, 0, 1) != 0) perror("setsysinfo(SSI_FD_NEWMAX)"); #endif #if defined(RLIMIT_NOFILE) || defined(RLIMIT_OFILE) #if !defined(RLIMIT_NOFILE) #define RLIMIT_NOFILE RLIMIT_OFILE #endif struct rlimit rl; if (getrlimit(RLIMIT_NOFILE, &rl) < 0) { perror("getrlimit: RLIMIT_NOFILE"); } else { rl.rlim_cur = rl.rlim_max; /* set it to the max */ if (setrlimit(RLIMIT_NOFILE, &rl) < 0) perror("setrlimit: RLIMIT_NOFILE"); } #endif /* RLIMIT_NOFILE || RLIMIT_OFILE */ /* by starting at 2^15, we will never exceed 2^16 */ int i,j; i = j = 1<<15; while (j) { j >>= 1; if (dup2(0, i) < 0) { i -= j; } else { close(i); i += j; } } i++; FILE *fp = fopen("conftestval", "w"); fprintf (fp, "%d\n", i); return 0; } ], PROBED_MAXFD=`cat conftestval`, PROBED_MAXFD=-1, PROBED_MAXFD=-2 ) AC_MSG_RESULT($PROBED_MAXFD) AC_DEFINE_UNQUOTED(PROBED_MAXFD, $PROBED_MAXFD) dnl various aftershock warnings if test "x$ac_cv_header_regex_h" = xno then AC_MSG_WARN([ No Regular Expression library header file found (regex.h); RE-dependent features such as ACL checking will not work.]) fi if test "x$ac_cv_header_netinet_ip_dummynet_h" = xyes then AC_CHECK_TYPES(dn_pipe, [AC_DEFINE(USE_DUMMYNET, 1, [Defined if supported DummyNet version was found])], [AC_MSG_WARN([unsupported DummyNet version])], [ #include #include #include #include #include #include ]) else AC_MSG_WARN([ No DummyNet header files found; piper tool will not be available.]) fi if test "x$ac_cv_header_dlfcn_h" = xno -o $HAVE_DLOPEN_SOMEWHERE != yes then AC_MSG_WARN([ No dl library or dlfcn.h header file found; dynamically loadable modules will not be supported.]) fi if test "x$ac_cv_header_ncurses_h" = xno -o "x$ac_cv_lib_ncurses_main" = xno then AC_MSG_WARN([ No ncurses library or header files found; polymon tool will not be available.]) fi AX_CREATE_STDINT_H(src/xstd/h/stdint.h) dnl these variables will be substituted in Makefiles AC_SUBST(LIB_DL) AC_SUBST(LIB_CURSES) AC_SUBST(LDFLAG_RDYNAMIC) AC_PROG_MAKE_SET AC_OUTPUT( \ ./Makefile src/Makefile \ src/xstd/Makefile \ src/base/Makefile \ src/xml/Makefile \ src/runtime/Makefile \ src/xparser/Makefile \ src/pgl/Makefile \ src/csm/Makefile \ src/dns/Makefile \ src/icp/Makefile \ src/beep/Makefile \ src/cache/Makefile \ src/app/Makefile \ src/client/Makefile \ src/server/Makefile \ src/logextractors/Makefile \ src/loganalyzers/Makefile \ src/tools/Makefile \ src/probe/Makefile \ src/monitor/Makefile \ tools/Makefile \ workloads/Makefile \ workloads/include/Makefile \ polygraph.man ) polygraph-4.3.2/change.log0000644000175000017500000042225511546441013015064 0ustar testertesterversion 4.3.2 2011/04/05 - Fixed assertion when a container PGL Content type uses an embedded PGL Content that was not configured yet. - Fix build with OpenSSL libraries with disabled SSLv2 protocol. In particular, this fixes build on Debian Weezy. Workloads using SSLv2 will be rejected if Polygraph was built without SSLv2 support. - Fixed layout of tables printed on the console (e.g. "Server content distributions" and "Phases"). - Fix persistent working set ID reporting when loading the stored set. Before the change, a new ID was generated each time the set was loaded. - Cleanup copyright preamble to make Debian folks happier: add copyright where missing, remove copyright from generated config.h file, update copyright years, replace "(C)" with "Copyright". - Remove files specific to Windows build which has not been supported for a while. version 4.3.1 2011/03/03 - Better support for trace replay: Ignore Robot.origins field for PGL Robots with foreign-only interest. Before the change, non-empty Robot.origins field was required, even if it was unused because the Robot has no interest other than replaying the foreign trace. - Better support for trace replay: Do not "warm up" Robots with foreign-only interest. Before the change, many workloads containing trace-replaying Robots could get stuck in the first phase due to an infinite warmup. Now, foreign-only Robots do not affect the warmup, and the first phase ends as expected. - Print the corresponding workload file line and number for many PGL-related warnings and errors. - Fixed client crash on exit when Robot.accept_content_encodings PGL field is set. - Fixed generation of lifecycle-related entity parameters when the same ObjLifeCycle object is used multiple times in the workload. The bug resulted in unexpected entity-header (e.g., Last-Modified-Time and Expires) changes when the same URL was revisited. - Fix statistics recording for aborted transactions. Before the change, non-basic (e.g. POST) aborted transactions were recorded in the wrong category (e.g. post). Now, all aborted transactions are recorded in the aborted category. - OpenSSL 1.0 support (LP:690599). - Fixed epoll(2)-related bug which could result in "Socket not connected" and similar errors. - Fixed select(2) support in SSL-related workloads. - Code cleanup. version 4.3.0 2010/12/10 - Content-driven Robots support: The user may configure Robot behavior based on Content. Real HTTP client behavior often depends on web site and its content. Content-driven Robots help simulate those dependencies. All for-Robot settings in PGL Content objects are grouped under the new Content::client_behavior field. A new PGL ClientBehavior type is added. Assigning PGL Robot to ClientBehavior and vice versa is supported. Once Server and Content are selected, Content::client_behavior settings, if any, overwrite the corresponding Robot fields. For now, the following three Robot behavior properties can be specified on per-Content basis: req_types, req_methods, and ranges. - Considerably improved the randomness of distribution values tied to URLs, such as response sizes. The old distributions could have too many similar values, with too few extreme values, especially when non-unique worlds (--unique_world no) were used with multiple PGL Servers and table-based distributions. - Linked object blobs from traffic stream tables in report. - Fixed poll(2)-related bug which could result in "Socket not connected" or similar errors on platforms without epoll(2) support such as Mac OS X. - Multiple world support for distr_test tool: The world_count option is intended to simulate URL-based value generation in workloads with multiple PGL Servers, each Server being a "oid world" for the Robot. By default, world_count is 1, and all oids are generated from a single world. If world_count is greater than 1, distr_test generates oids from multiple worlds. The number of oids generated from each world is (trials count / worlds count). - Minor fixes and code cleanup. version 4.2.2 2010/11/05 - Added custom stats: The user may configure which transactions are added to custom stats based on HTTP status code. Configuration is done with Agent.custom_stats_scope PGL field: custom_stats_scope = ["all", "-2xx", "+30x", "+206"]; If a transaction finishes before getting a reply, or there is no HTTP status code for another reason, the transaction is not included in the custom stats. The following custom stats are collected: custom.rep, custom.live.rep, custom.projected.rep. Custom stats are accessible from PGL phase scripts. - Fixed PGL TblDistr to produce the same results for the same seed. For example, if a table distribution is used for response body sizes, the response size would stay the same when the same URL is visited multiple times. - Fixed several build and startup problems on Mac OS X. - Fixed a DummyNet-related build problem on FreeBSD 8.1. - Fixed IPv6 address parsing in command line arguments. - Fixed segfault when parsing a PGL array or list with a malformed item. - Allow IPv6 addresses in PGL to start with '[:'. version 4.2.1 2010/10/14 - Transaction lifetime support: Allow the user to limit the lifetime of client and server transactions via the new PGL Agent::xact_lifetime field. A transaction aborts when its configured lifetime expires. Aborted transactions are treated as errors on client and server sides of the test, for now. Client transaction lifetime includes both connection slot waiting time (if any; see Robot::open_conn_lmt) and regular execution time. Server transaction lifetime includes both think time (if any; see Agent::xact_think) and regular execution time. version 4.2.0 2010/10/08 - Allow PGL scripts to monitor in-flight or "live" transactions so that they can adjust load even when many transactions get stuck or slow down and do not immediately contribute to regular postmortem stats. Measure how long live transactions have been running (live.rep.rptm) and how many response bytes they have read (live.rep.size). Make these new stats available to PGL scripts via the regular stats interfaces (currentSample().live.* and currentPhase().live.*). - Added projected.rep.* statistics that combines live and finished transaction stats. Available to PGL scripts only. - Treat an HTTP CONNECT response as the end of the transaction. Collect and report stats dedicated to these new CONNECT-only transactions. These CONNECT transactions are often rather different from the in-tunnel transactions that follow. Isolating them also prevents CONNECT overheads from polluting the first in-tunnel transaction stats. version 4.1.2 2010/11/06 - Fixed PGL TblDistr to produce the same results for the same seed. For example, if a table distribution is used for response body sizes, the response size would stay the same when the same URL is visited multiple times. - Fixed several build and startup problems on Mac OS X. - Fixed a DummyNet-related build problem on FreeBSD 8.1. - Fixed IPv6 address parsing in command line arguments. - Fixed segfault when parsing a PGL array or list with a malformed item. - Allow IPv6 addresses in PGL to start with '[:'. version 4.1.1 2010/10/07 - Add dump command to the cdb tool, which stores all verbatim and link-only objects into individual files. Cdb dump accepts an optional --as "path/prefix${name}.suffix " parameter. Default --as value is "./cdb${name}". ${name} macros are substituted with 1,2,3,4... integers. - Cdb interface has changed. Commands (add, dump, show) are passed as options with database argument, i.e. cdb --show test.cdb. - Limit PGL load and population factors value to [0%, 100%] range. Print error and exit if begin or end factor value is greater than 100%. If a script changes factor to a negative or greater than 100% value, print warning and set the factor to 0%/100% instead. - Set Recursion Desired (RD) flag for outgoing DNS queries because some modern resolvers do not accept queries without that flag and because we do not recursively resolve ourselves. - Skip already configured network interface aliases when creating new ones if delete_old_addrs option is false. - Do not send Content-MD5 header with HTTP 206 (Partial Content) responses. This fixes segfault and is in line with HTTPbis. See http://trac.tools.ietf.org/wg/httpbis/trac/ticket/178 - Fix assertion during network interface alias creation. - Fix errPersistButNoCLen error on client-side when multi-ranges and persistent connections are enabled. Content-Length is not required if message uses the media type "multipart/byteranges". - Fix LP:644813: SSL issues with DNS and embedded objects. - Fix address space size check for SpreadAs. - Fix multiple alias range support in the aka tool. - Fix compilation of the msl_test tool on Linux. - Make out-of-memory handler more robust. - Minor fixes and code cleanup. version 4.1.0 2010/05/13 - Support SOCKS5 proxies for HTTP, HTTPS, and passive FTP traffic (socks_proxies and socks_prob PGL Robot fields). No support for Active FTP data connections yet. "None" and basic "username/password" SOCKS authentication methods are supported. Chaining SOCKS and HTTP proxies is supported (the socks_chaining_prob PGL Robot field). - Extend the ipsToNames() PGL function to support a domain name generation template with ${dashed_ip} and ${port} macros. This change allows more flexibility in naming a large number of server domains. - Add a dynamize() PGL function that converts an array of static domain names to dynamic names (using the supplied renewal percentage), similar to what the dynamicName() function does to one static name. This change allows creation of a large number of servers, each with a unique dynamic domain name. - Do not send "100 Continue" control response if we got the entire request body already. This fix prevents POST requests sent without waiting for 100 Continue response from getting stuck because the client sent everything and cannot continue. - Fix server freeze when "Expect: 100-continue" request headers and body (partial or complete) is received in a single read(2) call. - Fix assertion when an HTTP client aborts transaction. - Fix abort position calculation on the client side. The abort size was set bigger than the request size, and client aborted after the request was sent, during response reading. version 4.0.11 2010/11/06 - Limit PGL load and population factors value to [0%, 100%] range. Print error and exit if begin or end factor value is greater than 100%. If a script changes factor to a negative or greater than 100% value, print warning and set the factor to 0%/100% instead. - Fixed PGL TblDistr to produce the same results for the same seed. For example, if a table distribution is used for response body sizes, the response size would stay the same when the same URL is visited multiple times. - Fixed several build and startup problems on Mac OS X. - Fixed a DummyNet-related build problem on FreeBSD 8.1. - Skip already configured network interface aliases when creating new ones if delete_old_addrs option is false. - Do not send Content-MD5 header with HTTP 206 (Partial Content) responses. This fixes segfault and is in line with HTTPbis. See http://trac.tools.ietf.org/wg/httpbis/trac/ticket/178 - Fix assertion during network interface alias creation. - Fix errPersistButNoCLen error on client-side when multi-ranges and persistent connections are enabled. Content-Length is not required if message uses the media type "multipart/byteranges". - Fix assertion when an HTTP client aborts transaction. - Fix abort position calculation on the client side. The abort size was set bigger than the request size, and client aborted after the request was sent, during response reading. - Fix LP:644813: SSL issues with DNS and embedded objects. - Fix address space size check for SpreadAs. - Fixed IPv6 address parsing in command line arguments. - Fixed segfault when parsing a PGL array or list with a malformed item. - Allow IPv6 addresses in PGL to start with '[:'. - Fix multiple alias range support in the aka tool. - Fix compilation of the msl_test tool on Linux. - Make out-of-memory handler more robust. - Minor fixes. version 4.0.10 2010/04/13 - Ignore ssl_wraps for FTP servers because we do not really support FTPS yet. - Servers with ssl_wraps no longer verify SSL peer certificates because verification causes an optional peer certificate request, which some proxies do not ignore but abandon their man-in-the-middle decryption instead. - In PGL arrays using well-known strings (e.g., protocol names), reject unknown names that share a common prefix with the known ones. - Fixed spreading of array probabilities that do not add up to 100%. Only the first such array in the workload had its probabilities adjusted. - Raised verbosity level of FYI and warning messages from 0 to 1 to minimize zero-level output. - Fixed segfault in ltrace that rendered the tool unusable. - Fixed logging of very long messages. - Fixed memory leak in pgl2ips. - Minor man pages and spelling fixes. version 4.0.9 2010/02/14 - Used newer autotools: automake v1.10.2 to v1.11 and autoconf v2.63 to 2.64. - Added polygraph(7) and individual man pages for each binary. - Added cmp-lx.pl --precision=percent option to ignore relative measurement differences below the specified percentage. The default is 10 percent. Zero percent shows all, including identical, measurements. Other fixes for cmp-lx.pl. - Added header_size/body_size field at the end of "--dump sum" log line. This change may be reverted in future releases. - Added rng_seed option for distr-test tool. - Allow apostrophe and a few other non-alphanumerical symbols in PGL comments. - Detect the lack of SSL support and quit with an error for SSL workloads. - Fixed HTTP authentication for SSL connections. - Do not assert if an agent address lacks a netmask. - Delete old IP aliases by name, not by index, to avoid having lo:1 without lo:0. May need more work. - Warn about duplicate network interface addresses instead of asserting that there are none. - Fixed a segfault when retrieving IPv6 addresses from a network interface. - Removed tcp2xmld as unused. version 4.0.8 2009/10/01 - New addressing scheme called Spread. It is possibly the simplest addressing scheme that distributes the load evenly across all hosts, unlike the PolyMix-4 addressing scheme that does not add a host until existing hosts are 100% utilized. - Allow the user to specify alias addresses explicitly, overwriting the "concatenate PGL agent addresses" default. This allows defining several Server kinds (e.g. plain and SSL) and then spreading their aliases across all hosts. - Add size and response time phase stats for all HTTP response status codes received by polyclt and sent by polysrv. The measurements are collected for all transactions, including failed and non-basic ones. See the new "HTTP reply status..." tables in the report. - Add tools/cmp-lx.pl script to compare two tests and print important differences. Test logs must be preprocessed with lx first. - Print warning if Content-MD5 header is present but malformed. - Polished generated reports: Sort a few tables, number and link report generation notes, stretched histogram plots, make one-page summary fit. - Comply with Filesystem Hierarchy Standard (FHS) and Debian Policy requirements: * set default installation prefix to /usr/local * install workloads to share/polygraph/workloads * rename polyclt/polysrv to client/server * replace '_' with '-' in binaries name (pop_test to pop-test) * add polygraph- prefix to all binaries during installation * strip .pl prefix from Perl scripts during installation - Fix "content type mismatch" regression error with reporter log processing. - Fix negative open connection level and ttl stats. - Fix reporter crash when it is called with the --phases option and the phase does not exist. Warn if a phase does not exist. - Fixed parsing of ":sup" ranges in table distributions. - Fix "inconsistent computation of unseen_objects" report error. - Fix "reference to an undefined blob 'object.all_proxy_validations...'" reporter error. - Quit nicely when the reporter runs out of memory. - Fix (potential) double delete errors. - Made more modern compilers happier. - Remove gnuplot build dependency. - Do not link with libnsl on platforms where libnsl is not used. version 4.0.7 2009/07/15 - Support origin server authentication for Robots. Foreign origin servers are supported as well. Unlike real browsers, Robots do not support or remember nested paths. For example, if the /foo/bar/ path got authenticated, the same Robot would go through the entire authentication for /foo/bar/baz/ even though a real browser would probably try to reuse the first (higher-level directory) authentication information instead. - Support cookie-based Robot authentication, including that with foreign URLs. Robots now have their own private cookie storage and queue. Cookie Max-Age parameter is parsed and honored. Stale cookies are not sent. Cookie path and domain parameters are still ignored as if they were not sent. - Support active FTP mode (i.e., FTP PORT command). Configured using Robot's passive_ftp field. The default is 100% passive. - Added pgl2ips tool that takes a PGL workload and prints agent IP addresses that a test would use. - Added modulo operator support for PGL integers. Useful for flipping coins in PGL Phase scripts. - Users no longer need to patch system headers to get more file descriptors on Linux: Polygraph now ignores FD_SETSIZE limit when using poll(2) or epoll(2), which is the default on most platforms. - Exit gracefully on SIGTERM (SIGINT as supported earlier). Killall sends SIGTERM by default and we were getting truncated console logs because of that. - Better HTTP cookies support: honor expires/discard options and support cookie updates. Do not send cookies that do not fit. - Better handling of and error reporting in HTTP 401, 403, and 407 corner cases. - Polished FTP error reporting. - Limit SSL error reporting frequency to minimize noise during high-performance tests with many SSL errors. Polished reporting. - Better reporting of tests with different client- and server-side PGL workloads. - Better reporting of foreign trace configuration errors. - Removed some expensive mallocs. - Removed unseen object blobs and unknown (zero and N/A) rows in stream and object tables from the HTML report. - Allow tabs where were spaces allowed in PGL. - Fixed memory leaks. - Fix alias creation bug when workload contains both generated and manually configured aliases. Some aliases were created on many hosts while others were not created on any hosts. - Fixed misleading 'unknown log file format' error when the named binary log file is not found by lx, ltrace, or reporter. - Fixed error handling code that was ignoring certain fatal errors. - Made more modern compilers happier. - Ignore zero-valued Content-Length in GET instead of terminating the server transaction with the "unexpected Content-Length header" error. Some proxies add that header to some GET requests. - Fixed select(2) support (unused on most platforms by default). - Report that content types differ and exit instead of asserting when a report is generated using logs with mismatched content types. - Do not send body in HTTP HEAD response. This fixes a bug that resulted in "malformed HTTP request or response line" errors on the client side when Range requests were used. - Work around crashes in OpenSSL versions older than 0.9.8c: http://objectmix.com/apache/674160-apache-start-segmentation- fault-emlink-too-many-links.html version 4.0.6 2009/04/20 - Added support for FTP STOR to robots and servers, triggered by selecting the new "Upload" request type in PGL Robot::req_types. STOR transactions use PGL content configuration, including cdb support, just like PUT transactions already do. Robot::put_contents PGL field is replaced with Robot::upload_contents. The old put_contents field is still supported, with a deprecated warning. Request method selection code adjusted to select PUT/STOR method for HTTP/FTP transactions if "Upload" request type is chosen. - When parsing and generating URLs and related addresses, handle default port with respect to the protocol scheme (80 for HTTP, 443 for HTTPS, 21 for FTP). - Make the test quit when phase error goal is reached. One of the goals here is to prevent automated tests from getting stuck forever. - Added a PGL Phase.primary boolean field. If any of the phases have that field set, those phases and only those phases should be used for the executive summary and the baseline report. The new field is logged when phase stats are logged. This change resulted in the log version increase to v17. - Skip phases without stats when trying to guess which phases should be included in the baseline report and the executive summary. - Added initial set of 13 workloads for regression testing to workloads/regression/. These workloads need more work. - Report when phase positive/negative goal is reached for the first time. - When reporting stat phase progress say "waiting for WSS to freeze" only when the phase itself is actually locked, waiting for WSS to freeze. We used to report when polyclt was waiting, which was confusing in a phase progress report context. - Report more information about locked phases that may continue to run after reaching their explicit goal. This should help diagnose remaining problems with phases getting stuck when a test generates many errors. - Report IP address counts in addition to dumping (potentially huge) address arrays when local and PGL addresses do not match. Helps to identify addressing problems quicker. - Log details about server warmup plan start and completion. - Fixed nil pointer derefence bug when dealing with rare SSL tunnel establishment failures. User-visible bug effects are unknown but could include polyclt crashes. - Fixed epoll support in the presence of some connection errors. The bug resulted in some errors not reported to the user and an infinite loop. - Fixed agent alias allocation bug that resulted in identical IP aliases created on different hosts. The bug was exposed by workloads that randomized generated address order using PGL select() or slicing assignment. - Fixed stats collection for authenticated HTTP transactions. User-visible effects could include runtime assertions but the exact effects are unknown. - Deleted a questionable assertion being hit by users (during an error-rich test) when SSL tunneled transactions are in use. The exact cause of the assertion is still unknown and could be already removed by other changes. version 4.0.5 2009/02/05 - Warn about "unusable" objects as arguments to use(). Skip such objects. Skipping unusable objects simplifies workloads where some agents or address maps become unusable if some workload parameter leaves them without any addresses. A common use case is a workload where agent addresses are selected using slicing assignments or select() function calls, with "ratio of such-and-such agents" as a configurable parameter. - Fixed SSL connection level accounting. The level was not decremented properly, resulting in artificially high number of open SSL connections being reported. - Fixed oid management to avoid an assertion when dumping foreign request URLs and to dump more oid details. - Do not blab about configured SSL proxies and/or servers if there are none. version 4.0.4 2009/02/04 - Added Robot.http_proxies and Robot.ftp_proxies PGL fields to support HTTP proxies dedicated to ftp:// transactions. Specifying an empty ftp_proxies array, disables proxying of those transactions. The Robot.proxies field is now deprecated. The overall logic is similar to how browsers can be configured to use different proxies for different origin server protocols. Polygraph still supports HTTP proxies only. - Added setAddrPort(addresses, newPort) PGL function call, returning original addresses but with each port is set to newPort. The function is useful for assigning a protocol-specific port to server addresses, when those addresses were selected or sliced from a larger address array used for many protocols. - Support https:// URLs in traces. - Support ${url_number} macro in URL traces. The substitution value is a "URL position in the trace" number that increases as the trace wraps. Using this macro, it is possible to create a virtually endless sequence of unique URLs using very short traces, which is useful for testing using specific URL patterns. - Significantly decreased startup times for workloads with thousands of agent addresses. - Fixed "FtpCltXact.cc:195: assertion failed: 'false'" on data connect errors. - Better handle various FTP RETR-related command sequences and server races. - Do not refuse to create agent aliases if some but not all agent addresses lack network interface names. Instead, ignore/skip nameless addresses in the alias:host mapping algorithm. This change helps when a workload mixes agents for which Polygraph should create aliases dynamically with agents for which the addresses are created before the test. - Complain about the lack of the root certificate once. When the number of SslWraps is large, complaining for each takes too much time and space, while there is little new information with each new complaint. - Do not warn if using non-ALL cipher for SslWrap. The default may be ALL:HIGH now and warning for each cipher takes too much time and space. version 4.0.3 2009/01/21 - Support ${target_ip} macro in PGL-configured robot credentials. If found, the macro is replaced with the IP address of the target (i.e., true origin) server. This is especially useful for reverse FTP proxies that distinguish the true origin server by FTP user credentials. - Support empty robot passwords via "username:" credentials pattern. version 4.0.2 2009/01/19 - Added initial support for PWD command. The directory we return always ends with a slash ("/") for now. - Revised FTP RETR logic to make 1xx responses required and to distinguish FTP 150 "Opening data connection" from the newly added "FTP 125 "Data connection already open; transfer starting" response. - Added initial support for user-configurable FTP origin server authentication. If no credentials are configured, the FTP transaction will use anonymous:anonymous@ credentials. For now, credentials are always supplied (i.e., the FTP transaction always tries to login to the server). - Use "227 Entering Passive Mode (,,,,)" format for PASV responses. Some FTP clients expect the parens because that is what FTP RFC uses, probably informally. - Be flexible when parsing FTP PASV response: search for the first digit as the start of a,b,c,d,p1,p2 sequence instead of assuming a fixed response format. - Fixed address map support: Agent protocol is determined by the target origin server and not by the visible server. Visible server may have no Agent configuration at all (e.g., if an address map is used in reverse proxy configurations). - Avoid polysrv assertions when a foreign URL or its size is requested without a subsequent request for a Polygraph URL. - Polished stats maintenance for FTP data connection. Needs more work. - Made libtool 2.2.4 happier. version 4.0.1 2009/01/14 - Pause FTP client transactions before writing a RETR command if the data connection has not succeeded yet. - Pause HTTP 100-Continue client writing when we are done writing HTTP request headers and are waiting for a 100 Continue response from the server. Resume writing, if needed, when we get a response. Old code would waste CPU cycles and occasionally assert when checking whether it is possible to write the request body while we were receiving a response. May need more work. - FTP clients may send repeated PASV commands, especially if they try unsupported commands. If a PASV command is received when we are already listening or even accepted a data connection (but have not sent any data!), we simply close the data channel and open another one, assuming the client considers the old data channel unusable. - Treat 150 "Data connection opened" response as optional. We may not receive it if the server sent all data by the time it could generate a RETR response. Or, at least, that is what happens with Polygraph FTP server. - Wait for the data connection if the FTP server got RETR before it accepted the data connection: The client sending RETR cannot know whether its data connection was already accepted by the server. TODO: We need a timeout for this wait. - Finish with errFtpNoDataXfer ("FTP exchange terminated w/o data transfer") error when the FTP client did not RETR anything. This prevents feeding an unclassified (no oid even!) but successful transaction to stats collection. - Fixed logging of request body content type meta-information for content types not used as server response content types. Partial meta-information led to core dumps during log extraction and analysis (also fixed). - Be more forgiving when encountering an unclassified transaction and when encountering a compound transaction without an authentication scheme. Both are bugs. We have not observed the former, but the latter does happen in some NTLM tests (and still needs to be fixed). - Report FTP port binding errors. - Report synchronous connect(2) errors for the data channel. May need more work handling the errors. - Check for asynchronous connect(2) errors for the data channel and terminate the transaction when they happen. - Made GCC v3.4.6 and v4.3.2 happier. version 4.0.0 2008/11/25 - Added initial FTP client and server support. Robots use FTP for talking directly to an FTP server. When a transaction ends at an FTP server via an HTTP proxy, robots use HTTP and ftp:// Request-URLs, as most real web clients would. Only passive FTP downloads are supported. Added a "protocol" field to the PGL Server type to specify the distribution of protocols. "HTTP" and "FTP" strings are accepted. Multiple protocols make sense if Server specifies multiple simulated servers using the addresses field. Many FTP aspects require more work, such as support for more FTP commands and responses, FTP uploads, configurable FTP user authentication, better correlation with various HTTP-centric workload parameters, and a clear definition of an FTP transaction boundary for statistics collection. The code changes to support FTP were significant and widespread. version 3.6.1 2008/11/25 - Removed unnecessary delays when processing log comments. The entire process (or even host) buffers and inodes were flushed with every console log message. The bug increased the time to process a single log comment significantly. The sync(2) delays become a noticeable problem when PGL watchdogs print a lot of custom statistics while Polygraph is working at the bench performance limit. The same probably happens when many errors are reported. - Fixed (enabled) statistics collection for socket write(2) calls. - Fixed assert failure when repeating foreign URLs from a trace. version 3.6.0 2008/09/22 - Fixed URL selection algorithm: A change in URL selection code accidentally exposed an ancient bug that led to incorrect offered hit ratios. The bug remained unnoticed because the effect was negligible in most standard workloads. However, a significant difference in offered hit ratios can be observed in some custom workloads that used large portion of private URLs. Removed support for private URL worlds. Adding dynamic DNS feature (Polygraph v3.4.0) broke this support, offering false hits among private URLs. Hopefully, removing private URL worlds has no significant effect on standard workloads as this feature should only affect URL paths, which most DUTs treat as opaque. We may resurrect support for private URL worlds. The problem here is how to make private URLs use the same set of dynamic domain names as public URLs use. Since new names in public and private slices usually appear at a different speed, one category of URLs may use domain names that the other category stopped using long time ago. - Added authentication scheme stats. Response time and size stats are collected for each auth scheme separately. HTML report now includes "all auth-ing" (all authenticating) and "all auth-ed" (all authenticated) categories for individual HTTP transactions. There is also "all auth" stats combining all categories. The following log objects introduced: auth.none auth.ing.basic auth.ing.ntlm auth.ing.negotiate auth.ed.basic auth.ed.ntlm auth.ed.negotiate auth.none. - Added recurrence ratio stats. Put unique URL count and recurrence ratio to the executive summary table in the reporter. Added url.unique.count and url.recurrence.ratio log objects. - Report various stats for compound transactions such as authentication transactions that may require several individual transactions before the final response is received. - Added SslWrap.ssl_config_file PGL field to set config file path for SSL certificate generation. The old 'myssl.conf' default is honored but deprecated. - Added SSL stats reporting. New SSL Load blob, similar to the old [HTTP] Load blob. Added SSL hits and misses to stream and object tables. Made SSL stats available in scripts. The following objects added: ssl.xact.* - successful SSL transactions HR and levels ssl.conn.* - open SSL connections level stats ssl.err_xact.count - failed SSL transactions count - Added --hushed_error_tout option to polysrv and polyclt to configure error reporting frequency. This reduced log sizes for long tests designed to produce many errors. - Added stats for HTTP transactions exchanged using a CONNECT tunnel. - Polished generated HTML reports. - Fixed CONNECT regression bug introduced by NTLM authentication changes. - Fixed reporter initialization bug causing occasional coredumps and bogus results. - Fixed occasional parsing errors of chunked encoded message bodies. - Fixed assertion failure when handling an errors response to an "Expect: 100-Continue" request. - Updated running-out-of-RAM handling code that has not been tested for a while. This should remove a PolyApp.cc warning when Polygraph process starts. version 3.5.0 2008/06/22 - Recognized authentication errors as transaction errors. It is now possible to write workloads that stop the test or decrease the load when there are too many authentication errors (using PGL phase goals or watchdogs). - Added epoll(2) level-triggered interface support. Epoll is now default on systems that support it (e.g., Linux). It should be faster than poll(2), the old default. Use the --file_scan command-line option to force poll if epoll gives you troubles. - Fix gcc 4.3 warnings and errors. - Removed deprecated acconfig.h that was causing ./configure warnings. Removed autotools-generated files from VCS control. version 3.4.2 2008/05/27 - Fixed NTLM authentication of CONNECT requests. version 3.4.1 2008/05/22 - Fixed 407 Authentication Required response handling: A v3.3.0 regression bug resulted in "misdirected" errors for authenticated requests. version 3.4.0 2008/05/16 - Support dynamic domain name generation using masks like '*.example.com:80'. The speed of new name generation is configurable and proportional to the miss rate. The number of names is frozen if the working set size is. Details at http://www.web-polygraph.org/test/docs/userman/dns.html - Added proxy authentication support for CONNECT requests. - Fixed CONNECT requests support (v3.3 regression bugs). version 3.3.1 2008/04/29 - Report gzip-specific headers correctly for range responses. - Fix assert failures when using gzip encoding or range requests with bodies from a content database. Mixing range requests, gzip-compressed content, and cdbs now works correctly. - Fixed v3.1 regression bug: Uninitialized theBodyParser member led to occasional polyclt segfaults. version 3.3.0 2008/04/17 - Support configurable POST/PUT content. Request body content configuration is very similar to how response content is configured. Content databases (cdbs) are supported. Actual request content type properties are measured runtime and reported by the reporter. Standard workloads have been updated. Custom workloads must be updated if you are using POST or PUT request types. Use cntSimpleRequest content defined in contents.pg to mimic old behavior. Details and configuration examples at http://www.web-polygraph.org/docs/userman/req_bodies.html - Detect proxy validation requests (i.e., transactions where the proxy converts a regular GET request into an If-Modified-Since request, most likely because it has the content cached but considers it stale). Collect stats for useful and useless proxy validation requests. Useless validation requests are those resulting in a 304 Not Modified response (i.e., the cached content is still fresh). Added "Validation effectiveness" table to generated reports. This helps to analyze the effectiveness of proxy validations. Added "Cheap proxy validation hit ratio" table to the generated reports. The reported hit ratios are calculated as if useless proxy validation requests are so cheap that they should not be counted as server accesses for the purpose of hit ratio calculation. - Added basic statistics for messages without bodies and messages with unknown body content types. Requests with body have an "unknown" content type on server side. Messages with a non 2xx status and a body also have an "unknown" content type. - Fixed report generation bug leading to occasional garbage in the report text. version 3.2.1 2008/03/25 - Polished stat object names related to Range request generation. - Fix segfault when server replies to HEAD request and attempts to pour body. - Fixed Proxy-Authenticate scheme parsing bug that was causing random segfaults during authentication tests (Qiao Yang at IronPort) version 3.2.0 2008/03/19 - Added support for generation of Range requests and responses, including configurable single- and multi-range Range requests, single Content-Range responses, and multipart/byterange responses. - Fixed "undefined assert()" compilation error on Ubuntu Gutsy. version 3.1.5 2008/03/13 - Initial and mostly untested support for NTLM/GSSAPI proxy authentication. We need to find a proxy that supports NTLM/GSSAPI to test this feature. - Added support for recycling or sharing SSL certificates that have identical generation parameters. The support is enabled by setting PGL SslWrap::sharing_group to a non-empty string. The certificates within the same group will be shared if their openssl generation commands are the same. Sharing provides significant speedup in Polygraph start times when hundreds of servers require certificate generation. - Some Linux kernels have gettimeofday bugs that cause time jumps of approximately 72 minutes, especially on SMP systems. We saw it on an 8-CPU box running 2.6.18-8.el5. For 2002 discussion, start at http://www.kernel-traffic.org/kernel-traffic/kt20020708_174.html#1 We now try to ignore individual jumps exceeding 60 minutes. If the time did change, the change will be honored during the second gettimeofday() call. - Make NTLM code compile when SSL is disabled. Polygraph will assert if NTLM is used without SSL support because it needs SSL code for NTLM. - Removed extra terminating CRLF after CONNECT headers. version 3.1.4 2007/11/12 - Use the first (top) supported Proxy-Authenticate method instead of the last one. - Use the last '@' in NTLM credentials to separate the host name from the user name because the user name itself may contain '@'. - Robots were not parsing some CONNECT responses correctly. - Send full Request URL only if we are talking directly to a proxy. Sending an HTTP request inside the CONNECT transaction is not talking directly to a proxy. - Do not try to parse content as markup if we are not going to request embedded objects due to non-positive embed_recur. version 3.1.3 2007/06/13 - Tolerate binary log "level" statistics with negative mean level data, which may be caused by level sum overflow. - Fixed --log and --sample_log command-line option descriptions. (Mikhail Fedotov). - Removed no longer used or maintained nmake-specific Makefiles. version 3.1.1 2007/03/07 - Fixed the 'theInOff <= theCapacity' assertion. version 3.1.0 2007/03/05 - Support client-side NTLM authentication with proxies. - Fixed a bug resulting in a stuck client transaction when the HTTP request did not fit into a single I/O. - Make GCC4 on Ubuntu6 happier (Mikhail Fedotov). version 3.0.5 2006/09/25 - Changed license to Apache v2.0. - Support gzip Content-Encoding: Build fails if gzip content encoding is requested but an appropriate zlib library is not found. Added encoding PGL field to PGL Content type to specify supported encodings for a given content type. Added accept_content_encodings field to PGL Robot type to control Accept-Encoding request header generation. When parsing Content-Encoding values, only gzip encoding is recognized for now, and q=0 parameter (if any) is ignored. Generate 406 "Not Acceptable" responses and record transaction errors when content coding negotiation fails. Send Vary: Accept-Encoding header when multiple codings are possible, depending on client acceptance rules. - Use autotools to generate Makefiles instead of relying on custom generators that are too costly to maintain. - Removed licensing terms acknowledgment from ./configure - Do not simulate cached DNS responses for embedded objects if embedded URL points to a foreign server. - Set pipelining depth for each newly opened connection; the default is the depth of 1 (i.e., no pipelining) - Added initial support for HTTP cookies. Generate cookies based on probability, count, and size params supplied by the user via PGL (cookie_set_prob, cookie_set_count). Added Agent::cookie_sender PGL field to specify sticky probability that an agent supports (generates or returns) cookies. Added Server::cookie_size PGL field to specify the size distribution for generated cookies. Added Robot::cookie_keep_lmt PGL field to control how many cookies (per server) are kept on the client side. - Added hostedUrls() PGL function to convert a URL trace to an array of extracted host addresses. - Added pipeline_depth PGL field to control the maximum number of concurrent (pipelined) HTTP requests per connection. - Allow for more names than there are IP addresses in the PGL address map and use round-robin mapping to assign IPs. This is needed to support URL traces that may have a lot more real host names than the IP addresses the tester is willing to create. - Support "." or root zone when configuring PGL address maps for BIND configuration generation. - Disabled recursion in named config file, just in case. - When generating a BIND configuration file, comment out records for names with underscores: BIND will not use the entire zone if it has invalid names. - Speedup warmup phase: If we sent two requests to the server, the server is considered warmed-up even if there were no first-hand responses. - Raised log version to 11. - Complain if response to a reload request was generated before the reload request. - Allow content database (cdb) to co-exist with may_contain PGL content option and use container information to configure the database. - Allow the number of embedded objects in a page to exceed 255 with a slight chance an embedded object ID colliding with another object ID. Warn if the number of embedded objects increases beyond 500. - Added "linkonly" input format to cdb. With this format, cdb manipulates embedded URIs (links) as if markup format was used but preserves the entire file as a single object as if verbatim format was used. - Added tagname-independent catch-all rules for common attributes such as background and src so that we can find more embedded URLs. - Presence of the Xact-Id header field is now sufficient for the response to be considered as generated by Polygraph. - Do not cancel a server-side transaction if the request lacks Polygraph headers because it may be a prefetch. Count such requests as errors, for now. - When generating IP aliases, avoid interfaces that have no inet family information at all (e.g., teql0 on a Linux laptop). Thanks to Adrian Chadd. - Enabled recovering partial phase statistics from interval stats. - Deal with duplicate phase names by appending unique suffixes instead of crashing. - Added page-related stats such as page response time and page size. Page download accounting starts with the first "container" transaction and ends with the last transaction among those that fetch the container and embedded objects. Client-side only. - Added a "mean response time versus load" scattered plot figure to the report. - Added traces of offered and measured hit ratios, based on client-side measurements. - Report pipelining statistics. - Switched to Gnuplot 4.x command conventions. PNG terminal in Gnuplot 4.x does not have a 'color' option. All "set *style" commands are now "set style *" - Be more robust when reading strings from a corrupted test log. - Fixed --unique_world handling when persistent working set feature is enabled. - Fixed seed generation when computing random request bodies. Old generated request content sequences were probably not random enough because they were probably always started at the beginning of the precomputed random-content source instead of starting at random offsets. - Fixed handling of timed out DNS queries. The queries were probably not retried unless the timeout was reported on the console. - Content checksums were not generated for some URLs. - Newer FreeBSD versions require (/32, /128) netmasks for aliases on public interfaces. Warn, but do not quit if the explicit mask is too small. - The "real." and "cachable." prefixes of PGL stat objects were not recognized. Thanks to Wu S.N. (a.k.a. Searock) - Various GCC3 and GCC4 portability fixes, including the use of the newer stringstream interface instead of strstream, to avoid "deprecated header" warnings in GCC3 and compilation errors in GCC4. The code will no longer work with GCC2 though because GCC2 does not support stringstreams. - Applied FreeBSD 5.2-CURRENT / gcc-3.3.3 compatibility patches from Sergey A. Osokin - Give priority to new

s but still load headers if new headers do not exist. Loading old headers is unlikely to help anyway because many interfaces have changed. - Fixed network socket handling code on Solaris. version 2.8.1 2004/10/11 - GCC 3.4 compatibility fixes - GCC 3.3 compatibility fixes backported from Polygraph 3.0 - guard SIOCGIFINDEX-dependent code (Linux compatibility) - String does not have a reset() method - include as a last resort when compiling string.h wrapper; ideally, it too should be guarded by ./configure - make sure configure fails if SSL support is not disabled and SSL files are not found - fixed ReportGen installation, although new reporter should be used instead; ReportGen is deprecated and will not be available in 3.x version 2.8.0 2003/11/17 - SSL/TLS (https) support, including CONNECT requests - HTTP Authentication support, including support for very large user databases (e.g., via LDAP directory) and loadable modules for custom authentication schemes (e.g., RADIUS) - IPv6 support - Automatic adjustment of offered load based on real-time measurements, such as response time or throughput (aka peakfinder or watchdog) - Persistent working set and ability to continue/resume tests - Support for checking decisions of URL- and content-based filtering proxies to allow, deny, or modify content during a performance test - Better integrated, more detailed, and robust result reporter - GCC 3.2 support 2.7.4 -> 2.7.5 20011231 - removed "you can modify" clause from the default end-user license to comply with University of California license - changed default PopModel.bhr_discrimination from 100% to zero to remain backward compatible with earlier versions and workloads that did not use BHR discrimination algorithm - removed "under construction" warnings from -4 workload files - log statistics about transactions involving foreign URLs - make all accept(2) errors on the server side non-fatal; continue to report previously fatal errors - make errors when creating a UDP socket to send a DNS query non-fatal - report current load factor level when rptmstat is in use - better handle very large console messages (some errors may generate output exceeding internal buffer capacity) - fixed phase synchronization bug; reordered requests from robots could sometimes confuse phase synchronization algorithm and lead to incorrect phase schedule - BSDI portability fix - fixed lx segmentation fault bug that was especially likely in the presence of large number of different errors logged during the test - do not print millions of "fyi: populus reached max level" messages when (often incorrectly configured) test attempts to increase the number of robots when all robots are already in use - fixed other minor bugs that probably remained invisible in 2.7 branch but were exposed by ongoing code changes 2.7.3 -> 2.7.4 20010922 - synchronized packet delay/loss parameters with on-line workloads documentation (PolyMix-4 and WebAxe-4): 40msec delay and 0.05% packet loss in both directions, on one side - changed plateau phase duration back to 4 hours until more evidence is collected to support the theory that shorter measurement phase is OK. (WebAxe-4) - set final populus factor to zero and added "synchronize = false" flag to last phase (dec2) in PolyMix-4 and WebAxe-4 workloads to ensure smooth test termination - do not perform DNS lookups for embedded objects - added "synchronize" flag to Phase type (PGL); by default, phases try to synchronize their finishes with remote schedules; if set to false, the phase will quit when its local schedule tells it so; the latter is handy for the very last phase of the test when request rate may be almost zero, leaving few chances for synchronization; this per-phase flag replaces the global --sync_phases option - emit progress reports on the console every 5 minutes; handy when a phase seem to be stuck for no apparent reason; the report frequency is currently hard-coded - do not report "premature end of message body" errors when transaction is aborted by the other side due to PGL abort_prob settings - ignore stale objects that became stale while in-transit; report more info about stale objects - update clock more often at startup - replaced several assertions with warnings and workarounds to be robust in the presence of yet unknown internal and external errors - fixed waiting transaction accounting bug: when robots were made idle to meet (smaller) populus goal or for other reasons, waiting transaction queue was silently flushed instead of "killing" each individual queued xaction and updating appropriate counters/tables; the bug led to wrong wait.level statistics and to memory leaks - robot transactions were not resetting calculated Object Lifecycle timestamps leading to false "stale object" errors - different embedded object IDs were sometimes generated for the same container, screwing MD5 checksums and probably some supersmart intermediaries because generated content for the same oid may not be the same - optimized address map searching; linear search was too slow for large scale configuration, leading to worse response times for servers closer to the end of the robots' origins array - make sure that no more than scheduled abort size gets written/read on a connection; old code would read/write as much as possible, and many scheduled aborts would not happen until the object is fully written/read, when it is too late for the other side to notice/experience an abort - a CRLF sequence was getting appended to a host name in the Host: header if that header happened to be the last HTTP header in the request/response; Polygraph servers would report "foreign host name" errors because of that extra CRLF in the host name; other headers in the same [last] position were getting extra CRLF as well, but, apparently, their parser would ignore those extra characters (or, at least, no side-effects were noticed) - initialize OS-dependent libraries in aka to prevent aka from quitting due to WSAStartup() not being called on MS Windows; alias handling code still needs more work on Windows 2.7.2 -> 2.7.3 20010830 - submit requests to foreign URLs if needed; the corresponding responses are read but not counted/analyzed as hits/misses/etc; the only current valid source of foreign URLs are redirect (HTTP 302 Found) responses that have foreign URLs in the Location: field - use relative URLs in container objects because most embedded URLs that point to the same Web site are relative and because it solves problems with group of "identical" servers implementing one Web site (each server should not put its own address into a URL, but should put its visible name, but one server may, in theory have multiple visible names) - attempt to handle more errors gracefully, without terminating a connection and aborting the transaction; still report errors as they are detected (polyclt) - handle [large] string ranges efficiently (PGL) - all open square brackets ('[') in strings must now be escaped with a backslash ("\[") for the string to be interpreted as a string rather than a range (PGL) - compute MD5 checksum for selected objects - WSS of 1GB is now required for cache-off entries (WebAxe-4) - default port for HTTP surrogates should be 80 (WebAxe-4) - client and server-side .hosts values where swapped (WebAxe-4) - minimized the number of differences in parameters and layout between PolyMix-4 and WebAxe-4 - added proxy IP:port address to webaxe-4.pg and address map to webaxe-4-guts.pg; robots should send requests directly to the proxy address as if it were an origin server - added port numbers for WebAxe-4 servers (via server-side.hosts array) - replaced "foreign HTTP request/reply" errors with "foreign HTTP request or response" (response without Polygraph-specific headers while those headers were expected) and "foreign URL" (a URL that does not belong to Polygraph URL space) errors - added "foreign URL without host name" error to complain about relative foreign URLs when they are not expected - warn if popularity model is not set and rep_types are used because some reply types require popularity model - added X-* extension headers to HTTP 302 responses - use WSAEMFILE on W2K instead of EMFILE when indicating out-of-sockets condition to make that error reporting on W2K work - webaxe-4-guts.pg file was not installed on "make install" - fixed order-dependency bug in polymix-4-guts.pg and synced webaxe-4-guts.pg - polished polymix-4-guts file to eliminate non-differences with webaxe-4 guts file - lx should not read standard input by default - fixed yet another bug affecting reported "level" stats; old reports could have much higher per-phase level stats than what could be inferred from ltrace plots; per-phase level stats were incorrect - set GET method for redirected requests; old code would assert on redirects 2.7.1 -> 2.7.2 20010813 - calculate values for If-Modified-Since header fields using Object Life Cycle configuration of the corresponding object content type instead of using cached last-modified values; robots could not cache enough objects to generate reasonable number of IMS requests with "sane" IMS headers; the new method solves the problem and eliminates requirement for maintaining a private cache on each robot (saving lots of RAM for large scale configurations); now we can specify the number of IMS-200s and IMS-304s and should be able to meet the specs; this is a yet another change that makes it crucial to have identical client- and server- side PGL configurations - select object birthday as a random point within the object life cycle starting at "time zero" instead of allowing relative or absolute birthdays; birthday point should not matter for any current practical purpose, and having relative birthdays made it impossible(?) to re-generate OLC timestamps in non-authoritative processes (e.g., clients); the latter was needed for robots to generate sane IMS requests; PGL object birthday settings are now deprecated and ignored - removed private_cache_cap settings (i.e., disabled robots' private caches) from all standard workloads; we no longer need a private cache to generate IMS requests and, hopefully, the latter was the only reason to have it; private caches consumed a lot of memory on large scale configurations - added "bhr_discrimination" field to the PopModel type to control the aggressiveness of the byte hit ratio (BHR) discrimination algorithm; 100% makes algorithm the most aggressive (default); 0% disables the algorithm; internally, bhr_discrimination is actually a probability of applying the discrimination algorithm when oid selection is made - added bhr_discrimination to generation-4 workloads - make request body size of PUT and POST requests random rather than 8192 bytes; 8192 bytes is now an upper limit given 16KB I/O buffers - moved WebAxe-4 details into include/webaxe-4-guts.pg and synchronized WebAxe-4 specs with recent PolyMix-4 changes to keep the two workloads similar (needs more work) - removed "recurrence" field from content types in contents.pg since Polygraph does not use it anymore; warn if "recurrence" is set - increased public_interest from 50% to 75% because we now have robots that are not active until after the [private] working set size is frozen and, thus, some robots will not be able to request objects from their private working set, violating the old goal of 50% of request submitted to private subsets; while no drastic changes are expected due to this change, we need more measurements to understand its effect - removed birthday settings from standard OLC configs; OLC now ignores birthday settings - decreased the ratio of IMS requests in PolyMix-4 from 20% to 15% - configured PolyMix-4 robots and servers to abort 0.1% of transactions - added 1.5% of POST and 0.1% of HEAD request methods to PolyMix-4 and WebAxe-4 workloads - added zone name back to AddrMap in PolyMix-4; dns_cfg should work with polymix-4.pg now - increased supported and required log version to 10 due to transaction logging changes; older logs cannot be read with newer readers and vice-versa - IpsToNames() now preserves port numbers (PGL) - supply more information about DNS timeout errors - DNS answers were allocated but not freed, leaking a memory on every DNS request - try other DNS servers (if any) when DNS query times out - cloned addresses are now iterated depth first when agent aliases are created, because alias creation code tries to allocate n-th chunk of addresses rather than every k addresses; the old code would create twice as many aliases for addressing schemes with more than one agent per address; the new code creates the same alias twice; needs more work to avoid extra creation - "--names" option was treated as "--addresses" option (dns_cfg) - improved old IP alias deletion on OSes with numbered virtual interfaces (e.f., Linux and Solaris); this needs more work; for example, one cannot have both polyclt and polysrv creating aliases on the same Linux box - fixed netmasks and broadcast address settings on systems with "numbered" virtual interfaces and without ifaliasreq structure (e.g., Linux and Solaris); old code did not set netmasks and broadcast addresses at all (effectively), and defaults were probably assigned by OS - when creating aliases, we were opening a new socket instead of closing an old one, creating thousands of "extra" sockets when aliases were created on a box; this led to "too many open files" errors on some systems, at least 2.7.0 -> 2.7.1 20010803 - added "recurrence" field to PGL's Content type; recurrence value affects the probability of the content type being selected when Polygraph decides which object should be revisited among a set of candidates; recurrence is a "weight" rather than "probability" because several objects of the same type may be present in the selection group and the sum of all weights can exceed 1.0; in other words, the selection group is formed first, recurrence weights are applied second; recurrence weight defaults to 1.00 - added "zone" field to AddrMap type (PGL); the zone name is not used by Polygraph run-time, but can be used by external programs such as dns_cfg to build zone files based on PGL configuration - added ipsToNames() PGL function to convert an array of IP addresses into host names using a simple 1:1 mapping - added "--config" and "--cfg_dirs" options to dns_cfg to extract required information from a PGL file; old command-line options interface is still supported and is mutually exclusive with the new PGL interface - added DNS resolver address as a parameter for PolyMix-4 workload; set DNS resolver timeout to 5 seconds - use ipsToNames() to generate DNS names for PolyMix-4 servers PolyMix-4 workloads are now using DNS names; this can be disabled (in violation of PolyMix-4 rules) by setting AddrMap "names" to server "addresses" - polished PolyMix-4 workload files to explicitly list address space and real hosts settings; /22 subnet must be specified for address space or PGL will compute the smallest subnet given the bench configuration (smaller subnet will lead to a different agent addresses compared to the required /22 subnet, screwing up routing) - added recurrence weights to content types used in PolyMix-4; this should improve byte hit ratio (BHR) simulation, but it looks like more work is needed to actually make that happen - polished dns_cfg output to better match BIND files format - renamed "--domain" to "--zone" (dns_cfg) - renamed Session's heartbit_notif_rate to heartbeat_notif_rate (PGL) - reduce the number of "labeled logs have no object named X" messages when generating reports - account for the number of agents per address when computing minimum (smallest) subnet in version-4 addressing schemes; old code incorrectly complained about /22 being too small for PolyMix-4 workloads because it did not account for the fact that two PolyMix-4 robots may share an IP address - plot_traces was using unportable regular expression feature that led to "Character class syntax [: :] is reserved for future extensions" Perl errors when generating reports; use a more portable expression instead - delete old IP aliases before creating new ones; old code was not calling the delete method 2.6.5 -> 2.7.0 20010729 - added "req_methods" field to Robot (PGL) to specify distribution of HTTP request methods for a robot to use; supported methods are GET, HEAD, POST, and PUT; methods that allow request bodies (POST and PUT) use constant body size of 8192Bytes, for now. - added "abort_prob" field to robot and server PGL configurations to abort transactions based on on a given abort probability; robots may abort while receiving response body, and no aborts while sending headers, waiting for a response, or reading response headers are supported; servers may abort while sending response body, and no aborts while sending headers, waiting for a request, or reading request headers are supported - added "addr_space" field that deprecates "addr_mask" field; version-4 addressing schemes use "addr_space" addresses to generate robot and server addresses; older schemes used only two first octets of, now deprecated, addr_mask, making agent address configuration too rigid; "addr_space" can list any IP addresses as long as they comply with the addressing schemes in use; this is a big change so there may be bugs in implementations of addressing schemes that now use addr_space - rewrote PGL arrays to allow array members to be arrays; the nesting is almost transparent to the end-user as far as member iteration is concerned (i.e., nested arrays are flattened automagically), but does affect probabilities assignment because each container can have its own [sub-]probabilities in addition to a member probability specified in the array; doing something like "[ a: 5%, [b: 10%, c]: 50%, d]" is now possible and equivalent to "[a: 5%, b: 5%, c: 45%, d]" - added a binary "+" operator for arrays so that two arrays can be concatenated without nesting - added "--delete_old_addrs " option to polyclt/srv to specify whether old IP addresses should be deleted when creating new IP addresses; defaults to "yes"; may allow to run polyclt on the same box as polysrv _and_ create aliases run-time (bit more work would be needed for OSes that number their alias interfaces for system call purposes) - do not send Last-Modified and Expires headers for uncachable objects; some proxies (e.g., Apache's mod_proxy 1.3) completely ignore "Cache-control no-=cache,private" and "Pragma: no-cache" response headers and cache an object if a response has, say, a valid Last-Modified header - use interface names and subnets directly from agent.addresses arrays instead of addr_mask when creating new aliases; now, in theory, one can have agents bound to several interfaces and still be able to create aliases automatically - added PolyMix-4 workload (first draft) and synchronized them with PGL changes - use 2 robots per IP for WebAxe-4 and PolyMix-4; this allows to support higher request rates with reasonable number of IP aliases - bumped log version to 9 due to new stats (HEAD/POST/PUT and aborts) being logged - reduced memory footprint of large address ranges and large "**" clones (PGL) - renamed option "--addrs" to "--addresses" (dns_cfg) - "something/0" expressions were interpreted as rate because zero is convertible to time (e.g., 0 seconds); forced PGL to just complain about division by zero instead 2.6.4 -> 2.6.5 20010729 - use "binary" mode for log files; required on MS Windows for the logging to work; previous versions produced corrupted log files on MS Windows or any platform that treats binary files specially - changed Unix makefiles to stop after the first error 2.6.3 -> 2.6.4 20010709 - added "proxies" field to robot PGL configuration; proxies must not be used together with the --proxy command line option as they are mutually exclusive; a robot selects a random proxy at the configuration time and uses that proxy for the entire duration of the test (sticky proxy assignment); proxy addresses are distributed evenly (if possible) among all robots in the test; individual groups of robots (e.g., all robots on one host) may not get an even distribution. - support "make install" in workload files; "make install" will overwrite existing files in the destination directory (/usr/local/polygraph/workloads by default); users are not supposed to modify original workload files and should create copies when modifications are needed - made aka robust when no primary IP is present on the interface - lognormal mean and standard deviation were not reported correctly (Johnson Lee ) - synced comments in polymix-3.pg with Bench type changes - clientHostCount in PolyMix-3 on 2.6 branch was not computed correctly, leading to ridiculously small working set sizes and shorter fill phases 2.6.2 -> 2.6.3 20010627 - emit (errUnreachContType) "unreachable content type" when an embedded content type cannot be found on the [visible] server that hosts the container object; Polygraph can only handle embedded objects that reside on the same visible server as the container - use the same content distribution for all L7 servers (except for the URL extension) so that all servers will get similar load - when server's direct_access probabilities are specified manually, use them and do not compute our own - added "byte.rptm.mean", [milliseconds / kilobyte] object to Report Generator. The object value is visible in make_report as "byte" table entry in the Response Time table and as the byte_rptms plot. - replaced --rng_seed option with two new options: --global_rng_seed and --local_rnd_seed. Both default to 1. All processes within a test must have the same --global_rng_seed value to correctly generate "global" information such as a URL extension for the same object ID. All processes within a test should have different --local_rnd_seed settings to avoid lock-step behavior of things that should not be synchronized. - do not print "Potential problems" header if no problems are detected by make_report - when generating a public miss, use local public world slice only; using remote slices creates race condition for generating a miss with the corresponding remote agent; these race conditions lead to buggy offered hit ratio - W2K select(2) fails if no FDs are passed to it (i.e., all FDs sets are empty) - fixed fixed a very old bug (again) that, under certain conditions, left some FDs ignored by select(2); the earlier fix was ineffective - fixed ceil() workaround once again: the earlier fix was ineffective 2.6.1 -> 2.6.2 20010613 - polished SrvLB workloads; added SrvLb4As addressing scheme; same as WebAxe4 but uses server_side.hosts for server addresses instead of building them using a server_side.addr_mask - try to select appropriate target if content type is known so that embedded objects will be directed to the right targets; this feature is needed for L7 SLB tests; needs more work to randomize target selection when more than one target server has the required content type - if no command is given to forsome, just print addresses in the specified host group - check that the content type in the request URI belongs to the server that the request was sent to; generate a misdirected request error if it does not; previously, a server would happily serve any content type that belonged to at least one server in a PGL configuration - open DNS/UDP socket for a robot only when the first DNS query needs to be sent out; this will prevent polyclt from creating one socket for every robot even when no DNS queries are needed; also do not select(2) DNS/UDP socket for reading when there are no pending transactions to reduce CPU overhead when some DNS queries are present - public URL worlds were erroneously updated with stale data when hot set position of the stale snapshot differed from the current one; the bug led to incorrect (higher than what should have been offered) offered hit ratios - fixed a very old bug that, under certain conditions, left some FDs ignored by select(2)/poll(2); most likely, the bug was never or infrequently triggered by Polygraph code before; noticed the bug when adding BEEP support that has somewhat different FD registration/scanning pattern from the rest of the code. - libc's ceil(700/0.7) returns 1001; the old workaround was not good enough, and led to creation of 1001 agents where only 1000 agents were needed; added a better workaround - made interface aliases / primary address detection code more robust on Linux - changed recur_factor to recur_factor_{beg,end} in webaxe-1.pg 2.6.0 -> 2.6.1 20010605 - fixed "version three" addressing scheme bug that led to incorrect addresses being auto-generated for PolyMix-3 and other -3 workloads that used addressing scheme PGL feature - a server now checks for requested URL to belong to that server; if request is misdirected, the connection is closed with an xaction error message dumped to the console; no 404 HTTP error is generated at this time - added --load_balancer option to make_report to generate slightly different reports for load balancing tests 2.5.5 -> 2.6.0 20010522 - changed licensing terms and added not-for-publication clause - updated README - added DNS/FQDN client-side support; robots can now send [non-blocking asynchronous] DNS queries to specified DNS servers to resolve destination addresses; DNS responses are currently not cached - added pxylb-4.pg and srvlb-l4-4.pg workload drafts to test proxy and origin server load balancing, respectfully - added webaxe-3.pg configuration file for WebAxe workload; this file was distributed with Polygraph version 2.5.4 under the name webaxe-2.5.pg - create IP aliases upon startup, based on the PGL bench configuration; warn if aliases are not being created and explain why - log PGL configuration upon startup - added support for URL prefixes; arbitrary string(s) can be specified to be included as a URL "prefix", right after the first '/' following the host name or host address; use Mime.prefixes for now (just like Mime.extensions) - added "special_msg_factor_beg" and "special_msg_factor_end" fields to Phase specs to control the presence of special messages (IMS, Reload, 302 Found, etc.) in the generated stream (PGL); useful to speedup fill phase and to test the influence of those special requests on the cache - handle ``302 Found'' replies: treat them as special valid kind of an HTTP response and follow URIs in the Location: field; 302s may be foreign replies, but 302 takes priority when it comes to redirection handling and statistics logging - added client-side support for HTTP 300, 303, and 307 responses; a Robot would request the Uri found in the Location: field of the response after processing the original transaction - use new client-centric PubWorld interface instead of old server-centric object ID (oid) exchange; use/maintain related global indexes; broadcast/update PubWorld info - added support to flip-flop debugging (--dump hdrs) status on USR1 signal - try to flush logs when process is running out of memory; pre-allocate a 16KB buffer to be freed when we are running out of memory, in hope to get enough memory to flush the logs; this trick may help to save some of the logs when Polygraph quits with "virtual memory exhausted in new"; may need more work - added phase name reporting to ltrace; when multiple logs with different phases are merged, the printed phase name can be the name of any then-current phase. - added polyprobe -- a traffic-volume-oriented network test tool that will, eventually, replace netperf for many-to-many raw network bandwidth tests used before PolyMix tests - added polyrrd tool to supply run-time Polygraph stats to the rrd tool; rrd tool is a "better MRTG" software for displaying run-time stats in graphical form: http://ee-staff.ethz.ch/~oetiker/webtools/rrdtool/ - added rrd-cgi.sh as a sample file to use with polyrrd and rrd tool - pgl_test now accepts include directories as parameters (after the PGL file to test) - assume URLs starting with "/health" are health checks and reply as if --ignore_urls was set - added "Rptmstat" PGL type to store response-time-stat (as in thermo-stat) configuration; a rptmstat attempts to keep response time within the specified bounds by adjusting offered request rate (just as a thermostat attempts to keep temperature within the specified bounds by adjusting the furnace or A/C operation) - added "rptmstat" field to Phase type; rptmstats are activated on a per-phase basis - added "rep_types" field to Server agent type (PGL); "Basic" and "302 Found" reply types are supported - added "client.hosts" and "server.hosts" fields to Bench object to specify real machine addresses (as opposed to "virtual" aliases for agents) (PGL); - added DnsResolver PGL type to configure DNS resolver for robots - skip insane/corrupted entries in the trace and warn about them; trace corruption may happen when Polygraph process or the host OS is killed without a chance to flush the binary logs - added dns_resolver field to Robot type - support port ranges in PGL addresses - added use(Bench) PGL call to specify the bench configuration for the test - added minimize_new_conn field to Robot to specify probability of an attempt to use idle connections to address' substitutes when no idle connections to the address are available; added note_substitutes() procedure call to register known substitute groups (PGL); substitutes are useful to approximate connection reuse pattern of a single-destination workload in a no-proxy or no-balancer setup - added dns_cfg tool to generate configuration files for named, a DNS server, needs more work - use a new tool to generate Makefiles for Unix (gmake) and MS Windows (nmake). Temporary lost ability to built out of the source directory. - renamed --*log_size to --*log_buf_size so better reflect the true meaning of the option - removed the --new_oids* options as unused - renamed popularity distributions pmUnif and pmZipf to popUnif and popZipf to avoid conflict with popularity models (pm*) - polished simple.pg and moved simple server to port 9090 to avoid strange port 8080 hijacking problems on some Windows installations - synced standard workloads with PGL changes - changed HotSet algorithm to avoid storing theObjLmt: change hot position when it is no longer in the working set; when WSS is unknown, keep hot position in the middle of the URL set (constantly sliding position); the rationale is to reduce memory overhead and, maybe, to simplify remote synchronization of hot positions - changed Bench definition to use BenchSides instead of client_ and server_ prefixes - added new "Addressing Scheme" PGL types to calculate agent addresses for various workloads - PGL agents should always bind(2) to specified addresses - removed "Rptm vs Size" and "Conn Life Time vs Use Count" statistics; (they were huge and rarely used; similar data can probably be derived from stats samples) - changed binary log format and version - moved pop_model PGL field from Robot to Agent because Server agent needs it to generate ``302 Found'' responses - replaced "agent.hosts" fields with "agent.addresses" to be more consistent; "hosts" in PGL usually means "real, permanent address" while "addresses" are often used for IP aliases that are created run-time - use address ranges when printing arrays of addresses, if possible - when checking for "clocks out of sync" errors, ignore replies that took a long time; slow replies is usually an indication of problems other/bigger than clock synchronization - embedded objects should inherit request type property from the container (not ideal, but better than using default req type for all embedded objects) - removed old place() PGL function - increased client-group-count-dependent hash capacities from 11 to 37 old limits did not allow to use more than 11 clients or server machines in a test - changed default value of polysrv's --idle_tout to 5 minutes - removed piper tool -- it was not used and caused compilation problems on FreeBSD 4.x due to new DummyNet interface - tried to make alias manipulation with aka work again on Linux - added port of msl_test to linux, based, in part, on code donated by Andrew Schultz from Mission Critical Linux, Inc. More work is required to make msl_test more portable - provide traces of conn.open.rate and conn.estb.rate in the report - improved report generation tools - AIX portability fixes - HP-UX (aCC) portability fixes - Linux portability fixes - MS W2K (VC++) portability fixes - Solaris portability fixes - added config.bat script to compile sources with nmake on Windows - reduce default optimization level on Linux to 1 (-O1) to avoid coredumps - lots of polypxy fixes to make it more robust; needs more work - a yet another attempt to fix code that merges level stats - adjusted alias creation code to extract and honor individual subnets of the alias addresses - when merging (not concatenating) stats, the duration should be set to the maximum duration of the two intervals being merged so that merging 10 req/sec for 1 sec and 1 req/sec for 1 hour does not result in 11 req/sec mean; will this screw anything that used to work? - when merging or concatenating phase stats, do not change the phase name if phases with the same are joined - fixed ./CdbBodyIter.cc:43: assertion failed: '!theInjector'; a common problem with realistic content simulation module - fixed an old ltrace bug: sampling windows were sometimes getting huge, leading to excessive memory consumption and possibly somewhat incorrect trace plots - "ceil(700/0.7)" returns 1001(!!); added a workaround; old IP calculation tools were generating wrong values when 700 req/sec/host and 0.7req/sec/agent combination was used - aka was not setting subnets right 2.5.4 -> 2.5.5 20010501 - updated copyright/license statements - added licensing terms acknowledgment to ./configure script - resurrected support for user-defined or "table-based" distributions; e.g., ``SimpleContent.size = table("/tmp/t.distr", "size");'' where /tmp/t.distr contains the distribution of type size_distr - added webaxe-3.pg configuration file for WebAxe workload; this file was distributed with Polygraph version 2.5.4 under the name webaxe-2.5.pg - Red Hat 7.0 compatibility fixes - AIX compatibility fixes - fixes for platforms with unsigned chars (e.g. AIX); PGL parser would quit on those platforms with error location pointing to the end of a file being parsed - fixed an old ltrace bug: sampling windows were sometimes getting huge, leading to excessive memory consumption and possibly somewhat incorrect trace plots - fixed local time detection bug on Solaris; the bug caused Polygraph to complain about client/server clocks being out of sync even if both processes were running on the same host - replaced "MB/sec" column heading produced by make_report with less confusing Mbit/sec 2.5.3 -> 2.5.4 20000724 - added "special_req_factor_beg" and "special_req_factor_end" fields to Phase specs to control the presence of special requests (IMS, Reload, etc.) in the generated stream (PGL) - identify cases when HTTP race conditions lead to connectivity errors on used-to-idle persistent connections; retry transactions when such errors occur; log the number of retried transactions (that otherwise do not affect statistics) - eliminated most of the special (non-Basic) requests during the fill to shorten the fill phase (PolyMix-3) - when a phase meets its goal, wait until all remote phases are ready before proceeding - added the "--sync_phases " option to disable auto phase synchronization if needed - log statistics about oid generation algorithm - distinguish "hot" requests in stats samples - polished stats samples and made them smaller - report WSS in terms of bytes (in addition to object count); be careful when interpreting the values -- adjust for embedded objects and other hosts in the bench - complain if server agent port is not specified - upgraded current and required log versions from 2 to 3; log versions were not upgraded for a while despite format changes, unfortunately - warn about log extractor <--> log producer versions mismatch when reading a log - fixed typo in X-Srv-Sets header parsing code that led to growing WSS for public (shared) objects, resulting in lower-than-expected HR - fixed ./CdbBodyIter.cc:43: assertion failed: '!theInjector' - fixed coredumps when all configured phases are ended and Polygraph adds a "cold" phase for the last transactions to finish and coredumpts with segmentation fault - made reportgen.cfg dependent on ./configure parameters 2.5.2 -> 2.5.3 20000719 - ProxyCacheSize in polymix-3.pg should now be set to the *total* proxy cache size, without adjusting for the number of polyclt processes; all adjustments are now done in polymix-3-guts.pg, without user participation (the patch with the above functionality was posted shortly after 2.5.2 was released) - added the clientHostCount() function to compute the number of client hosts based on the PolyMix-3 rules (PGL) - added client_host_count to benchPolyMix3; if not defined by the user, its value will be computed on-the-fly in polymix-3-guts.pg: TheBench.client_host_count will be set to clientHostCount(TheBench) - added the "fexit" phase to PolyMix-3; fexit increases hit ratio to peak level while decreasing request rate; this phase may be required for proxies that get overwhelmed by high loads with low hit ratio; the latter were possible during "inc1" when hit ratio was increased along with request rate, but not "fast enough". - adjust Robot's recurrence ratio for Reload requests (we are still unsure what to do with IMSes) - collect small transaction sample during the fill phase (PolyMix-3) - polypxy can now handle misses (not yet robust in error-rich environments) - a Robot that did not request any private objects from a server before WSS was frozen should try not to request new private oids from that server; doing otherwise populates the working set with oids that cannot be requested again in the future (because the corresponding WSS is zero) but may consume cache space, etc. Generating a new private oid is always the last resort, but it should not be the second best choice. - hot set objects may have been sometimes chosen outside of the working set, leading to false false hits and lower hit ratio; other objects may have been subjected to this as well - request embedded objects during the initial server scan (not requesting them makes Polygraph think that an embedded object was requested before when it was not, decreasing HR) - made polymon report Network Bandwidth usage again (untested) - moved foreign request error reporting so that we still have request headers to dump - marking a freshly accepted socket as a non-blocking or not-naggling socket may fail when poly* is overloaded; do not exit the program but rather continue after reporting the error - fixed two coredump-inducing bugs in the polypxy module - aka was exiting silently when no "anonymous" options where specified - support the "log_dir" option to label_results 2.5.1 -> 2.5.2 20000713 - accept more than one alias range (aka); useful for running robots and servers on the same machine (but do not do that for production tests!) - do not attempt to extract the broadcast address of the interface, we do not need it and some interfaces (e.g., lo0 on FreeBSD) may not have one (aka) - added tools/pmix3-ips.pl script which is identical to pmix2-ips.pl except that it also prints IP ranges for each host in the bench (in default, verbose mode) - added IMS/reload statistics collection and reporting IMS and reload transactions are now treated separately from "basic" transactions and do not affect hit ratios, fill rates and other statistics that is hard to define for IMS and reloads (e.g., what IMS response can be count as a hit?); The "Contribution" table in generated reports needs more work as it is not 100% in sync with the above change - added working_set_cap() function to freeze WSS based on the number of transactions rather than run time (PGL) - added min() and max() functions to PGL - split polymix-3.pg into [modifiable] polymix-3.pg and [read-only] include/polymix-3-guts.pg - specify WSS in terms of fill responses rather than time (the calculation is still time-based, just like in Polymix-2); WSS is meant to be equal to 4 hours of fill traffic during a "top" phase, approximately (polymix-3.pg) - specify ramp durations as a fraction of plateau durations to ease scaling of a test (polymix-3.pg) - added "wait_wss_freeze" boolean field to PGL's Phase; if set, the phase will finish only if the working set size is frozen (and all other goals are satisfied, of course) - make sure that fill phase stops only if working set is frozen - do not request embedded objects while in the initial server-scan mode - shortened launch window because we no longer request embedded objects - decreased warmup phase goal to a single launch window - detect malformed HTTP request or response lines and complain - check that embedded object type makes sense and complain if it does not (instead of dumping core) - Polygraph was erroneously reporting "hit on reload requests" for some embedded objects - removed "http." prefix from some lx object names - report the number of local private objects in the working set size - obey RFC 2616: do not send any extension-headers with 304s - Zip files should have ".zip" extension, not "zip" (contents.pg) - yet another attempt to make oid exchange algorithm more robust (needs more work) - increased new_oids_history default to from 1024 to 2*1024 - increased new_oids_prefetch default to from 128 to 256 2.5.0 -> 2.5.1 20000626 - added support for realistic HTML simulation - added aka support for IP alias manipulation on systems that use virtual interfaces such as eth0:1, including Solaris (untested) and Linux (tested) - polished the code that searches for local interface addresses in hope to make it work on more OSes - added robotAddrs() and serverAddrs() functions: PGL now knows how to compute PolyMix-2,3 IP addresses, given the peak request rate (and other parameters); make use of that feature so that people do not have to spell out IP addresses in polymix-3.pg; explicit addresses are still supported, of course; IP aliases still need to be created before the test - added "Bench" type to describe parameters for the IP allocation schemes and include/benches.pg to describe standard IP allocation schemes - support variable recurrence ratio: replaced "recur_factor" in Robot with recur_factor_beg / recur_factor_end pair for smooth change in recurrence ratio (PGL) - polished definitions of fill-related phases to make use of variable recurrence in PolyMix-3 - polished launch window calculations in PolyMix-3 (needs more work) - added the "--ign_bad_cont_tags" option to ignore content tags that a Robot cannot parse; useful with realistic HTML simulation - added the "--ign_urls" option to generate content regardless of the URL being requested; useful when running polysrv in a "demo" mode - fixed logging of ICP statistics - added reporting of ICP measurements to the Report Generator; the above reporting is enabled only when ICP stats are present in the logs - make sure IMS timestamp is always at least 00:00:00 UTC, January 1, 1970 - fixed type unification compile-time bugs - Solaris portability fixes (IP aliasing improvements are untested due to the lack of root access to a Solaris box) - "everything expires when modified" should use "nmt" not "lmt" in olcStatic in contents.pg - replaced pop_model settings to make old workloads "compile" with new PopModel type (PGL) - support number/rate operation (returns time) (PGL) 2.2.8 -> 2.2.9 20000113 - added downtime-2.pg workload specs for the "power-off" test - when transaction fails, it may take a while for polysrv to stop waiting on the corresponding object id; the latter may lead to a situation when hits are generated only using oids that are smaller than the lost oid, preventing WSS algorithm from "sliding"; we now attempt to report "lost" oids back to the server so it does not wait for them; the latter may improve hit ratio for "small" caches in the presence of errors - freeze WSS based on last object id sent to the server rather than last object id confirmed by the server (to prevent funny tiny working sets in the presence of errors when oids may get lost) - groups with working set size of zero were not handled correctly; a group has zero size if it was never used prior to the working set freezing time - on the client side, distinguish closed connection with no headers sent from a closed connection with some headers sent; that is, distinguish "connection closed before sending headers" from "premature end of msg headers"; the former may by due to HTTP race condition, the latter is definitely a bug. - added "Errors Total Count x 1000" screen to polymon - assume BB/*.pm files are in /usr/local/polygraph/BB or in the current dir - polished bb.pl output - added tools/pmix2-ips.pl and tool/BB/forsome scripts that help in configuring and running [large scale] experiments - removed most defaults values for options of Report Generator tools; reportgen.cfg should be used instead; needs more work - allow up to 0.1% difference between fill count measured and computed based on hit and cachability ratios (Report Generator) - do not complain about too many waiting requests because it is not clear what "too many" is (Report Generator) - plot cachability ratio along with hit ratios (Report Generator) - ported Report Generator to Solaris (the patch was available for 2.2.8) - do not dump core if a requested phase is not found in the log (lx) - do not quit on logs with [very] different absolute times (ltrace) - made log merging algorithm less susceptible to minor local time differences between machines where the logs where generated (lx, ltrace); "level" traces (e.g., those produced by Report Generator) no longer have weird slight downward slopes - statistics about "waiting" (queued for connections) transactions was not collected appropriately resulting in zero readings - fixed "request rate for robot X is not positive" configuration time bug (the patch was available for 2.2.8) - compile IpSocket.cc even when IPFW is not supported (the patch was available for 2.2.8) - prevent "invalid type X for default argument to Y" compilation errors 2.2.7 -> 2.2.8 19991227 - added "req_inter_arrival" field to Robot to specify other than Poisson inter-arrival time distributions; for example, downtime test will use constant distribution (PGL) - redesigned lx to concentrate on phase-related statistics and support multiple log files; phases with equal names are merged, and phases with different names are concatenated to produce correct "totals" - added `ltrace', a tool to generate traces based on binary logs; this functionality was moved from `lx' that no longer supports trace extraction - changed naming scheme for log objects to be more consistent and scalable, added a few new objects - log statistics about transactions waiting for resources to become available (e.g., requests waiting for a robot connection pool to allocate a connection); this changed log format, unfortunately - dump more information on "foreign content " and "malformed content " errors - added a cool "summary" screen to polymon - do not quit when all phases met their goals when there is a positive idle timeout; wait for that timeout to expire (so that servers do not quit while clients are running) - do not log ascii representation of addresses when storing current state - "level" statistics was not maintained properly between phases - "fill" stream was measured as "miss" stream instead of "cachable miss"; polyfill-2.pg was not affected because all polyfill objects were cachable - gettimeofday(2) system call is broken on FreeBSD; make sure internal Polygraph clocks never go backwards and show sane values - avoid infinite recursion coredumps when we fail to write a log entry; report and try to salvage logging errors instead - do not dump core if a phase has no name, use "" name instead 2.2.6 -> 2.2.7 19991215 - added "--time_scale" option to lx; positive values make lx to print relative [to the run start] time for traces, scaled by the specified amount - added "Traces" macro to represent all "traceable" objects in lx - added rptm_mean:hit and rptm_mean:miss to lx's All micro - only redirect standard output when --out option is used (lx); used to redirect error streams as well - try to skip corrupted entries in [otherwise valid] binary logs - implementation of lognormal distribution used incorrect standard deviation leading to occasional huge objects and smaller-than-configured means - changed content sizes for cntOther and cntDownload after fixing the lognormal distribution bug to get the right mean object size - removed percents from Server.direct_access specs; those values are ignored by Polygraph because servers calculate them from scratch to match "global" content distribution - the sum of squared values was not computed correctly leading to incorrect readings for standard and relative deviations for samples with sampled values larger than 46341 2.2.5 -> 2.2.6 19991207 - PolyMix-2 workload now uses 0.4/sec per-robot request rate; use cloning or IP aliases to get more robots (and hence higher request rates) - added "rep_sz_mean" object to lx - added polyfill-2.pg to workloads/ - added pmix2 blob to tools/BB/bb.pl and polished bb.pl's comments - when using --dump, dump transaction meta-info even if there is no message data to dump - recognize KB scale for command line size options - two meant-to-be-independent random number generators used for simulating object sizes got "in-sync", producing wrong size distributions - phases with decreasing request rate were dropping request rate too fast - public_interest for PolyMix-1 and DataComm-1 workloads should be set to 0%, not 50%. - added now-required hit ratio and popularity model specs to datacomm-1.pg - avoid division by zero on corrupted log files 2.2.4 -> 2.2.5 19991129 - added "idle_pconn_tout" field to Agent to specify the maximum time an idle (no pending transactions) persistent connection should remain open (PGL) - support phase-dependent recurrence ratio; useful for populating the cache (fast, with only a few hits) and then doing measurement phase with appropriate hit ratio, all in one run - added "recur_factor" field to Phase (PGL) - added support for priority scheduling: Polygraph runs (N-1) file scans for file descriptors (connections) that require urgent action and then one scan for all file descriptors; N is configurable using the --priority_sched option - added "--priority_sched" option to control priority level for somewhat urgent socket I/O operations; giving higher priority to some operations may improve performance in environments with lots of probably idle connections as Polygraph will spend less time processing those connections; "over-optimized" setting are likely to cause noticeable increase in response time; the default priority is 5; to disable the optimization, set to 1 - added support for assigning the same value to several variables with one assignment operator (PGL): [ a1, a2, a3 ] = 100%; - added support for randomly distributing values of an array among several arrays, each with an optional "portion" quantifier; for example: [ arr1 : 60%, arr2 : 20%, arr3 ] = '10.0.1.1-255'; this feature is very convenient when distributing network addresses among several objects (e.g., servers or pipes) (PGL) - grow storage area for new oids when clients start to run out of them (to make prefetching more aggressive) - optimized initial server scan (when robots are "touching" all servers to make sure robots can later generate hits on every server); the scan is now much faster and has more random access pattern - added reporting of server scan progress - "lock" first phase until all robots are ready to hit all servers - when an advertised oid has not been requested for a while, see if we should grow the bitmap to store more oids (and hence keep an oid for a longer time) instead of reclaiming that stuck oid - changed default for "--new_oids_prefetch" from 256 to 128 because Polygraph is not capable of dynamically adjusting prefetch amount - if load factor reaches zero, stop submitting requests - check for late alarms and other timers and report if we are getting behind the schedule - server now accepts new connections for at most 10msec at a time - do not quit when running out of file descriptors in accept() call - destroy PGL objects after use to reduce memory footprint for large scale configurations - use idle_pconn_tout of 15 seconds for PolyMix-2 servers - changed Robot.open_conn_lmt to 4 in polymix-2.pg - polished phase definitions in polymix-2.pg - check for error status when deleting rules and pipes (piper) - re-ordered screens in polymon - resurrected err_cnt_tbl in lx - the "--phase" option was not supported for non-traceable objects (lx) - fixed introduced in 2.2.4 typo that made servers "blocking" and hence unusable (they would get stuck in the accept(2) call); client-side sockets were also affected but that was not visible to an end-user because blocking robots may still work; the patch was posted to the Polygraph list - portability star, ncurses, #defines timeout causing namespace havoc - bad guys #defined `select' in Solaris system files causing namespace havoc - I/O buffer pool was not reporting memory growth correctly (err.. at all) - prevent coredumps when printing a histogram with max value less than 1 (lx) - True64 requires a special setsysinfo(2) call to raise FD limit beyond the default hard limit of 4K 2.2.3 -> 2.2.4 19991109 - added "piper", a program to configure DummyNet network pipes based on PGL - added 'bwidth' -- a basic PGL type to represent [network] bandwidth small set of operations are supported for now - added NetPipe PGL type to describe network pipes; a pipe specifies maximum network bandwidth, packet delay and loss, and other low -level network parameters/conditions - added pipes.pg with definitions of commonly used network pipes, including modems and various kinds of origin servers - added configure-time check for DummyNet and IPFW interfaces - reduced start-up time in configurations with large number of robots and servers; reduced memory footprint for large configurations (more work may be needed) - print aggregate statistics (min/mean/max/etc) for histograms produced by lx and distr_test - report agent creation progress every 5 seconds rather than every 50 agents - report system resource usage before and after run - adjusted the way stat intervals (i- lines on the console) are printed in a blocking or overloaded environment - increase the maximum number of FD ./configure script can detect to 2^16 (65K) - fixed configuration bug in environments with no rlim_t type - include limits.h file to avoid compilation errors on Solaris - do not print "array probabilities (above) add up to .. less than 100%" if they add up to more than 99% - cntDownload in contents.pg was named cntDowload; some found it funny - lx could not handle logs with combination of duration-based and non-duration based phases - some environments do not know better than #define getc and putc macros causing namespace havoc - rptm_hist was printed twice by lx --objects All - a typo caused size type to do multiplication instead of division (PGL) 2.2.2 -> 2.2.3 19991026 - added "--sample_log" and "--sample_log_size" options to configure sample log; sample log defaults to the general purpose log; if separate log is specified, it is not flushed to disk on periodic basis as general log (to avoid potential run-time overhead) - added "StatsSample" object to PGL to describe the time and size of a statistical sample; Samples must be scheduled(), just like Phases; more work needed to make scheduling more convenient (phase related?) - added "kind" field to label Content object (PGL) - added "cntDowload" content to represent relatively large downloads; use cntDowload in PolyMix-2 - disabled sharing of embedded objects among containers; with no sharing, we have to rely on user specified content configuration with no tools to "tune" the model internally -- resulting (actual) global content distribution may not match the planned one; a likely global distribution is reported so a user can check their specs without running an experiment - disabled client side cache in PolyMix-2 - set embedded recurrence to 100% in PolyMix-2 - added initial support to save experiment "state"; eventually we will use state information to resume experiments from a given point and also for stats analysis; needs more work - added support for collecting low-level statistics in the form of samples; sample collection is controlled using PGL "StatsSample" type and "schedule()" call - for large configurations, inform about the progress in creating agents at startup - prevent int overflows when generating very large files - several bug fixes related to size constants (PGL) - try to handle "unlimited" resources reported by getrlimit(2) correctly 2.2.1 -> 2.2.2 19991020 - added "cache_cap" field to Robot (PGL); the cache is currently used for embedded objects only; it prevents robots from requesting the same embedded objects many times, emulating browser behavior - added "conn_lvl_mean" lx object to report average number of opened concurrent connections; because of Polygraph does not update time on every open() and close(), this statistics may not be accurate - robots now scan all servers (in a semi-random manner) before proceeding with truly random accesses; the initial scan ensures that all connectivity problems are detected early and also helps reach stable HR earlier - inform user when all robots are ready to generate hits on all servers - no "wait_xact_lmt" by default, a robot can queue as many transactions as needed; this is potentially dangerous because a user does not see those queued transactions (yet) and may think that everything is OK; more work is needed - generate new oids only when we know that the xaction will not be queued; this rule prevents robots from thinking an oid has already been seen by a proxy while it was not (because it got queued); the latter used to decreases offered hit ratio - agents should share random number generators to avoid a hard-to-detect artificial condition when all agents are "doing the same" all the time (e.g., all robots send requests to the same subset of servers for a while) - if no new alias is specified, just delete old ones (aka) - make sure that we always #include sys/types.h before inet.h because old BSD systems may not include sys/types.h in inet.h, and they should - removed --out option from aka - fixed a bug that led to an occasional assertion if the FD limit is reached on server side - fixed bugs leading to occasional segv and assertions when poll(2) system call is used - fixed parsing of the --dump option; now all tag combinations are parsed correctly - request rate field of a Robot was misinterpreted unless N/sec format was used (PGL) - due to ioctl(2) interface differences, aka does not work on Linux and probably some other OSes; more work is needed - fixed IP range parsing bug; some valid ranges were declared malformed - logging modifications; binary logs are not compatible with previous versions (again) - updated polymix-2.pg and contents.pg; changed content weights to ensure that an "average" object has 80% chance of being cachable 2.2.0 -> 2.2.1 19991012 - added support for IP ranges using a-b.c-d.e-f.g-h notation, for example '10.0.1-2.50-99' constant specifies an array of 100 addresses; the old range operator (A .. B) is still supported but is less flexible; subnet and/or port specification is also allowed - added "aka", an ingeniously named tool to create [large] number of IP aliases for a given interface; aka accepts IP specs in the PGL format (a-b.c-d.e-f.g-h/subnet) which allows for specifying hundreds of aliases in a compact form; aka will _delete_ all aliases for the interface before setting new ones; aka requires superuser access, of course - added "new_oids_per_msg_max", "new_oids_prefetch", and "new_oids_history" options to control synchronization between robots and servers; may be useful if you get many oid-related errors in large scale experiments and/or when a proxy gets too slow or drops requests - added "open_conn_lmt" and "wait_xact_lmt" fields to Robot (PGL) to limit the number of concurrent connections a single Robot may have at a time, emulating browser behavior (the code was written for version 2.2.0, but lacked PGL hook-ups and was not tested) - removed place() function call and replaced 'host' field in Agent with 'hosts' field to reduce memory footprint when configuring lots of agents on a single machine; place() call used to clone lots of agent specs that would differ only in 'host' field setting; we now specify all the hosts directly (in 'hosts' field) bypassing the cloning step (PGL) - added "conn_close_act_use_hist" and "conn_close_act_ttl_hist" lx objects to filter stats for "actively" closed connections only (i.e., all connections except those that were closed in an "idle" state) - agents where not binding to their addresses when they were supposed to - robots used to maintain one private object space as if there was only one origin server; the bug led to lower than expected hit ratios when robot's public_interest was less than 100% - robots were not setting server world id (wid) for embedded requests - statistics for idle persistent connections closed by Polygraph was not maintained - synchronized workload specs 2.1.0 -> 2.2.0 19990928 - added support for embedded objects: a server populates content with special tags that point to embedded objects (based on PGL Content specs); a robot parses server responses for those tags; a robot requests embedded objects as it detects them; we try to keep average request rate at the specified level (if any) despite these "extra" requests requests for embedded objects do cause bursts in traffic, just like in the real worlds - added may_contain and embedded_obj_cnt fields to Content type (PGL) - added "embed_recur" ratio to Robot; the ratio specifies the portion of embedded objects in a container that are re-requested, on average - added "choice_space" field to Content type (PGL); Choice space specifies the number of oids considered when selecting next embedded oid for a container oid: embedded_oid = uniform(k*container_oid, k*container_oid + choice_space) - added "direct_access" field to Server (PGL); direct_access selector specifies (a) objects that can be accessed not as an embedded part of other object and (b) relative frequency of such an accesses (e.g. 1% of all direct accesses are images and 95% of all direct accesses are HTML pages) - added tools/msl_test tool that attempts to estimate the maximum time a proxy keeps a TCP connection in a TIME_WAIT state. (Glenn Chisholm ) - added rng_test to test the quality of various random number generators that Polygraph is using - added the "--dump" option to control what type of messages and what message parts must be dumped to console. The option accepts a list of flags; the old --prn_* interface was not flexible enough or required too many options; new interface needs more work - renamed "--prn_dump_size" to "--dump_size" - removed "--prn_reqs", "--prn_reps", and "--prn_errs" options - robots must bind(2) to the IP addresses they are running on except when all robots are running on the same address - report approximate time a server waited for an advertised oid to be requested - always randomize IO buffers before use - use agent id along with object id when seeding r.n.g; this prevents skew in pseudo-random numbers when many agents are dealing with the same small subset of oids - use one port manager for all addresses to reduce memory footprint for big scale runs - install SIG_INT (^C) handler _after_ polyclt/srv has configured itself; makes possible to kill a program if configuration takes forever - workload files were missing server side delays (Becky Larsen ) - polymix-2.pg file comments were saying that it is a PolyMix-1 file - fixed typo in [client] "closed conn w/o sending data" error message - polymon.cc::TheSelHostCnt was not defined properly, causing compilation warnings (Ron Jones ) - udp2tcpd.cc was using `int' instead of `socklen_t' causing compilation errors on Linux and others OSes that require socklen_t (Ron Jones ) - a kludge to prevent mysterious (compiler?) bug causing segmentation faults in RecSym::getDistr on some platforms 2.0.0 -> 2.1.0 19990917 - added pop_test program that simulates an LRU-based cache and can be used to estimate [memory] hit ratios depending on popularity model and other related workload parameters - added "pop_model" field to Robot; two popularity models are supported: pmUnif() and pmZipf(skew_factor). - added "public_interest", "recurrence", and "unique_urls" fields to Robot - added working_set_length() procedure call to PGL; use it to limit working set size and hence prevent hit ratio decline during long tests - added a "--prn_dump_size" option to limit the size of HTTP message dumps - added a "--prn_errs" option to print HTTP messages that caused xaction errors - recognize 'All' macro in --phase option (lx) - removed "--unique_urls" option (polyclt) -- it is now Robot's field - removed unused `world' options (polyclt): world_type, world_id, world_urls, unique_urls, order, recur, pop_model, tmp_loc, tmp_loc_delta, tmp_loc_depth, exp_oids, prefilled_cnt; some of these option were already implemented via PGL, and some wait till be implemented/ported/tuned - removed the "dhr" field from Robot specs (the field was ignored anyway) - share URL space among all robots and servers; robots may have a "private" URL subset while "public" subset is shared by all; servers send robots information about the current global state; current limitation is that all polyclt processes should emit request streams with similar rate (per robot differences are OK), otherwise the servers start complaining that some clients are not doing their share in creating the load; solving the latter problem would require more heavy negotiations among clients and servers - make IO buffer content "more random" than it used to be; servers now produce Web objects filled with more random content. Note that servers still may produce different content for the same URL, even if the headers imply that the object has not changed. The latter is probably OK under "normal testing conditions" though. - do NOT make relative URIs artificially long by prepending them with an '/http:/ip-address' prefix; transparent proxies usually receive relative URIs so they do have and advantage of handling shorter request-lines, and we probably should simulate that - HTTP/1.0 connections are non-persistent by default, no need to send *Connection: close header. - write all 'unique ids' (e.g., world id) using hex notation and fixed length fields - write oids in URLs using hex notation and fixed length fields - console now displays minutes since the start of an experiment rather than absolute time; absolute time is logged and is available to lx - when building executables, use CXXFLAGS instead of CFLAGS in Makefiles - "conn_ttl_vs_use" was erroneously named "conn_ttl_vs_sz" - fixed parsing bug that caused assertions on a "''" expression - offered (ideal) hit ratio was not recorded properly 1.3.2 -> 2.0.0 19990907 - new copyright file header and polished licensing terms - added support for PGL configuration language; most experiment parameters are now set via a configuration file - "--pop_model" option defaults to "unif"orm now (used to be "zipf") - added the "--idle_tout" option to the client side; "idle" in "--idle_tout" now means "no network I/O activity" -- Polygraph may still be trying to do something - added the "--stats_cycle" option to specify the duration of a cycle that collects and logs "cheap" stats (defaults to 5 seconds) - added "fill size" experiment goal - removed accept_lmt, rep_sz, obj_bday, obj_life_cycle, obj_life_cycle_var, obj_with_lmt, obj_expire, cool_phase, goal, launch_win, rep_cachable, maximize_req_rate, and perhaps other options; similar functionality is usually provided in PGL - renamed "--dhr" to "--recur" (recurrence ratio or how often a URL is revisited by a robot); in Polygraph 2.0.0 the actual DHR is recurrence/cachability ratio; DHR may be affected by popularity model and other factors; we had to rename "--dhr" because Polygraph can no longer simulate any given DHR independent from other config parameters -- the models are becoming too complex; this is still work in progress and changes are likely - removed `ephemeral' string as a valid port range in --ports - the "--file_scan" option now defaults to poll if poll(2) is supported; Polygraph used to call select(2) by default - added conn_close_frgn_use_hist,conn_close_frgn_ttl_hist, conn_close_lcl_use_hist,conn_close_lcl_ttl_hist, soread_sz_hist, sowrite_sz_hist, and phase_name objects to lx - made 'All' macro a default "object" to be extracted by lx - honor --phase option in trace mode of lx - changed binary log format; new format is not backwards compatible - log errno strings along with errnos; makes error logging portable across OSes - modified logging procedure to flush binary log data periodically - removed "X-ETag" header field - send Proxy-Connection: header when talking to a proxy (used to send Connection: which was a terrible idea and was not honored by some proxies) - added support for Content-Type header field in responses - added support for arbitrary number of user-defined simulation "phases" - cachability is now determined on the server side (again!) - added ./configure script to preconfigure the distribution before `make' - added INSTALL file with generic package installation instructions - added root-level Makefile - added distclean goal to Makefiles to remove most auto-generated files - changed Perl interpreter name in scripts to '/usr/bin/perl'; ideally we need to determine the location at ./configure time - explicitly require Perl version 5.003 or higher in Perl scripts - added DataComm-1 specs to bb.pl - added workloads directory with standard workloads specs written in PGL. - added polymix-1.pg, polymix-2.pg, and datacomm-1.pg workload specs - added simple.pg, a minimalistic workload to use as a "Hello World" example - complain about "uncachable hits" - ignore "false hits" by default - removed hard coded limits on the reply content size; the change may have performance effect for workloads with zipf popularity model - removed 'loop' tool until it gets fixed - applied some HPUX compatibility patches (submitted by James Murphy ) - 'max' values printed by distr_test was greater than real max by one - lots of other additions, modifications, and bug fixes 1.3.1 -> 1.3.2 19990830 - this minor release incorporates all previously released patches for 1.3.1; the pathces fix a few logging bugs and, to the best of our knowledge, do not affect Polygraph run-time performance - replaced the "--prn_trace" lx switch with the "--trace" option; now trace stats can be averaged at a specified time interval; small window sizes emulate old behavior - added "conn_use_mean" object to lx - added datacomm-1 workload specs to bb.pl script - increment error count on each phase/log - fixed generation of class-based object names - fixed lx coredump when extracting class-based stats like class_rptm_mean:hit 1.3.0 -> 1.3.1 19990616 - added the "--notify" option that enables notification messages to be sent to a monitoring program; messages (currently less than 100 bytes) are sent via UDP every 20 seconds. - added "--label" option to specify a [short] run description; the label gets included into notification messages if any. - added udp2tcpd, a daemon that runs on port 18256 (default), listens to notification messages (UDP), and forwards them to polymon(s) via TCP; the intention is to support multiple monitoring programs for the same notification stream without using IP multicast or broadcast - added polymon; a ncurses-based program that monitors run-time status of Polygraph experiments using udp2tcpd daemon - added the err_cnt_tbl option to lx to print error table; note that Unix error codes are not standardized so you may get different descriptions of error messages if you move binary logs around (this is a bug) - fixed "invalid port" assertion in ExpPortMgr - fixed logging of error table (it probably was not logged before) - fixed compile-time warnings reported on Linux - fixed compile-time poll(2) errors on Linux - fixed a few compile-time problems on Solaris - maybe improved timegm() port to True64 and other environments with tm_gmtoff - removed custom polyVersion dependency from Makefile; it caused too many re-compilations and we never used the $CompileTime$ feature anyway - polished comments in Makefiles 1.2.2 -> 1.3.0 19990608 - added poll(2) support; users must add -DLACK_POLL to DEFINES in src/xstd/Makefile in the environments without poll(2); - added the "--file_scan" option to select between poll(2) and select(2); the default is still select, but that is likely to change - added the "--maximize_req_rate" option that enables "maximum request rate" searching mode; must be used together with the "--req_rate" option which specified initial rate; search is done by incrementing or decrementing actual request rate depending on the reply rate; support is rudimental, more work needed - added the "--prn_rep_cnt" option to lx; similar to --prn_time; prepends trace lines with number of replies received since log start - added response time statistics for major request classes: hits, misses, cachable, uncachable, etc. - added several lx objects to extract new stats: conn_use_hist, conn_ttl_vs_use, resp_tm_vs_class, class_rptm_mean - added stats for persistent connections (xactions per connection) - "rep_rate" and "req_rate" objects are now traceable via lx - log errors "histogram" - scan the port range and mark used ports at start-up if port manager is enabled via the "--ports" option; the scan adds about 2.5 seconds to polyclt start but avoids some of the run-time port binding errors - improved random number generation; do not use object ids as r.n.g seeds directly; generate a "good" "uncorrelated" random value and use that instead; prevents "loops" in reply size generation and other oid-dependent models; the loops were visible only on some micro-level workloads though; the fix adds a few seconds to polyclt/srv start time and about 4MB to memory footprint - adjusted histogram dumps so that max column in the output corresponds to max rather than max+1 - avoid floating point exceptions when reading bogus log data - improved handling of incomplete or old binary logs in lx - fixed FP exception error due to wrong double->int cast (a patch was available for 1.2.2) - use "linux" #define to prevent "FD_SETSIZE already defined" warnings on Linux (one can also add -DLINUX in Makefiles) 1.2.1 -> 1.2.2 19990521 - added distr_test tool that can print a histogram for any distribution recognized by Polygraph - added the "--prn_trace" lx option to print trace stats rather than totals; not all object values can be computed in the middle of a log; those that can, will be printed; others will be silently ignored; log files are not joined when trace mode is on - added the "--prn_time" lx option to show log time as a first column of the output (useful together with --prn_trace) - added support for accumulating large sizes (over 2GB); yet untested; old counters used to overflow leading to bogus byte hit ratios; current counters can hold up to about 4 exabytes which should be enough for a while - timegm is missing on Solaris; Solaris users should add -DLACK_TIMEGM to DEFINES in xstd/Makefile to enable a workaround - several bug fixes in Histogram module; trace stats (collected at 90 second intervals) that were based on histograms (e.g., response time percentiles) where not recorded correctly; aggregate stats (e.g., means) and stats averaged over entire phase were not affected - Zipf distribution was not generating the value of '1' when used for persistent connection limits; other zipf fixes - fixed "ambiguous overload for `bool ? Size : int'" compilation errors - zipf distribution now reports "world size" parameter only to be consistent with the way zipf is specified on the command line (used to print alpha=1 as a first parameter) - other minor bug fixes 1.2.0 -> 1.2.1 19990513 - added the "--obj_bday" option to specify object creation times; used to be hard-coded to a randomly (per object) chosen day in the 70s; negative birthdays are relative to program start time; non-negative birthdays are relative to 0, UTC; - accept "zipf" distribution on command line - time parsing routines now accept "year" scale (e.g., const:1.5year) - report Polygraph version in the configuration dump - internal lmt and expires time must be rounded using second resolution for HTTP date comparisons to work correctly - polished last-modified-time calculations to better handle lmts that are close to current time - renamed XSTD_INCLUDE to LIB_INCLUDE in xstd/Makefile 1.1.0 -> 1.2.0 19990501 - added Object Life Cycle model to simulate object modification and expiration; the model is controlled by the following server side options: --obj_life_cycle, --obj_life_cycle_var, --obj_with_lmt, and --obj_expire - support If-Modified-Since requests on server side - Poly-server now adds the Date: header field to all replies - removed "max-age" option from cachable replies because it had priority over the more common Expires: header. As a side effect not all cachable replies have expiration information - Last-Modified header field is now added to replies only if --obj_life_cycle option is enabled; some proxies will not cache replies without a Last-Modified header field -- watch out! - Expires header fields are not added to cachable replies by default; see --obj_expire option to control Expires fields - reordered HTTP reply header fields to follow HTTP recommendations - always use the minimum of FD_SETSIZE and getrlimit(RLIMIT_NOFILE); warn if FD_SETSIZE is smaller; we got tired of coredumps when a tester would forget to increase FD_SETSIZE after increasing kernel limits - date manipulation functions now use "timegm()" call which may not be portable; portability problems will be fixed on-demand - distinguish between "premature end of msg body" and "premature end of msg headers"; useful to diagnose proxy connection resets - detect premature end of headers on client side (we probably would wait forever prior to this fix) - reorganized and optimized HTTP header parsing; parsing speed is increasingly important as we have to recognize more header fields; current improvement (on 2.5 fields) is negligible (about 3%). - fixed a bug with HTTP header parsing (a patch was available for 1.1.0) 1.0p7 -> 1.1p0 19990425 - added a "--version" option to print package version and (c) info - ident-like routines may extract version info from Poly binaries now - added a "--pconn_use_lmt" option to control the number of transactions executed over a single [persistent] connection; Poly generates (and understands) Connection: close/keep-alive headers, and will honor Proxy-Connection: header as well; mutli-token Connection: headers are not supported; the option makes sense on both client and server sides; TCP may need "--nagle off" to utilize the advantage of pconns - added support for user-defined (tabulated) distributions; "tab:filename" or "tab(filename)" can be used wherever a distribution is expected on the command line - changed the default of the "--origin" option to "127.0.0.1:8080" - the "--proxy" option has no default now and implies proxied connection if present; with no "--proxy" option, a direct connection is assumed - removed the "--abs_urls" option; absolute (relative) urls are now generated only for proxied (direct) traffic - added libs target to Makefile to ease re-build of libraries - make will now clean src/xstd directory when making clean in src - the "--rng_seed" option is now honored by all rnd number generators - complain if units (KB, sec, etc.) are specified where none expected - added reporting of source code location in error messages - check for connect errors during first IO on a socket; Poly 1.0 would do an I/O and only then check for an error leading to imprecise error diagnostics - set socket options for accept(2)-ed sockets (server side), do not rely on inheritance; BSD OSes silently suppress inheritance of some flags like TCP_NODELAY (Nagle algorithm) - response time now includes connection setup time - fixed member initialization bug in StatPhase.cc that caused coredumps on Linux 1.0p6 -> 1.0p7 19990405 - added rep_sz_hist, rep_sz_mean, resp_tm_vs_sz, req_bhr, rep_bhr objects to 'lx' (byte HRs are broken for long experiments) - added --track_rports and --rport_wait_min options to track TCP port usage on the proxy side; unfortunately tracking is complicated by known TCP violations of most BSD implementations - fixed 'lx' bug with aggregating reply rate from logs with different phase durations - applied bake-off mods to bb.pl 1.0p5 -> 1.0p6 19990310 - added "lx", a routine to eXtract objects from binary logs; also mergers logs to support multiple client/server experiments - added "exprep.pl" script to generate reports from binary logs of several experiments (needs more work) - replaced all ETag: headers with X-ETag (requests and replies); ETag is not a request header and gets filtered out on some proxies - added "Last-Modified: Wed, 18 Feb 1998 17:53:58 GMT" reply header to cachable replies; some proxies may not cache replies otherwise (a magic constant value should be eventually eliminated) - changed the default for the "--log_size" option to 10KB with no logging and 10MB with logging enabled - various fixes to handle log buffer overflows; we now ignore all new entries if the buffer is full (needs more work) - fixed a lot of bugs in interpreting logged data (recording was OK?) no program was interpreting logs so bugs stayed undetected - mandatory phases should not stop even when too many errors; we used to abort if that happens (needs more work to detect error flood early as we used to) - increased required verbosity level for "got N xactions..." message to 2; a more detailed but less readable progress/goal message is printed with a verbose level of 0. 1.0p3 -> 1.0p5 19990223 - added "--prefilled_cnt" option - added Uniform Popularity Model - minor polish and bug fixes 1.0p3 -> 1.0p4 19990222 - added "--ign_false_hits" option; by default false hits are now treated as xaction errors - added "--prn_false_misses" option; handy to see what hits a proxy has "missed" - added the "--tmp_loc_depth" option that specifies a soft limit on inter-request distances in tmp locality model - added "--tmp_loc_delta" option to specify fuzziness of temporal locality choices - added "--cool_phase" option to specify minimal cool-off phase; handy when you run more than one polyclt and do not want one of them to stop when others are still in their measurement phase - added "--fd_limit" option to lower FD limit supplied by OS (as a safeguard for OSes that crash when we run out of FDs) - added "--phase" option to log reader to show more stats about a particular phase (needs more work) - added support for error limit (given in the --goal option) - added ETag headers to pass object specific info from the client to the server - better facilities for logging and reporting various errors - record ideal (i.e, offered) Hit Ratio - record actual cachability ratio - added lognormal distribution to the list of distributions available on the command line (logn:mean,sdev) - print number of hits, misses, errors, etc. in lr - enforce a safeguard FD limit of (0.97*rcur - 10), where "rcur" is the FD limit enforced by OS according to getrlimit(2) the "--fd_limit" option can be used to lower this value - shutdown nicely on exit or fatal external error - count and report the number of open sockets; the last column of the console output now shows number of open FDs, not just pending xactions - revised traffic model operation and simplified the interface now traffic characteristics are controlled individually rather than via an ugly --world_oids option - constant hit ratio now works correctly regardless of cachability ratio and other traffic parameters - moved --rep_cachable option from the server side to the client side - changed tmp locality model to use future "plan" instead of past "history"; old model was emitting too many too close requests; current model follows the --tmp_loc distribution much better - complain if cachability status of a reply has changed - print various FD limits on start - r.n.g. seed was not properly changed when calculating reply sizes, resulting in same object ids having different content length if requested more than once by a proxy 1.0p2 -> 1.0p3 19990215 - added initial support for Constant Hit Ratio; we have several models (Object Ids distributions) to choose from, see --world_oids - added --world_oids option to specify Object Ids "distribution"; currently supported "distributions" meaningful for --world_oids are: . sequential ("seq"), . MemoryLess Zipf ("MLZipf"), . Hot/Cold ("HoCo"); names will change if we find better ones - changed default behavior when no --world_oids is specified: we used to generate Zipf-like stream of Object Ids by default, now the default is Sequential stream (hence, no hits by default) - removed --world_cap option; we no longer need/use World Capacity - added "Expires: Wed, 17 Feb 2000 03:35:25 GMT" header to cachable responses; this constant should be good enough in most cases and will be changed to a dynamic value in next releases - added "Pragma: no-cache" to uncachable responses - better explanation for errors with distribution specs on the command line - fixed zero Content-Length bug - fixed time accounting bug (visible at least on FreeBSD 3.0) - define _BSD_SIGNALS to make IRIX happy (needs more work) 1.0p1 -> 1.0p2 19990212 - added support for specifying distributions on the command line; currently available distributions: unif, exp, norm, and const - added support for specifying ratios and such on the command line; - "--rep-size" is now of type Size Distribution (default: exp(13KB)) - "--xact_think" is now of type Time Distribution - "--rep_cachable" is now of type Ratio (e.g. 80%) - renamed "--users" to "--robots" - value "0" does not require scale any more ("0" == "0sec" == "0min") - report number of outstanding xactions in i-stats (new last column) - increased verbosity level required for i-stats to 2 - fixed parsing of floating point values on the command line (used to be truncated to int) - #warn, not #err if FD_SETSIZE is #defined suspiciously early 1.0p0 -> 1.0p1 19990207 - added time support in goal option - enabled --launch_win support - enabled --xact_think support (per xaction think times on both sides) - added constant req submission rate mode (--req_rate) - replaced --req_ccl with --users (just a name change) - added support for absolute/relative urls (--abs_urls option) - added Host: header (all requests) - many binary logs fixes and improvements - many stats fixes - disable SO_LINGER if linger_tout == 0 - drastically improved memory management; now Poly runs out of filedescriptors before running out of memory on high loads - account for BSD select(2)'s magic limit on timeout value - internal alarms now use Heap instead of LinkedList polygraph-4.3.2/NOTICE0000644000175000017500000000313611471034123014026 0ustar testertesterPortions of NTLM authentication code (src/client/NtlmAuth.(h|cc) files) are derived from libcurl of cURL project, circa v7.16.0 (http://curl.haxx.se/): Copyright (c) 1996 - 2006, Daniel Stenberg, . All rights reserved. Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. --------------- MD5 checksum code (src/xstd/Checksum.cc) is derived from RFC 1321, Appendix 3. Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All rights reserved. License to copy and use this software is granted provided that it is identified as the "RSA Data Security, Inc. MD5 Message-Digest Algorithm" in all material mentioning or referencing this software or this function. License is also granted to make and use derivative works provided that such works are identified as "derived from the RSA Data Security, Inc. MD5 Message-Digest Algorithm" in all material mentioning or referencing the derived work. RSA Data Security, Inc. makes no representations concerning either the merchantability of this software or the suitability of this software for any particular purpose. It is provided "as is" without express or implied warranty of any kind. These notices must be retained in any copies of any part of this documentation and/or software. --------------- Fowler–Noll–Vo hash function implementation (src/xstd/gadgets.h file) is based on pseudo code from http://www.isthe.com/chongo/tech/comp/fnv/. FNV hash algorithms and source code are in public domain. polygraph-4.3.2/config.h.in0000644000175000017500000002465011546445452015167 0ustar testertester/* config.h.in. Generated from configure.in by autoheader. */ #ifndef POLYGRAPH__CONFIG_H #define POLYGRAPH__CONFIG_H /* here are some typedefs that configure script might be missing */ #undef rlim_t /* host type from configure */ #define CONFIG_HOST_TYPE "-unknown-" /* negative or zero if we failed to detect it */ #define DEFAULT_FD_SETSIZE -1 /* altzone global is supported (Solaris only?) */ #undef HAVE_ALTZONE /* Define to 1 if you have the header file. */ #undef HAVE_ARPA_INET_H /* Define to 1 if you have the `ceilf' function. */ #undef HAVE_CEILF /* Define to 1 if you have the `closesocket' function. */ #undef HAVE_CLOSESOCKET /* Define to 1 if you have the header file. */ #undef HAVE_DLFCN_H /* Define if you have the dlopen/sum/error/close. */ #undef HAVE_DLOPEN /* Define to 1 if the system has the type `dn_pipe'. */ #undef HAVE_DN_PIPE /* Define to 1 if you have the `epoll_create' function. */ #undef HAVE_EPOLL_CREATE /* must explicitly declare timezone global as extern */ #undef HAVE_EXTERN_TIMEZONE /* Define to 1 if you have the header file. */ #undef HAVE_FCNTL_H /* Define to 1 if you have the `fork' function. */ #undef HAVE_FORK /* Define to 1 if you have the `getifaddrs' function. */ #undef HAVE_GETIFADDRS /* Define to 1 if you have the `GetLastError' function. */ #undef HAVE_GETLASTERROR /* Define to 1 if you have the `getpagesize' function. */ #undef HAVE_GETPAGESIZE /* Define to 1 if you have the `getrlimit' function. */ #undef HAVE_GETRLIMIT /* Define to 1 if you have the `getrusage' function. */ #undef HAVE_GETRUSAGE /* Define to 1 if you have the `gettimeofday' function. */ #undef HAVE_GETTIMEOFDAY /* Define to 1 if you have the header file. */ #undef HAVE_IFADDRS_H /* Define to 1 if you have the `inet_lnaof' function. */ #undef HAVE_INET_LNAOF /* Define to 1 if you have the `inet_makeaddr' function. */ #undef HAVE_INET_MAKEADDR /* Define to 1 if you have the `inet_netof' function. */ #undef HAVE_INET_NETOF /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* Define to 1 if you have the `ioctl' function. */ #undef HAVE_IOCTL /* Define to 1 if you have the `ioctlsocket' function. */ #undef HAVE_IOCTLSOCKET /* Define to 1 if you have the header file. */ #undef HAVE_IOMANIP /* Define to 1 if you have the header file. */ #undef HAVE_IOMANIP_H /* Define to 1 if you have the header file. */ #undef HAVE_IOSFWD /* Define to 1 if you have the header file. */ #undef HAVE_IOSFWD_H /* Define to 1 if you have the header file. */ #undef HAVE_IOSTREAM /* Define to 1 if you have the header file. */ #undef HAVE_IOSTREAM_H /* Define to 1 if you have the `crypto' library (-lcrypto). */ #undef HAVE_LIBCRYPTO /* Define to 1 if you have the `m' library (-lm). */ #undef HAVE_LIBM /* Define if you have the ncurses library (-lncurses). */ #undef HAVE_LIBNCURSES /* Define to 1 if you have the `socket' library (-lsocket). */ #undef HAVE_LIBSOCKET /* Define to 1 if you have the `ssl' library (-lssl). */ #undef HAVE_LIBSSL /* Define to 1 if you have the `z' library (-lz). */ #undef HAVE_LIBZ /* Define to 1 if you have the header file. */ #undef HAVE_MATH_H /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the header file. */ #undef HAVE_NCURSES_H /* Define to 1 if you have the header file. */ #undef HAVE_NETDB_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_VAR_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IP_DUMMYNET_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IP_FW_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_TCP_H /* Define to 1 if you have the header file. */ #undef HAVE_NET_IF_H /* Define to 1 if you have the header file. */ #undef HAVE_NET_IF_VAR_H /* Define to 1 if you have the header file. */ #undef HAVE_OPENSSL_ERR_H /* Define to 1 if you have the header file. */ #undef HAVE_OPENSSL_RAND_H /* Define to 1 if you have the header file. */ #undef HAVE_OPENSSL_SSL_H /* Define to 1 if you have the `pclose' function. */ #undef HAVE_PCLOSE /* Define to 1 if you have the `poll' function. */ #undef HAVE_POLL /* Define to 1 if you have the `popen' function. */ #undef HAVE_POPEN /* Define to 1 if you have the header file. */ #undef HAVE_PROCESS_H /* Define to 1 if you have the `RAND_egd' function. */ #undef HAVE_RAND_EGD /* Define to 1 if you have the `RAND_screen' function. */ #undef HAVE_RAND_SCREEN /* Define to 1 if you have the `RAND_status' function. */ #undef HAVE_RAND_STATUS /* Define to 1 if you have the header file. */ #undef HAVE_REGEX_H /* Define to 1 if you have the `rint' function. */ #undef HAVE_RINT /* sockaddr structure has sa_len member */ #undef HAVE_SA_LEN /* Define to 1 if you have the `SetLastError' function. */ #undef HAVE_SETLASTERROR /* have set_new_handler or std::set_new_handler */ #undef HAVE_SET_NEW_HANDLER /* Define to 1 if you have the `signal' function. */ #undef HAVE_SIGNAL /* Define to 1 if you have the header file. */ #undef HAVE_SIGNAL_H /* Define to 1 if you have the `sleep' function. */ #undef HAVE_SLEEP /* Define to 1 if you have the header file. */ #undef HAVE_SSTREAM /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the `strcasecmp' function. */ #undef HAVE_STRCASECMP /* Define to 1 if you have the `stricmp' function. */ #undef HAVE_STRICMP /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the `strncasecmp' function. */ #undef HAVE_STRNCASECMP /* Define to 1 if you have the `strnicmp' function. */ #undef HAVE_STRNICMP /* Define to 1 if you have the header file. */ #undef HAVE_STRSTREAM /* Define to 1 if you have the header file. */ #undef HAVE_STRSTREAM_H /* Define to 1 if you have the header file. */ #undef HAVE_STRSTREA_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_IOCTL_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_PARAM_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_RESOURCE_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SELECT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SOCKET_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SOCKIO_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SYSINFO_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TIME_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the `timegm' function. */ #undef HAVE_TIMEGM /* how time zone global variable looks like (timezone, _timezone, etc.) */ #undef HAVE_TIMEZONE /* Define to 1 if you have the header file. */ #undef HAVE_TIME_H /* tm structure has tm_gmtoff member */ #undef HAVE_TM_GMTOFF /* */ #undef HAVE_TYPE_IFALIASREQ /* */ #undef HAVE_TYPE_IFREQ /* */ #undef HAVE_TYPE_IN6_ALIASREQ /* */ #undef HAVE_TYPE_IN6_IFREQ /* */ #undef HAVE_TYPE_IOS_BASE_FMTFLAGS /* */ #undef HAVE_TYPE_IOS_FMTFLAGS /* */ #undef HAVE_TYPE_RLIM_T /* */ #undef HAVE_TYPE_RUSAGE /* Some systems use socklen_t typedef for some socket operations. Socklen_t may conflict with "int" that is also used. */ #undef HAVE_TYPE_SOCKLEN_T /* */ #undef HAVE_TYPE_STREAMPOS /* */ #undef HAVE_TYPE_TIMEVAL /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to 1 if you have the `unlink' function. */ #undef HAVE_UNLINK /* Define to 1 if you have the header file. */ #undef HAVE_WINBASE_H /* Define to 1 if you have the header file. */ #undef HAVE_WINSOCK2_H /* Define to 1 if you have the `WSACleanup' function. */ #undef HAVE_WSACLEANUP /* Define to 1 if you have the `WSAIoctl' function. */ #undef HAVE_WSAIOCTL /* Define to 1 if you have the `WSAStartup' function. */ #undef HAVE_WSASTARTUP /* Define to 1 if you have the `_ftime' function. */ #undef HAVE__FTIME /* Define to 1 if you have the `_pclose' function. */ #undef HAVE__PCLOSE /* Define to 1 if you have the `_popen' function. */ #undef HAVE__POPEN /* Define to the sub-directory in which libtool stores uninstalled libraries. */ #undef LT_OBJDIR /* Mac OS X 10.6 (at least) defines NCURSES_OPAQUE to 1 by default. Some structs we use (e.g. WINDOW) are defined only if NCURSES_OPAQUE is 0. */ #undef NCURSES_OPAQUE /* Define if OpenSSL support is enabled */ #undef OPENSSL_ENABLED /* Name of package */ #undef PACKAGE /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the home page for this package. */ #undef PACKAGE_URL /* Define to the version of this package. */ #undef PACKAGE_VERSION /* negative or zero if no probing has been done or probing failed */ #define PROBED_MAXFD -1 /* sighandler prototype (e.g. "void SignalHandler(...)" on IRIX */ #undef SIGNAL_HANDLER_TYPE /* The size of `char', as computed by sizeof. */ #undef SIZEOF_CHAR /* The size of `int', as computed by sizeof. */ #undef SIZEOF_INT /* The size of `long', as computed by sizeof. */ #undef SIZEOF_LONG /* The size of `short', as computed by sizeof. */ #undef SIZEOF_SHORT /* The size of `void*', as computed by sizeof. */ #undef SIZEOF_VOIDP /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Defined if supported DummyNet version was found */ #undef USE_DUMMYNET /* Version number of package */ #undef VERSION #include "post-config.h" #endif /* POLYGRAPH__CONFIG_H */ polygraph-4.3.2/exe_test.sh0000755000175000017500000000143211533775753015322 0ustar testertester#!/bin/sh config=$1 if test -z "$config" then config='workloads/simple.pg'; else shift fi set -x #strace -s 80 -o srv.strace \ ./src/server/server \ --config $config \ --cfg_dirs workloads/include \ --verb_lvl 10 \ --log /tmp/ts.log \ --console /tmp/ts.con $* & sleep 3 #strace -s 80 -o clt.strace \ ./src/client/client \ --config $config \ --cfg_dirs workloads/include \ --verb_lvl 10 \ --log /tmp/tc.log \ --console /tmp/tc.con $* & sleep 3 set +x a=''; while tail /tmp/ts.con /tmp/tc.con && test -z "$a" do echo -n " to continue, to stop: " read a done set -x killall -INT client server sleep 10; tail /tmp/t[sc].con ./src/logextractors/lx /tmp/ts.log | fgrep rate | paste - - ./src/logextractors/lx /tmp/tc.log | fgrep rate | paste - - set +x polygraph-4.3.2/common.am0000644000175000017500000000365611505177464014756 0ustar testertester# settings common to all Makefile.ams # top_builddir/ is needed for generated config.h # top_builddir/src/ is needed for generated src/xstd/h/stdint.h # top_srcdir/ is needed for post-config.h # TODO: move post-config.h and generated config.h to src? AM_CPPFLAGS = -I$(top_builddir) -I$(top_builddir)/src -I$(top_srcdir) -I$(top_srcdir)/src #AM_LDFLAGS = #imported_libs = if ENABLE_MANPAGES_GEN manpages-am: @top_srcdir@/common.h2m @for binary in $(bin_PROGRAMS) $(dist_bin_SCRIPTS); do \ echo "Generating manpage for $$binary"; \ manpage=`echo -n "$$binary" | sed -e 's/\..*//'`; \ name=`(grep \ --after-context=1 \ ".B \\\\\%polygraph-$$manpage" \ '@top_srcdir@/polygraph.man.in' || \ echo -n ' a part of Web Polygraph performance benchmark') | \ tail -1 | cut -c4-`; \ $(HELP2MAN) \ --no-info \ --name="$$name" \ --version-string="polygraph-$$manpage - $(PACKAGE_NAME)" \ --include='@top_srcdir@/common.h2m' \ --opt-include="$$manpage.h2m" \ --output="$$manpage.man" \ "./$$binary";\ done @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpages-recursive' ; then \ $(MAKE) \ $(AM_MAKEFLAGS) \ RECURSIVE_TARGETS=manpages-recursive \ manpages-recursive; \ fi manpages-recursive: manpages: Makefile $(LIBRARIES) $(PROGRAMS) manpages-am manpages-recursive manpagesclean-am: @rm -f $(dist_man1_MANS) @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpagesclean-recursive' ; then \ $(MAKE) \ $(AM_MAKEFLAGS) \ RECURSIVE_TARGETS=manpagesclean-recursive \ manpagesclean-recursive; \ fi manpagesclean-recursive: manpagesclean: manpagesclean-am manpagesclean-recursive .PHONY: manpages-am manpages-recursive manpages \ manpagesclean-am manpagesclean-recursive manpagesclean else manpages: @echo "Can not generate man pages. Please install help2man and reconfigure." manpagesclean: @echo "Can not generate man pages. Please install help2man and reconfigure." .PHONY: manpages manpagesclean endif polygraph-4.3.2/Makefile.in0000644000175000017500000007161011546445454015211 0ustar testertester# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # settings common to all Makefile.ams VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ DIST_COMMON = README $(am__configure_deps) $(noinst_HEADERS) \ $(notrans_dist_man7_MANS) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/config.h.in \ $(srcdir)/polygraph.man.in $(top_srcdir)/common.am \ $(top_srcdir)/configure INSTALL TODO cfgaux/config.guess \ cfgaux/config.sub cfgaux/depcomp cfgaux/install-sh \ cfgaux/ltmain.sh cfgaux/missing subdir = . ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/cfgaux/ax_create_stdint_h.m4 \ $(top_srcdir)/cfgaux/check_zlib.m4 $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = polygraph.man CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-dvi-recursive install-exec-recursive \ install-html-recursive install-info-recursive \ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' man7dir = $(mandir)/man7 am__installdirs = "$(DESTDIR)$(man7dir)" NROFF = nroff MANS = $(notrans_dist_man7_MANS) HEADERS = $(noinst_HEADERS) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ distdir dist dist-all distcheck ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ { test ! -d "$(distdir)" \ || { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -fr "$(distdir)"; }; } am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AR_R = @AR_R@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HELP2MAN = @HELP2MAN@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LDFLAG_RDYNAMIC = @LDFLAG_RDYNAMIC@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIB_CURSES = @LIB_CURSES@ LIB_DL = @LIB_DL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ std_include = @std_include@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ SUBDIRS = . src workloads tools noinst_HEADERS = post-config.h EXTRA_DIST = \ common.h2m \ change.log \ INSTALL \ README \ LICENSE \ NOTICE \ bootstrap.sh notrans_dist_man7_MANS = polygraph.man # top_builddir/ is needed for generated config.h # top_builddir/src/ is needed for generated src/xstd/h/stdint.h # top_srcdir/ is needed for post-config.h # TODO: move post-config.h and generated config.h to src? AM_CPPFLAGS = -I$(top_builddir) -I$(top_builddir)/src -I$(top_srcdir) -I$(top_srcdir)/src all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: @: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/common.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign ./Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign ./Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): config.h: stamp-h1 @if test ! -f $@; then \ rm -f stamp-h1; \ $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \ else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f stamp-h1 touch $@ distclean-hdr: -rm -f config.h stamp-h1 polygraph.man: $(top_builddir)/config.status $(srcdir)/polygraph.man.in cd $(top_builddir) && $(SHELL) ./config.status $@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs distclean-libtool: -rm -f libtool config.lt install-man7: $(notrans_dist_man7_MANS) @$(NORMAL_INSTALL) test -z "$(man7dir)" || $(MKDIR_P) "$(DESTDIR)$(man7dir)" @list='$(notrans_dist_man7_MANS)'; test -n "$(man7dir)" || exit 0; \ { for i in $$list; do echo "$$i"; done; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed 'n;s,.*/,,;p;s,\.[^7][0-9a-z]*$$,.7,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man7dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man7dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man7dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man7dir)" || exit $$?; }; \ done; } uninstall-man7: @$(NORMAL_UNINSTALL) @list='$(notrans_dist_man7_MANS)'; test -n "$(man7dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ } | sed 's,.*/,,;s,\.[^7][0-9a-z]*$$,.7,'`; \ test -z "$$files" || { \ echo " ( cd '$(DESTDIR)$(man7dir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(man7dir)" && rm -f $$files; } # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @list='$(MANS)'; if test -n "$$list"; then \ list=`for p in $$list; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; else :; fi; done`; \ if test -n "$$list" && \ grep 'ab help2man is required to generate this page' $$list >/dev/null; then \ echo "error: found man pages containing the \`missing help2man' replacement text:" >&2; \ grep -l 'ab help2man is required to generate this page' $$list | sed 's/^/ /' >&2; \ echo " to fix them, install help2man, remove and regenerate the man pages;" >&2; \ echo " typically \`make maintainer-clean' will remove them" >&2; \ exit 1; \ else :; fi; \ else :; fi $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-lzma: distdir tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma $(am__remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lzma*) \ lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @$(am__cd) '$(distuninstallcheck_dir)' \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile $(MANS) $(HEADERS) config.h installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(man7dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr \ distclean-libtool distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-man install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-man7 install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-man uninstall-man: uninstall-man7 .MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) all \ ctags-recursive install-am install-strip tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am am--refresh check check-am clean clean-generic \ clean-libtool ctags ctags-recursive dist dist-all dist-bzip2 \ dist-gzip dist-lzma dist-shar dist-tarZ dist-xz dist-zip \ distcheck distclean distclean-generic distclean-hdr \ distclean-libtool distclean-tags distcleancheck distdir \ distuninstallcheck dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-man7 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-recursive \ uninstall uninstall-am uninstall-man uninstall-man7 #AM_LDFLAGS = #imported_libs = @ENABLE_MANPAGES_GEN_TRUE@manpages-am: @top_srcdir@/common.h2m @ENABLE_MANPAGES_GEN_TRUE@ @for binary in $(bin_PROGRAMS) $(dist_bin_SCRIPTS); do \ @ENABLE_MANPAGES_GEN_TRUE@ echo "Generating manpage for $$binary"; \ @ENABLE_MANPAGES_GEN_TRUE@ manpage=`echo -n "$$binary" | sed -e 's/\..*//'`; \ @ENABLE_MANPAGES_GEN_TRUE@ name=`(grep \ @ENABLE_MANPAGES_GEN_TRUE@ --after-context=1 \ @ENABLE_MANPAGES_GEN_TRUE@ ".B \\\\\%polygraph-$$manpage" \ @ENABLE_MANPAGES_GEN_TRUE@ '@top_srcdir@/polygraph.man.in' || \ @ENABLE_MANPAGES_GEN_TRUE@ echo -n ' a part of Web Polygraph performance benchmark') | \ @ENABLE_MANPAGES_GEN_TRUE@ tail -1 | cut -c4-`; \ @ENABLE_MANPAGES_GEN_TRUE@ $(HELP2MAN) \ @ENABLE_MANPAGES_GEN_TRUE@ --no-info \ @ENABLE_MANPAGES_GEN_TRUE@ --name="$$name" \ @ENABLE_MANPAGES_GEN_TRUE@ --version-string="polygraph-$$manpage - $(PACKAGE_NAME)" \ @ENABLE_MANPAGES_GEN_TRUE@ --include='@top_srcdir@/common.h2m' \ @ENABLE_MANPAGES_GEN_TRUE@ --opt-include="$$manpage.h2m" \ @ENABLE_MANPAGES_GEN_TRUE@ --output="$$manpage.man" \ @ENABLE_MANPAGES_GEN_TRUE@ "./$$binary";\ @ENABLE_MANPAGES_GEN_TRUE@ done @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpages-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpages-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpages-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpages-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpages: Makefile $(LIBRARIES) $(PROGRAMS) manpages-am manpages-recursive @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-am: @ENABLE_MANPAGES_GEN_TRUE@ @rm -f $(dist_man1_MANS) @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpagesclean-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpagesclean-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpagesclean: manpagesclean-am manpagesclean-recursive @ENABLE_MANPAGES_GEN_TRUE@.PHONY: manpages-am manpages-recursive manpages \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-am manpagesclean-recursive manpagesclean @ENABLE_MANPAGES_GEN_FALSE@manpages: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@manpagesclean: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@.PHONY: manpages manpagesclean # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: polygraph-4.3.2/LICENSE0000644000175000017500000002367610621177424014152 0ustar testertester Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS polygraph-4.3.2/Makefile.am0000644000175000017500000000045111463777710015174 0ustar testertester## Process this file with automake to produce Makefile.in SUBDIRS = . src workloads tools noinst_HEADERS = post-config.h EXTRA_DIST = \ common.h2m \ change.log \ INSTALL \ README \ LICENSE \ NOTICE \ bootstrap.sh notrans_dist_man7_MANS = polygraph.man include $(top_srcdir)/common.am polygraph-4.3.2/common.h2m0000644000175000017500000000030611336340427015025 0ustar testertester[COPYRIGHT] Copyright \(co 2003-2006 The Measurement Factory, Inc. [SEE ALSO] .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/INSTALL0000644000175000017500000001724707457103231014171 0ustar testertesterBasic Installation ================== These are generic installation instructions. Some steps may not be applicable to Web Polygraph. Please start with reading http://www.web-polygraph.org/docs/userman/ The `configure' shell script attempts to guess correct values for various system-dependent variables used during compilation. It uses those values to create a `Makefile' in each directory of the package. It may also create one or more `.h' files containing system-dependent definitions. Finally, it creates a shell script `config.status' that you can run in the future to recreate the current configuration, a file `config.cache' that saves the results of its tests to speed up reconfiguring, and a file `config.log' containing compiler output (useful mainly for debugging `configure'). If you need to do unusual things to compile the package, please try to figure out how `configure' could check whether to do them, and mail diffs or instructions to the address given in the `README' so they can be considered for the next release. If at some point `config.cache' contains results you don't want to keep, you may remove or edit it. The file `configure.in' is used to create `configure' by a program called `autoconf'. You only need `configure.in' if you want to change it or regenerate `configure' using a newer version of `autoconf'. The simplest way to compile this package is: 1. `cd' to the directory containing the package's source code and type `./configure' to configure the package for your system. If you're using `csh' on an old version of System V, you might need to type `sh ./configure' instead to prevent `csh' from trying to execute `configure' itself. Running `configure' takes awhile. While running, it prints some messages telling which features it is checking for. 2. Type `make' to compile the package. 3. Optionally, type `make check' to run any self-tests that come with the package. 4. Type `make install' to install the programs and any data files and documentation. 5. You can remove the program binaries and object files from the source code directory by typing `make clean'. To also remove the files that `configure' created (so you can compile the package for a different kind of computer), type `make distclean'. There is also a `make maintainer-clean' target, but that is intended mainly for the package's developers. If you use it, you may have to get all sorts of other programs in order to regenerate files that came with the distribution. Compilers and Options ===================== Some systems require unusual options for compilation or linking that the `configure' script does not know about. You can give `configure' initial values for variables by setting them in the environment. Using a Bourne-compatible shell, you can do that on the command line like this: CXX=c++89 CFLAGS=-O2 LIBS=-lposix ./configure Or on systems that have the `env' program, you can do it like this: env CPPFLAGS=-I/usr/local/include LDFLAGS=-s ./configure Compiling For Multiple Architectures ==================================== You can compile the package for more than one kind of computer at the same time, by placing the object files for each architecture in their own directory. To do this, you must use a version of `make' that supports the `VPATH' variable, such as GNU `make'. `cd' to the directory where you want the object files and executables to go and run the `configure' script. `configure' automatically checks for the source code in the directory that `configure' is in and in `..'. If you have to use a `make' that does not supports the `VPATH' variable, you have to compile the package for one architecture at a time in the source code directory. After you have installed the package for one architecture, use `make distclean' before reconfiguring for another architecture. Installation Names ================== By default, `make install' will install the package's files in `/usr/local/bin', `/usr/local/man', etc. You can specify an installation prefix other than `/usr/local' by giving `configure' the option `--prefix=PATH'. You can specify separate installation prefixes for architecture-specific files and architecture-independent files. If you give `configure' the option `--exec-prefix=PATH', the package will use PATH as the prefix for installing programs and libraries. Documentation and other data files will still use the regular prefix. In addition, if you use an unusual directory layout you can give options like `--bindir=PATH' to specify different values for particular kinds of files. Run `configure --help' for a list of the directories you can set and what kinds of files go in them. If the package supports it, you can cause programs to be installed with an extra prefix or suffix on their names by giving `configure' the option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. Optional Features ================= Some packages pay attention to `--enable-FEATURE' options to `configure', where FEATURE indicates an optional part of the package. They may also pay attention to `--with-PACKAGE' options, where PACKAGE is something like `gnu-as' or `x' (for the X Window System). The `README' should mention any `--enable-' and `--with-' options that the package recognizes. For packages that use the X Window System, `configure' can usually find the X include and library files automatically, but if it doesn't, you can use the `configure' options `--x-includes=DIR' and `--x-libraries=DIR' to specify their locations. Specifying the System Type ========================== There may be some features `configure' can not figure out automatically, but needs to determine by the type of host the package will run on. Usually `configure' can figure that out, but if it prints a message saying it can not guess the host type, give it the `--host=TYPE' option. TYPE can either be a short name for the system type, such as `sun4', or a canonical name with three fields: CPU-COMPANY-SYSTEM See the file `config.sub' for the possible values of each field. If `config.sub' isn't included in this package, then this package doesn't need to know the host type. If you are building compiler tools for cross-compiling, you can also use the `--target=TYPE' option to select the type of system they will produce code for and the `--build=TYPE' option to select the type of system on which you are compiling the package. Sharing Defaults ================ If you want to set default values for `configure' scripts to share, you can create a site shell script called `config.site' that gives default values for variables like `CXX', `cache_file', and `prefix'. `configure' looks for `PREFIX/share/config.site' if it exists, then `PREFIX/etc/config.site' if it exists. Or, you can set the `CONFIG_SITE' environment variable to the location of the site script. A warning: not all `configure' scripts look for a site script. Operation Controls ================== `configure' recognizes the following options to control how it operates. `--cache-file=FILE' Use and save the results of the tests in FILE instead of `./config.cache'. Set FILE to `/dev/null' to disable caching, for debugging `configure'. `--help' Print a summary of the options to `configure', and exit. `--quiet' `--silent' `-q' Do not print messages saying which checks are being made. `--srcdir=DIR' Look for the package's source code in directory DIR. Usually `configure' can determine that directory automatically. `--version' Print the version of Autoconf used to generate the `configure' script, and exit. `configure' also accepts some other, not widely useful, options. polygraph-4.3.2/src/0000755000175000017500000000000011546445453013725 5ustar testertesterpolygraph-4.3.2/src/tools/0000755000175000017500000000000011546445454015066 5ustar testertesterpolygraph-4.3.2/src/tools/pop_test.cc0000644000175000017500000002241711546440450017227 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/h/iostream.h" #include #include "xstd/h/iomanip.h" #include "xstd/Rnd.h" #include "base/RndPermut.h" #include "xstd/Ring.h" #include "tools/IntIntHash.h" #include "base/CmdLine.h" #include "base/opts.h" #include "base/polyOpts.h" #include "xstd/gadgets.h" class MyOpts: public OptGrp { public: MyOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theOutFileName(this, "out ", "redirect console output", "-"), theCachabRatio(this, "cachable <%>", "portion of cachable replies", 0.80), thePublicRatio(this, "public_interest <%>", "portion of URLs shared among all robots", 0.50), theRecurrRatio(this, "recurrence <%>", "probability of a re-visit to a URL", 0.55/0.80), theWorkSetSize(this, "work_set_size ","working set size"), theCacheSize(this, "cache_size ","cache size", BigSize::MB(100)), theObjSize(this, "obj_size ", "average object size", Size::KB(13)), theRobCount(this, "robots ", "total number of robots to simulate", 1), thePopModel(this, "pop_model ", "popularity model", "unif"), theSimLength(this, "sim_length ", "total number of request to simulate", 50000000) {} virtual bool validate() const; public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theOutFileName; DblOpt theCachabRatio; DblOpt thePublicRatio; DblOpt theRecurrRatio; BigSizeOpt theWorkSetSize; BigSizeOpt theCacheSize; SizeOpt theObjSize; IntOpt theRobCount; StrOpt thePopModel; IntOpt theSimLength; } TheOpts; class Robot { public: Robot(int anOidOff); void step(); protected: int genOid(); protected: int theOidOff; int thePrivOidCnt; }; class Cache { public: Cache(int aCapacity); double dhp() const { return Percent(theHitCount, theReqCount); } double intervalDhp(); double utilp() const { return Percent(theSize, theCapacity); } bool full() const { return theSize >= theCapacity; } int capacity() const { return theCapacity; } int fill() const { return theFill; } int reqs() const { return theReqCount; } const IntIntHash &hash() const { return theIndex; } void noteObject(int oid); protected: void purge(); protected: int theCapacity; int theSize; int theFill; Ring theRepPolicy; IntIntHash theIndex; int theHitCount; int theReqCount; int theIntvlHitCount; int theIntvlReqCount; }; static struct Server { int theLastOid; } TheServer; typedef int (*PopModelPtr)(RndGen &rng, int lastOid); static int PopModel(RndGen &rng, int lastOid, int wsCap); static Array TheRobots; static Cache *TheCache = 0; static int TheOidLmt = -1; // limit for private and shared oids static int TheTotlWorkSetCap = -1; static int ThePrivWorkSubsetCap = -1; static int TheShrdWorkSubsetCap = -1; static PopModelPtr ThePopModel = 0; static double ThePopModelParam = 0; bool MyOpts::validate() const { if (theWorkSetSize <= 0) cerr << "working set size must be specified" << endl; else return true; return false; } /* Robot */ Robot::Robot(int anOidOff): theOidOff(anOidOff), thePrivOidCnt(0) { } // mimics Client::genOid int Robot::genOid() { static RndGen rng; const bool publicOid = rng() < TheOpts.thePublicRatio; const bool repeatOid = rng() < TheOpts.theRecurrRatio; if (publicOid) { if (repeatOid && TheServer.theLastOid > 0) { return PopModel(rng, TheServer.theLastOid, TheShrdWorkSubsetCap); } Assert(TheServer.theLastOid < TheOidLmt); return ++TheServer.theLastOid; } if (repeatOid && thePrivOidCnt > 0) return theOidOff + PopModel(rng, thePrivOidCnt, ThePrivWorkSubsetCap); Assert(thePrivOidCnt < TheOidLmt); return theOidOff + (++thePrivOidCnt); } void Robot::step() { const int oid = genOid(); TheCache->noteObject(oid); } /* Cache */ Cache::Cache(int aCapacity): theCapacity(aCapacity), theSize(0), theFill(0), theRepPolicy(4*aCapacity), theIndex(aCapacity), theHitCount(0), theReqCount(0), theIntvlHitCount(0), theIntvlReqCount(0) { } void Cache::noteObject(int oid) { Assert(oid > 0); theReqCount++; theIntvlReqCount++; RndGen rng(LclPermut(oid)); if (rng() > TheOpts.theCachabRatio) return; // uncachable object IntIntHash::Loc loc; if (theIndex.find(oid, loc)) { theHitCount++; theIntvlHitCount++; theIndex[loc]++; } else { const bool wasFull = full(); theIndex.addAt(loc, oid, 1); theSize++; theFill++; if (wasFull) purge(); } Assert(!theRepPolicy.full()); theRepPolicy.enqueue(oid); } void Cache::purge() { Assert(theSize > 0); while (1) { Assert(!theRepPolicy.empty()); const int oid = theRepPolicy.dequeue(); IntIntHash::Loc loc; Assert(theIndex.find(oid, loc)); int &ttl = theIndex[loc]; Assert(ttl > 0); if (--ttl == 0) { theIndex.delAt(loc); break; } } theSize--; } double Cache::intervalDhp() { const double res = Percent(theIntvlHitCount, theIntvlReqCount); theIntvlHitCount = theIntvlReqCount = 0; return res; } static void configureLogs(int prec) { if (TheOpts.theOutFileName && TheOpts.theOutFileName != "-") redirectOutput(TheOpts.theOutFileName.cstr()); configureStream(cout, prec); configureStream(cerr, prec); configureStream(clog, prec); } /* Pop Models */ static int UnifPopModel(RndGen &rng, int lastOid) { return 1 + rng(0, lastOid); } int Zipf(double alpha, RndGen &rng, int lastOid) { const double rn = rng(); return (int)pow(lastOid+1, pow(rn,alpha)); } static int ZipfPopModel(RndGen &rng, int lastOid) { return 1 + lastOid - Zipf(ThePopModelParam, rng, lastOid); } inline double logd(double x) { return log(x); } static int ZipdPopModel(RndGen &rng, int lastOid) { if (lastOid == 1 || ThePopModelParam >= 1) return lastOid; const double alpha = logd(logd(2)/logd(lastOid+1)) / logd(ThePopModelParam); return 1 + lastOid - Zipf(alpha, rng, lastOid); } static int PopModel(RndGen &rng, int lastOid, int wsCap) { const int offset = lastOid > wsCap ? lastOid-wsCap : 0; return offset + ThePopModel(rng, lastOid-offset); } // set some general stuff and // propogate cmd line options to corresponding objects static void configure() { configureLogs(2); // this is total work set size TheTotlWorkSetCap = 1 + (int) (TheOpts.theWorkSetSize / BigSize(TheOpts.theObjSize)); // compute private work subsets for robots and "shared" subset ThePrivWorkSubsetCap = 1 + (int)((1-TheOpts.thePublicRatio)*TheTotlWorkSetCap/(int)TheOpts.theRobCount); TheShrdWorkSubsetCap = 1 + (int)(TheOpts.thePublicRatio*TheTotlWorkSetCap); // note: we do not adjust subsets for uncachable objects because // robots have no idea and do not care what is cachable String pmName = TheOpts.thePopModel; if (const char *p = pmName.chr(':')) { isNum(p+1, ThePopModelParam); pmName = pmName(0, p-pmName.cstr()); } if (TheOpts.thePopModel == "unif") { ThePopModel = &UnifPopModel; } else if (pmName == "zipf") { ThePopModel = &ZipfPopModel; if (ThePopModelParam <= 0) ThePopModelParam = 1; } else if (pmName == "zipd") { ThePopModel = &ZipdPopModel; if (ThePopModelParam <= 0) ThePopModelParam = 0.5/100; } else { cerr << "unknown popularity model `" << TheOpts.thePopModel << "'" << endl; exit(-1); } const int objInCache = 1 + (int) (TheOpts.theCacheSize / BigSize(TheOpts.theObjSize)); TheCache = new Cache(objInCache); // oid limits TheOidLmt = INT_MAX / (1 + (int)TheOpts.theRobCount); for (int i = 0; i < TheOpts.theRobCount; ++i) TheRobots.append(new Robot((i+1)*TheOidLmt)); } static void report() { static int repCnt = 0; if (!repCnt++) { cout << '#' << ' ' << setw(8) << "reqs" << ' ' << setw(8) << "fill#" << ' ' << setw(8) << "fill%" << ' ' << setw(6) << "DHRi" << ' ' << setw(6) << "DHR" << endl; } cout << 'i' << ' ' << setw(8) << TheCache->reqs() << ' ' << setw(8) << TheCache->fill() << ' ' << setw(8) << (int)Percent(TheCache->fill(), TheCache->capacity()) << ' ' << setw(6) << TheCache->intervalDhp() << ' ' << setw(6) << TheCache->dhp() << endl; } static void run() { static RndGen rng; bool full = TheCache->full(); if (!full) cout << "# filling the cache..." << endl; const int repCycle = Max(1, TheOpts.theSimLength/1000, TheCache->capacity() / 20); for (int i = TheOpts.theSimLength; i; --i) { // select a random robot Robot *robot = TheRobots[rng(0, TheRobots.count())]; robot->step(); if ((full && abs(i) % repCycle == 1) || (!full && TheCache->full())) report(); full = TheCache->full(); } } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheOpts); if (!cmd.parse(argc, argv) || !TheOpts.validate()) return -1; configure(); cmd.report(cout); cout << "# " << TheOpts.theCacheSize << " cache fits " << TheCache->capacity() << " objects" << endl; cout << "# " << "working set is " << TheOpts.theWorkSetSize << " or " << TheTotlWorkSetCap << " objects" << endl; cout << "# " << "working set split: " << TheRobots.count() << " * " << ThePrivWorkSubsetCap << " + " << TheShrdWorkSubsetCap << " objects" << endl; cout << "# " << "server and robot world limit is " << TheOidLmt << " oids each" << endl; run(); return 0; } polygraph-4.3.2/src/tools/aka.cc0000644000175000017500000000745511546440450016133 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/h/net/if.h" #include "xstd/h/iostream.h" #include "xstd/h/iomanip.h" #include "xstd/NetIface.h" #include "xstd/gadgets.h" #include "base/CmdLine.h" #include "base/opts.h" #include "base/polyOpts.h" #include "pgl/PglNetAddrRange.h" class MyOpts: public OptGrp { public: MyOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theIfName(this, "if ", "interface name"), theAliases(this, "aliases ","ip(s) or ip range(s) to set as aliases") {} virtual bool validate() const; virtual ostream &printAnonym(ostream &os) const; virtual bool parseAnonym(const Array &opts); virtual bool canParseAnonym() const { return true; } public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theIfName; StrArrOpt theAliases; }; static MyOpts TheOpts; static NetIface *TheIface = 0; /* MyOpt */ ostream &MyOpts::printAnonym(ostream &os) const { return os << "[interface_name] [alias] ..."; } bool MyOpts::parseAnonym(const Array &opts) { if (!opts.count()) return true; int aliasStart = 0; if (!isdigit(*opts[0])) { theIfName.val(opts[0]); aliasStart = 1; } for (int i = aliasStart; i < opts.count(); ++i) theAliases.addItem(opts[i]); return true; } bool MyOpts::validate() const { if (!theIfName) cerr << "interface name is not specified" << endl; else return true; return false; } static void configureLogs(int prec) { configureStream(cout, prec); configureStream(cerr, prec); configureStream(clog, prec); } static void configure() { Socket::Configure(); configureLogs(2); TheIface = new NetIface; TheIface->name(TheOpts.theIfName); } static void getPrimaryAddress() { NetIface::Primaries addrs; if (!TheIface->primaries(addrs)) { clog << TheIface->name() << ": cannot get primary address(es) of " << "the network interface" << endl; return; } if (addrs.vFour) { clog << TheIface->name() << ": primary IPv4 address is " << addrs.vFour << endl; } if (addrs.vSix) { clog << TheIface->name() << ": primary IPv6 address is " << addrs.vSix << endl; } } static void delOldAliases() { const int delCount = TheIface->delAliases(); Must(delCount >= 0); clog << TheIface->name() << ": deleted " << delCount << " old aliases" << endl; } static void addNewAliases(const Array &addrs, const InAddress &netmask) { Array aliases(addrs.count()); for (int i = 0; i < addrs.count(); ++i) aliases.append(addrs[i]->addrN()); Must(TheIface->addAliases(aliases, netmask)); static int newCount = 0; newCount += aliases.count(); clog << TheIface->name() << ": created " << aliases.count() << " new aliases " << "( " << newCount << " total )" << endl; } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheOpts); if (!cmd.parse(argc, argv) || !TheOpts.validate()) return -1; configure(); clog << TheOpts.theIfName << ": "; if (TheOpts.theAliases) TheOpts.theAliases.report(clog << "reseting aliases to "); else clog << "no new alias specified; will just delete old ones."; clog << endl; getPrimaryAddress(); delOldAliases(); for (int i = 0; i < TheOpts.theAliases.val().count(); ++i) { const String &alias = *TheOpts.theAliases.val()[i]; PglNetAddrRange aliasParser; if (!aliasParser.parse(alias)) { cerr << TheOpts.theIfName << ": malformed alias: `" << alias << "' alias" << endl; return -2; } Array aliases; aliasParser.toAddrs(aliases); InAddress netmask; aliasParser.netmask(netmask); addNewAliases(aliases, netmask); } return 0; } polygraph-4.3.2/src/tools/distr_test.cc0000644000175000017500000001142111546440450017547 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/h/iostream.h" #include #include "xstd/h/iomanip.h" #include "base/CmdLine.h" #include "base/ObjId.h" #include "base/RndPermut.h" #include "base/opts.h" #include "base/polyOpts.h" #include "base/histograms.h" #include "xstd/gadgets.h" class MyOpts: public OptGrp { public: MyOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theOutFileName(this, "out ", "redirect console output", "-"), theDistr(this, "distr ", "distribution to test"), theStep(this, "hist_step <%>", "bin `size' in histogram output", 0.05/100), theCount(this, "count ", "number of trials", 100000), useOidSeeding(this, "oid_seeding ", "oid-based r.n.g. seeding", false), theRngSeed(this, "rng_seed ", "r.n.g. seed", 1), theLclRngSeed(this, "local_rng_seed ", "per-process r.n.g. seed", 1), theGlbRngSeed(this, "global_rng_seed ", "per-test r.n.g. seed", 1), theWorldCount(this, "world_count ", "number of simulated worlds", 1), useUniqueWorld(this, "unique_world ", "use URL set that is unique across runs", true) { theDistr.argType("num"); } virtual bool validate() const; public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theOutFileName; DistrOpt theDistr; DblOpt theStep; IntOpt theCount; BoolOpt useOidSeeding; IntOpt theRngSeed; IntOpt theLclRngSeed; IntOpt theGlbRngSeed; IntOpt theWorldCount; BoolOpt useUniqueWorld; } TheOpts; bool MyOpts::validate() const { if (!theDistr) cerr << "must specify the distribution to test" << endl; else return true; return false; } static void configureLogs(int prec) { if (TheOpts.theOutFileName && TheOpts.theOutFileName != "-") redirectOutput(TheOpts.theOutFileName.cstr()); configureStream(cout, prec); configureStream(cerr, prec); configureStream(clog, prec); } // set some general stuff and // propogate cmd line options to corresponding objects static void configure() { configureLogs(2); if (TheOpts.useOidSeeding) { if (TheOpts.theRngSeed.wasSet()) { cerr << "error: rng_seed option can not be used with " "oid-based seeding" << endl << xexit; } // set random seeds GlbPermut().reseed(TheOpts.theGlbRngSeed); LclPermut().reseed(TheOpts.theLclRngSeed); // use the seed as uid "space" index for non-unique worlds if (!TheOpts.useUniqueWorld) UniqId::Space(TheOpts.theLclRngSeed); } else { if (TheOpts.theLclRngSeed.wasSet() || TheOpts.theGlbRngSeed.wasSet() || TheOpts.theWorldCount.wasSet() || TheOpts.useUniqueWorld.wasSet()) { cerr << "error: local_rng_seed, global_rng_seed, " "world_count, and unique_world options can " "only be used with oid-based seeding" << endl << xexit; } TheOpts.theDistr.condDistr()->rndGen()->seed(TheOpts.theRngSeed); } if (TheOpts.theCount % TheOpts.theWorldCount) { cerr << "warning: trials count is not divisible by worlds count" << endl; } } static void reportOption(const Opt& opt) { static const String prefix = "config."; const int plen = opt.name().len() + 1; cout << prefix << opt.name() << ':' << setw(20-plen) << ""; opt.report(cout); cout << endl; } static void reportConfiguration() { reportOption(TheOpts.theOutFileName); reportOption(TheOpts.theDistr); reportOption(TheOpts.theStep); reportOption(TheOpts.theCount); reportOption(TheOpts.useOidSeeding); if (TheOpts.useOidSeeding) { reportOption(TheOpts.theLclRngSeed); reportOption(TheOpts.theGlbRngSeed); reportOption(TheOpts.theWorldCount); reportOption(TheOpts.useUniqueWorld); } else reportOption(TheOpts.theRngSeed); cout << endl; } static void testOneWorld(RndDistr *const distr, Histogram *const hist) { const int count = TheOpts.theCount / TheOpts.theWorldCount; ObjId oid; oid.world(UniqId::Create()); for (int i = 0; i < count; ++i) { if (TheOpts.useOidSeeding) { oid.name(i + 1); const int seed = GlbPermut(oid.hash(), rndRepSize); distr->rndGen()->seed(seed); } hist->record((int)Min(distr->trial(), (double)INT_MAX)); } } static void test() { RndDistr *const distr = TheOpts.theDistr.condDistr(); Histogram *const hist = new Log2Hist(); for (int i = 0; i < TheOpts.theWorldCount; ++i) testOneWorld(distr, hist); hist->report(TheOpts.theStep, cout); cout << endl << hist->stats(); delete hist; } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheOpts); if (!cmd.parse(argc, argv) || !TheOpts.validate() || !TheOpts.validate()) return -1; configure(); reportConfiguration(); test(); return 0; } polygraph-4.3.2/src/tools/IntIntHash.cc0000644000175000017500000000250211546440450017374 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/string.h" #include "tools/IntIntHash.h" #include "xstd/gadgets.h" IntIntHash::IntIntHash(int aCapacity): theHashCap(0) { theHashCap = (aCapacity + aCapacity/3 + 7) | 1; theIndex = new (IntIntHashItem*[theHashCap]); memset(theIndex, 0, sizeof(IntIntHashItem*)*theHashCap); } IntIntHash::~IntIntHash() { delete[] theIndex; } double IntIntHash::utilp() const { return Percent(theHashCnt, theHashCap); } bool IntIntHash::find(int key, Loc &loc) const { Assert(key); loc = theIndex + hashIdx(key); // search the chain while (*loc && (*loc)->key < key) loc = &(*loc)->next; return *loc && (*loc)->key == key; } void IntIntHash::addAt(Loc loc, int key, int val) { Assert(key); IntIntHashItem *i = getNewItem(); i->next = *loc; i->key = key; i->val = val; *loc = i; if (!i->next && theIndex <= loc && loc < theIndex+theHashCap) { theHashCnt++; Assert(theHashCnt <= theHashCap); } } void IntIntHash::delAt(Loc loc) { Assert(*loc && (*loc)->key); IntIntHashItem *i = *loc; *loc = i->next; putOldItem(i); if (!*loc && theIndex <= loc && loc < theIndex+theHashCap) { theHashCnt--; Assert(theHashCnt >= 0); } } polygraph-4.3.2/src/tools/dns-cfg.man0000644000175000017500000000242011336340427017071 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-DNS-CFG "1" "February 2010" "polygraph-dns-cfg - Web Polygraph" "User Commands" .SH NAME polygraph-dns-cfg \- DNS server configuration tool .SH SYNOPSIS .B dns-cfg [\fI--option \fR...] .SH DESCRIPTION Given a zone name, DNS names, and IP addresses, dns-cfg builds configuration suitable for use with BIND and, perhaps, other DNS servers. Dns-cfg output consists of three parts. The first part is the text to cut-and-paste into BIND's named.conf file. This part is sent to the standard output. The outer two parts are direct and reverse zone files. Those may be quite large and are dumped on disk into the appropriately named files. Dns-cfg can use PGL configuration as the source of information. .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-config\fR PGL configuration .TP \fB\-\-cfg_dirs\fR directories for PGL #includes .TP \fB\-\-zone\fR zone name .TP \fB\-\-addresses\fR IP addresses .TP \fB\-\-names\fR domain names .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/tools/distr-test.man0000644000175000017500000000213111466527563017665 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.37.1. .TH POLYGRAPH-DISTR-TEST "1" "November 2010" "polygraph-distr-test - Web Polygraph" "User Commands" .SH NAME polygraph-distr-test \- distribution test .SH SYNOPSIS .B distr-test [\fI--option \fR...] .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-out\fR redirect console output .TP \fB\-\-distr\fR distribution to test .TP \fB\-\-hist_step\fR <%> bin `size' in histogram output .TP \fB\-\-count\fR number of trials .TP \fB\-\-oid_seeding\fR oid\-based r.n.g. seeding .TP \fB\-\-rng_seed\fR r.n.g. seed .TP \fB\-\-local_rng_seed\fR per\-process r.n.g. seed .TP \fB\-\-global_rng_seed\fR per\-test r.n.g. seed .TP \fB\-\-world_count\fR number of simulated worlds .TP \fB\-\-unique_world\fR use URL set that is unique across runs .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/tools/aka.man0000644000175000017500000000275311336340427016315 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-AKA "1" "February 2010" "polygraph-aka - Web Polygraph" "User Commands" .SH NAME polygraph-aka \- IP aliases manipulation tool .SH SYNOPSIS .B aka [\fI--option \fR...] [\fIinterface_name\fR] [\fIalias\fR] ... .SH DESCRIPTION Aka is used to setup large number of aliases during Polygraph experiments with many robots and servers. The aliases option specifies what alias or aliases you want to have on the given interface. Aka recognizes IP addresses in PGL address format, including dotted IP range. Aka will try to guess the subnet or you can use an explicit subnet specification. The number of aliases you can set depends on your OS. Moreover, some OSes may support large number of aliases (more than 1000) but with a significant performance penalty. Note that you can just put alias specs after all other options and the interface name (see aka's usage line). Aka will delete all old aliases before setting new ones. If you do not specify the new aliases, the old ones will still be deleted (handy for cleaning up after yourself). .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-if\fR interface name .TP \fB\-\-aliases\fR ip(s) or ip range(s) to set as aliases .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/tools/dns_cfg.cc0000644000175000017500000002067711546440450017003 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iostream.h" #include "xstd/h/sstream.h" #include #include "xstd/h/iomanip.h" #include "xstd/gadgets.h" #include "base/CmdLine.h" #include "base/opts.h" #include "base/polyOpts.h" #include "pgl/PglArraySym.h" #include "pgl/PglNetAddrSym.h" #include "pgl/PglNetAddrRange.h" #include "pgl/PglNetAddrRangeSym.h" #include "pgl/AddrMapSym.h" #include "pgl/PglPp.h" #include "pgl/PglStaticSemx.h" class MyOpts: public OptGrp { public: MyOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theCfgName(this, "config ", "PGL configuration"), theCfgDirs(this, "cfg_dirs ", "directories for PGL #includes"), theZoneName(this, "zone ", "zone name"), theAddrStr(this, "addresses ","IP addresses"), theNameStr(this, "names ", "domain names") {} virtual bool validate() const; //virtual ostream &printAnonym(ostream &os) const; //virtual bool parseAnonym(const Array &opts); //virtual bool canParseAnonym() const { return true; } public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theCfgName; StrArrOpt theCfgDirs; StrOpt theZoneName; StrOpt theAddrStr; StrOpt theNameStr; }; static String TheZoneName; static String TheZoneOrRoot; static String TheNameServerName; static String TheNameServerIp; static ArraySym TheAddrs("addr"); static ArraySym TheNames("addr"); static MyOpts TheOpts; /* MyOpt */ bool MyOpts::validate() const { if (theCfgName) { if (theZoneName) cerr << "--config is mutually exclusive with --zone" << endl; else if (theAddrStr || theNameStr) cerr << "--config is mutually exclusive with --addresses and --names" << endl; else return true; } else { if (!theZoneName) cerr << "must specify the zone name" << endl; else if (!theAddrStr || !theNameStr) cerr << "must specify addresses and domain names" << endl; else return true; } return false; } static void configureLogs(int prec) { configureStream(cout, prec); configureStream(cerr, prec); configureStream(clog, prec); } static void configureViaPgl() { TheOpts.theCfgDirs.copy(PglPp::TheDirs); PglStaticSemx::Interpret(TheOpts.theCfgName); if (PglStaticSemx::TheAddrMapsToUse.count() != 1) { cerr << TheOpts.theCfgName << ": must use() exactly one address map" << ", but " << PglStaticSemx::TheAddrMapsToUse.count() << " maps " << "were used" << endl << xexit; } const AddrMapSym *ams = PglStaticSemx::TheAddrMapsToUse.last(); TheZoneName = ams->zone(); TheAddrs.append(*ams->addressesSym()); TheNames.append(*ams->namesSym()); if (!TheZoneName) { cerr << ams->loc() << "address map must specify zone name" << endl << xexit; } if (!TheAddrs.count()) { cerr << ams->loc() << "address map must specify at least one IP addresses" << endl << xexit; } if (!TheNames.count()) { cerr << ams->loc() << "address map must specify at least one domain name" << endl << xexit; } } static void configureViaCmdLine() { PglNetAddrRange addrs; PglNetAddrRange names; if (!addrs.parse(TheOpts.theAddrStr)) cerr << here << "malformed address range: `" << TheOpts.theAddrStr << "'" << endl << xexit; if (!names.parse(TheOpts.theNameStr)) cerr << here << "malformed name range: `" << TheOpts.theNameStr << "'" << endl << xexit; NetAddrRangeSym as; as.range(new PglNetAddrRange(addrs)); TheAddrs.add(as); NetAddrRangeSym ns; ns.range(new PglNetAddrRange(names)); TheNames.add(ns); TheZoneName = TheOpts.theZoneName; if (!TheZoneName) cerr << "missing zone name (use --zone option)" << endl << xexit; } static void configure() { configureLogs(2); if (TheOpts.theCfgName) configureViaPgl(); else configureViaCmdLine(); TheZoneOrRoot = TheZoneName == "." ? "root" : TheZoneName; TheNameServerName = "ns." + TheZoneOrRoot + "."; // XXX: extract addresses from Robot.dns_resolver if possible TheNameServerIp = "172.16.101.100"; if (TheAddrs.count() > TheNames.count()) { cerr << here << "warning: the number of IP addresses (" << TheAddrs.count() << ") exceeds the number of domain names (" << TheNames.count() << "); ignoring extra IPs" << endl; } } static void printSoa(ostream &os) { const String serialNo = "200412021"; // XXX; os << "$TTL 3600" << endl << endl; os << "@ IN SOA " << TheNameServerName << " dnsmaster." << TheZoneOrRoot << '.' // person responsible << " (" << endl << "\t " << serialNo << "; Serial" << endl << "\t 10800 ; Refresh" << endl << "\t 1801 ; Retry" << endl << "\t 3600000 ; Expire" << endl << "\t 259200 ; Minimum" << endl << ")" << endl << endl; // NS record os << "\tIN\tNS\t" << TheNameServerName << endl; os << endl; } static String reverseAddr(const NetAddr &addr, int octets) { ostringstream os; for (int i = 0; i= 0 && name[end] == '.') { return (addr.isDynamicName() ? "*" : "") + addr.addrA()(0, end); } } cerr << here << "warning: " << name << " does not end with zone suffix (" << TheZoneName << "), but probably should" << endl; return name; } static String addressType(const InAddress &ina) { static const String A = "A"; static const String AAAA = "AAAA"; if (AF_INET6 == ina.family()) return AAAA; return A; } static String reverseZone(const InAddress &ina) { static const String INADDRARPA = "IN-ADDR.ARPA"; static const String IP6INT = "IP6.INT"; if (AF_INET6 == ina.family()) return IP6INT; return INADDRARPA; } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheOpts); if (!cmd.parse(argc, argv) || !TheOpts.validate()) return -1; configure(); const String dirFname(TheZoneOrRoot); const String revFname(TheZoneOrRoot + ".rev"); ofstream direct(dirFname.cstr()); ofstream reverse(revFname.cstr()); NetAddr commonAddr; const String commonName = TheZoneName; // SOA records printSoa(direct); printSoa(reverse); // nameserver IP (only goes in to direct zone) direct << TheNameServerName << "\tIN\tA\t" << TheNameServerIp << endl; direct << endl; // A and PTR records for (int n = 0, a = 0; n < TheNames.count(); ++n, ++a) { if (a == TheAddrs.count()) a = 0; const NetAddrSym &as = (NetAddrSym&)TheAddrs.item(a)->cast("addr"); const NetAddrSym &ns = (NetAddrSym&)TheNames.item(n)->cast("addr"); const NetAddr addr(as.val().addrN(), -1); // remove port number const NetAddr name(ns.val().addrA(), -1); // remove port number if (!n) commonAddr = addr; const char *pfx = ""; if (name.addrA().chr('_')) { pfx = ";"; // comment cerr << here << "error: skipping invalid name '" << name << "' " << "because underscores are not allowed in DNS" << endl; } direct << pfx << forwardName(name) << "\t\tIN\t" << addressType(as.val().addrN()) << "\t" << as.val().addrN().rawImage() << endl; reverse << pfx << reverseAddr(addr) << "." << reverseZone(addr.addrN()) << "." << "\t\tIN\tPTR\t" << reverseName(name) << '.' << endl; } cout << "# BIND configuration for named.conf is below " << endl << "# zone files are " << revFname << " and " << dirFname << endl << endl; cout << "options {" << endl << "\tdirectory \"/etc/namedb\";" << endl << "\trecursion no;" << endl << "};" << endl << endl; cout << "zone \"" // Just put the reverse zone name in the config file, without // and leading octets. // << reverseAddr(commonAddr, 2) << "." << reverseZone(commonAddr.addrN()) << "\" {" << endl << "\ttype master;" << endl << "\tfile \"" << revFname << "\";" << endl << "};" << endl << endl; cout << "zone \"" << commonName << "\" {" << endl << "\ttype master;" << endl << "\tfile \"" << dirFname << "\";" << endl << "};" << endl << endl; return 0; } polygraph-4.3.2/src/tools/Makefile.in0000644000175000017500000006272711546445454017151 0ustar testertester# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # # Various utilities # # settings common to all Makefile.ams VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = aka$(EXEEXT) pop-test$(EXEEXT) distr-test$(EXEEXT) \ rng-test$(EXEEXT) dns-cfg$(EXEEXT) pgl2ips$(EXEEXT) DIST_COMMON = $(dist_man1_MANS) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(top_srcdir)/common.am subdir = src/tools ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/cfgaux/ax_create_stdint_h.m4 \ $(top_srcdir)/cfgaux/check_zlib.m4 $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am_aka_OBJECTS = aka.$(OBJEXT) aka_OBJECTS = $(am_aka_OBJECTS) aka_DEPENDENCIES = ../pgl/libpgl.a ../xparser/libxparser.a $(LDADD) am_distr_test_OBJECTS = distr_test.$(OBJEXT) distr_test_OBJECTS = $(am_distr_test_OBJECTS) distr_test_LDADD = $(LDADD) distr_test_DEPENDENCIES = ../base/libbase.a ../xstd/libxstd.a am_dns_cfg_OBJECTS = dns_cfg.$(OBJEXT) dns_cfg_OBJECTS = $(am_dns_cfg_OBJECTS) dns_cfg_DEPENDENCIES = ../pgl/libpgl.a ../xparser/libxparser.a \ $(LDADD) am_pgl2ips_OBJECTS = pgl2ips.$(OBJEXT) pgl2ips_OBJECTS = $(am_pgl2ips_OBJECTS) pgl2ips_DEPENDENCIES = ../pgl/libpgl.a ../xparser/libxparser.a \ ../runtime/libruntime.a $(LDADD) am_pop_test_OBJECTS = pop_test.$(OBJEXT) IntIntHash.$(OBJEXT) pop_test_OBJECTS = $(am_pop_test_OBJECTS) pop_test_LDADD = $(LDADD) pop_test_DEPENDENCIES = ../base/libbase.a ../xstd/libxstd.a am_rng_test_OBJECTS = rng_test.$(OBJEXT) rng_test_OBJECTS = $(am_rng_test_OBJECTS) rng_test_LDADD = $(LDADD) rng_test_DEPENDENCIES = ../base/libbase.a ../xstd/libxstd.a DEFAULT_INCLUDES = depcomp = $(SHELL) $(top_srcdir)/cfgaux/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) CXXLD = $(CXX) CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SOURCES = $(aka_SOURCES) $(distr_test_SOURCES) $(dns_cfg_SOURCES) \ $(pgl2ips_SOURCES) $(pop_test_SOURCES) $(rng_test_SOURCES) DIST_SOURCES = $(aka_SOURCES) $(distr_test_SOURCES) $(dns_cfg_SOURCES) \ $(pgl2ips_SOURCES) $(pop_test_SOURCES) $(rng_test_SOURCES) am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' man1dir = $(mandir)/man1 NROFF = nroff MANS = $(dist_man1_MANS) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AR_R = @AR_R@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HELP2MAN = @HELP2MAN@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LDFLAG_RDYNAMIC = @LDFLAG_RDYNAMIC@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIB_CURSES = @LIB_CURSES@ LIB_DL = @LIB_DL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ std_include = @std_include@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ dist_man1_MANS = \ aka.man \ pop-test.man \ distr-test.man \ rng-test.man \ dns-cfg.man \ pgl2ips.man EXTRA_DIST = \ aka.h2m \ rng-test.h2m \ dns-cfg.h2m \ pgl2ips.h2m pop_test_SOURCES = \ pop_test.cc \ IntIntHash.h \ IntIntHash.cc distr_test_SOURCES = \ distr_test.cc rng_test_SOURCES = \ rng_test.cc LDADD = \ ../base/libbase.a \ ../xstd/libxstd.a aka_SOURCES = \ aka.cc aka_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ \ $(LDADD) dns_cfg_SOURCES = \ dns_cfg.cc dns_cfg_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ \ $(LDADD) pgl2ips_SOURCES = \ pgl2ips.cc pgl2ips_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ ../runtime/libruntime.a \ \ $(LDADD) # top_builddir/ is needed for generated config.h # top_builddir/src/ is needed for generated src/xstd/h/stdint.h # top_srcdir/ is needed for post-config.h # TODO: move post-config.h and generated config.h to src? AM_CPPFLAGS = -I$(top_builddir) -I$(top_builddir)/src -I$(top_srcdir) -I$(top_srcdir)/src all: all-am .SUFFIXES: .SUFFIXES: .cc .lo .o .obj $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/common.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/tools/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/tools/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p || test -f $$p1; \ then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list aka$(EXEEXT): $(aka_OBJECTS) $(aka_DEPENDENCIES) @rm -f aka$(EXEEXT) $(CXXLINK) $(aka_OBJECTS) $(aka_LDADD) $(LIBS) distr-test$(EXEEXT): $(distr_test_OBJECTS) $(distr_test_DEPENDENCIES) @rm -f distr-test$(EXEEXT) $(CXXLINK) $(distr_test_OBJECTS) $(distr_test_LDADD) $(LIBS) dns-cfg$(EXEEXT): $(dns_cfg_OBJECTS) $(dns_cfg_DEPENDENCIES) @rm -f dns-cfg$(EXEEXT) $(CXXLINK) $(dns_cfg_OBJECTS) $(dns_cfg_LDADD) $(LIBS) pgl2ips$(EXEEXT): $(pgl2ips_OBJECTS) $(pgl2ips_DEPENDENCIES) @rm -f pgl2ips$(EXEEXT) $(CXXLINK) $(pgl2ips_OBJECTS) $(pgl2ips_LDADD) $(LIBS) pop-test$(EXEEXT): $(pop_test_OBJECTS) $(pop_test_DEPENDENCIES) @rm -f pop-test$(EXEEXT) $(CXXLINK) $(pop_test_OBJECTS) $(pop_test_LDADD) $(LIBS) rng-test$(EXEEXT): $(rng_test_OBJECTS) $(rng_test_DEPENDENCIES) @rm -f rng-test$(EXEEXT) $(CXXLINK) $(rng_test_OBJECTS) $(rng_test_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/IntIntHash.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/aka.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/distr_test.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dns_cfg.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pgl2ips.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pop_test.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/rng_test.Po@am__quote@ .cc.o: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $< .cc.obj: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cc.lo: @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(dist_man1_MANS) @$(NORMAL_INSTALL) test -z "$(man1dir)" || $(MKDIR_P) "$(DESTDIR)$(man1dir)" @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ { for i in $$list; do echo "$$i"; done; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ test -z "$$files" || { \ echo " ( cd '$(DESTDIR)$(man1dir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(man1dir)" && rm -f $$files; } ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @list='$(MANS)'; if test -n "$$list"; then \ list=`for p in $$list; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; else :; fi; done`; \ if test -n "$$list" && \ grep 'ab help2man is required to generate this page' $$list >/dev/null; then \ echo "error: found man pages containing the \`missing help2man' replacement text:" >&2; \ grep -l 'ab help2man is required to generate this page' $$list | sed 's/^/ /' >&2; \ echo " to fix them, install help2man, remove and regenerate the man pages;" >&2; \ echo " typically \`make maintainer-clean' will remove them" >&2; \ exit 1; \ else :; fi; \ else :; fi @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-binPROGRAMS \ clean-generic clean-libtool ctags distclean distclean-compile \ distclean-generic distclean-libtool distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-binPROGRAMS install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-man1 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags uninstall \ uninstall-am uninstall-binPROGRAMS uninstall-man \ uninstall-man1 #AM_LDFLAGS = #imported_libs = @ENABLE_MANPAGES_GEN_TRUE@manpages-am: @top_srcdir@/common.h2m @ENABLE_MANPAGES_GEN_TRUE@ @for binary in $(bin_PROGRAMS) $(dist_bin_SCRIPTS); do \ @ENABLE_MANPAGES_GEN_TRUE@ echo "Generating manpage for $$binary"; \ @ENABLE_MANPAGES_GEN_TRUE@ manpage=`echo -n "$$binary" | sed -e 's/\..*//'`; \ @ENABLE_MANPAGES_GEN_TRUE@ name=`(grep \ @ENABLE_MANPAGES_GEN_TRUE@ --after-context=1 \ @ENABLE_MANPAGES_GEN_TRUE@ ".B \\\\\%polygraph-$$manpage" \ @ENABLE_MANPAGES_GEN_TRUE@ '@top_srcdir@/polygraph.man.in' || \ @ENABLE_MANPAGES_GEN_TRUE@ echo -n ' a part of Web Polygraph performance benchmark') | \ @ENABLE_MANPAGES_GEN_TRUE@ tail -1 | cut -c4-`; \ @ENABLE_MANPAGES_GEN_TRUE@ $(HELP2MAN) \ @ENABLE_MANPAGES_GEN_TRUE@ --no-info \ @ENABLE_MANPAGES_GEN_TRUE@ --name="$$name" \ @ENABLE_MANPAGES_GEN_TRUE@ --version-string="polygraph-$$manpage - $(PACKAGE_NAME)" \ @ENABLE_MANPAGES_GEN_TRUE@ --include='@top_srcdir@/common.h2m' \ @ENABLE_MANPAGES_GEN_TRUE@ --opt-include="$$manpage.h2m" \ @ENABLE_MANPAGES_GEN_TRUE@ --output="$$manpage.man" \ @ENABLE_MANPAGES_GEN_TRUE@ "./$$binary";\ @ENABLE_MANPAGES_GEN_TRUE@ done @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpages-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpages-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpages-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpages-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpages: Makefile $(LIBRARIES) $(PROGRAMS) manpages-am manpages-recursive @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-am: @ENABLE_MANPAGES_GEN_TRUE@ @rm -f $(dist_man1_MANS) @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpagesclean-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpagesclean-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpagesclean: manpagesclean-am manpagesclean-recursive @ENABLE_MANPAGES_GEN_TRUE@.PHONY: manpages-am manpages-recursive manpages \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-am manpagesclean-recursive manpagesclean @ENABLE_MANPAGES_GEN_FALSE@manpages: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@manpagesclean: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@.PHONY: manpages manpagesclean # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: polygraph-4.3.2/src/tools/Makefile.am0000644000175000017500000000154311335553726017123 0ustar testertester# # Various utilities # bin_PROGRAMS = \ aka \ pop-test \ distr-test \ rng-test \ dns-cfg \ pgl2ips dist_man1_MANS = \ aka.man \ pop-test.man \ distr-test.man \ rng-test.man \ dns-cfg.man \ pgl2ips.man EXTRA_DIST = \ aka.h2m \ rng-test.h2m \ dns-cfg.h2m \ pgl2ips.h2m pop_test_SOURCES = \ pop_test.cc \ IntIntHash.h \ IntIntHash.cc distr_test_SOURCES = \ distr_test.cc rng_test_SOURCES = \ rng_test.cc LDADD = \ ../base/libbase.a \ ../xstd/libxstd.a aka_SOURCES = \ aka.cc aka_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ \ $(LDADD) dns_cfg_SOURCES = \ dns_cfg.cc dns_cfg_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ \ $(LDADD) pgl2ips_SOURCES = \ pgl2ips.cc pgl2ips_LDADD = \ ../pgl/libpgl.a \ ../xparser/libxparser.a \ ../runtime/libruntime.a \ \ $(LDADD) include $(top_srcdir)/common.am polygraph-4.3.2/src/tools/IntIntHash.h0000644000175000017500000000305411546440450017241 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__TOOLS_INTINTHASH_H #define POLYGRAPH__TOOLS_INTINTHASH_H #include // a simple and efficient hash indexed on non-zero 'int' // the hash stores an 'int' value for each entry // we will make it more "generic" iff needed // hash item struct IntIntHashItem { IntIntHashItem *next; // next item in chain int key; // position is empty if key is zero int val; // initialized to zero IntIntHashItem(): next(0), key(0), val(0) {} }; class IntIntHash { public: typedef IntIntHashItem **Loc; // an address returned by find() and used in [] public: IntIntHash(int aCapacity); // may be adjusted a bit ~IntIntHash(); double utilp() const; bool find(int key, Loc &loc) const; void addAt(Loc idx, int key, int val); void delAt(Loc idx); int operator ()(Loc loc) const { return (*loc)->key; } int &operator [](Loc loc) { return (*loc)->val; } int operator [](Loc loc) const { return (*loc)->val; } protected: inline int hashIdx(int key) const; IntIntHashItem *getNewItem() { return new IntIntHashItem; } void putOldItem(IntIntHashItem *i) { delete i; } protected: Loc theIndex; // hash (stores pointers to real items) int theHashCap; // hash capacity int theHashCnt; // active pointers in the hash }; /* inlined methods */ inline int IntIntHash::hashIdx(int key) const { if (key < 0) key += INT_MAX; return key % theHashCap; } #endif polygraph-4.3.2/src/tools/pgl2ips.man0000644000175000017500000000165111336340427017135 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-PGL2IPS "1" "February 2010" "polygraph-pgl2ips - Web Polygraph" "User Commands" .SH NAME polygraph-pgl2ips \- PGL IP addresses calculator .SH SYNOPSIS .B pgl2ips [\fI--option \fR...] .SH DESCRIPTION Calculates and prints IP addresses used in PGL workload. .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-config\fR PGL configuration .TP \fB\-\-cfg_dirs\fR directories for PGL #includes .TP \fB\-\-agent\fR print addresses for the given agent only .TP \fB\-\-host\fR print addresses for the given host only .TP \fB\-\-var\fR print value of the specified variable .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/tools/dns-cfg.h2m0000644000175000017500000000077511335553726017026 0ustar testertester[DESCRIPTION] Given a zone name, DNS names, and IP addresses, dns-cfg builds configuration suitable for use with BIND and, perhaps, other DNS servers. Dns-cfg output consists of three parts. The first part is the text to cut-and-paste into BIND's named.conf file. This part is sent to the standard output. The outer two parts are direct and reverse zone files. Those may be quite large and are dumped on disk into the appropriately named files. Dns-cfg can use PGL configuration as the source of information. polygraph-4.3.2/src/tools/rng-test.man0000644000175000017500000000105611336340427017317 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-RNG-TEST "1" "February 2010" "polygraph-rng-test - Web Polygraph" "User Commands" .SH NAME polygraph-rng-test \- random number generator test .SH SYNOPSIS .B rng-test \fI\fR .SH DESCRIPTION Generates random numbers using algorithms used in Polygraph. .PP .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/tools/pgl2ips.cc0000644000175000017500000002706511546440450016756 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/gadgets.h" #include "xstd/h/iomanip.h" #include "base/polyOpts.h" #include "base/CmdLine.h" #include "base/IpRange.h" #include "pgl/AgentAddrIter.h" #include "pgl/AgentSymIter.h" #include "pgl/BenchSideSym.h" #include "pgl/BenchSym.h" #include "pgl/PglArraySym.h" #include "pgl/PglClonerSym.h" #include "pgl/PglNetAddrParts.h" #include "pgl/PglNetAddrRange.h" #include "pgl/PglNetAddrRangeSym.h" #include "pgl/PglNetAddrSym.h" #include "pgl/PglPp.h" #include "pgl/PglStaticSemx.h" #include "pgl/ProxySym.h" #include "pgl/RobotSym.h" #include "pgl/ServerSym.h" #include "runtime/HostMap.h" class MyOpts: public OptGrp { public: MyOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theCfgName(this, "config ", "PGL configuration"), theCfgDirs(this, "cfg_dirs ", "directories for PGL #includes"), theAgent (this, "agent ", "print addresses for the given agent only"), theHost (this, "host ", "print addresses for the given host only"), theVar (this, "var ", "print value of the specified variable") {} virtual bool validate() const; public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theCfgName; StrArrOpt theCfgDirs; StrOpt theAgent; StrOpt theHost; StrOpt theVar; }; // Bench.*.host related information class HostInfo { public: HostInfo() {} HostInfo(const String &aName, const NetAddr &aHost, const Array &addrs, const int start, const int count, const int anAgentCount); bool includes(const NetAddr &addr) const; ostream &print(ostream &os) const; private: ostream &printIps(ostream &os) const; ostream &printVar(ostream &os, const String &var) const; String theName; NetAddr theHost; Array theAddrs; int theAgentCount; }; // Agent (robot or server) related information class AgentInfo { public: AgentInfo() {} AgentInfo(const String &aName, const String &aKind, const ArraySym *addrs); const Array &addrs() const { return theAddrs; } int totalAgentCount() const { return theAddrs.count() * theAgentsPerAddr; } void calcRanges(); ostream &print(ostream &os) const; private: ostream &printIps(ostream &os) const; ostream &printVar(ostream &os, const String &var) const; int agentCount() const; String theName; String theKind; Array theAddrs; Array theRanges; int theAgentsPerAddr; }; static MyOpts TheOpts; static Array TheCltHosts; static Array TheSrvHosts; static Array TheRobotAgents; static Array TheServerAgents; static const HostInfo *TheHostScope(0); /* MyOpt */ bool MyOpts::validate() const { if (!theCfgName) cerr << "must specify PGL configuration file (--config)" << endl; else return true; return false; } /* HostInfo */ HostInfo::HostInfo(const String &aName, const NetAddr &aHost, const Array &addrs, const int start, const int count, const int anAgentCount): theName(aName), theHost(aHost), theAddrs(count), theAgentCount(anAgentCount) { Assert(0 <= start); Assert(0 <= count); Assert(start + count <= addrs.count()); Array hostAddrs(count); for (int i = start; i < start + count; ++i) hostAddrs.append(addrs[i]); CompactAllAddrs(hostAddrs, theAddrs); } bool HostInfo::includes(const NetAddr &addr) const { for (int i = 0; i < theAddrs.count(); ++i) if (theAddrs[i].includes(addr)) return true; return false; } ostream &HostInfo::print(ostream &os) const { if (TheOpts.theVar) { if (TheOpts.theVar.startsWith(theName + '.')) { const String var(TheOpts.theVar(theName.len() + 1, TheOpts.theVar.len())); printVar(os, var); } return os; } os << theName << ": host=" << theHost << " ips="; printIps(os) << " agents=" << theAgentCount; return os << endl; } ostream &HostInfo::printIps(ostream &os) const { if (theAddrs.empty()) os << "none"; else for (int i = 0; i < theAddrs.count(); ++i) { if (i) os << ','; theAddrs[i].print(os); } return os; } ostream &HostInfo::printVar(ostream &os, const String &var) const { static const String host("host"); static const String ips("ips"); static const String agents("agents"); if (var == host) os << theHost << endl; else if (var == ips) printIps(os) << endl; else if (var == agents) os << theAgentCount << endl; else cerr << "Unknown variable '" << var << "'." << endl; return os; } /* AgentInfo */ AgentInfo::AgentInfo(const String &aName, const String &aKind, const ArraySym *addrs): theName(aName), theKind(aKind) { if (addrs) { HostMap seen(addrs->count()); // lookup table to weed out duplicates for (int i = 0; i < addrs->count(); ++i) { const NetAddrSym &addr_sym = (const NetAddrSym&)addrs->item(i)->cast(NetAddrSym::TheType); const NetAddr addr(addr_sym.val().addrN(), -1); // we only care about the IP int idx = -1; if (!seen.find(addr, idx)) { theAddrs.append(addr); (void)seen.addAt(idx, addr); } } theAgentsPerAddr = addrs->count() / theAddrs.count(); if (theAgentsPerAddr * theAddrs.count() != addrs->count()) cerr << "warning: " << theName << " total address count " << "is not divisible by the unique address count, " << "agents per host counters will be inaccurate" << endl; } else theAgentsPerAddr = 0; } void AgentInfo::calcRanges() { Assert(theRanges.empty()); Array addrs; if (TheHostScope) { addrs.stretch(theAddrs.count()); for (int i = 0; i < theAddrs.count(); ++i) if (TheHostScope->includes(theAddrs[i])) addrs.append(theAddrs[i]); } CompactAllAddrs(TheHostScope ? addrs : theAddrs, theRanges); } ostream &AgentInfo::print(ostream &os) const { if (theRanges.empty()) return os; if (TheOpts.theVar) { if (TheOpts.theVar.startsWith(theName + '.')) { const String var(TheOpts.theVar(theName.len() + 1, TheOpts.theVar.len())); printVar(os, var); } return os; } os << theName << ": kind=" << theKind << " ips="; printIps(os) << " agents=" << agentCount(); return os << endl; } ostream &AgentInfo::printIps(ostream &os) const { if (theRanges.empty()) os << "none"; else for (int i = 0; i < theRanges.count(); ++i) { if (i) os << ','; theRanges[i].print(os); } return os; } ostream &AgentInfo::printVar(ostream &os, const String &var) const { static const String kind("kind"); static const String ips("ips"); static const String agents("agents"); if (var == kind) os << theKind << endl; else if (var == ips) printIps(os) << endl; else if (var == agents) os << agentCount() << endl; else cerr << "Unknown variable '" << var << "'." << endl; return os; } int AgentInfo::agentCount() const { int count(0); for (int i = 0; i < theRanges.count(); ++i) count += theRanges[i].count() * theAgentsPerAddr; return count; } // get unique addresses from AgentInfo array static void getAgentAddrs(const Array &agentInfos, Array &addrs, int &totalAgentCount) { // count all addresses to size the lookup table totalAgentCount = 0; for (int i = 0; i < agentInfos.count(); ++i) totalAgentCount += agentInfos[i].totalAgentCount(); HostMap seen(totalAgentCount); // lookup table to weed out duplicates // append unique IP addresses only for (int i = 0; i < agentInfos.count(); ++i) { const AgentInfo &agentInfo(agentInfos[i]); for (int j = 0; j < agentInfo.addrs().count(); ++j) { const NetAddr addr(agentInfo.addrs()[j]); int idx = -1; if (!seen.find(addr, idx)) { addrs.append(addr); (void)seen.addAt(idx, addr); } } } } // make host/agent name by appending a number to given string static String makeName(const String &base, const int n) { static char buf[256]; ofixedstream ofs(buf, sizeof(buf)/sizeof(*buf)); ofs << base << n << ends; buf[sizeof(buf)/sizeof(*buf) - 1] = '\0'; return String(buf); } // collect host info static void configureHosts(const String &nameBase, const String &agentType, const Array &agentInfos, const BenchSideSym &benchSide, Array &hostInfos) { if (TheHostScope) return; PtrArray hosts; benchSide.hosts(hosts); if (hosts.empty()) clog << "no real host addresses for " << agentType << " side specified" << endl << xexit; Array addrs; int totalAgentCount; getAgentAddrs(agentInfos, addrs, totalAgentCount); if (!addrs.count()) return; const int agentsPerHost = totalAgentCount / hosts.count(); const int addrsPerHost = addrs.count() / hosts.count(); if (addrsPerHost * hosts.count() != addrs.count()) clog << "the number of agent addresses (" << addrs.count() << ") is not divisible by the number of real " << "host addresess (" << hosts.count() << ')' << xexit; for (int i = 0; i < hosts.count(); ++i) { const String name(makeName(nameBase, i+1)); if (!TheOpts.theHost || name == TheOpts.theHost || hosts[i]->addrA() == TheOpts.theHost) { const HostInfo info(name, *hosts[i], addrs, i*addrsPerHost, addrsPerHost, agentsPerHost); hostInfos.append(info); if (TheOpts.theHost) { TheHostScope = &hostInfos.last(); break; } } } } // collect agent info static void configureAgents(const String &nameBase, const String &agentType, Array &agentInfos) { static AgentSymIter::Agents &agents = PglStaticSemx::TheAgentsToUse; int n(1); for (AgentSymIter i(agents, agentType, false); i; ++i) { const String name(makeName(nameBase, n++)); const AgentSym &agent(*i.agent()); if (!TheOpts.theAgent || name == TheOpts.theAgent || agent.kind() == TheOpts.theAgent) { AgentInfo info(name, agent.kind(), agent.addresses()); agentInfos.append(info); } } } // print current scope static ostream &printScope(ostream &os) { os << "scope: host="; if (TheOpts.theHost) TheOpts.theHost.report(os); else os << '*'; os << " agent="; if (TheOpts.theAgent) TheOpts.theAgent.report(os); else os << '*'; return os << endl; } int main(int argc, char **argv) { CmdLine cmd; cmd.configure(Array() << &TheOpts); if (!cmd.parse(argc, argv) || !TheOpts.validate()) return -1; configureStream(cout, 2); configureStream(clog, 3); TheOpts.theCfgDirs.copy(PglPp::TheDirs); PglStaticSemx::Interpret(TheOpts.theCfgName); if (!PglStaticSemx::TheBench) clog << "no bench selected with use()" << endl << xexit; if (!TheOpts.theVar) printScope(cout) << endl; configureAgents("robot", RobotSym::TheType, TheRobotAgents); configureAgents("server", ServerSym::TheType, TheServerAgents); if (TheRobotAgents.empty() && TheServerAgents.empty()) cerr << "No" << (TheOpts.theAgent ? "matching " : " ") << "agents found" << endl << xexit; configureHosts("polyclt", RobotSym::TheType, TheRobotAgents, *PglStaticSemx::TheBench->clientSide(), TheCltHosts); configureHosts("polysrv", ServerSym::TheType, TheServerAgents, *PglStaticSemx::TheBench->serverSide(), TheSrvHosts); if (TheCltHosts.empty() && TheSrvHosts.empty()) cerr << "No" << (TheOpts.theHost ? "matching " : " ") << "hosts found" << endl << xexit; ostream::pos_type p(cout.tellp()); for (int i = 0; i < TheCltHosts.count(); ++i) TheCltHosts[i].print(cout); for (int i = 0; i < TheSrvHosts.count(); ++i) TheSrvHosts[i].print(cout); if (cout.tellp() != p) cout << endl; for (int i = 0; i < TheRobotAgents.count(); ++i) { TheRobotAgents[i].calcRanges(); TheRobotAgents[i].print(cout); } for (int i = 0; i < TheServerAgents.count(); ++i) { TheServerAgents[i].calcRanges(); TheServerAgents[i].print(cout); } return 0; } polygraph-4.3.2/src/tools/access2poly/0000755000175000017500000000000011546445446017316 5ustar testertesterpolygraph-4.3.2/src/tools/access2poly/access2cdb0000755000175000017500000000700010621204214021210 0ustar testertester#!/usr/bin/perl -w # # This script reads Squid access log and downloads referenced objects, # stuffing them into Polygraph Content Database (.cdb) files, based on # reported or guessed content type. The user specifies the directory # where the files should be created or updated. # use strict; # content group entries are checked in the order they are listed here # last group always matches my @ContentGroups = ( { name => 'images', ctypes => [ qr|image/|i ], extensions => [ qr(jpeg|jpg|gif|png)i ], format => 'verbatim', }, { name => 'htmls', ctypes => [ qr|text/\w*html|i ], extensions => [ qr(html?)i ], format => 'linkonly', }, { name => 'downloads', ctypes => [ qr|application/(?!\w*java)/|i ], extensions => [ qr(zip|tar|tgz|gz|exe)i ], }, { name => 'other', ctypes => [ qr|.|i ], extensions => [ qr|.|i ], }, ); my ($opt, $Dir) = @ARGV; die("usage: $0 --cdbs \n") unless defined $Dir && -d $Dir && $opt eq '--cdbs'; $Dir =~ s|/*$||g; shift @ARGV; shift @ARGV; # init groups foreach my $g (@ContentGroups) { $g->{hits} = 0; $g->{ctypes_stats} = {}; $g->{extensions_stats} = {}; $g->{format} = 'verbatim' unless exists $g->{format}; } $| = 1; my $cntEntry = 0; while (<>) { chomp; ++$cntEntry; &reportProgress() if $cntEntry % 1000 == 0; my @fields = (split); next unless @fields >= 10; my $url = $fields[6]; my $type = $fields[9]; # find matching content group my $match; foreach my $g (@ContentGroups) { last if $match = &groupMatch($g, $url, $type); } # last resort $match = $ContentGroups[$#ContentGroups] unless $match; &get($match, $url); } &reportProgress(); map { &reportGroup($_) } sort { $b->{hits} <=> $a->{hits} } @ContentGroups; exit(0); sub groupMatch { my ($g, $url, $type) = @_; my $match; if (defined $type && $type ne '-') { $match = &listMatch($g, 'ctypes', $type); } if (!$match && defined $url) { my ($ext) = ($url =~ m|/.*\w+\.([^\.]+)|); $match = &listMatch($g, 'extensions', $ext) if defined $ext; } return $match; } sub listMatch { my ($g, $name, $text) = @_; my $list = $g->{$name}; foreach my $e (@{$list}) { if ($text =~ m/$e/) { my $stats = $g->{"${name}_stats"}; if (exists $stats->{$e}) { ++$stats->{$e}; } else { $stats->{$e} = 1; } return $g; } } return undef(); } sub get { my ($g, $url) = @_; my $tmp = sprintf('%s/wget-%d.out', $Dir, $$); my $wget = sprintf("wget --output-document=%s --server-response '%s'", $tmp, $url); if (system($wget)) { warn("failed to fetch '$url'\n"); return; } my $db = sprintf('%s/%s.cdb', $Dir, $g->{name}); my $cdb = sprintf("cdb %s add --format %s %s", $db, $g->{format}, $tmp); if (system($cdb)) { die("failed to add '$url' to $db: $!\n"); } unlink $tmp; ++$g->{hits}; } sub reportGroup { my ($g) = @_; printf("Group: %s\n", $g->{name}); printf("\thits: %10d\n", $g->{hits}); &reportList($g, 'ctypes'); &reportList($g, 'extensions'); printf("\n"); } sub reportList { my ($g, $name) = @_; my $stats = $g->{"${name}_stats"}; my $total = 0; map { $total += $_ } values %{$stats}; printf("\t%-10s: %10d\n", $name, $total); while (my ($key, $value) = each %{$stats}) { printf("\t\t%-20s %5d %10.3f\n", $key, $value, &percent($value, $total)); } } sub reportProgress { printf(STDERR "#lines: %03dK\n", $cntEntry/1000); } sub percent { my ($part, $whole) = @_; die() unless defined $whole; return -1 unless $whole > 0 && defined($part); no integer; return 100. * $part/$whole; } polygraph-4.3.2/src/tools/access2poly/access-order0000755000175000017500000000020510621204214021566 0ustar testertester#!/usr/bin/perl -w while (<>) { my @fields = split; $fields[0] -= $fields[1]/1000; print(map { "$_ " } @fields); print("\n"); } polygraph-4.3.2/src/tools/access2poly/access2pgl0000755000175000017500000002062210621204214021247 0ustar testertester#!/usr/bin/perl -w # # This script reads Squid access log and computes test configuration # values (suitable for use in Polygraph PGL configuration files) to # mimic logged traffic # # NOTE: request interarrival time distribution calculation assumes that # the first field in access log is request arrival time (not response departure # time recorded in standard Squid logs), and that the log is sorted by that # first field; use the following command or equivalent to convert standard # access log: # # % access2order access.log | sort -t' ' -n +0 # use strict; my $SessionIdleTout = 1*60*1000.; # when a busy session ends my %Ds = ( InterArrival => &newTimeDistr('my_req_inter_arrival', 'Request interarrival times during busy periods'), SessionBusyDur => &newTimeDistr('my_session_busy_period', 'Duration of a busy session period'), SessionBusyCount => &newNumDistr('my_session_busy_count', 'Number of requests per busy session period'), SessionIdleDur => &newTimeDistr('my_session_idle_period', 'Duration of an idle session period'), Rptm => &newTimeDistr('my_think_time', 'Response times'), RequestHeaderSize => &newSizeDistr('my_req_header_size', 'Request header sizes'), RequestBodySize => &newSizeDistr('my_req_content_size', 'Request body sizes'), ResponseSize => &newSizeDistr('my_resp_size', 'Response sizes'), StatusCodes => &newEventsDistr('my_resp_codes', 'Response status codes'), # RequestTypes => &newEventsDistr('my_req_types', 'Request types'), RequestMethods => &newEventsDistr('my_req_methods', 'Request methods'), ); my %Ips = (); my ($cntEntry, $cntIp) = (0) x 2; $| = 1; while (<>) { chomp; ++$cntEntry; &reportProgress() if $cntEntry % 1000 == 0; my @fields = (split); my $rptm = $fields[1]; my $time = $fields[0]; my $ip = $fields[2]; my ($result, $scode) = split(m|/|, $fields[3]); if (exists $Ips{$ip}) { my $last = $Ips{$ip}->{last}; die("access log not sorted by request time, stopped") if $time < $last; &updateDistr($Ds{Rptm}, $rptm) if $scode == 200 || $scode == 304; &updateDistr($Ds{RequestHeaderSize}, $fields[10]); &updateDistr($Ds{RequestBodySize}, $fields[11]); &updateDistr($Ds{ResponseSize}, $fields[4]) if $scode == 200; &updateDistr($Ds{StatusCodes}, $scode); &updateDistr($Ds{RequestMethods}, $fields[5]); my $gap = 1000.*($time - $last); if (!defined $SessionIdleTout || $gap < $SessionIdleTout) { &updateDistr($Ds{InterArrival}, $gap); } else { &updateDistr($Ds{SessionBusyCount}, $Ips{$ip}->{busy_count}); &updateDistr($Ds{SessionBusyDur}, 1000.*($last - $Ips{$ip}->{busy_start})); &updateDistr($Ds{SessionIdleDur}, 1000.*($time - $last)); $Ips{$ip}->{busy_start} = $time; $Ips{$ip}->{busy_count} = 0; } $Ips{$ip}->{last} = $time; $Ips{$ip}->{busy_count}++; } else { ++$cntIp; $Ips{$ip} = { last => $time, busy_start => $time, busy_count => 1, } } } &reportProgress(); map { &reportDistr($_) } sort { $a->{id} cmp $b->{id} } values %Ds; exit(0); sub newEventsDistr { my ($id, $name) = @_; return &newDistr($id, $name, [ &newArea('all', undef(), undef()), ]); } sub newTimeDistr { my ($id, $name) = @_; my $distr = &newDistr($id, $name, [ &newArea('frequent', 1000, 1), &newArea('medium', 10*1000, 10), &newArea('occasional', 100*1000, 100), ]); $distr->{pgl_type} = 'time_distr'; $distr->{report_factor} = 1000.0; # convert to seconds $distr->{report_unit} = 'seconds'; return $distr; } sub newSizeDistr { my ($id, $name) = @_; my $distr = &newDistr($id, $name, [ &newArea('tiny', 1024, 1), &newArea('small', 10*1024, 10), &newArea('medium', 100*1024, 100), &newArea('large', 1000*1024, 1000), &newArea('huge', 10000*1024, 10000), ]); $distr->{pgl_type} = 'size_distr'; $distr->{report_unit} = 'bytes'; return $distr; } sub newNumDistr { my $distr = &newSizeDistr(@_); $distr->{pgl_type} = 'num_distr'; $distr->{report_unit} = 'number'; return $distr; } sub newDistr { my ($id, $name, $areas) = @_; my $d = { id => $id, name => $name, pgl_type => undef(), report_factor => undef(), report_unit => undef(), areas => $areas, count => 0, sum => 0, sqSum => 0, }; # assign minimums my $lastMax; foreach my $area (@{$d->{areas}}) { $area->{min} = $lastMax if defined $lastMax; $lastMax = $area->{max}; } return $d; } sub newArea { my ($name, $max, $factor) = @_; return { name => $name, min => undef(), max => $max, factor => $factor, values => {}, }; } sub updateDistr { my ($distr, $value) = @_; return unless defined $value && $value ne '-'; # find matching area my $area; foreach $a (@{$distr->{areas}}) { if (defined $area) { $area = $a if defined $a->{min} && $value >= $a->{min}; } else { $area = $a; } } die("no matching area for $value in ". $distr->{name}. " distro, stopped") unless $area; if (defined $area->{factor}) { $distr->{sum} += $value; $distr->{sqSum} += $value * $value; $value = int($value / $area->{factor}); } $distr->{count}++; if (defined $area->{values}->{$value}) { $area->{values}->{$value}++; } else { $area->{values}->{$value} = 1; } } sub reportDistr { my ($distr) = @_; printf("# %s\n", $distr->{name}); printf("#\tcount: %10d\n", $distr->{count}); if (defined $distr->{areas}->[0]->{factor}) { &reportNumDistr($distr); } else { &reportEventDistr($distr); } printf("\n"); } sub reportNumDistr { my ($distr) = @_; if ($distr->{count}) { my $mean = $distr->{sum}/$distr->{count}; my $dev; if ($distr->{count} > 1) { my $diff = $distr->{sqSum} - $distr->{sum}*$distr->{sum}/$distr->{count}; $dev = sqrt($diff / ($distr->{count}-1)); } my $median = &distrPercentile($distr, 50.0); printf("#\tmedian: %s\n", &distrValue($distr, $median)); printf("#\tmean: %s\n", &distrValue($distr, $mean)); printf("#\tstd_dev: %s\n", &distrValue($distr, $dev)) if defined $dev; printf("#\trel_dev: %14.3f%%\n", &percent($dev, $mean)) if $mean > 0; } printf("#\tunit: %10s\n", $distr->{report_unit}); printf("%s %s = {\n", $distr->{pgl_type}, $distr->{id}); my $sum = 0; foreach my $a (@{$distr->{areas}}) { &reportNumArea($distr, $a, \$sum); } printf("}\n"); } sub reportNumArea { my ($distr, $area, $sumPtr) = @_; my @keys = sort { $a <=> $b } keys %{$area->{values}}; my $bin = { min => undef(), max => undef(), count => 0 }; foreach my $v (@keys) { my $c = $area->{values}->{$v}; my $value = int($v * $area->{factor}); &nextBin($distr, $bin, ${$sumPtr}) if ($bin->{count} + $c) >= ($distr->{count}/100.); $bin->{count} += $c; $bin->{min} = $value unless defined $bin->{min}; $bin->{max} = $value; ${$sumPtr} += $c; } &nextBin($distr, $bin, ${$sumPtr}) if $bin->{count}; } sub nextBin { my ($distr, $bin, $sum) = @_; return unless $bin->{count}; my ($min, $max) = map { &distrValue($distr, $_) } ($bin->{min}, $bin->{max}); printf("\t%s : %s %10.3f # %10.3f\n", $min, $max, &percent($bin->{count}, $distr->{count}), &percent($sum, $distr->{count})); $bin->{count} = 0; $bin->{min} = $bin->{max} = undef(); } sub distrPercentile { my ($distr, $level) = @_; my $sum = 0; my $last; foreach my $area (@{$distr->{areas}}) { my @keys = sort { $a <=> $b } keys %{$area->{values}}; foreach my $v (@keys) { $sum += $area->{values}->{$v}; my $value = int($v * $area->{factor}); return $value if &percent($sum, $distr->{count}) >= $level; $last = $value; } } return $last; } sub distrValue { my ($distr, $v) = @_; my $value = $distr->{report_factor} ? $v/$distr->{report_factor} : $v; my $f = $distr->{report_factor} ? '%14.3f' : '%10d'; return sprintf($f, $value); } sub reportEventDistr { my ($distr) = @_; printf("%s = [\n", $distr->{id}); my $area = $distr->{areas}->[0]; my @keys = sort { $area->{values}->{$b} <=> $area->{values}->{$a} } keys %{$area->{values}}; my $count = 0; foreach my $v (@keys) { if (my $c = $area->{values}->{$v}) { my $value = sprintf('"%s"', $v); if ($count == 0) { printf("\t%-10s", $value); # let most frequent entry absorb cal mistakes } else { printf(",\n") if $count; printf("\t%-10s : %.3f%%", $value, &percent($c, $distr->{count})); } ++$count; } } printf("\n];\n", $distr->{id}); } sub reportProgress { printf(STDERR "#%03dK IPs: %3d\n", $cntEntry/1000, $cntIp); } sub percent { my ($part, $whole) = @_; die() unless defined $whole; return -1 unless $whole > 0 && defined($part); no integer; return 100. * $part/$whole; } polygraph-4.3.2/src/tools/access2poly/access-filter0000755000175000017500000001461710621204214021754 0ustar testertester#!/usr/bin/perl -w # # This script reads access log in Squid format and prints "good" entries # Good entries are defined based on the --profile option. # # "country" profile (for building request interarrival distributions): # - US-based client IP addresses # # "server" profile (for building most server-side parameters) # - HTTP protocol # - 2xx and 3xx status codes # - GET, POST, and HEAD request methods # # "content" profile (for building content databases): # - HTTP protocol # - 200 status code # - GET request methods # - no query terms in request-URI # # The script also dumps statistics related to the above filtering choices # # "country" profile prerequisite, an IP::Country::Fast Perl module, can be # found at http://search.cpan.org/~nwetters/IP-Country-2.17/ # use strict; use integer; # grok profile my ($option, $Profile) = @ARGV or die("usage: $0 --profile \n"); die("unsupported option '$option'\n") unless $option eq '--profile'; die("unsupported profile '$Profile'\n") unless ($Profile eq 'country' || $Profile eq 'server' || $Profile eq 'content'); shift @ARGV; shift @ARGV; my %Ips = (); my %Bads = (); my %Countries = (); my %Statuses = (); my %Protos = (); my %Methods = (); my ($cntEntry, $cntGoodEntry, $cntBad, $cntIp, $cntGoodIp, $cntStatus, $cntGoodStatus, $cntUri, $cntGoodUri, $cntCountry, $cntGoodCountry, $cntMethod, $cntGoodMethod, $cntProto, $cntGoodProto) = (0) x 15; my %GoodCountries = map { ($_ => 1) } qw(US); my %GoodMethods = map { ($_ => 1) } qw(GET HEAD POST); my $Registry; select(STDERR); while (<>) { chomp; ++$cntEntry; &reportProgress() if $cntEntry % 1000 == 0; my @fields = (split); my @bad = (); push @bad, 'FC' if @fields < 10; # check response status code ++$cntStatus; my ($sc) = ($fields[3] =~ m|\w+/(\d+)|); $sc = '??' unless defined $sc; if (defined $Statuses{$sc}) { ++$Statuses{$sc}; } else { $Statuses{$sc} = 1; } my $goodStatus = $Profile eq 'country'; if ($Profile eq 'server') { $goodStatus = $sc ne '??' && ($sc/100 == 2 || $sc/100 == 3); } elsif ($Profile eq 'content') { $goodStatus = $sc eq '200'; } if ($goodStatus) { ++$cntGoodStatus; } else { push @bad, 'SC'; } # check protocol ++$cntProto; my $uri = $fields[6]; my ($proto) = ($uri =~ m|(\w+)://|); $proto = '??' unless defined $proto; if (defined $Protos{$proto}) { ++$Protos{$proto}; } else { $Protos{$proto} = 1; } my $goodProto = $Profile eq 'country' || $proto eq 'http'; if ($goodProto) { ++$cntGoodProto; } else { push @bad, 'PRT'; } # check URI for query terms ++$cntUri; if ($Profile ne 'content' || $uri !~ /[\?\&]/) { ++$cntGoodUri; } else { push @bad, 'URI'; } # check request method ++$cntMethod; my $method = $fields[5]; $method = '??' unless defined $method; if (defined $Methods{$method}) { ++$Methods{$method}; } else { $Methods{$method} = 1; } my $goodMethod = $Profile eq 'country'; if ($Profile eq 'server') { $goodMethod = exists $GoodMethods{$method}; } elsif ($Profile eq 'content') { $goodMethod = $method eq 'GET' } if ($goodMethod) { ++$cntGoodMethod; } else { push @bad, 'MT'; } # check client country code ++$cntCountry; my ($ip, $cc) = ($fields[2] =~ m|([\-\.\d]+)/?(\w+)?|); if (!defined $cc && $Profile eq 'country') { # init IP registry if needed require IP::Country::Fast; $Registry = IP::Country::Fast->new() unless $Registry; $cc = $Registry ? $Registry->inet_atocc($ip) : '??'; } $cc = '??' unless defined $cc; if (defined $Countries{$cc}) { ++$Countries{$cc}; } else { $Countries{$cc} = 1; } my $goodCC = $Profile ne 'country'; if ($Profile eq 'country') { $goodCC = !defined(%GoodCountries) || $GoodCountries{$cc}; } if ($goodCC) { ++$cntGoodCountry; } else { push @bad, 'CC'; } # maintain an IP:quality map if (exists $Ips{$ip}) { if ($Ips{$ip}) { if (@bad) { $Ips{$ip} = 0; } else { $Ips{$ip} = $fields[0]; } } # enable to support good IPs # else { # push @bad, 'IP'; # } } else { $Ips{$ip} = @bad ? 0 : $fields[0]; ++$cntIp; } if (@bad) { &recordBads(\@bad); } else { ++$cntGoodEntry; } # skip bad entries next if @bad; # skip bad IPs # next unless $Ips{$ip}; print(STDOUT $_, "\n"); } &reportProgress(); foreach my $sc (sort { $Statuses{$b} <=> $Statuses{$a} } keys %Statuses) { printf("SC: %-3s %6d %6.2f\n", $sc, $Statuses{$sc}, &percent($Statuses{$sc}, $cntStatus)); } print("\n"); foreach my $proto (sort { $Protos{$b} <=> $Protos{$a} } keys %Protos) { printf("PRT: %-15s %6d %6.2f\n", $proto, $Protos{$proto}, &percent($Protos{$proto}, $cntProto)); } print("\n"); printf("URI: %-5s %6d %6.2f\n", 'good', $cntGoodUri, &percent($cntGoodUri, $cntUri)); printf("URI: %-5s %6d %6.2f\n", 'bad', $cntUri-$cntGoodUri, &percent($cntUri-$cntGoodUri, $cntUri)); print("\n"); foreach my $method (sort { $Methods{$b} <=> $Methods{$a} } keys %Methods) { printf("MT: %-10s %6d %6.2f\n", $method, $Methods{$method}, &percent($Methods{$method}, $cntMethod)); } print("\n"); foreach my $cc (sort { $Countries{$b} <=> $Countries{$a} } keys %Countries) { printf("CC: %2s %6d %6.2f\n", $cc, $Countries{$cc}, &percent($Countries{$cc}, $cntCountry)); } print("\n"); printf("entry: %-5s %6d %6.2f\n", 'good', $cntGoodEntry, &percent($cntGoodEntry, $cntEntry)); printf("entry: %-5s %6d %6.2f\n", 'bad', $cntEntry-$cntGoodEntry, &percent($cntEntry-$cntGoodEntry, $cntEntry)); print("\n"); $cntGoodIp = scalar grep { $_ } values %Ips; printf("IPs: %-5s %6d %6.2f\n", 'good', $cntGoodIp, &percent($cntGoodIp, $cntIp)); printf("IPs: %-5s %6d %6.2f\n", 'bad', $cntIp-$cntGoodIp, &percent($cntIp-$cntGoodIp, $cntIp)); print("\n"); foreach my $bas (sort { $Bads{$b} <=> $Bads{$a} } keys %Bads) { printf("Bads: %-3s %6d %6.2f\n", $bas, $Bads{$bas}, &percent($Bads{$bas}, $cntBad)); } exit(0); sub recordBads { my $bads = shift; foreach my $b (@{$bads}) { $Bads{$b} = 0 unless defined $Bads{$b}; ++$Bads{$b}; ++$cntBad; } } sub reportProgress { printf("#Klines: %03d IPs: %3d SC: %6.2f PRT: %6.2f URI: %6.2f MT: %6.2f CC: %6.2f\n", $cntEntry/1000, $cntIp, &percent($cntGoodStatus, $cntStatus), &percent($cntGoodProto, $cntProto), &percent($cntGoodUri, $cntUri), &percent($cntGoodMethod, $cntMethod), &percent($cntGoodCountry, $cntCountry)); } sub percent { my ($part, $whole) = @_; $whole = $cntEntry unless defined $whole; return -1 unless $whole && defined($part); no integer; return 100. * $part/$whole; } polygraph-4.3.2/src/tools/pgl2ips.h2m0000644000175000017500000000010711335553726017052 0ustar testertester[DESCRIPTION] Calculates and prints IP addresses used in PGL workload. polygraph-4.3.2/src/tools/aka.h2m0000644000175000017500000000147211335553726016234 0ustar testertester[DESCRIPTION] Aka is used to setup large number of aliases during Polygraph experiments with many robots and servers. The aliases option specifies what alias or aliases you want to have on the given interface. Aka recognizes IP addresses in PGL address format, including dotted IP range. Aka will try to guess the subnet or you can use an explicit subnet specification. The number of aliases you can set depends on your OS. Moreover, some OSes may support large number of aliases (more than 1000) but with a significant performance penalty. Note that you can just put alias specs after all other options and the interface name (see aka's usage line). Aka will delete all old aliases before setting new ones. If you do not specify the new aliases, the old ones will still be deleted (handy for cleaning up after yourself). polygraph-4.3.2/src/tools/rng_test.cc0000644000175000017500000000240611546440450017213 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include #include "xstd/Rnd.h" #include "xstd/String.h" #include "xstd/gadgets.h" #include "base/RndPermut.h" #include "base/polyVersion.h" static String ThePrgName; static int uncorr(int n) { //int exp; //const double x = INT_MAX * (2*frexp(::sin((double)n), &exp) - 1); const double x = INT_MAX * ::sin(n*(double)n); const double y = fabs(x); return (int)Min(y, (double)INT_MAX); } static int usage(std::ostream &os) { (void)PolyVersion(); os << "Usage: " << ThePrgName << " " << endl; return 0; } int main(int argc, char *argv[]) { ThePrgName = argv[0]; if (argc == 2 && String("--help") == argv[1]) return usage(cout); int sampleCount = -1; if (argc != 2 || !isInt(argv[1], sampleCount)) { usage(cerr); return -1; } RndGen rngCont; RndGen rngSeeded; RndGen rngPermut; for (int i = 1; i <= sampleCount; ++i) { rngSeeded.seed(i); rngPermut.seed(LclPermut(i)); cout << ' ' << rngCont.ltrial() << ' ' << rngSeeded.ltrial() << ' ' << rngPermut.ltrial() << ' ' << uncorr(i) << endl; } return 0; } polygraph-4.3.2/src/tools/rng-test.h2m0000644000175000017500000000011311335553726017232 0ustar testertester[DESCRIPTION] Generates random numbers using algorithms used in Polygraph. polygraph-4.3.2/src/tools/pop-test.man0000644000175000017500000000216711336340427017333 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-POP-TEST "1" "February 2010" "polygraph-pop-test - Web Polygraph" "User Commands" .SH NAME polygraph-pop-test \- predicts cache hit ratios for a given workload .SH SYNOPSIS .B pop-test [\fI--option \fR...] .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-out\fR redirect console output .TP \fB\-\-cachable\fR <%> portion of cachable replies .TP \fB\-\-public_interest\fR <%> portion of URLs shared among all robots .TP \fB\-\-recurrence\fR <%> probability of a re\-visit to a URL .TP \fB\-\-work_set_size\fR working set size .TP \fB\-\-cache_size\fR cache size .TP \fB\-\-obj_size\fR average object size .TP \fB\-\-robots\fR total number of robots to simulate .TP \fB\-\-pop_model\fR popularity model .TP \fB\-\-sim_length\fR total number of request to simulate .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/loganalyzers/0000755000175000017500000000000011546445453016437 5ustar testertesterpolygraph-4.3.2/src/loganalyzers/ProcInfo.cc0000644000175000017500000001607611546440450020467 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iomanip.h" #include "base/polyLogCats.h" #include "loganalyzers/InfoScope.h" #include "loganalyzers/SideInfo.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/ProcInfo.h" ProcInfo::ProcInfo(const String &aName): theName(aName), theSide(0), theLogCat(lgcEnd) { } ProcInfo::~ProcInfo() { while (thePhases.count()) delete thePhases.pop(); } const String &ProcInfo::name() const { if (theName) return theName; static String defName = "unnamed"; return defName; } int ProcInfo::logCat() const { return theLogCat; } const String &ProcInfo::benchmarkVersion() const { return theBenchmarkVersion; } const String &ProcInfo::pglCfg() const { return thePglCfg; } Time ProcInfo::startTime() const { return theStartTime; } void ProcInfo::logCat(int aLogCat) { Assert(theLogCat == lgcEnd && 0 <= aLogCat && aLogCat < lgcEnd); theLogCat = aLogCat; } void ProcInfo::side(SideInfo *aSide) { Assert(!theSide ^ !aSide); theSide = aSide; } void ProcInfo::benchmarkVersion(const String &aVersion) { Assert(!theBenchmarkVersion); theBenchmarkVersion = aVersion; } void ProcInfo::pglCfg(const String &aPglCfg) { Assert(!thePglCfg); thePglCfg = aPglCfg; } void ProcInfo::startTime(Time aStartTime) { Assert(theStartTime < 0); theStartTime = aStartTime; } const PhaseInfo &ProcInfo::execScopePhase() const { return theExecScopePhase; } const PhaseInfo &ProcInfo::allPhasesPhase() const { return theAllPhasesPhase; } int ProcInfo::repCount(const Scope &scope) const { int count = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) count += thePhases[i]->stats().theXactCnt; // all successfull } return count; } int ProcInfo::hitCount(const Scope &scope) const { int count = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) count += thePhases[i]->stats().theBasicXacts.hits().size().stats().count(); } return count; } int ProcInfo::offeredHitCount(const Scope &scope) const { int count = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) count += thePhases[i]->stats().theIdealHR.hits().count(); } return count; } int ProcInfo::uselessProxyValidationCount(const Scope &scope) const { int count = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) count += thePhases[i]->stats().theProxyValidationR.misses().aggr().count(); } return count; } BigSize ProcInfo::repVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) { const TmSzStat repAll = thePhases[i]->stats().reps(); volume += BigSize::Byted(repAll.size().sum()); } } return volume; } BigSize ProcInfo::hitVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) volume += BigSize::Byted(thePhases[i]->stats().theBasicXacts.hits().size().stats().sum()); } return volume; } BigSize ProcInfo::offeredHitVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) volume += BigSize::Byted(thePhases[i]->stats().theIdealHR.hits().size().sum()); } return volume; } BigSize ProcInfo::uselessProxyValidationVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < thePhases.count(); ++i) { if (scope.hasPhase(thePhases[i]->name())) volume += BigSize::Byted(thePhases[i]->stats().theProxyValidationR.misses().aggr().size().sum()); } return volume; } AggrStat ProcInfo::lastReqByteWritten(const Scope &scope) const { AggrStat stat; for (int i = 0; i < thePhases.count(); ++i) if (scope.hasPhase(thePhases[i]->name())) stat += thePhases[i]->stats().theLastReqByteWritten.stats(); return stat; } AggrStat ProcInfo::lastReqByteRead(const Scope &scope) const { AggrStat stat; for (int i = 0; i < thePhases.count(); ++i) if (scope.hasPhase(thePhases[i]->name())) stat += thePhases[i]->stats().theLastReqByteRead.stats(); return stat; } AggrStat ProcInfo::firstRespByteWritten(const Scope &scope) const { AggrStat stat; for (int i = 0; i < thePhases.count(); ++i) if (scope.hasPhase(thePhases[i]->name())) stat += thePhases[i]->stats().theFirstRespByteWritten.stats(); return stat; } AggrStat ProcInfo::firstRespByteRead(const Scope &scope) const { AggrStat stat; for (int i = 0; i < thePhases.count(); ++i) if (scope.hasPhase(thePhases[i]->name())) stat += thePhases[i]->stats().theFirstRespByteRead.stats(); return stat; } void ProcInfo::noteIntvl(const StatIntvlRec &r, const String &phaseName) { if (!thePhases.count() || thePhases.last()->name() != phaseName) thePhases.append(new PhaseInfo()); thePhases.last()->noteIntvl(r, phaseName); } void ProcInfo::addPhase(const StatPhaseRec &r) { int foundCount = 0; for (int i = 0; i < thePhases.count(); ++i) { PhaseInfo &pi = *thePhases[i]; if (pi.name() == r.name()) { ++foundCount; if (!pi.hasStats()) { pi.notePhase(r); break; } } } if (foundCount == 0) { clog << name() << ": strange, phase '" << r.name() << "' has " << "phase statistics but no interval stats" << endl; thePhases.append(new PhaseInfo()); thePhases.last()->noteIntvl(r, r.name()); thePhases.last()->notePhase(r); } else if (foundCount > 1) { clog << name() << ": error: found " << foundCount << " phases named '" << r.name() << "', even after trying " << "to make all phase names unique; the reporter may fail or " << "mislead" << endl; } } void ProcInfo::noteEndOfLog() { for (int i = 0; i < thePhases.count(); ++i) thePhases[i]->noteEndOfLog(); } const PhaseInfo &ProcInfo::phase(int idx) const { Assert(0 <= idx && idx < thePhases.count()); return *thePhases[idx]; } const PhaseInfo *ProcInfo::hasPhase(const String &name) const { for (int i = 0; i < phaseCount(); ++i) { if (thePhases[i]->name() == name) return thePhases[i]; } return 0; } const PhaseInfo &ProcInfo::phase(const String &name) const { const PhaseInfo *p = hasPhase(name); Assert(p); return *p; } int ProcInfo::phaseCount() const { return thePhases.count(); } PhaseTrace *ProcInfo::tracePhase(const String &name) { for (int i = 0; i < phaseCount(); ++i) { if (thePhases[i]->name() == name) return thePhases[i]->startTrace(); } return 0; } void ProcInfo::checkConsistency() { if (!theBenchmarkVersion) cerr << name() << ": strange, no benchmark version found" << endl; if (!thePglCfg) cerr << name() << ": strange, no PGL configuration found" << endl; if (theStartTime < 0) cerr << name() << ": strange, no startup time could be determined" << endl; } void ProcInfo::compileStats(BlobDb &) { const Scope &scope = theSide->execScope(); for (int i = 0; i < phaseCount(); ++i) { if (scope.hasPhase(thePhases[i]->name())) theExecScopePhase.concat(*thePhases[i]); theAllPhasesPhase.concat(*thePhases[i]); } } polygraph-4.3.2/src/loganalyzers/LoadTraceFig.cc0000644000175000017500000000433111546440450021223 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xml/XmlAttr.h" #include "loganalyzers/LoadStex.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/LoadTraceFig.h" LoadTraceFig::LoadTraceFig(): theStex(0), thePhase(0), theTrace(0) { } void LoadTraceFig::stats(const Stex *aStex, const PhaseInfo *aPhase) { theStex = aStex; Assert(aStex); thePhase = aPhase; theTrace = &thePhase->trace(); Assert(theTrace); } void LoadTraceFig::compareWith(const Stex *stex) { Assert(thePhase); Assert(stex); theComparison.append(stex); } void LoadTraceFig::setCtrlOptions() { theLabelY1 = "rate, #/sec"; theLabelY2 = "bandwidth, Mbits/sec"; ReportTraceFigure::setCtrlOptions(); } int LoadTraceFig::createCtrlFile() { if (ReportTraceFigure::createCtrlFile() < 0) return -1; // make sure that the most interesting line is on top theComparison.append(theStex); // create plot command for (int i = 0; i < theComparison.count(); ++i) { addPlotLine(theComparison[i]->name() + " rate", theLabelY1); addPlotLine(theComparison[i]->name() + " bwidth", theLabelY2); } addedAllPlotLines(); // dump data to plot int pointCount = 0; for (int s = 0; s < theComparison.count(); ++s) { if (s) *theCtrlFile << 'e' << endl; dumpDataLines(theComparison[s], lnRate); *theCtrlFile << 'e' << endl; const int c = dumpDataLines(theComparison[s], lnBwidth); if (theStex == theComparison[s]) pointCount = c; } return pointCount; } int LoadTraceFig::dumpDataLines(const LoadStex *stex, const lineType lt) { int pointCount = 0; for (int i = 0; i < theTrace->count(); ++i) pointCount += dumpDataLine(stex, theTrace->winPos(i), theTrace->winStats(i), lt); return pointCount; } int LoadTraceFig::dumpDataLine(const LoadStex *stex, Time stamp, const StatIntvlRec &r, const lineType lt) { const double val = lt == lnRate ? stex->rate(r) : stex->bwidth(r)*8/1024/1024; dumpTime(stamp); *theCtrlFile << ' ' << val << endl; return 1; } polygraph-4.3.2/src/loganalyzers/PointTraceFig.cc0000644000175000017500000000410311546440450021432 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xml/XmlAttr.h" #include "loganalyzers/PointStex.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/PointTraceFig.h" PointTraceFig::PointTraceFig(): theStex1(0), theStex2(0), thePhase(0), theTrace(0) { } void PointTraceFig::stats(const Stex *aStex1, const Stex *aStex2, const PhaseInfo *aPhase) { thePhase = aPhase; theTrace = &thePhase->trace(); theStex1 = aStex1; theStex2 = (aStex2 && hasDataLines(aStex2)) ? aStex2 : 0; Assert(theStex1 && theTrace); } void PointTraceFig::setCtrlOptions() { theLabelY1 = theStex1->unit(); if (theStex2) theLabelY2 = theStex2->unit(); ReportTraceFigure::setCtrlOptions(); } int PointTraceFig::createCtrlFile() { if (ReportTraceFigure::createCtrlFile() < 0) return -1; addPlotLine(theStex1->name(), theStex1->unit()); if (theStex2) addPlotLine(theStex2->name(), theStex2->unit()); addedAllPlotLines(); int pointCount = 0; pointCount += dumpDataLines(theStex1); *theCtrlFile << 'e' << endl; if (theStex2) pointCount += dumpDataLines(theStex2); return pointCount; } bool PointTraceFig::hasDataLines(const PointStex *stex) const { for (int i = 0; i < theTrace->count(); ++i) { const StatIntvlRec &r = theTrace->winStats(i); if (stex->valueKnown(r)) return true; } return false; } int PointTraceFig::dumpDataLines(const PointStex *stex) { int pointCount = 0; for (int i = 0; i < theTrace->count(); ++i) pointCount += dumpDataLine(stex, theTrace->winPos(i), theTrace->winStats(i)); return pointCount; } int PointTraceFig::dumpDataLine(const PointStex *stex, Time stamp, const StatIntvlRec &r) { dumpTime(stamp); if (stex->valueKnown(r)) { *theCtrlFile << ' ' << stex->value(r) << endl; return 1; } else { *theCtrlFile << ' ' << '?' << endl; return 0; } } polygraph-4.3.2/src/loganalyzers/TestInfo.cc0000644000175000017500000011002311546440450020466 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iomanip.h" #include "xstd/gadgets.h" #include "base/polyLogCats.h" #include "base/polyVersion.h" #include "base/AnyToString.h" #include "runtime/HttpDate.h" #include "xml/XmlAttr.h" #include "xml/XmlNodes.h" #include "xml/XmlTable.h" #include "xml/XmlSection.h" #include "xml/XmlParagraph.h" #include "xml/XmlText.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/HistStex.h" #include "loganalyzers/HistogramFigure.h" #include "loganalyzers/InfoScopes.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/SideInfo.h" #include "loganalyzers/TestInfo.h" static XmlAttr algnLeft("align", "left"); static XmlAttr algnRight("align", "right"); TestInfo::TestInfo(const String &aLabel): theLabel(aLabel), theSides(lgcEnd) { theSides.count(lgcEnd); theSides[lgcCltSide] = new SideInfo(lgcCltSide); theSides[lgcCltSide]->test(this); theSides[lgcSrvSide] = new SideInfo(lgcSrvSide); theSides[lgcSrvSide]->test(this); theExecScope.addSide("client"); theExecScope.addSide("server"); theExecScope.name("baseline"); } TestInfo::~TestInfo() { while (theSides.count()) { if (theSides.last()) theSides.last()->test(0); delete theSides.pop(); } while (theScopes.count()) delete theScopes.pop(); } void TestInfo::execScope(const Scope &aScope) { theExecScope = aScope; } const TestInfo::Scope &TestInfo::guessExecScope() { Assert(!theExecScope); const SideInfo &side = aSide(); // find last phase with peak (highest) request rate String bestName; double peakRate = -1; String allBestName; double allPeakRate = -1; for (int i = 0; i < side.phaseCount(); ++i) { const PhaseInfo &phase = side.phase(i); const double rate = phase.availStats().reqRate(); // allow for 1% rate diff among phases with the same configured rate if (phase.hasStats()) if (!bestName || peakRate <= 1.01*rate) { peakRate = rate; bestName = phase.name(); } if (!bestName) if (!allBestName || allPeakRate <= 1.01*rate) { allPeakRate = rate; allBestName = phase.name(); } if (phase.stats().primary) theExecScope.addPhase(phase.name()); } if (!bestName) bestName = allBestName; if (!theExecScope && Should(bestName)) { clog << "no primary or 'executive summary' phases specified, using '" << bestName << "' phase" << endl; theExecScope.addPhase(bestName); } return theExecScope; } const String &TestInfo::label() const { return theLabel; } const String &TestInfo::pglCfg() const { return thePglCfg; } Time TestInfo::startTime() const { return theStartTime; } const InfoScope &TestInfo::execScope() const { return theExecScope; } const SideInfo *TestInfo::cltSideExists() const { return side(lgcCltSide).procCount() ? theSides[lgcCltSide] : 0; } const SideInfo *TestInfo::srvSideExists() const { return side(lgcSrvSide).procCount() ? theSides[lgcSrvSide] : 0; } SideInfo &TestInfo::cltSide() { return side(lgcCltSide); } SideInfo &TestInfo::srvSide() { return side(lgcSrvSide); } SideInfo &TestInfo::side(int logCat) { Assert(logCat == lgcCltSide || logCat == lgcSrvSide); Assert(theSides[logCat]); return *theSides[logCat]; } const SideInfo &TestInfo::aSide() const { return cltSideExists() ? cltSide() : srvSide(); } const SideInfo &TestInfo::cltSide() const { return side(lgcCltSide); } const SideInfo &TestInfo::srvSide() const { return side(lgcSrvSide); } const SideInfo &TestInfo::side(int logCat) const { Assert(logCat == lgcCltSide || logCat == lgcSrvSide); Assert(theSides[logCat]); return *theSides[logCat]; } int TestInfo::scopes(InfoScopes &res) const { if (!twoSided()) return aSide().scopes(res); for (int i = 0; i < theScopes.count(); ++i) res.add(*theScopes[i]); return res.count(); } void TestInfo::checkCommonPglCfg() { if (!cltSideExists() && srvSideExists()) thePglCfg = srvSide().pglCfg(); else if (cltSideExists() && !srvSideExists()) thePglCfg = cltSide().pglCfg(); else if (cltSide().pglCfg() == srvSide().pglCfg()) thePglCfg = cltSide().pglCfg(); else cerr << label() << ": warning: client- and server-side PGL configurations differ" << endl; } void TestInfo::checkCommonBenchmarkVersion() { if (!cltSideExists() && srvSideExists()) theBenchmarkVersion = srvSide().benchmarkVersion(); else if (cltSideExists() && !srvSideExists()) theBenchmarkVersion = cltSide().benchmarkVersion(); else if (cltSide().benchmarkVersion() == srvSide().benchmarkVersion()) { theBenchmarkVersion = cltSide().benchmarkVersion(); } else { cerr << label() << ": warning: client- and server-side" << " benchmark versions differ" << endl; theBenchmarkVersion = String(); } } void TestInfo::checkCommonStartTime() { if (!cltSideExists() && srvSideExists()) theStartTime = srvSide().startTime(); else if (cltSideExists() && !srvSideExists()) theStartTime = cltSide().startTime(); else if (cltSide().startTime() >= 0 && srvSide().startTime() >= 0) { const Time diff = Max(cltSide().startTime(), srvSide().startTime()) - Min(cltSide().startTime(), srvSide().startTime()); if (diff > Time::Sec(5*60)) cerr << label() << ": warning: client- and server-side process " << "start times differ by " << diff << endl; // should we not set a start time in this case (like with thePglCfg) theStartTime = Min(cltSide().startTime(), srvSide().startTime()); } } void TestInfo::checkConsistency() { if (!cltSideExists() && !srvSideExists()) { cerr << "no client- or server-side information found in the logs, exiting" << endl << xexit; } if (!cltSideExists() || !srvSideExists()) { const String &sname = cltSideExists() ? cltSide().name() : srvSide().name(); theOneSideWarn = String("Only ") + sname + "-side information found in the logs."; cerr << "warning: " << theOneSideWarn << " The report will be incomplete and less accurate" << endl; } if (cltSideExists()) cltSide().checkConsistency(); if (srvSideExists()) srvSide().checkConsistency(); checkCommonBenchmarkVersion(); checkCommonPglCfg(); checkCommonStartTime(); //checkCommonPhases(); } int TestInfo::repCount(const Scope &scope) const { return cltSideExists() ? cltSide().repCount(scope) : -1; } int TestInfo::hitCount(const Scope &scope) const { return twoSided() ? cltSide().repCount(scope) - srvSide().repCount(scope) : -1; } int TestInfo::uselessProxyValidationCount(const Scope &scope) const { return srvSideExists() ? srvSide().uselessProxyValidationCount(scope) : -1; } BigSize TestInfo::repVolume(const Scope &scope) const { return cltSideExists() ? cltSide().repVolume(scope) : BigSize(); } BigSize TestInfo::hitVolume(const Scope &scope) const { return twoSided() ? cltSide().repVolume(scope) - srvSide().repVolume(scope) : BigSize(); } BigSize TestInfo::uselessProxyValidationVolume(const Scope &scope) const { return srvSideExists() ? srvSide().uselessProxyValidationVolume(scope) : BigSize(); } AggrStat TestInfo::lastReqByteWritten(const Scope &scope) const { return cltSideExists() ? cltSide().lastReqByteWritten(scope) : AggrStat(); } AggrStat TestInfo::lastReqByteRead(const Scope &scope) const { return srvSideExists() ? srvSide().lastReqByteRead(scope) : AggrStat(); } AggrStat TestInfo::firstRespByteWritten(const Scope &scope) const { return srvSideExists() ? srvSide().firstRespByteWritten(scope) : AggrStat(); } AggrStat TestInfo::firstRespByteRead(const Scope &scope) const { return cltSideExists() ? cltSide().firstRespByteRead(scope) : AggrStat(); } void TestInfo::cmplExecSumVars(BlobDb &db) { static const String tlLabel = "test label"; addMeasBlob(db, "label", theLabel, "string", tlLabel); { ostringstream buf; HttpDatePrint(buf, startTime()); buf << ends; static const String tlStartTime = "test start time"; addMeasBlob(db, "start.time", buf.str().c_str(), "string", tlStartTime); streamFreeze(buf, false); } { static const String tlTitle = "benchmark software version"; ReportBlob blob("benchmark.version" + theExecScope, tlTitle); if (theBenchmarkVersion) { blob << XmlText(theBenchmarkVersion); } else { XmlParagraph p; p << XmlText("cannot show a single benchmark version because "); p << db.ptr(BlobDb::Key("benchmark.version", execScope().oneSide("client")), XmlText("client-")); p << XmlText(" and "); p << db.ptr(BlobDb::Key("benchmark.version", execScope().oneSide("server")), XmlText("server-side")); p << XmlText(" versions differ"); blob << p; } db << blob; } { static const String tlTitle = "reporter software version"; ReportBlob blob("reporter.version" + theExecScope, tlTitle); blob << XmlText(PolyVersion()); db << blob; } } void TestInfo::cmplExecSum(BlobDb &db) { const Scope &cltScope = theExecScope.oneSide("client"); cmplExecSumTable(db, cltScope); cmplExecSumPhases(db, cltScope); } void TestInfo::cmplExecSumTable(BlobDb &db, const Scope &cltScope) { static const String tlTitle = "executive summary table"; ReportBlob blob("summary.exec.table" + theExecScope, tlTitle); blob << XmlAttr("vprimitive", "Test summary table"); XmlTable table; table << XmlAttr::Int("border", 0); { XmlTableRec tr; tr << algnLeft << XmlTableHeading("label:"); XmlTableCell cell; cell << db.include("label"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("throughput:"); XmlTableCell cell; cell << db.quote("rep.rate" + cltScope); cell << XmlText(" or "); cell << db.quote("rep.bwidth" + cltScope); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("response time:"); XmlTableCell cell; //cell << db.quote("object.hits.rptm.mean" + cltScope); //cell << XmlText(" hit, "); cell << db.quote("rep.rptm.mean" + cltScope); cell << XmlText(" mean"); //cell << db.quote("object.misses.rptm.mean" + cltScope); //cell << XmlText(" miss"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("hit ratios:"); XmlTableCell cell; cell << db.quote("hit.ratio.obj" + theExecScope); cell << XmlText(" DHR and "); cell << db.quote("hit.ratio.byte" + theExecScope); cell << XmlText(" BHR"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("unique URLs:"); XmlTableCell cell; cell << db.quote("url.unique.count" + cltScope); cell << XmlText(" ("); cell << db.quote("url.recurrence.ratio" + cltScope); cell << XmlText(" recurrence)"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("errors:"); XmlTableCell cell; cell << db.quote("xact.error.ratio" + cltScope); cell << XmlText(" ("); cell << db.quote("xact.error.count" + cltScope); cell << XmlText(" out of "); cell << db.quote("xact.count" + cltScope); cell << XmlText(")"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("duration:"); XmlTableCell cell; cell << db.include("duration" + cltScope); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("start time:"); XmlTableCell cell; cell << db.include("start.time"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("workload:"); XmlTableCell cell; cell << db.ptr("workload" + theExecScope, XmlText("available")); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("Polygraph version:"); XmlTableCell cell; cell << db.include("benchmark.version" + theExecScope); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("reporter version:"); XmlTableCell cell; cell << db.include("reporter.version" + theExecScope); tr << cell; table << tr; } blob << table; db << blob; } void TestInfo::cmplExecSumPhases(BlobDb &db, const Scope &cltScope) { static const String tlTitle = "executive summary phases"; ReportBlob blob("summary.exec.phases" + theExecScope, tlTitle); blob << XmlAttr("vprimitive", "Test summary phases"); XmlParagraph p; XmlText text; text.buf() << "This executive summary and baseline report statistics" << " are based on the following " << theExecScope.phases().count() << " test phase(s): "; {for (int i = 0; i < theExecScope.phases().count(); ++i) { if (i) text.buf() << ", "; text.buf() << *theExecScope.phases().item(i); }} text.buf() << ". The test has the following " << aSide().phaseCount() << " phase(s): "; {for (int i = 0; i < aSide().phaseCount(); ++i) { if (i) text.buf() << ", "; text.buf() << aSide().phase(i).name(); }} text.buf() << '.'; p << text; blob << p; db << blob; } void TestInfo::cmplWorkload(BlobDb &db) { static const String tlTitle = "test workload"; ReportBlob blob(BlobDb::Key("workload", theExecScope), tlTitle); if (!thePglCfg) { XmlParagraph p; p << XmlText("Cannot show a single test workload because "); p << db.ptr(BlobDb::Key("workload.code", execScope().oneSide("client")), XmlText("client-")); p << XmlText(" and "); p << db.ptr(BlobDb::Key("workload.code", execScope().oneSide("server")), XmlText("server-side")); p << XmlText(" PGL configurations differ."); p << db.reportNote("workload", db.ptr("page.workload", XmlText("client- and server-side PGL configurations differ"))); blob << p; } { XmlSection sect("English interpretation"); sect << XmlTextTag("TBD."); blob << sect; } if (thePglCfg) cmplWorkloadBlob(blob, "", thePglCfg); else { cmplWorkloadBlob(blob, "client", cltSide().pglCfg()); cmplWorkloadBlob(blob, "server", srvSide().pglCfg()); } db << blob; } void TestInfo::cmplWorkloadBlob(ReportBlob &blob, const String &side, const String &pglCfg) { { const String tlPglTitle((side ? side + "-side " : String()) + "PGL code"); XmlSection sect(tlPglTitle); ReportBlob code(BlobDb::Key("workload.code", side ? execScope().oneSide(side) : theExecScope), tlPglTitle); XmlTag codesample("codesample"); codesample << XmlText(pglCfg); code << codesample; sect << code; blob << sect; } } void TestInfo::cmplHitRatioVars(BlobDb &db, const Scope &scope) { if (twoSided()) { const String sfx = BlobDb::KeySuffix(scope); const double dhr = Percent(hitCount(scope), repCount(scope)); static const String tlDhr = "document hit ratio"; addMeasBlob(db, "hit.ratio.obj" + sfx, dhr, "%", tlDhr); const double bhr = Percent(hitVolume(scope).byted(), repVolume(scope).byted()); static const String tlBhr = "byte hit ratio"; addMeasBlob(db, "hit.ratio.byte" + sfx, bhr, "%", tlBhr); } else { // XXX: put err pointer to the theOneSideWarn-based description Should(false); } } void TestInfo::cmplHitRatio(BlobDb &db, const Scope &scope) { static const String tlTitle = "hit ratios"; ReportBlob blob(BlobDb::Key("hit.ratio", scope), tlTitle); blob << XmlAttr("vprimitive", "Hit Ratios"); if (twoSided()) { cmplHitRatioTable(db, blob, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The hit ratios table shows measured hit " << "ratios. Hits are calculated based on client- and " << "server-side traffic comparison. Offered hits are " << "counted for 'basic' transactions only (simple HTTP GET " << "requests with '200 OK' responses). Measured hit stats " << "are based on all transactions. Thus, 'offered' hit ratio " << "are not the same as 'ideal' hit ratio in this context. "; descr << p1; XmlTextTag p2; p2.buf() << "Measured hit count or volume is the difference " << "between client- and server-side traffic counts or " << "volumes. " << "DHR, Document Hit Ratio, is the ratio of the total " << "number of hits to the number of all transactions. " << "BHR, Byte Hit Ratio, is the ratio of " << "the total volume (a sum of response sizes) of hits to the " << "total volume of all transactions. " << "Negative measured hit ratios are possible if server-side " << "traffic of a cache exceeds client-side traffic (e.g., " << "due to optimistic prefetching or extra freshness checks) " << "and if side measurements are out-of-sync. " << "Negative measured BHR can also be due to " << "aborted-by-robots transactions."; descr << p2; XmlParagraph p3; p3 << XmlText("A less accurate way to measure hit ratio is to " "detect hits on the client-side using custom HTTP headers. " "A hit ratio table based on client-side tricks is available "); p3 << db.ptr("hit.ratio" + scope.oneSide("client"), XmlText("elsewhere")); p3 << XmlText("."); descr << p3; blob << descr; } } else { XmlParagraph para; para << XmlText(theOneSideWarn); if (cltSideExists()) { para << XmlText(" See "); para << db.ptr("summary" + theExecScope.oneSide("client"), XmlText("client-side")); para << XmlText(" information for hit ratio estimations (if any)"); } else { para << XmlText(" No hit ratio measurements"); para << XmlText(" can be derived from server-side logs."); } blob << para; } db << blob; } void TestInfo::cmplHitRatioTable(BlobDb &db, XmlTag &parent, const Scope &scope) { Assert(twoSided()); static const String tlTitle = "hit ratio table"; ReportBlob blob(BlobDb::Key("hit.ratio.table", scope), tlTitle); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Hit Ratios"); XmlTableHeading dhr("DHR"); dhr << XmlTag("br") << XmlText("(%)"); tr << dhr; XmlTableHeading bhr("BHR"); bhr << XmlTag("br") << XmlText("(%)"); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("measured"); XmlTableCell dhr; dhr << algnRight << db.quote("hit.ratio.obj" + scope); tr << dhr; XmlTableCell bhr; bhr << algnRight << db.quote("hit.ratio.byte" + scope); tr << bhr; table << tr; } blob << table; db << blob; parent << blob; } void TestInfo::cmplCheapProxyValidationVars(BlobDb &db, const Scope &scope) { if (twoSided()) { const String sfx = BlobDb::KeySuffix(scope); const double dhr = Percent(hitCount(scope) + uselessProxyValidationCount(scope), repCount(scope)); static const String tlDhr = "cheap proxy validation document hit ratio"; addMeasBlob(db, "cheap_proxy_validation.ratio.obj" + sfx, dhr, "%", tlDhr); const double bhr = Percent(hitVolume(scope).byted() + uselessProxyValidationVolume(scope).byted(), repVolume(scope).byted()); static const String tlBhr = "cheap proxy validation byte hit ratio"; addMeasBlob(db, "cheap_proxy_validation.ratio.byte" + sfx, bhr, "%", tlBhr); } else { // XXX: put err pointer to the theOneSideWarn-based description Should(false); } } void TestInfo::cmplCheapProxyValidation(BlobDb &db, const Scope &scope) { static const String tlTitle = "cheap proxy validation"; ReportBlob blob(BlobDb::Key("cheap_proxy_validation.ratio", scope), tlTitle); blob << XmlAttr("vprimitive", "Cheap proxy validation"); if (twoSided()) { cmplCheapProxyValidationTable(db, blob, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Cheap proxy validation' table is similar to " "'Hit Ratios' table. But it reflects hit ratios " "that would have been observed if successful " "(i.e., useless or in vein) validation initiated " "by the proxy were so 'cheap' that they could have " "been ignored rather than decrease measured hit ratio. " "The following formula is used to calculate " "cheap proxy validation hit ratio:"; descr << p1; XmlTextTag p2; p2.buf() << "cheap_proxy_validation_hits = " "all_client_side_responses " "- all_server_side_responses " "+ useless_server_side_proxy_validation_responses"; descr << p2; XmlTextTag p3; p3.buf() << "cheap_proxy_validation_HR = " "cheap_proxy_validation_hits " "/ all_client_side_responses"; descr << p3; const String all_proxy_validations_key = "object.all_proxy_validations" + scope.oneSide("server"); if (db.has(all_proxy_validations_key)) { XmlParagraph p4; p4 << XmlText("More details about proxy validation " "effectiveness are available ") << db.ptr(all_proxy_validations_key, XmlText("elsewhere")) << XmlText("."); descr << p4; } blob << descr; } } else { XmlParagraph para; para << XmlText(theOneSideWarn) << XmlText("No cheap proxy validation measurements" " can be derived from one side logs."); blob << para; } db << blob; } void TestInfo::cmplCheapProxyValidationTable(BlobDb &db, XmlTag &parent, const Scope &scope) { Assert(twoSided()); static const String tlTitle = "cheap proxy validation table"; ReportBlob blob(BlobDb::Key("cheap_proxy_validation.ratio.table", scope), tlTitle); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Cheap proxy validation"); XmlTableHeading dhr("DHR"); dhr << XmlTag("br") << XmlText("(%)"); tr << dhr; XmlTableHeading bhr("BHR"); bhr << XmlTag("br") << XmlText("(%)"); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("measured"); XmlTableCell dhr; dhr << algnRight << db.quote("cheap_proxy_validation.ratio.obj" + scope); tr << dhr; XmlTableCell bhr; bhr << algnRight << db.quote("cheap_proxy_validation.ratio.byte" + scope); tr << bhr; table << tr; } blob << table; db << blob; parent << blob; } void TestInfo::cmplByteLatencyVars(BlobDb &db, const Scope &scope) { if (twoSided()) { const String sfx = BlobDb::KeySuffix(scope); const AggrStat last_req_byte_written(lastReqByteWritten(scope)); const AggrStat last_req_byte_read(lastReqByteRead(scope)); const AggrStat first_resp_byte_written(firstRespByteWritten(scope)); const AggrStat first_resp_byte_read(firstRespByteRead(scope)); addMeasBlob(db, "latency.request.last_byte_written.mean" + sfx, last_req_byte_written.mean(), "msec", "mean last request byte written latency"); addMeasBlob(db, "latency.request.last_byte_written.min" + sfx, last_req_byte_written.min(), "msec", "min last request byte written latency"); addMeasBlob(db, "latency.request.last_byte_written.max" + sfx, last_req_byte_written.max(), "msec", "max last request byte written latency"); addMeasBlob(db, "latency.request.last_byte_read.mean" + sfx, last_req_byte_read.mean(), "msec", "mean last request byte read latency"); addMeasBlob(db, "latency.request.last_byte_read.min" + sfx, last_req_byte_read.min(), "msec", "min last request byte read latency"); addMeasBlob(db, "latency.request.last_byte_read.max" + sfx, last_req_byte_read.max(), "msec", "max last request byte read latency"); addMeasBlob(db, "latency.response.first_byte_written.mean" + sfx, first_resp_byte_written.mean(), "msec", "mean first response byte written latency"); addMeasBlob(db, "latency.response.first_byte_written.min" + sfx, first_resp_byte_written.min(), "msec", "min first response byte written latency"); addMeasBlob(db, "latency.response.first_byte_written.max" + sfx, first_resp_byte_written.max(), "msec", "max first response byte written latency"); addMeasBlob(db, "latency.response.first_byte_read.mean" + sfx, first_resp_byte_read.mean(), "msec", "mean first response byte read latency"); addMeasBlob(db, "latency.response.first_byte_read.min" + sfx, first_resp_byte_read.min(), "msec", "min first response byte read latency"); addMeasBlob(db, "latency.response.first_byte_read.max" + sfx, first_resp_byte_read.max(), "msec", "max first response byte read latency"); } else { // XXX: put err pointer to the theOneSideWarn-based description Should(false); } } void TestInfo::cmplByteLatency(BlobDb &db, const Scope &scope) { static const String tlTitle = "latency"; ReportBlob blob(BlobDb::Key("latency", scope), tlTitle); blob << XmlAttr("vprimitive", "Byte Latency"); if (twoSided()) { cmplByteLatencyTable(db, blob, scope); cmplByteLatencyHist(db, blob, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'first response byte' latency is the " "time it took Polygraph to read (or write) the " "first response byte. The timer starts when the " "transaction starts. The timer stops when the " "server writes the first response byte to the " "TCP socket or the client reads the first " "response byte from the socket."; descr << p1; XmlTextTag p2; p2.buf() << "Similarly, the 'last request byte' latency " "is the time it took Polygraph to read (or " "write) the last request byte. The timer starts " "when the transaction starts. The timer stop " "when the client writes the last request byte " "or the server reads the last request byte."; descr << p2; XmlTextTag p3; p3.buf() << "Usually, more than one byte is read or " "written in one I/O operation, but a " "single-byte I/O is sufficient to stop these " "latency timers. Only HTTP-level bytes can stop " "the timers. Low-level content exchanged during " "TCP or SSL handshakes and negotiations has no " "effect. These stats are collected for basic " "transactions only."; descr << p3; blob << descr; } } else { XmlParagraph para; para << XmlText(theOneSideWarn) << XmlText("No latency measurements" " can be derived from one side logs."); blob << para; } db << blob; } void TestInfo::cmplByteLatencyTable(BlobDb &db, XmlTag &parent, const Scope &scope) { Assert(twoSided()); static const String tlTitle = "byte latency table"; ReportBlob blob(BlobDb::Key("latency.table", scope), tlTitle); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Byte Latency", 1, 2); tr << XmlTableHeading("Written (msec)", 3, 1); tr << XmlTableHeading("Read (msec)", 3, 1); table << tr; } { XmlTableRec tr; XmlNodes nodes; nodes << XmlTableHeading("Min"); nodes << XmlTableHeading("Mean"); nodes << XmlTableHeading("Max"); tr << nodes; tr << nodes; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("Last request byte"); XmlTableCell wmin; wmin << algnRight << db.quote("latency.request.last_byte_written.min" + scope); tr << wmin; XmlTableCell wmean; wmean << algnRight << db.quote("latency.request.last_byte_written.mean" + scope); tr << wmean; XmlTableCell wmax; wmax << algnRight << db.quote("latency.request.last_byte_written.max" + scope); tr << wmax; XmlTableCell rmin; rmin << algnRight << db.quote("latency.request.last_byte_read.min" + scope); tr << rmin; XmlTableCell rmean; rmean << algnRight << db.quote("latency.request.last_byte_read.mean" + scope); tr << rmean; XmlTableCell rmax; rmax << algnRight << db.quote("latency.request.last_byte_read.max" + scope); tr << rmax; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("First response byte"); XmlTableCell wmin; wmin << algnRight << db.quote("latency.response.first_byte_written.min" + scope); tr << wmin; XmlTableCell wmean; wmean << algnRight << db.quote("latency.response.first_byte_written.mean" + scope); tr << wmean; XmlTableCell wmax; wmax << algnRight << db.quote("latency.response.first_byte_written.max" + scope); tr << wmax; XmlTableCell rmin; rmin << algnRight << db.quote("latency.response.first_byte_read.min" + scope); tr << rmin; XmlTableCell rmean; rmean << algnRight << db.quote("latency.response.first_byte_read.mean" + scope); tr << rmean; XmlTableCell rmax; rmax << algnRight << db.quote("latency.response.first_byte_read.max" + scope); tr << rmax; table << tr; } blob << table; db << blob; parent << blob; } void TestInfo::cmplByteLatencyHist(BlobDb &db, XmlTag &parent, const Scope &scope) { Assert(twoSided()); static const String tlTitle = "byte latency histogram"; ReportBlob blob(BlobDb::Key("latency.histogram", scope), tlTitle); { TimeHistStex stex1("latency.request.last_byte_written.stex" + scope, "last byte written", &StatPhaseRec::theLastReqByteWritten); TimeHistStex stex2("latency.response.first_byte_read.stex" + scope, "first byte read", &StatPhaseRec::theFirstRespByteRead); HistogramFigure fig; fig.configure("latency.histogram.client_figure" + scope, "Client Byte Latency Histogram"); fig.stats(&stex1, &cltSide().phase(scope)); fig.compareWith(&stex2); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } { TimeHistStex stex1("latency.request.last_byte_read.stex" + scope, "last byte read", &StatPhaseRec::theLastReqByteRead); TimeHistStex stex2("latency.response.first_byte_written.stex" + scope, "first byte written", &StatPhaseRec::theFirstRespByteWritten); HistogramFigure fig; fig.configure("latency.histogram.server_figure" + scope, "Server Byte Latency Histogram"); fig.stats(&stex1, &srvSide().phase(scope)); fig.compareWith(&stex2); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } parent << blob; } void TestInfo::cmplBaseStats(BlobDb &db, const Scope &scope) { static const String tlTitle = "baseline stats"; ReportBlob blob(BlobDb::Key("baseline", scope), tlTitle); blob << db.quote(BlobDb::Key("load", scope)); blob << db.quote(BlobDb::Key("hit.ratio", scope)); db << blob; } void TestInfo::cmplTraffic(BlobDb &db, const Scope &scope) { static const String tlTitle = "test traffic stats"; ReportBlob blob("traffic" + scope, tlTitle); XmlTag title("title"); title << XmlText("Traffic rates, counts, and volumes"); blob << title; blob << XmlTextTag("This information is based on the client-side measurements."); blob << db.quote(BlobDb::Key("load", scope.oneSide("client"))); blob << db.quote(BlobDb::Key("ssl.load", scope.oneSide("client"))); blob << db.quote(BlobDb::Key("ftp.load", scope.oneSide("client"))); blob << db.quote(BlobDb::Key("reply_stream.table", scope.oneSide("client"))); db << blob; } void TestInfo::cmplRptm(BlobDb &db, const Scope &scope) { static const String tlTitle = "test response time stats"; ReportBlob blob("rptm" + scope, tlTitle); XmlTag title("title"); title << XmlText("Response times"); blob << title; blob << XmlTextTag("This information is based on the client-side measurements."); blob << db.quote(BlobDb::Key("rptm.trace", scope.oneSide("client"))); blob << db.quote("reply_object.table" + scope.oneSide("client")); blob << db.quote(BlobDb::Key("latency", scope)); db << blob; } void TestInfo::cmplSavings(BlobDb &db, const Scope &scope) { static const String tlTitle = "cache effectiveness"; ReportBlob blob("savings" + scope, tlTitle); XmlTag title("title"); title << XmlText("Savings"); blob << title; blob << db.quote("hit.ratio" + scope); blob << db.quote("hit.ratio" + scope.oneSide("client")); blob << db.quote("cheap_proxy_validation.ratio" + scope); db << blob; } void TestInfo::cmplLevels(BlobDb &db, const Scope &scope) { static const String tlTitle = "test transaction concurrency and population levels"; ReportBlob blob("levels" + scope, tlTitle); XmlTag title("title"); title << XmlText("Concurrency levels and robot population"); blob << title; blob << XmlTextTag("This information is based on the client-side measurements."); const InfoScope cltScope = scope.oneSide("client"); { XmlSection s("concurrent HTTP/TCP connections"); //s << db.quote("conn.level.fig" + cltScope); s << db.quote("conn.level.table" + cltScope); blob << s; } { XmlSection s("population level"); //s << db.quote("populus.level.fig" + cltScope); s << db.quote("populus.level.table" + cltScope); blob << s; } { XmlSection s("concurrent HTTP transactions"); //s << db.quote("xact.level.fig" + cltScope); s << db.quote("xact.level.table" + cltScope); blob << s; } db << blob; } void TestInfo::cmplAuthentication(BlobDb &db, const Scope &scope) { static const String tlTitle = "test authentication stats"; ReportBlob blob("authentication" + scope, tlTitle); XmlTag title("title"); title << XmlText("Authentication"); blob << title; blob << XmlTextTag("This information is based on the client-side measurements."); const InfoScope cltScope = scope.oneSide("client"); { XmlTag compoundReplyStreamTitle("h3"); compoundReplyStreamTitle << XmlText("Compound reply traffic stream table"); blob << compoundReplyStreamTitle; blob << db.quote(BlobDb::Key("compound.reply_stream.table", cltScope)); } { XmlTag compoundRequestStreamTitle("h3"); compoundRequestStreamTitle << XmlText("Compound request traffic stream table"); blob << compoundRequestStreamTitle; blob << db.quote(BlobDb::Key("compound.request_stream.table", cltScope)); } { XmlTag compoundReplyObjectTitle("h3"); compoundReplyObjectTitle << XmlText("Compound reply object kind table"); blob << compoundReplyObjectTitle; blob << db.quote(BlobDb::Key("compound.reply_object.table", cltScope)); } { XmlTag compoundRequestObjectTitle("h3"); compoundRequestObjectTitle << XmlText("Compound request object kind table"); blob << compoundRequestObjectTitle; blob << db.quote(BlobDb::Key("compound.request_object.table", cltScope)); } { XmlTag authStreamTitle("h3"); authStreamTitle << XmlText("Authentication traffic stream table"); blob << authStreamTitle; blob << db.quote(BlobDb::Key("auth.stream.table", cltScope)); } { XmlTag authObjectTitle("h3"); authObjectTitle << XmlText("Authentication object kind table"); blob << authObjectTitle; blob << db.quote(BlobDb::Key("auth.object.table", cltScope)); } db << blob; } void TestInfo::cmplErrors(BlobDb &db, const Scope &scope) { static const String tlTitle = "test errors"; ReportBlob blob("errors" + scope, tlTitle); XmlTag title("title"); title << XmlText("Errors"); blob << title; { XmlSection s("client-side errors"); s << db.include("errors.table" + scope.oneSide("client")); blob << s; } { XmlSection s("server-side errors"); s << db.include("errors.table" + scope.oneSide("server")); blob << s; } db << blob; } void TestInfo::cmplNotes(BlobDb &db) { static const String tlTitle = "report notes"; ReportBlob blob("report_notes", tlTitle); XmlSearchRes res; if (db.blobs().selByAttrName("report_note", res)) { XmlTag list("ol"); for (int i = 0; i < res.count(); ++i) { Assert(res[i]->attrs()->value("report_note_number") == AnyToString(i + 1)); list << db.include(res[i]->attrs()->value("key")); } blob << list; } db << blob; } void TestInfo::cmplSynonyms(BlobDb &db, const Scope &scope) { addLink(db, BlobDb::Key("load", scope), BlobDb::Key("load", scope.oneSide("client"))); addLink(db, BlobDb::Key("load.table", scope), BlobDb::Key("load.table", scope.oneSide("client"))); addLink(db, BlobDb::Key("reply_stream.table", scope), BlobDb::Key("reply_stream.table", scope.oneSide("client"))); } void TestInfo::compileStats(BlobDb &db) { if (!theExecScope) guessExecScope(); if (cltSideExists()) cltSide().compileStats(db); else SideInfo::CompileEmptyStats(db, execScope().oneSide("client")); if (srvSideExists()) srvSide().compileStats(db); else SideInfo::CompileEmptyStats(db, execScope().oneSide("server")); cmplExecSumVars(db); cmplExecSum(db); cmplWorkload(db); // build theScopes array theScopes.append(new Scope(execScope())); if (twoSided()) { Scope *allScope = new Scope; allScope->name("all phases"); theScopes.append(allScope); for (int i = 0; i < cltSide().phaseCount(); ++i) { const String &pname = cltSide().phase(i).name(); // include common phases only if (!srvSide().scope().hasPhase(pname)) continue; theScopes.append(new Scope(theExecScope.onePhase(pname))); theScopes.last()->name(pname); allScope->add(*theScopes.last()); } } for (int s = 0; s < theScopes.count(); ++s) { const Scope &scope = *theScopes[s]; cmplSynonyms(db, scope); cmplHitRatioVars(db, scope); cmplHitRatio(db, scope); cmplCheapProxyValidationVars(db, scope); cmplCheapProxyValidation(db, scope); cmplByteLatencyVars(db, scope); cmplByteLatency(db, scope); cmplBaseStats(db, scope); cmplTraffic(db, scope); cmplRptm(db, scope); cmplSavings(db, scope); cmplLevels(db, scope); cmplAuthentication(db, scope); cmplErrors(db, scope); } cmplNotes(db); } polygraph-4.3.2/src/loganalyzers/CompOpts.cc0000644000175000017500000000201111546440450020474 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "loganalyzers/CompOpts.h" CompOpts TheCompOpts; CompOpts::CompOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theDelta(this, "delta ", "maximum value difference to ignore", 0.0), thePhases(this, "phases ", "names of phases for executive summary"), theCompDir(this, "report_dir ", "report's root directory"), theTmpDir(this, "tmp_dir ", "temporary dir", "/tmp") { } bool CompOpts::validate() const { return OptGrp::validate(); } ostream &CompOpts::printAnonym(ostream &os) const { return os << " ..."; } bool CompOpts::parseAnonym(const Array &opts) { for (int i = 0 ; i < opts.count(); ++i) theReports.append(new String(opts[i])); return theReports.count() > 0; } polygraph-4.3.2/src/loganalyzers/ReportBlob.cc0000644000175000017500000000102511546440450021006 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xml/XmlAttr.h" #include "loganalyzers/ReportBlob.h" const String ReportBlob::NilTitle = "_nil"; ReportBlob::ReportBlob(const Key &aKey, const String &aTitle): XmlTag("report_blob"), theKey(aKey) { append(XmlAttr("key", aKey)); append(XmlAttr("title", aTitle)); } XmlNode *ReportBlob::clone() const { return new ReportBlob(*this); } polygraph-4.3.2/src/loganalyzers/SizeHistFig.h0000644000175000017500000000101211546440450020762 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SIZEHISTFIG_H #define POLYGRAPH__LOGANALYZERS_SIZEHISTFIG_H #include "loganalyzers/TmSzHistFig.h" // creates siae distribution figure based on phase stats class SizeHistFig: public TmSzHistFig { public: SizeHistFig(); protected: virtual const Histogram *extractHist(const Stex *stex, const PhaseInfo &info) const; }; #endif polygraph-4.3.2/src/loganalyzers/PointTraceFig.h0000644000175000017500000000202611546440450021276 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_POINTTRACEFIG_H #define POLYGRAPH__LOGANALYZERS_POINTTRACEFIG_H #include "xstd/String.h" #include "loganalyzers/ReportTraceFigure.h" class PointStex; class PhaseInfo; class PhaseTrace; class StatIntvlRec; // creates a trace figure based on values extracted by supplied PointStexes class PointTraceFig: public ReportTraceFigure { public: typedef PointStex Stex; public: PointTraceFig(); void stats(const Stex *aStex1, const Stex *aStex2, const PhaseInfo *phase); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); bool hasDataLines(const PointStex *stex) const; int dumpDataLines(const PointStex *stex); int dumpDataLine(const PointStex *stex, Time stamp, const StatIntvlRec &r); protected: const Stex *theStex1; const Stex *theStex2; const PhaseInfo *thePhase; const PhaseTrace *theTrace; }; #endif polygraph-4.3.2/src/loganalyzers/LoadStex.cc0000644000175000017500000000101211546440450020453 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/gadgets.h" #include "base/StatIntvlRec.h" #include "loganalyzers/LoadStex.h" LoadStex::LoadStex(const String &aKey, const String &aName): theKey(aKey), theName(aName) { } double LoadStex::perDuration(double meas, const StatIntvlRec &rec) const { return rec.theDuration > 0 ? Ratio(meas, rec.theDuration.secd()) : -1; } polygraph-4.3.2/src/loganalyzers/ReportTraceFigure.h0000644000175000017500000000106711546440450022200 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_REPORTTRACEFIGURE_H #define POLYGRAPH__LOGANALYZERS_REPORTTRACEFIGURE_H #include "xstd/String.h" #include "loganalyzers/ReportFigure.h" // base class for all trace figures class ReportTraceFigure: public ReportFigure { public: void globalStart(Time aStart); void setCtrlOptions(); protected: void dumpTime(Time stamp); protected: Time theGlobalStart; }; #endif polygraph-4.3.2/src/loganalyzers/InfoScopes.h0000644000175000017500000000146111546440450020652 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_INFOSCOPES_H #define POLYGRAPH__LOGANALYZERS_INFOSCOPES_H #include "xstd/String.h" #include "xstd/Map.h" class InfoScope; // manages a collection of scopes indexed by their image class InfoScopes { public: InfoScopes(); ~InfoScopes(); int count() const { return theScopes.count(); } const InfoScope *scope(int idx) const { return theScopes[idx]; } const InfoScope *operator [](int idx) const { return scope(idx); } const InfoScope *find(const String &image) const; void add(const InfoScope &scope); void absorb(InfoScope *&scope); protected: Array theScopes; Map theIndex; }; #endif polygraph-4.3.2/src/loganalyzers/TmSzHistFig.cc0000644000175000017500000000246011546440450021113 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "base/Histogram.h" #include "loganalyzers/Stex.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/HistStex.h" #include "loganalyzers/TmSzHistFig.h" // converts Stex to HistStex class TmSzHistStex: public HistStex { public: TmSzHistStex(const Stex *stex, const String aUnit, const Histogram *aHist): HistStex(stex->key(), stex->name(), aUnit), theHist(aHist) {} virtual const Histogram *value(const PhaseInfo &) const { return theHist; } protected: const Histogram *theHist; // authoritative source of information }; TmSzHistFig::TmSzHistFig(const String &aUnit): theUnit(aUnit) { theLabelX1 = theUnit; } TmSzHistFig::~TmSzHistFig() { while (theComparison.count() > 0) delete theComparison.pop(); theStex = 0; } void TmSzHistFig::stats(const Stex *aStex, const PhaseInfo *aPhase) { HistStex *core = new TmSzHistStex(aStex, theUnit, extractHist(aStex, *aPhase)); HistogramFigure::stats(core, aPhase); // collect comparison stexes to plot for (const Stex *stex = aStex->parent(); stex; stex = stex->parent()) { compareWith(new TmSzHistStex(stex, theUnit, extractHist(stex, *aPhase))); } } polygraph-4.3.2/src/loganalyzers/Sample.cc0000644000175000017500000001070211546440450020157 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/gadgets.h" #include "loganalyzers/Sample.h" #include "loganalyzers/Panorama.h" #include "loganalyzers/Formatter.h" String CompositeSample::TheId = "C"; String NumberSample::TheId = "N"; String TextSample::TheId = "T"; double NumberSample::TheDelta = 0; void Sample::propagateLocation(const String &aLocation) { location(aLocation); } bool Sample::similar(const Sample &s) const { return typeId() == s.typeId() && key() == s.key() ? selfSimilar(s) : false; } void Sample::copy(const Sample &s) { key(s.key()); title(s.title()); location(s.location()); } void Sample::reportDifferences(const Sample &, Formatter &form) const { form.addNothing(); } CompositeSample::~CompositeSample() { for (int i = 0; i < theKids.count(); ++i) delete theKids.pop(); } void CompositeSample::propagateLocation(const String &aLocation) { Sample::propagateLocation(aLocation); for (int i = 0; i < theKids.count(); ++i) theKids[i]->propagateLocation(aLocation); } Sample *CompositeSample::clone() const { CompositeSample *c = new CompositeSample; c->copy(*this); return c; } void CompositeSample::copy(const CompositeSample &c) { Sample::copy(c); theKids.reset(); for (int i = 0; i < c.theKids.count(); ++i) add(c.theKids[i]->clone()); } void CompositeSample::print(ostream &os) const { os << this << " {title: " << title() << endl; for (int i = 0; i < theKids.count(); ++i) theKids[i]->print(os); os << this << " }title: " << title() << endl; } void CompositeSample::add(Sample *kid) { theKids.append(kid); } Panorama *CompositeSample::makePanoramaSkeleton() const { Panorama *p = new Panorama(); p->key(key()); p->title(title()); for (int i = 0; i < theKids.count(); ++i) { p->add(theKids[i]->makePanoramaSkeleton()); } return p; } void CompositeSample::fillPanorama(Panorama *p) const { for (int i = 0; i < theKids.count(); ++i) { Sample *kid = theKids[i]; Panorama *slice = p->findSlice(kid->key(), i); if (!slice) { slice = kid->makePanoramaSkeleton(); p->add(slice); } theKids[i]->fillPanorama(slice); } } bool CompositeSample::selfSimilar(const Sample &s) const { const CompositeSample &c = static_cast(s); if (theKids.count() != c.theKids.count()) return false; for (int i = 0; i < theKids.count(); ++i) { if (!theKids[i]->similar(*c.theKids[i])) return false; } return true; } void AtomSample::print(ostream &os) const { os << theImage; } void AtomSample::setImage(const String &image) { theImage = image; } Panorama *AtomSample::makePanoramaSkeleton() const { Panorama *p = new PanAtom(); p->key(key()); p->title(title()); return p; } bool AtomSample::selfSimilar(const Sample &s) const { const AtomSample &a = static_cast(s); return theImage == a.theImage; // no fuzziness allowed } void TextSample::propagateLocation(const String &aLocation) { AtomSample::propagateLocation(aLocation); if (key() == "label") Panorama::LabelLocation(theLocation, image()); } void TextSample::fillPanorama(Panorama *p) const { p->add(clone()); } Sample *TextSample::clone() const { return new TextSample(*this); } NumberSample::NumberSample(): theValue(-1) { } void NumberSample::setImage(const String &image) { AtomSample::setImage(image); if (!Should(isNum(image.cstr(), theValue))) cerr << "warning: expeted a number, found: '" << image << "'" << endl; } void NumberSample::fillPanorama(Panorama *p) const { p->add(clone()); } bool NumberSample::selfSimilar(const Sample &s) const { const NumberSample &n = static_cast(s); if (TheDelta < 0) return true; // different no matter what if (image() == n.image()) return true; // identical if (TheDelta == 0) return false; // not identical const Value diff = Abs(theValue - n.theValue); return diff <= TheDelta*Min(Abs(theValue), Abs(n.theValue)); } Sample *NumberSample::clone() const { return new NumberSample(*this); } void NumberSample::reportDifferences(const Sample &s, Formatter &form) const { if (s.typeId() != TheId) { Sample::reportDifferences(s, form); return; } const NumberSample &n = (const NumberSample&)s; const Value ref = Abs(n.theValue); if (ref < 1e-6) { form.addText("infinity"); return; } const Value diff = theValue - n.theValue; form.addInteger((int)(100.*diff/ref + 0.5), "%", true); } polygraph-4.3.2/src/loganalyzers/PhaseInfo.cc0000644000175000017500000000464511546440450020623 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/PhaseInfo.h" PhaseInfo::PhaseInfo(): theIntvlCount(0), theTrace(0), gotPhaseStats(false) { } PhaseInfo::~PhaseInfo() { delete theTrace; } const String PhaseInfo::name() const { return thePhase.theName; } const StatPhaseRec *PhaseInfo::hasStats() const { return gotPhaseStats ? &thePhase : 0; } const StatIntvlRec &PhaseInfo::availStats() const { Assert(theIntvl.theDuration >= 0); return hasStats() ? (const StatIntvlRec &)stats() : theIntvl; } void PhaseInfo::merge(const PhaseInfo &phase) { if (!thePhase.theName) thePhase.theName = phase.thePhase.theName; if (phase.hasStats()) { thePhase.merge(phase.stats()); gotPhaseStats = true; } theIntvl.merge(phase.theIntvl); theIntvlCount = Max(theIntvlCount, phase.theIntvlCount); Assert(phase.theTrace); if (!theTrace) { theTrace = new PhaseTrace; theTrace->concat(phase.trace()); } else { theTrace->merge(phase.trace()); } } void PhaseInfo::concat(const PhaseInfo &phase) { if (!thePhase.theName) thePhase.theName = phase.thePhase.theName; if (phase.hasStats()) { thePhase.concat(phase.stats()); gotPhaseStats = true; } theIntvl.concat(phase.theIntvl); theIntvlCount = theIntvlCount + phase.theIntvlCount; Assert(phase.theTrace); if (!theTrace) theTrace = new PhaseTrace; theTrace->concat(phase.trace()); } PhaseTrace *PhaseInfo::startTrace() { theTrace = new PhaseTrace; theTrace->configure(theIntvl); return theTrace; } void PhaseInfo::noteIntvl(const StatIntvlRec &r, const String &phaseName) { Assert(!theTrace); if (thePhase.theName) Should(thePhase.theName == phaseName); else thePhase.theName = phaseName; theIntvl.concat(r); theIntvlCount++; } void PhaseInfo::noteEndOfLog() { Assert(!theTrace); if (thePhase.theDuration < 0) ((StatIntvlRec&)thePhase).merge(theIntvl); // restore what we can } void PhaseInfo::notePhase(const StatPhaseRec &r) { Assert(!theTrace); Assert(!thePhase.name() || thePhase.name() == r.name()); Assert(thePhase.theDuration < 0); thePhase.concat(r); gotPhaseStats = true; } void PhaseInfo::checkConsistency() { if (!theIntvlCount) cerr << thePhase.theName << ": strange, no stat intervals within a phase" << endl; } void PhaseInfo::compileStats(BlobDb &) { } polygraph-4.3.2/src/loganalyzers/InfoScope.cc0000644000175000017500000000501511546440450020624 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "loganalyzers/InfoScopeDim.h" #include "loganalyzers/InfoScope.h" InfoScope::InfoScope() { theSides = new InfoScopeDim("sides"); thePhases = new InfoScopeDim("phases"); } InfoScope::InfoScope(const InfoScope &s) { theSides = new InfoScopeDim("sides"); thePhases = new InfoScopeDim("phases"); copy(s); } InfoScope::~InfoScope() { reset(); delete theSides; delete thePhases; } bool InfoScope::operator ==(const InfoScope &s) const { return image() == s.image(); } InfoScope &InfoScope::operator =(const InfoScope &s) { reset(); copy(s); return *this; } InfoScope::operator void*() const { return theSides->count() && thePhases->count() ? (void*)-1 : 0; } void InfoScope::name(const String &aName) { Assert(!theName); // paranoid theName = aName; } String InfoScope::name() const { return theName ? theName : image(); } String InfoScope::image() const { String buf; buf += theSides->image(); buf += "__"; buf += thePhases->image(); return buf; } const Array &InfoScope::sides() const { return theSides->names(); } const Array &InfoScope::phases() const { return thePhases->names(); } InfoScope InfoScope::oneSide(const String &name) const { //Assert(hasSide(name)); InfoScope s; s.theSides->add(name); s.thePhases->copy(*thePhases); return s; } InfoScope InfoScope::onePhase(const String &name) const { //Assert(hasPhase(name)); InfoScope s; s.theSides->copy(*theSides); s.thePhases->add(name); return s; } void InfoScope::addSide(const String &name) { theSides->add(name); } void InfoScope::addPhase(const String &name) { thePhases->add(name); } void InfoScope::add(const InfoScope &scope) { for (int s = 0; s < scope.sides().count(); ++s) { if (!hasSide(*scope.sides().item(s))) addSide(*scope.sides().item(s)); } for (int p = 0; p < scope.phases().count(); ++p) { if (!hasPhase(*scope.phases().item(p))) addPhase(*scope.phases().item(p)); } } bool InfoScope::hasSide(const String &name) const { return theSides->has(name); } bool InfoScope::hasPhase(const String &name) const { return thePhases->has(name); } void InfoScope::copy(const InfoScope &s) { if (s.theSides != theSides && s.thePhases != thePhases) { Assert(!*this); theName = s.theName; theSides->copy(*s.theSides); thePhases->copy(*s.thePhases); } } void InfoScope::reset() { theSides->reset(); thePhases->reset(); } polygraph-4.3.2/src/loganalyzers/LoadStex.h0000644000175000017500000000161111546440450020322 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_LOADSTEX_H #define POLYGRAPH__LOGANALYZERS_LOADSTEX_H #include "xstd/String.h" #include "base/StatIntvlRec.h" // an interface of extracting a particular load statistics out of // interval stats record class LoadStex { public: LoadStex(const String &aKey, const String &aName); virtual ~LoadStex() {} const String &key() const { return theKey; } // precise, for machine use const String &name() const { return theName; } // imprecise, human-readable virtual double rate(const StatIntvlRec &rec) const = 0; virtual double bwidth(const StatIntvlRec &rec) const = 0; protected: double perDuration(double meas, const StatIntvlRec &rec) const; protected: String theKey; String theName; }; #endif polygraph-4.3.2/src/loganalyzers/comparator.css0000644000175000017500000000102610621177424021310 0ustar testertester The Measurement Factory polygraph-4.3.2/src/loganalyzers/Stex.h0000644000175000017500000002722211546440450017530 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_STEX_H #define POLYGRAPH__LOGANALYZERS_STEX_H #include "xstd/String.h" #include "base/StatPhaseRec.h" class XmlNodes; class PhaseInfo; // an algorithm of extracting a particular named statistics out of // cycle or phase stats record and describing/categorizing that statistics class Stex { public: typedef AggrStat StatPhaseRec::*AggrPtr; typedef TmSzHistStat StatPhaseRec::*HistPtr; typedef ContTypeStat StatPhaseRec::*ContTypePtr; typedef HRHistStat StatPhaseRec::*HRHistPtr; typedef CompoundXactStat StatPhaseRec::*CompoundPtr; typedef TmSzStat StatIntvlRec::*TracePtr; typedef StatusCodeStat StatPhaseRec::*StatusCodePtr; public: Stex(const String &aKey, const String &aName); virtual ~Stex(); void parent(const Stex *aParent); const String &key() const { return theKey; } // precise, for machine use const String &name() const { return theName; } // imprecise, human-readable const Stex *parent() const { return theParent; } bool ignoreUnseen() const { return doIgnoreUnseen; } double totalCount(const PhaseInfo &phase) const; double meanPartsCount(const PhaseInfo &phase) const; int cmpByCountContrib(const PhaseInfo &phase, const Stex &stex) const; virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const TmSzHistStat *hist(const PhaseInfo &) const { return 0; } virtual const TmSzStat *trace(const StatIntvlRec &) const { return 0; } virtual const Histogram *partsHist(const PhaseInfo &) const { return 0; } virtual const AggrStat *partsStat(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const = 0; protected: void describeParent(XmlNodes &nodes) const; protected: const String theKey; const String theName; const Stex *theParent; // if we are a part of a 'larger' stats group bool doIgnoreUnseen; // if stex should be added to the unseen stat list }; class HitsStex: public Stex { public: HitsStex(const String &aKey, const String &aName); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class MissesStex: public Stex { public: MissesStex(const String &aKey, const String &aName); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class HitMissesStex: public Stex { public: HitMissesStex(const String &aKey, const String &aName); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; protected: mutable TmSzHistStat theXactHist; mutable TmSzStat theXactAggr; virtual void describe(XmlNodes &nodes) const; }; class ValidationHitStex: public Stex { public: ValidationHitStex(const String &aKey, const String &aName, const HRHistPtr aHRHist); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theXactAggr; const HRHistPtr theHRHist; }; class ValidationMissStex: public Stex { public: ValidationMissStex(const String &aKey, const String &aName, const HRHistPtr aHRHist); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theXactAggr; const HRHistPtr theHRHist; }; class ValidationHitMissStex: public Stex { public: ValidationHitMissStex(const String &aKey, const String &aName, const HRHistPtr aHRHist, const TracePtr aTrace); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzHistStat theXactHist; mutable TmSzStat theXactAggr; const HRHistPtr theHRHist; const TracePtr theTrace; }; class CachableStex: public Stex { public: CachableStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class UnCachableStex: public Stex { public: UnCachableStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class AllCachableStex: public Stex { public: AllCachableStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theXactAggr; }; class FillStex: public Stex { public: FillStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class SimpleStex: public Stex { public: SimpleStex(const String &aKey, const String &aName, HistPtr aHist, TracePtr aTrace); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: HistPtr theHist; TracePtr theTrace; }; class AllMethodsStex: public Stex { public: AllMethodsStex(const String &aKey, const String &aName); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzHistStat theXactHist; mutable TmSzStat theXactAggr; }; class AllRepsStex: public Stex { public: AllRepsStex(const String &aKey, const String &aName); virtual const TmSzHistStat *hist(const PhaseInfo &phase) const; virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzHistStat theXactHist; mutable TmSzStat theXactAggr; }; class ContTypeStex: public Stex { public: ContTypeStex(const String &aKey, const String &aName, int idx, ContTypePtr aContType); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: int theIdx; ContTypePtr theContType; mutable TmSzStat theXactAggr; }; class AllContTypesStex: public Stex { public: AllContTypesStex(const String &aKey, const String &aName, ContTypePtr aContType); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: ContTypePtr theContType; mutable TmSzStat theXactAggr; }; class CompoundReplyStex: public Stex { public: CompoundReplyStex(const String &aKey, const String &aName, const CompoundPtr aCompoundPtr); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const Histogram *partsHist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: const CompoundPtr theCompoundPtr; mutable TmSzStat theStat; }; class CompoundRequestStex: public Stex { public: CompoundRequestStex(const String &aKey, const String &aName, const CompoundPtr aCompoundPtr); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const Histogram *partsHist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: const CompoundPtr theCompoundPtr; mutable TmSzStat theStat; }; class AllCompoundRepsStex: public Stex { public: AllCompoundRepsStex(const String &aKey, const String &aName); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const Histogram *partsHist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: mutable CompoundXactStat theCompound; mutable TmSzStat theStat; }; class AllCompoundReqsStex: public Stex { public: AllCompoundReqsStex(const String &aKey, const String &aName); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual const Histogram *partsHist(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: mutable CompoundXactStat theCompound; mutable TmSzStat theStat; }; class AuthIngStex: public Stex { public: AuthIngStex(const String &aKey, const String &aName, const AuthPhaseStat::Scheme aScheme); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: const AuthPhaseStat::Scheme theScheme; }; class AuthEdStex: public Stex { public: AuthEdStex(const String &aKey, const String &aName, const AuthPhaseStat::Scheme aScheme); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: const AuthPhaseStat::Scheme theScheme; }; class AllAuthIngStex: public Stex { public: AllAuthIngStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theStat; }; class AllAuthEdStex: public Stex { public: AllAuthEdStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theStat; }; class AllAuthStex: public Stex { public: AllAuthStex(const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; protected: mutable TmSzStat theStat; }; class ProtoIntvlStex: public Stex { public: typedef ProtoIntvlStat StatIntvlRec::*ProtoPtr; ProtoIntvlStex(ProtoPtr theProto, const String &aKey, const String &aName); protected: ProtoPtr theProto; }; class ProtoHitsStex: public ProtoIntvlStex { public: ProtoHitsStex(ProtoPtr theProto, const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class ProtoMissesStex: public ProtoIntvlStex { public: ProtoMissesStex(ProtoPtr theProto, const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; virtual void describe(XmlNodes &nodes) const; }; class ProtoHitMissesStex: public ProtoIntvlStex { public: ProtoHitMissesStex(ProtoPtr theProto, const String &aKey, const String &aName); virtual const TmSzStat *trace(const StatIntvlRec &rec) const; protected: mutable TmSzStat theAggr; virtual void describe(XmlNodes &nodes) const; }; class CookiesStex: public Stex { public: CookiesStex(const String &aKey, const String &aName, const AggrPtr anAggrPtr); virtual const AggrStat *partsStat(const PhaseInfo &phase) const; protected: const AggrPtr theAggrPtr; virtual void describe(XmlNodes &nodes) const; }; class AllStatusCodeStex: public Stex { public: AllStatusCodeStex(const String &aKey, const String &aName, const StatusCodePtr aPtr); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: const StatusCodePtr thePtr; mutable TmSzStat theAggr; }; class StatusCodeStex: public Stex { public: StatusCodeStex(const String &aKey, const String &aName, const StatusCodePtr aPtr, const int aStatus); virtual const TmSzStat *aggr(const PhaseInfo &phase) const; virtual void describe(XmlNodes &nodes) const; protected: const StatusCodePtr thePtr; const int theStatus; }; #endif polygraph-4.3.2/src/loganalyzers/RptmHistFig.h0000644000175000017500000000102311546440450020774 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_RPTMHISTFIG_H #define POLYGRAPH__LOGANALYZERS_RPTMHISTFIG_H #include "loganalyzers/TmSzHistFig.h" // creates response time distribution figure based on phase stats class RptmHistFig: public TmSzHistFig { public: RptmHistFig(); protected: virtual const Histogram *extractHist(const Stex *stex, const PhaseInfo &info) const; }; #endif polygraph-4.3.2/src/loganalyzers/Formatter.h0000644000175000017500000000433311546440450020546 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_FORMATTER_H #define POLYGRAPH__LOGANALYZERS_FORMATTER_H #include "xstd/String.h" #include "xstd/h/iosfwd.h" // interface to format data for different mediums such as HTML page(s) or PDF class Formatter { public: virtual ~Formatter() {} virtual void openSection(const String &id, const String &title) = 0; virtual void closeSection() = 0; virtual void openTable(const String &id, const String &title) = 0; virtual void openTableAnonym() = 0; // borderless, anonymous virtual void closeTable() = 0; virtual void openTableHeader(const String &id, const String &title) = 0; virtual void closeTableHeader() = 0; virtual void openTableRecord() = 0; virtual void closeTableRecord() = 0; virtual void openTableCell(const String &classId) = 0; virtual void closeTableCell() = 0; virtual void addTableCell(const String &cell) = 0; virtual void addLink(const String &addr, const String &text) = 0; virtual void addText(const String &text) = 0; virtual void addInteger(int v, const String &unit, bool addSign = false) = 0; virtual void addNothing() = 0; // make formatted contents virtual void make() = 0; }; class WebPageFormatter: public Formatter { public: WebPageFormatter(ostream *aPage); virtual void openSection(const String &id, const String &title); virtual void closeSection(); virtual void openTable(const String &id, const String &title); virtual void openTableAnonym(); // borderless, anonymous virtual void closeTable(); virtual void openTableHeader(const String &id, const String &title); virtual void closeTableHeader(); virtual void openTableRecord(); virtual void closeTableRecord(); virtual void openTableCell(const String &classId); virtual void closeTableCell(); virtual void addTableCell(const String &cell); virtual void addLink(const String &addr, const String &text); virtual void addText(const String &text); virtual void addInteger(int v, const String &unit, bool addSign = false); virtual void addNothing(); virtual void make(); private: ostream *thePage; }; #endif polygraph-4.3.2/src/loganalyzers/LevelTraceFig.cc0000644000175000017500000000367211546440450021422 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xml/XmlAttr.h" #include "loganalyzers/LevelStex.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/LevelTraceFig.h" LevelTraceFig::LevelTraceFig(): theStex(0), thePhase(0), theTrace(0) { } void LevelTraceFig::stats(const LevelStex *const aStex, const PhaseInfo *const aPhase) { theStex = aStex; thePhase = aPhase; theTrace = &thePhase->trace(); Assert(theTrace); } void LevelTraceFig::setCtrlOptions() { theLabelY1 = "level, #"; ReportTraceFigure::setCtrlOptions(); } void LevelTraceFig::compareWith(const LevelStex *const stex) { theComparison.append(stex); } int LevelTraceFig::createCtrlFile() { if (ReportTraceFigure::createCtrlFile() < 0) return -1; // make sure that the most interesting line is on top theComparison.append(theStex); // create plot command for (int i = 0; i < theComparison.count(); ++i) addPlotLine(theComparison[i]->name(), theLabelY1); addedAllPlotLines(); int pointCount = 0; for (int s = 0; s < theComparison.count(); ++s) { if (s) *theCtrlFile << 'e' << endl; // note: two empty lines do not work pointCount += dumpDataLines(theComparison[s]); } return pointCount; } int LevelTraceFig::dumpDataLines(const LevelStex *stex) { int pointCount = 0; for (int i = 0; i < theTrace->count(); ++i) pointCount += dumpDataLine(stex, theTrace->winPos(i), theTrace->winStats(i)); return pointCount; } int LevelTraceFig::dumpDataLine(const LevelStex *stex, Time stamp, const StatIntvlRec &r) { const LevelStat &stat = stex->level(r); if (stat.known()) { dumpTime(stamp); *theCtrlFile << ' ' << stat.level() << endl; return 1; } return 0; } polygraph-4.3.2/src/loganalyzers/BlobIdx.cc0000644000175000017500000000372711546440450020272 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/String.h" #include "xstd/Checksum.h" #include "xstd/gadgets.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobIdx.h" static const String strKey = "key"; BlobIdx::BlobIdx(int aCapacity): theHash(aCapacity), theCount(0) { theHash.count(theHash.capacity()); } void BlobIdx::add(const ReportBlob *blob) { Assert(blob); while (2*theHash.capacity() < 3*theCount) grow(); int idx; Assert(!find(blob->key(), idx)); theHash.put(blob, idx); theCount++; } const ReportBlob *BlobIdx::find(const Key &key) const { int idx; return find(key, idx); } const ReportBlob *BlobIdx::find(const Key &key, int &idx) const { if (!theHash.capacity()) { idx = 0; return 0; } xstd::ChecksumAlg alg; alg.update(key.data(), key.len()); alg.final(); static unsigned int hashVals[4]; memcpy(hashVals, alg.sum().image(), Min(SizeOf(hashVals), alg.sum().size())); const ReportBlob *blob = 0; for (int h = 0; h < 4; ++h) { idx = (int)(hashVals[h] % (unsigned)theHash.capacity()); if (stopAt(key, blob, idx)) return blob; } // collision; use linear search for (int i = 0; i < theHash.capacity(); ++i, ++idx) { idx %= theHash.capacity(); if (stopAt(key, blob, idx)) return blob; } Assert(false); // no empty space in the hash return 0; } bool BlobIdx::stopAt(const Key &key, const ReportBlob *&blob, int idx) const { Assert(0 <= idx && idx <= theHash.count()); if ((blob = theHash[idx])) { if (blob->key() == key) return true; blob = 0; return false; // collision } return true; } void BlobIdx::grow() { Array oldHash; oldHash = theHash; theHash.memset(0); theHash.stretch(2*theHash.capacity() + 1); theHash.count(theHash.capacity()); for (int i = 0; i < oldHash.count(); ++i) { if (oldHash[i]) add(oldHash[i]); } } polygraph-4.3.2/src/loganalyzers/SideInfo.h0000644000175000017500000002054411546440450020305 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SIDEINFO_H #define POLYGRAPH__LOGANALYZERS_SIDEINFO_H #include "xstd/Time.h" #include "xstd/String.h" #include "xstd/BigSize.h" #include "xstd/Array.h" #include "base/ProtoStatPtr.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/SomeInfo.h" #include class InfoScopes; class TestInfo; class ProcInfo; class PhaseInfo; class TmsSzStat; class StatPhaseRec; class ErrorRec; class ErrorStat; class StatTable; class XmlTable; class BlobDb; class Stex; // aggregate stats and other logged information about a test side // (e.g., client- or server-side); // manages info about all processes that belong to one side class SideInfo: public SomeInfo { public: static void CompileEmptyStats(BlobDb &db, const Scope &scope); static void Configure(); public: SideInfo(int aLogCat); ~SideInfo(); int logCat() const; const String &name() const; // "client" or "server" const String &otherName() const; // "server" or "client" const String &benchmarkVersion() const; const String &pglCfg() const; Time startTime() const; int repCount(const Scope &scope) const; int hitCount(const Scope &scope) const; int offeredHitCount(const Scope &scope) const; int uselessProxyValidationCount(const Scope &scope) const; BigSize repVolume(const Scope &scope) const; BigSize hitVolume(const Scope &scope) const; BigSize offeredHitVolume(const Scope &scope) const; BigSize uselessProxyValidationVolume(const Scope &scope) const; AggrStat lastReqByteWritten(const Scope &scope) const; AggrStat lastReqByteRead(const Scope &scope) const; AggrStat firstRespByteWritten(const Scope &scope) const; AggrStat firstRespByteRead(const Scope &scope) const; void test(TestInfo *t); TestInfo *test(); void add(ProcInfo *info); // absorbs ProcInfo &proc(int idx); int procCount() const; const PhaseInfo &phase(const Scope &scope) const; const PhaseInfo &phase(const String &name) const; const PhaseInfo &phase(int idx) const; int phaseCount() const; bool hasPhase(const String &name) const { return findPhase(name); } const Scope &scope() const { return theScope; } // all phases int scopes(InfoScopes &res) const; const Scope &execScope() const; const StatPhaseRec &execScopeStats() const; void checkConsistency(); void compileStats(BlobDb &db); protected: static void AddProtoStexes(ProtoIntvlPtr protoPtr); static void AddStex(Array &stexes, Stex *stex, const Stex *parent); void addPhase(const PhaseInfo &phase); const PhaseInfo *findPhase(const String &name) const; PhaseInfo *findPhase(const String &name); void checkCommonBenchmarkVersion(); void checkCommonPglCfg(); void checkCommonStartTime(); void checkCommonPhases(); void compileStats(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplLoadBlob(BlobDb &db, const Scope &scope); void cmplLoadTable(BlobDb &db, ReportBlob &parent, const Scope &scope); void cmplLoadFigure(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmpProtoStats(BlobDb &db, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope); void cmplProtoLoadBlob(BlobDb &db, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope); void cmplProtoLoadTable(BlobDb &db, ReportBlob &parent, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope); void cmplProtoLoadFigure(BlobDb &db, ReportBlob &blob, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope); void cmplRptmFigure(BlobDb &db, const Scope &scope); void cmplRptmVsLoadFigure(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplHitRatioTable(BlobDb &db, const Scope &scope); void cmplHrTraces(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplBhrTrace(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplDhrTrace(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplXactLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplReplyStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplRequestStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplCompoundReplyStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplCompoundRequestStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplAuthStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplReplyStatusStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplXactLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplConnLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplConnLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplConnPipelineBlob(BlobDb &db, const Scope &scope); void cmplConnPipelineTable(BlobDb &db, ReportBlob &parent, const Scope &scope); void cmplConnPipelineTrace(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplConnPipelineHist(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplPopulLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplPopulLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope); void cmplReplyObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplRequestObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplCompoundReplyObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplCompoundRequestObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplAuthObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplReplyStatusObjectTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplValidationTable(BlobDb &db, const PhaseInfo &s, const Scope &scope); void cmplErrorTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplCookieTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope); void cmplObjectBlobs(BlobDb &db, const PhaseInfo &phase, const Scope &scope, const Array &stexes); void cmplUnseenObjectsBlob(BlobDb &db, const Scope &scope); void cmplSideSum(BlobDb &db); void cmplLevelTableRec(BlobDb &db, const String &pfx, const String &state, const LevelStat &stats, const Scope &scope, XmlTable &table); void cmplObjectTableRec(BlobDb &db, StatTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope); void cmplStreamTableRec(BlobDb &db, StatTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope, const Stex *const topStex); void cmplValidationTableRec(BlobDb &db, XmlTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope, const String &pfx, const String &name); void cmplErrorTableRec(BlobDb &db, XmlTable &table, const ErrorStat &errors, const ErrorRec &error, const Scope &scope); void cmplCookieTableRec(BlobDb &db, XmlTable &table, const Stex &stex, const Stex &allStex, const PhaseInfo &phase, const Scope &scope, const String &pfx, const String &name); void cmplObjectBlob(BlobDb &db, const Stex &stex, const PhaseInfo &phase, const Scope &scope); static XmlTable makeStreamTableHdr(const bool hasParts = false); static XmlTable makeObjectTableHdr(const bool hasParts = false); static void SortStexes(const PhaseInfo &phase, const Array &in, Array &out); protected: static Stex *TheAllReps; // points to the top most reply stex static Stex *TheAllReqs; // points to the top most request stex static Stex *TheUsefulProxyValidation; // points to useful proxy validation stex static Array TheReplyStex; // array of reply related stexes static Array TheRequestStex; // array of request related stexes static Array TheCompoundReplyStex; // array of compound reply stexes static Array TheCompoundRequestStex; // array of compound request stexes static Array TheAuthStex; // array of auth related stexes static Array TheReplyStatusStex; // array of reply status code stexes protected: Scope theScope; mutable Scope theExecScope; int theLogCat; String theBenchmarkVersion; String thePglCfg; Time theStartTime; TestInfo *theTest; Array theProcs; Array thePhases; PhaseInfo theExecScopePhase; PhaseInfo theAllPhasesPhase; std::list theUnseenObjects; }; #endif polygraph-4.3.2/src/loganalyzers/Panorama.h0000644000175000017500000000242411546440450020340 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_PANORAMA_H #define POLYGRAPH__LOGANALYZERS_PANORAMA_H #include "xstd/Map.h" #include "loganalyzers/Sample.h" class Formatter; // horizontal merge of composite samples with similar structure // all kids should have the same ID class Panorama: public CompositeSample { public: static String TheId; static void LabelLocation(const String &location, const String &label); static String LocationLabel(const String &location); public: Panorama *findSlice(const String &key, int idxHint); virtual Panorama *genDiff() const; virtual void report(Formatter &form) const; protected: virtual const String &typeId() const { return TheId; } const Panorama *panKid(int idx) const; Panorama *panKid(int idx); private: typedef Map LocationLabels; static LocationLabels TheLocationLabels; }; // a leaf panorama node // all kids should be AtomSamples class PanAtom: public Panorama { public: static String TheId; public: virtual Panorama *genDiff() const; virtual void report(Formatter &form) const; protected: virtual const String &typeId() const { return TheId; } }; #endif polygraph-4.3.2/src/loganalyzers/StexBase.h0000644000175000017500000000167711546440450020331 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_STEXBASE_H #define POLYGRAPH__LOGANALYZERS_STEXBASE_H #include "xstd/String.h" // an algorithm of extracting a value statistics out of a stats record template class StexBase { public: StexBase(const String &aKey, const String &aName, const String &aUnit): theKey(aKey), theName(aName), theUnit(aUnit) {} virtual ~StexBase() {} const String &key() const { return theKey; } // precise, for machine use const String &name() const { return theName; } // imprecise, human-readable const String &unit() const { return theUnit; } // measurement unit virtual bool valueKnown(const Stats &rec) const = 0; virtual Value value(const Stats &rec) const = 0; protected: String theKey; String theName; String theUnit; }; #endif polygraph-4.3.2/src/loganalyzers/LoadStexes.cc0000644000175000017500000000314611546440450021015 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" //#include "xml/XmlText.h" //#include "xml/XmlParagraph.h" #include "loganalyzers/Stex.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/LoadStexes.h" /* SideLoadStex */ SideLoadStex::SideLoadStex(const String &aKey, const String &aName, StatPtr aRate, StatPtr aBwidth): LoadStex(aKey, aName), theRateStats(aRate), theBwidthStats(aBwidth) { } double SideLoadStex::rate(const StatIntvlRec &rec) const { return (rec.*theRateStats)(); } double SideLoadStex::bwidth(const StatIntvlRec &rec) const { return (rec.*theBwidthStats)(); } /* TmSzLoadStex */ TmSzLoadStex::TmSzLoadStex(const Stex *aStex): LoadStex(aStex->key(), aStex->name()), theStex(aStex) { } double TmSzLoadStex::rate(const StatIntvlRec &rec) const { return perDuration(theStex->trace(rec)->count(), rec); } double TmSzLoadStex::bwidth(const StatIntvlRec &rec) const { return perDuration(theStex->trace(rec)->size().sum(), rec); } /* ProtoSideLoadStex */ ProtoSideLoadStex::ProtoSideLoadStex(const String &aKey, const String &aName, ProtoPtr aProto, StatPtr aRate, StatPtr aBwidth): LoadStex(aKey, aName), theProto(aProto), theRateStats(aRate), theBwidthStats(aBwidth) { } double ProtoSideLoadStex::rate(const StatIntvlRec &rec) const { return (rec.*theProto.*theRateStats)(rec.theDuration); } double ProtoSideLoadStex::bwidth(const StatIntvlRec &rec) const { return (rec.*theProto.*theBwidthStats)(rec.theDuration); } polygraph-4.3.2/src/loganalyzers/CompOpts.h0000644000175000017500000000144211546440450020345 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_COMPOPTS_H #define POLYGRAPH__LOGANALYZERS_COMPOPTS_H #include "base/opts.h" #include "base/polyOpts.h" // options for the reporter class CompOpts: public OptGrp { public: CompOpts(); // defaults virtual ostream &printAnonym(ostream &os) const; virtual bool parseAnonym(const Array &opts); virtual bool canParseAnonym() const { return true; } virtual bool validate() const; public: HelpOpt theHelpOpt; VersionOpt theVersOpt; DblOpt theDelta; StrArrOpt thePhases; StrOpt theCompDir; StrOpt theTmpDir; Array theReports; }; extern CompOpts TheCompOpts; #endif polygraph-4.3.2/src/loganalyzers/ScatteredFig.h0000644000175000017500000000163411546440450021150 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SCATTEREDFIG_H #define POLYGRAPH__LOGANALYZERS_SCATTEREDFIG_H #include "xstd/String.h" #include "loganalyzers/ReportFigure.h" class PointStex; class PhaseInfo; class PhaseTrace; class StatIntvlRec; // scattered plot based on trace info class ScatteredFig: public ReportFigure { public: typedef PointStex Stex; public: ScatteredFig(); void stats(const Stex *aStex1, const Stex *aStex2, const PhaseInfo *phase); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); int dumpDataLine(const StatIntvlRec &r); bool dumpAxis(const Stex *stex, const StatIntvlRec &r); protected: const Stex *theStex1; const Stex *theStex2; const PhaseInfo *thePhase; const PhaseTrace *theTrace; }; #endif polygraph-4.3.2/src/loganalyzers/InfoScopeDim.h0000644000175000017500000000160711546440450021123 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_INFOSCOPEDIM_H #define POLYGRAPH__LOGANALYZERS_INFOSCOPEDIM_H #include "xstd/String.h" #include "xstd/Array.h" class String; class InfoScope; // manages one dimension of InfoScope (e.g., phase dimention or side dimension) class InfoScopeDim { friend class InfoScope; public: InfoScopeDim(const String &aLabel); InfoScopeDim(const InfoScopeDim &s); ~InfoScopeDim(); int count() const { return theNames.count(); } const Array &names() const { return theNames; } String image() const; void add(const String &name); bool has(const String &name) const; protected: void reset(); void copy(const InfoScopeDim &s); protected: String theLabel; Array theNames; }; #endif polygraph-4.3.2/src/loganalyzers/Makefile.in0000644000175000017500000007154311546445453020516 0ustar testertester# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # settings common to all Makefile.ams VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = reporter$(EXEEXT) EXTRA_PROGRAMS = comparator$(EXEEXT) DIST_COMMON = $(dist_man1_MANS) $(noinst_HEADERS) \ $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(top_srcdir)/common.am subdir = src/loganalyzers ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/cfgaux/ax_create_stdint_h.m4 \ $(top_srcdir)/cfgaux/check_zlib.m4 $(top_srcdir)/configure.in am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libloganalyzers_la_LIBADD = am_libloganalyzers_la_OBJECTS = InfoScope.lo InfoScopeDim.lo \ InfoScopes.lo Stex.lo LoadStex.lo LoadStexes.lo SomeInfo.lo \ TestInfo.lo SideInfo.lo ProcInfo.lo PhaseInfo.lo PhaseTrace.lo \ ReportFigure.lo TmSzHistFig.lo RptmHistFig.lo SizeHistFig.lo \ ReportTraceFigure.lo PointTraceFig.lo RptmTraceFig.lo \ LevelTraceFig.lo LoadTraceFig.lo HistogramFigure.lo \ ScatteredFig.lo ReportBlob.lo BlobIdx.lo BlobDb.lo \ SectionState.lo RepToHtmlFile.lo RepOpts.lo Sample.lo \ Panorama.lo Formatter.lo StatTable.lo libloganalyzers_la_OBJECTS = $(am_libloganalyzers_la_OBJECTS) am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am__comparator_SOURCES_DIST = comparator.cc CompOpts.cc @ENABLE_COMPARATOR_TRUE@am_comparator_OBJECTS = comparator.$(OBJEXT) \ @ENABLE_COMPARATOR_TRUE@ CompOpts.$(OBJEXT) comparator_OBJECTS = $(am_comparator_OBJECTS) @ENABLE_COMPARATOR_TRUE@comparator_DEPENDENCIES = libloganalyzers.la \ @ENABLE_COMPARATOR_TRUE@ ../xml/libxml.a ../Hapy/src/libHapy.la \ @ENABLE_COMPARATOR_TRUE@ ../base/libbase.a ../xstd/libxstd.a \ @ENABLE_COMPARATOR_TRUE@ @LIBOBJS@ $(am__empty) am_reporter_OBJECTS = reporter.$(OBJEXT) reporter_OBJECTS = $(am_reporter_OBJECTS) reporter_DEPENDENCIES = libloganalyzers.la \ ../logextractors/liblogextractors.a ../runtime/libruntime.a \ ../xml/libxml.a ../base/libbase.a ../xstd/libxstd.a @LIBOBJS@ DEFAULT_INCLUDES = depcomp = $(SHELL) $(top_srcdir)/cfgaux/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) CXXLD = $(CXX) CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SOURCES = $(libloganalyzers_la_SOURCES) $(comparator_SOURCES) \ $(reporter_SOURCES) DIST_SOURCES = $(libloganalyzers_la_SOURCES) \ $(am__comparator_SOURCES_DIST) $(reporter_SOURCES) am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' man1dir = $(mandir)/man1 NROFF = nroff MANS = $(dist_man1_MANS) HEADERS = $(noinst_HEADERS) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AR_R = @AR_R@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ HELP2MAN = @HELP2MAN@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LDFLAG_RDYNAMIC = @LDFLAG_RDYNAMIC@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIB_CURSES = @LIB_CURSES@ LIB_DL = @LIB_DL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ std_include = @std_include@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ dist_man1_MANS = \ reporter.man @ENABLE_COMPARATOR_TRUE@comparator_SOURCES = \ @ENABLE_COMPARATOR_TRUE@ comparator.cc \ @ENABLE_COMPARATOR_TRUE@ CompOpts.cc @ENABLE_COMPARATOR_TRUE@comparator_LDADD = \ @ENABLE_COMPARATOR_TRUE@ libloganalyzers.la \ @ENABLE_COMPARATOR_TRUE@ ../xml/libxml.a \ @ENABLE_COMPARATOR_TRUE@ ../Hapy/src/libHapy.la \ @ENABLE_COMPARATOR_TRUE@ ../base/libbase.a \ @ENABLE_COMPARATOR_TRUE@ ../xstd/libxstd.a \ @ENABLE_COMPARATOR_TRUE@ @LIBOBJS@ noinst_LTLIBRARIES = \ libloganalyzers.la libloganalyzers_la_SOURCES = \ InfoScope.cc \ InfoScopeDim.cc \ InfoScopes.cc \ Stex.cc \ LoadStex.cc \ LoadStexes.cc \ \ SomeInfo.cc \ TestInfo.cc \ SideInfo.cc \ ProcInfo.cc \ PhaseInfo.cc \ PhaseTrace.cc \ \ ReportFigure.cc \ TmSzHistFig.cc \ RptmHistFig.cc \ SizeHistFig.cc \ ReportTraceFigure.cc \ PointTraceFig.cc \ RptmTraceFig.cc \ LevelTraceFig.cc \ LoadTraceFig.cc \ HistogramFigure.cc \ ScatteredFig.cc \ \ ReportBlob.cc \ BlobIdx.cc \ BlobDb.cc \ \ SectionState.cc \ RepToHtmlFile.cc \ \ RepOpts.cc \ \ Sample.cc \ Panorama.cc \ Formatter.cc \ \ StatTable.cc reporter_SOURCES = \ reporter.cc noinst_HEADERS = \ BlobDb.h \ BlobIdx.h \ CompOpts.h \ Formatter.h \ InfoScope.h \ InfoScopeDim.h \ InfoScopes.h \ StexBase.h \ PointStex.h \ HistStex.h \ LevelStex.h \ LevelTraceFig.h \ LoadStex.h \ LoadStexes.h \ LoadTraceFig.h \ Panorama.h \ PhaseInfo.h \ PhaseTrace.h \ ProcInfo.h \ RepOpts.h \ RepToHtmlFile.h \ ReportBlob.h \ ReportFigure.h \ ReportTraceFigure.h \ PointTraceFig.h \ HistogramFigure.h \ ScatteredFig.h \ RptmHistFig.h \ RptmTraceFig.h \ Sample.h \ SectionState.h \ SideInfo.h \ SizeHistFig.h \ SomeInfo.h \ Stex.h \ TestInfo.h \ TmSzHistFig.h \ StatTable.h \ \ $(top_builddir)/config.h reporter_LDADD = \ libloganalyzers.la \ ../logextractors/liblogextractors.a \ ../runtime/libruntime.a \ ../xml/libxml.a \ ../base/libbase.a \ ../xstd/libxstd.a \ @LIBOBJS@ INCLUDES = \ -I$(top_srcdir)/src \ -I$(top_builddir) \ -I$(top_builddir)/src \ -I$(top_srcdir)/src/Hapy/src/include # top_builddir/ is needed for generated config.h # top_builddir/src/ is needed for generated src/xstd/h/stdint.h # top_srcdir/ is needed for post-config.h # TODO: move post-config.h and generated config.h to src? AM_CPPFLAGS = -I$(top_builddir) -I$(top_builddir)/src -I$(top_srcdir) -I$(top_srcdir)/src all: all-am .SUFFIXES: .SUFFIXES: .cc .lo .o .obj $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/common.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/loganalyzers/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/loganalyzers/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; for p in $$list; do \ dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \ test "$$dir" != "$$p" || dir=.; \ echo "rm -f \"$${dir}/so_locations\""; \ rm -f "$${dir}/so_locations"; \ done libloganalyzers.la: $(libloganalyzers_la_OBJECTS) $(libloganalyzers_la_DEPENDENCIES) $(CXXLINK) $(libloganalyzers_la_OBJECTS) $(libloganalyzers_la_LIBADD) $(LIBS) install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p || test -f $$p1; \ then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list comparator$(EXEEXT): $(comparator_OBJECTS) $(comparator_DEPENDENCIES) @rm -f comparator$(EXEEXT) $(CXXLINK) $(comparator_OBJECTS) $(comparator_LDADD) $(LIBS) reporter$(EXEEXT): $(reporter_OBJECTS) $(reporter_DEPENDENCIES) @rm -f reporter$(EXEEXT) $(CXXLINK) $(reporter_OBJECTS) $(reporter_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/BlobDb.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/BlobIdx.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/CompOpts.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Formatter.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/HistogramFigure.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/InfoScope.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/InfoScopeDim.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/InfoScopes.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LevelTraceFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LoadStex.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LoadStexes.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LoadTraceFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Panorama.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/PhaseInfo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/PhaseTrace.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/PointTraceFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ProcInfo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/RepOpts.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/RepToHtmlFile.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ReportBlob.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ReportFigure.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ReportTraceFigure.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/RptmHistFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/RptmTraceFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Sample.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ScatteredFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/SectionState.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/SideInfo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/SizeHistFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/SomeInfo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/StatTable.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/Stex.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/TestInfo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/TmSzHistFig.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/comparator.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/reporter.Po@am__quote@ .cc.o: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $< .cc.obj: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cc.lo: @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(dist_man1_MANS) @$(NORMAL_INSTALL) test -z "$(man1dir)" || $(MKDIR_P) "$(DESTDIR)$(man1dir)" @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ { for i in $$list; do echo "$$i"; done; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list='$(dist_man1_MANS)'; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ test -z "$$files" || { \ echo " ( cd '$(DESTDIR)$(man1dir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(man1dir)" && rm -f $$files; } ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @list='$(MANS)'; if test -n "$$list"; then \ list=`for p in $$list; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; else :; fi; done`; \ if test -n "$$list" && \ grep 'ab help2man is required to generate this page' $$list >/dev/null; then \ echo "error: found man pages containing the \`missing help2man' replacement text:" >&2; \ grep -l 'ab help2man is required to generate this page' $$list | sed 's/^/ /' >&2; \ echo " to fix them, install help2man, remove and regenerate the man pages;" >&2; \ echo " typically \`make maintainer-clean' will remove them" >&2; \ exit 1; \ else :; fi; \ else :; fi @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(MANS) $(HEADERS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool \ clean-noinstLTLIBRARIES mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-binPROGRAMS \ clean-generic clean-libtool clean-noinstLTLIBRARIES ctags \ distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-binPROGRAMS \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags uninstall uninstall-am \ uninstall-binPROGRAMS uninstall-man uninstall-man1 @ENABLE_COMPARATOR_TRUE@ bin_PROGRAMS += comparator #AM_LDFLAGS = #imported_libs = @ENABLE_MANPAGES_GEN_TRUE@manpages-am: @top_srcdir@/common.h2m @ENABLE_MANPAGES_GEN_TRUE@ @for binary in $(bin_PROGRAMS) $(dist_bin_SCRIPTS); do \ @ENABLE_MANPAGES_GEN_TRUE@ echo "Generating manpage for $$binary"; \ @ENABLE_MANPAGES_GEN_TRUE@ manpage=`echo -n "$$binary" | sed -e 's/\..*//'`; \ @ENABLE_MANPAGES_GEN_TRUE@ name=`(grep \ @ENABLE_MANPAGES_GEN_TRUE@ --after-context=1 \ @ENABLE_MANPAGES_GEN_TRUE@ ".B \\\\\%polygraph-$$manpage" \ @ENABLE_MANPAGES_GEN_TRUE@ '@top_srcdir@/polygraph.man.in' || \ @ENABLE_MANPAGES_GEN_TRUE@ echo -n ' a part of Web Polygraph performance benchmark') | \ @ENABLE_MANPAGES_GEN_TRUE@ tail -1 | cut -c4-`; \ @ENABLE_MANPAGES_GEN_TRUE@ $(HELP2MAN) \ @ENABLE_MANPAGES_GEN_TRUE@ --no-info \ @ENABLE_MANPAGES_GEN_TRUE@ --name="$$name" \ @ENABLE_MANPAGES_GEN_TRUE@ --version-string="polygraph-$$manpage - $(PACKAGE_NAME)" \ @ENABLE_MANPAGES_GEN_TRUE@ --include='@top_srcdir@/common.h2m' \ @ENABLE_MANPAGES_GEN_TRUE@ --opt-include="$$manpage.h2m" \ @ENABLE_MANPAGES_GEN_TRUE@ --output="$$manpage.man" \ @ENABLE_MANPAGES_GEN_TRUE@ "./$$binary";\ @ENABLE_MANPAGES_GEN_TRUE@ done @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpages-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpages-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpages-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpages-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpages: Makefile $(LIBRARIES) $(PROGRAMS) manpages-am manpages-recursive @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-am: @ENABLE_MANPAGES_GEN_TRUE@ @rm -f $(dist_man1_MANS) @ENABLE_MANPAGES_GEN_TRUE@ @if test 'x$(RECURSIVE_TARGETS)' != 'xmanpagesclean-recursive' ; then \ @ENABLE_MANPAGES_GEN_TRUE@ $(MAKE) \ @ENABLE_MANPAGES_GEN_TRUE@ $(AM_MAKEFLAGS) \ @ENABLE_MANPAGES_GEN_TRUE@ RECURSIVE_TARGETS=manpagesclean-recursive \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-recursive; \ @ENABLE_MANPAGES_GEN_TRUE@ fi @ENABLE_MANPAGES_GEN_TRUE@manpagesclean-recursive: @ENABLE_MANPAGES_GEN_TRUE@manpagesclean: manpagesclean-am manpagesclean-recursive @ENABLE_MANPAGES_GEN_TRUE@.PHONY: manpages-am manpages-recursive manpages \ @ENABLE_MANPAGES_GEN_TRUE@ manpagesclean-am manpagesclean-recursive manpagesclean @ENABLE_MANPAGES_GEN_FALSE@manpages: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@manpagesclean: @ENABLE_MANPAGES_GEN_FALSE@ @echo "Can not generate man pages. Please install help2man and reconfigure." @ENABLE_MANPAGES_GEN_FALSE@.PHONY: manpages manpagesclean # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: polygraph-4.3.2/src/loganalyzers/RepToHtmlFile.cc0000644000175000017500000002572211546440450021424 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xml/XmlDoc.h" #include "xml/XmlTag.h" #include "xml/XmlAttr.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/Sample.h" #include "loganalyzers/RepToHtmlFile.h" Map RepToHtmlFile::TheLocations; void RepToHtmlFile::Location(BlobDb &db, const ReportBlob &blob, const String &fname) { TheLocations.add(blob.key(), new String(fname)); CollectLocations(db, blob, fname); } void RepToHtmlFile::CollectLocations(BlobDb &db, const XmlNode &node, const String &fname) { if (node.attrs()) { if (node.name() == "report_blob" && node.attrs()->has("key")) { const String &key = node.attrs()->value("key"); if (!Location(key)) TheLocations.add(key, new String(fname + "#_" + key)); } else if (node.name() == "include" && node.attrs()->has("src") && node.attrs()->has("auth")) { const String &key = node.attrs()->value("src"); CollectLocations(db, db.get(key), fname); } } if (node.kids()) { for (int i = 0; i < node.kids()->count(); ++i) CollectLocations(db, *node.kids()->item(i), fname); } } String RepToHtmlFile::Location(const String &key) { String *name = 0; if (TheLocations.find(key, name)) return *name; else return 0; } RepToHtmlFile::RepToHtmlFile(BlobDb &db, ostream *aStream, const String &aLocation): theDb(db), theStream(aStream), theLocation(aLocation), theQuoteLevel(0) { } RepToHtmlFile::~RepToHtmlFile() { } void RepToHtmlFile::render(const XmlDoc &doc) { if (doc.root()) { doc.root()->render(*this); } } void RepToHtmlFile::renderReportBlob(const ReportBlob &blob) { static const String onePageSummaryStyles = "[id=\"summary.1page\"] h1, [id=\"summary.1page\"] h1 * { font-size: 16px }"\ "[id=\"summary.1page\"] * { font-size: 12px }"; *theStream << ""; renderBlob(blob); *theStream << "" << endl; } static void RepToHtmlFile_EscapeChar(char c, ostream &os) { if (c == '"') os << """; else if (c == '<') os << "<"; else if (c == '>') os << ">"; else if (c == '&') os << "&"; else os << c; } void RepToHtmlFile::renderText(const char *buf, Size sz) { for (int i = 0; i < sz; ++i, ++buf) RepToHtmlFile_EscapeChar(*buf, *theStream); } void RepToHtmlFile::renderTag(const XmlTag &tag) { if (tag.name() == "document") renderDocument(tag); else if (tag.name() == "section") renderSection(tag); else if (tag.name() == "chapter") renderChapter(tag); else if (tag.name() == "report_blob") renderBlob(tag); else if (tag.name() == "include") renderBlobInclude(tag); else if (tag.name() == "blob_ptr") renderBlobPtr(tag); else if (tag.name() == "measurement") renderMeasurement(tag); else if (tag.name() == "internal_error") *theStream << " err "; // XXX red, and add an ptr to an explanation else if (tag.name() == "codesample") { *theStream << "
";
		foreach(tag.kids());
		*theStream << "
"; } else if (tag.name() == "title") { // handled inside and
} else if (tag.name() == "description") { // handled inside } else if (tag.name() == "ul" || tag.name() == "ol") { renderList(tag); } else if (tag.name() == "th" || tag.name() == "td") { renderTableCell(tag); } else if (tag.name() == "img") { renderImage(tag); } else if (tag.name() == "br") { tag.printOpen(*theStream, ""); *theStream << ">"; } else { tag.printOpen(*theStream, ""); *theStream << ">"; foreach(tag.kids()); *theStream << ""; } } void RepToHtmlFile::renderDocument(const XmlTag &tag) { XmlSearchRes res; if (tag.kids()->selByTagName("title", res)) { *theStream << "

"; foreach(res.last()->kids()); *theStream << "

"; } foreach(tag.kids()); } void RepToHtmlFile::renderChapter(const XmlTag &tag) { theSectionState.reset(); if (tag.attrs()->has("name")) *theStream << "value("name") << "\">"; // title XmlSearchRes res; if (Should(tag.kids()->selByTagName("title", res))) { *theStream << "

"; foreach(res.last()->kids()); *theStream << "

" << endl; } foreach(tag.kids()); *theStream << endl << endl; } void RepToHtmlFile::renderSection(const XmlTag &tag) { const int sectLvl = theSectionState.level(); String trueNum; const String usrNum = theSectionState.begSection(tag, trueNum); *theStream << ""; // title const int hLvl = sectLvl + 1; XmlSearchRes res; if (Should(tag.kids()->selByTagName("title", res))) { *theStream << "' << usrNum << ' '; foreach(res.last()->kids()); *theStream << "' << endl; } if (sectLvl == 1) *theStream << "
"; foreach(tag.kids()); if (sectLvl == 1) *theStream << "
"; theSectionState.endSection(); *theStream << "
" << endl; } void RepToHtmlFile::renderBlobInclude(const XmlTag &tag) { const String &key = tag.attrs()->value("src"); const bool auth = tag.attrs()->has("auth"); theParents.append(&tag); if (!auth) ++theQuoteLevel; renderNode(theDb.get(key)); if (!auth) --theQuoteLevel; theParents.pop(); } void RepToHtmlFile::renderBlobPtr(const XmlTag &tag) { const String &key = tag.attrs()->value("key"); if (const String loc = location(key)) { *theStream << ""; foreach(tag.kids()); *theStream << ""; return; } *theStream << ""; if (!tag.attrs()->has("maybe_null")) { // XXX: we should output a link to the no-link error explanation if (theDb.has(key)) cerr << "internal_error: no location for blob '" << key << "'" << endl; else cerr << "internal_error: reference to an undefined blob '" << key << "'" << endl; } foreach(tag.kids()); *theStream << ""; } void RepToHtmlFile::renderBlob(const XmlTag &tag) { const String &key = tag.attrs()->value("key"); *theStream << ""; const bool div = !tag.attrs()->has("dtype", "span"); if (div) renderSampleStart(tag, "div", CompositeSample::TheId); XmlSearchRes res; tag.kids()->selByTagName("description", res); if (res.count()) { //*theStream << "
"; foreach(tag.kids()); //*theStream << "
"; *theStream << "
" << endl; //*theStream << "
"; for (int i = 0; i < res.count(); ++i) { foreach(res[i]->kids()); *theStream << "
" << endl; } //*theStream << "
"; *theStream << "
" << endl; } else { foreach(tag.kids()); } if (div) *theStream << ""; } void RepToHtmlFile::renderMeasurementVal(const XmlTag &tag, const String &val, bool renderUnit, const String &unit) { XmlSearchRes images; if (renderUnit && tag.kids()->selByTagName("image", images)) { foreach(images.last()->kids()); } else { String typeId = unit.len() > 0 ? NumberSample::TheId : TextSample::TheId; if (const XmlAttr *a = tag.attrs()->has("typeId")) typeId = a->value(); renderSampleStart(*tag.parent(), "span", typeId); *theStream << val; *theStream << ""; if (renderUnit) *theStream << unit; } } void RepToHtmlFile::renderMeasurement(const XmlTag &tag) { if (tag.attrs()->has("value")) { String val = tag.attrs()->value("value"); String unit = tag.attrs()->value("unit"); if (unit == "xact" || unit == "conn") val = val(0, val.chr('.') - val.cstr()); else if (unit == "string") unit = ""; bool renderUnit = true; const XmlNode *p = tag.parent(); int parentDepth = 0; while (renderUnit) { if (!p) { if (parentDepth < theParents.count()) p = theParents.last(parentDepth++); } if (!p) break; if (p->name() == "table") { renderUnit = p->attrs()->has("border", "0"); break; } renderUnit = !p->attrs() || !p->attrs()->has("align", "right"); p = p->parent(); } if (!renderUnit) *theStream << ""; renderMeasurementVal(tag, val, renderUnit, unit); if (!renderUnit) *theStream << ""; } else { foreach(tag.kids()); } } void RepToHtmlFile::renderList(const XmlTag &tag) { tag.printOpen(*theStream, ""); *theStream << ">" << endl; for (int i = 0; i < tag.kids()->count(); ++i) { *theStream << "\t
  • "; renderNode(*tag.kids()->item(i)); *theStream << "
  • " << endl; } *theStream << ""; } void RepToHtmlFile::renderTableCell(const XmlTag &tag) { tag.printOpen(*theStream, ""); if (tag.attrs()->has("emphasized")) *theStream << " bgcolor='#FFFFFF'"; *theStream << ">"; if (tag.kids()->count()) { if (tag.attrs()->has("disabled")) *theStream << ""; foreach(tag.kids()); if (tag.attrs()->has("disabled")) *theStream << ""; } else { *theStream << " "; } *theStream << "" << endl; } void RepToHtmlFile::renderImage(const XmlTag &tag) { *theStream << "count(); ++a) { const String &name = tag.attrs()->item(a)->name(); String value = tag.attrs()->item(a)->value(); if (name == "src") value = relativeUrl(theLocation, value); *theStream << ' ' << name << "='" << value << "'"; } *theStream << ">"; } void RepToHtmlFile::renderSampleStart(const XmlNode &n, const String &element, const String &typeId) { *theStream << "<" << element; if (const XmlAttrs *attrs = n.attrs()) { *theStream << " class=\"" << typeId << "\""; if (theQuoteLevel == 0) // non-authoritative includes are not IDed *theStream << " id=\"" << attrs->value("key") << "\""; const XmlAttr *title = attrs->has("title"); if (title && title->value().len() > 0 && title->value() != ReportBlob::NilTitle) title->print(*theStream << " ", String()); } *theStream << ">"; } String RepToHtmlFile::relativeUrl(const String &from, const String &to) const { String cur = from; // cut ancor off if (const char *ancor = cur.rchr('#')) cur = cur(0, ancor - cur.cstr()); // cut file name off if (const char *fname = cur.rchr('/')) cur = cur(0, fname+1 - cur.cstr()); else cur = ""; // get to the common root by replacing last dir with '..' String back = ""; while (cur && !cur.casePrefixOf(to.cstr(), to.len())) { const char *rdir = cur.rchr('/'); while (rdir > cur.cstr() && rdir[-1] == '/') --rdir; back += "../"; if (cur.cstr() < rdir) cur = cur(0, rdir-cur.cstr()); else cur = ""; } const String forth = to(cur.len(), to.len()); return back + forth; } String RepToHtmlFile::location(const String &key) const { if (const String loc = Location(key)) return relativeUrl(theLocation, loc); else return 0; } polygraph-4.3.2/src/loganalyzers/RptmTraceFig.h0000644000175000017500000000163611546440450021135 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_RPTMTRACEFIG_H #define POLYGRAPH__LOGANALYZERS_RPTMTRACEFIG_H #include "xstd/String.h" #include "loganalyzers/ReportTraceFigure.h" class Stex; class PhaseInfo; class PhaseTrace; class StatIntvlRec; // creates response time trace figure based on interval stats class RptmTraceFig: public ReportTraceFigure { public: RptmTraceFig(); void stats(const Stex *aStex, const PhaseInfo *phase); void moreStats(const Stex *aStex); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); int dumpDataLines(const Stex *stex); int dumpDataLine(const Stex *stex, Time stamp, const StatIntvlRec &r); protected: Array theStexes; const PhaseInfo *thePhase; const PhaseTrace *theTrace; }; #endif polygraph-4.3.2/src/loganalyzers/BlobDb.h0000644000175000017500000000316611546440450017732 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_BLOBDB_H #define POLYGRAPH__LOGANALYZERS_BLOBDB_H #include "xml/XmlNodes.h" #include "loganalyzers/InfoScope.h" #include "loganalyzers/BlobIdx.h" class ReportBlob; // a collection of ReportBlobs class BlobDb { public: typedef InfoScope Scope; public: static String Key(const String &name, const Scope &scope); static String KeySuffix(const Scope &scope); public: BlobDb(); const ReportBlob *has(const String &key); const ReportBlob *add(const ReportBlob &b); BlobDb &operator <<(const ReportBlob &b) { add(b); return *this; } const XmlNodes &blobs() const { return theBlobs; } // never fail void link(const String &oldString, const String &newString); const ReportBlob &get(const String &key); const XmlNode &include(const String &key); // at most once const XmlNode "e(const String &key); // many times XmlNode &ptr(const String &key, const XmlNodes &context); XmlTag reportNote(const String &id, const XmlNode ¬e); ostream &print(ostream &os, const String &pfx) const; protected: const ReportBlob *find(const String &key) const; protected: XmlNodes theBlobs; BlobIdx theBlobIdx; XmlNodes theIncludes; XmlNodes thePtrs; XmlNodes theErrors; int theNotesCount; }; inline String operator +(const String &name, const InfoScope &scope) { return BlobDb::Key(name, scope); } inline String operator +(const char *name, const InfoScope &scope) { return String(name) + scope; } #endif polygraph-4.3.2/src/loganalyzers/Makefile.am0000644000175000017500000000403711335553726020476 0ustar testertester## Process this file with automake to produce Makefile.in bin_PROGRAMS = \ reporter dist_man1_MANS = \ reporter.man EXTRA_PROGRAMS = comparator if ENABLE_COMPARATOR bin_PROGRAMS += comparator comparator_SOURCES = \ comparator.cc \ CompOpts.cc comparator_LDADD = \ libloganalyzers.la \ ../xml/libxml.a \ ../Hapy/src/libHapy.la \ ../base/libbase.a \ ../xstd/libxstd.a \ @LIBOBJS@ endif noinst_LTLIBRARIES = \ libloganalyzers.la libloganalyzers_la_SOURCES = \ InfoScope.cc \ InfoScopeDim.cc \ InfoScopes.cc \ Stex.cc \ LoadStex.cc \ LoadStexes.cc \ \ SomeInfo.cc \ TestInfo.cc \ SideInfo.cc \ ProcInfo.cc \ PhaseInfo.cc \ PhaseTrace.cc \ \ ReportFigure.cc \ TmSzHistFig.cc \ RptmHistFig.cc \ SizeHistFig.cc \ ReportTraceFigure.cc \ PointTraceFig.cc \ RptmTraceFig.cc \ LevelTraceFig.cc \ LoadTraceFig.cc \ HistogramFigure.cc \ ScatteredFig.cc \ \ ReportBlob.cc \ BlobIdx.cc \ BlobDb.cc \ \ SectionState.cc \ RepToHtmlFile.cc \ \ RepOpts.cc \ \ Sample.cc \ Panorama.cc \ Formatter.cc \ \ StatTable.cc reporter_SOURCES = \ reporter.cc noinst_HEADERS = \ BlobDb.h \ BlobIdx.h \ CompOpts.h \ Formatter.h \ InfoScope.h \ InfoScopeDim.h \ InfoScopes.h \ StexBase.h \ PointStex.h \ HistStex.h \ LevelStex.h \ LevelTraceFig.h \ LoadStex.h \ LoadStexes.h \ LoadTraceFig.h \ Panorama.h \ PhaseInfo.h \ PhaseTrace.h \ ProcInfo.h \ RepOpts.h \ RepToHtmlFile.h \ ReportBlob.h \ ReportFigure.h \ ReportTraceFigure.h \ PointTraceFig.h \ HistogramFigure.h \ ScatteredFig.h \ RptmHistFig.h \ RptmTraceFig.h \ Sample.h \ SectionState.h \ SideInfo.h \ SizeHistFig.h \ SomeInfo.h \ Stex.h \ TestInfo.h \ TmSzHistFig.h \ StatTable.h \ \ $(top_builddir)/config.h reporter_LDADD = \ libloganalyzers.la \ ../logextractors/liblogextractors.a \ ../runtime/libruntime.a \ ../xml/libxml.a \ ../base/libbase.a \ ../xstd/libxstd.a \ @LIBOBJS@ INCLUDES = \ -I$(top_srcdir)/src \ -I$(top_builddir) \ -I$(top_builddir)/src \ -I$(top_srcdir)/src/Hapy/src/include include $(top_srcdir)/common.am polygraph-4.3.2/src/loganalyzers/HistogramFigure.h0000644000175000017500000000156211546440450021703 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_HISTOGRAMFIGURE_H #define POLYGRAPH__LOGANALYZERS_HISTOGRAMFIGURE_H #include "loganalyzers/ReportFigure.h" class HistStex; class Histogram; class HistogramBin; class PhaseInfo; // creates a distribution figure based on phase stats class HistogramFigure: public ReportFigure { public: HistogramFigure(); void stats(const HistStex *aStex, const PhaseInfo *phase); void compareWith(const HistStex *stex); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); int dumpDataLine(const HistogramBin &bin, int totCount); protected: const PhaseInfo *thePhase; const HistStex *theStex; Array theComparison; // theStex and others }; #endif polygraph-4.3.2/src/loganalyzers/RepOpts.h0000644000175000017500000000145611546440450020202 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_REPOPTS_H #define POLYGRAPH__LOGANALYZERS_REPOPTS_H #include "base/opts.h" #include "base/polyOpts.h" // options for the reporter class RepOpts: public OptGrp { public: RepOpts(); // defaults virtual ostream &printAnonym(ostream &os) const; virtual bool parseAnonym(const Array &opts); virtual bool canParseAnonym() const { return true; } virtual bool validate() const; public: HelpOpt theHelpOpt; VersionOpt theVersOpt; StrOpt theLabel; StrArrOpt thePhases; StrOpt theRepDir; StrOpt theTmpDir; StrOpt thePlotter; Array theFiles; }; extern RepOpts TheRepOpts; #endif polygraph-4.3.2/src/loganalyzers/StatTable.cc0000644000175000017500000000137611546440450020630 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "loganalyzers/StatTable.h" #include "xml/XmlAttr.h" #include "xml/XmlParagraph.h" #include "xml/XmlText.h" XmlTag &operator <<(XmlTag &tag, const StatTable &statTable) { const XmlTable &table(statTable); tag << table; const std::list &unknowns(statTable.unknowns()); if (!unknowns.empty()) { XmlTextTag p; p.buf() << "No events observed for the following statistics:"; for (std::list::const_iterator i = unknowns.begin(); i != unknowns.end(); ++i) p.buf() << (i == unknowns.begin() ? " " : ", ") << *i; p.buf() << '.'; tag << p; } return tag; } polygraph-4.3.2/src/loganalyzers/Panorama.cc0000644000175000017500000000655311546440450020505 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "base/AnyToString.h" #include "loganalyzers/Panorama.h" #include "loganalyzers/Formatter.h" String Panorama::TheId = "Panorama"; String PanAtom::TheId = "PanAtom"; Panorama::LocationLabels Panorama::TheLocationLabels; void Panorama::LabelLocation(const String &location, const String &label) { String res; if (!TheLocationLabels.find(location, res)) { TheLocationLabels.add(location, label); } else { cerr << "warning: found several test results with identical location:" << endl << "\t" << location << endl; } } String Panorama::LocationLabel(const String &location) { String res = "unlabeled"; (void)TheLocationLabels.find(location, res); return res; } Panorama *Panorama::findSlice(const String &skey, int idxHint) { Assert(idxHint >= 0); // try hinted location first if (0 <= idxHint && idxHint < theKids.count()) { Panorama *kid = panKid(idxHint); if (kid->key() == skey) return kid; } // search for the key for (int i = 0; i < theKids.count(); ++i) { Panorama *kid = panKid(i); if (kid->key() == skey) return kid; } // not found; caller may add a slice return 0; } Panorama *Panorama::panKid(int idx) { Sample *s = theKids[idx]; Assert(s->typeId() == Panorama::TheId || s->typeId() == PanAtom::TheId); return (Panorama*)s; } const Panorama *Panorama::panKid(int idx) const { Assert(0 <= idx && idx <= theKids.count()); const Sample *s = theKids[idx]; Assert(s->typeId() == Panorama::TheId || s->typeId() == PanAtom::TheId); return (const Panorama*)s; } Panorama *Panorama::genDiff() const { Panorama *diff = 0; for (int i = 0; i < theKids.count(); ++i) { if (Panorama *kidDiff = panKid(i)->genDiff()) { if (!diff) { diff = new Panorama(); diff->key(key()); diff->title(title()); diff->location(location()); } diff->add(kidDiff); } } return diff; } void Panorama::report(Formatter &form) const { form.openSection(key(), title()); for (int i = 0; i < theKids.count(); ++i) panKid(i)->report(form); form.closeSection(); } Panorama *PanAtom::genDiff() const { bool same = true; for (int k = 0; same && k < theKids.count(); ++k) { Sample &kid = *theKids[k]; for (int t = k+1; same && t < theKids.count(); ++t) same = theKids[t]->similar(kid); } if (same) return 0; PanAtom *diff = new PanAtom; diff->key(key()); diff->title(title()); diff->location(location()); for (int i = 0; i < theKids.count(); ++i) diff->add(theKids[i]->clone()); return diff; } void PanAtom::report(Formatter &form) const { if (theKids.count() == 0) return; form.openSection(key(), title()); form.openTableAnonym(); for (int i = 0; i < theKids.count(); ++i) { const Sample &kid = *theKids[i]; form.openTableRecord(); form.openTableCell("rep-link"); form.addLink(kid.location(), LocationLabel(kid.location())); form.addText(":"); form.closeTableCell(); form.openTableCell(kid.typeId()); form.addLink(kid.location() + '#' + kid.key(), PrintToString(kid)); form.closeTableCell(); form.openTableCell("diff"); const Sample &otherKid = i > 0 ? *theKids[i-1] : *theKids.last(); kid.reportDifferences(otherKid, form); form.closeTableCell(); form.closeTableRecord(); } form.closeTable(); form.closeSection(); } polygraph-4.3.2/src/loganalyzers/InfoScopeDim.cc0000644000175000017500000000222211546440450021253 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/String.h" #include "loganalyzers/InfoScopeDim.h" InfoScopeDim::InfoScopeDim(const String &aLabel): theLabel(aLabel) { } InfoScopeDim::InfoScopeDim(const InfoScopeDim &s) { copy(s); } InfoScopeDim::~InfoScopeDim() { reset(); } String InfoScopeDim::image() const { String buf = theLabel; buf += '='; for (int i = 0; i < theNames.count(); ++i) { if (i) buf += ","; buf += *theNames[i]; } return buf; } void InfoScopeDim::add(const String &name) { Assert(!has(name)); theNames.append(new String(name)); } bool InfoScopeDim::has(const String &name) const { for (int i = 0; i < theNames.count(); ++i) { if (*theNames[i] == name) return true; } return false; } void InfoScopeDim::copy(const InfoScopeDim &d) { Assert(theLabel == d.theLabel); Assert(!theNames.count()); theNames.stretch(d.count()); for (int i = 0; i < d.count(); ++i) add(*d.theNames[i]); } void InfoScopeDim::reset() { while (theNames.count()) delete theNames.pop(); } polygraph-4.3.2/src/loganalyzers/reporter.man0000644000175000017500000000147611336340427020776 0ustar testertester.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.36. .TH POLYGRAPH-REPORTER "1" "February 2010" "polygraph-reporter - Web Polygraph" "User Commands" .SH NAME polygraph-reporter \- HTML report generator .SH SYNOPSIS .B reporter [\fI--option \fR...] \fI \fR... .SH OPTIONS .TP \fB\-\-help\fR list of options .TP \fB\-\-version\fR package version info .TP \fB\-\-label\fR test label .TP \fB\-\-phases\fR names of phases for executive summary .TP \fB\-\-report_dir\fR report's root directory .TP \fB\-\-tmp_dir\fR temporary dir .TP \fB\-\-plotter\fR gnuplot location .SH COPYRIGHT Copyright \(co 2003-2006 The Measurement Factory, Inc. .SH "SEE ALSO" .BR polygraph (7) \- general information and a list of programs .B \%http://www.web-polygraph.org/ \- project web site polygraph-4.3.2/src/loganalyzers/SectionState.h0000644000175000017500000000116211546440450021205 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SECTIONSTATE_H #define POLYGRAPH__LOGANALYZERS_SECTIONSTATE_H #include "xstd/String.h" #include "xstd/Array.h" class XmlTag; class SectionState { public: SectionState(); void reset(); int level() const; String begSection(const XmlTag &s, String &trueNum); // suggests section "number" void endSection(); protected: String curNum(const XmlTag &s, String &trueNum) const; protected: Array theLevels; }; #endif polygraph-4.3.2/src/loganalyzers/TestInfo.h0000644000175000017500000000706511546440450020343 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_TESTINFO_H #define POLYGRAPH__LOGANALYZERS_TESTINFO_H #include "xstd/Array.h" #include "xstd/Time.h" #include "xstd/String.h" #include "xstd/BigSize.h" #include "loganalyzers/SomeInfo.h" class InfoScopes; class SideInfo; class BlobDb; // aggregate stats and other logged information about a test // manages info about all sides of the test class TestInfo: public SomeInfo { public: TestInfo(const String &aLabel); ~TestInfo(); // get/set scope for executive summary const Scope &execScope() const; void execScope(const Scope &aScope); const Scope &guessExecScope(); const String &label() const; const String &pglCfg() const; Time startTime() const; bool twoSided() const { return cltSideExists() && srvSideExists(); } const SideInfo *cltSideExists() const; const SideInfo *srvSideExists() const; const SideInfo &aSide() const; const SideInfo &cltSide() const; const SideInfo &srvSide() const; SideInfo &cltSide(); SideInfo &srvSide(); const SideInfo &side(int logCat) const; SideInfo &side(int logCat); int scopes(InfoScopes &res) const; int repCount(const Scope &scope) const; int hitCount(const Scope &scope) const; int uselessProxyValidationCount(const Scope &scope) const; BigSize repVolume(const Scope &scope) const; BigSize hitVolume(const Scope &scope) const; BigSize uselessProxyValidationVolume(const Scope &scope) const; AggrStat lastReqByteWritten(const Scope &scope) const; AggrStat lastReqByteRead(const Scope &scope) const; AggrStat firstRespByteWritten(const Scope &scope) const; AggrStat firstRespByteRead(const Scope &scope) const; void checkConsistency(); void compileStats(BlobDb &db); protected: void checkCommonPglCfg(); void checkCommonBenchmarkVersion(); void checkCommonStartTime(); void cmplExecSumVars(BlobDb &db); void cmplExecSum(BlobDb &db); void cmplExecSumTable(BlobDb &db, const Scope &cltScope); void cmplExecSumPhases(BlobDb &db, const Scope &cltScope); void cmplWorkload(BlobDb &db); void cmplWorkloadBlob(ReportBlob &blob, const String &side, const String &pglCfg); void cmplSynonyms(BlobDb &db, const Scope &scope); void cmplHitRatioVars(BlobDb &db, const Scope &scope); void cmplHitRatio(BlobDb &db, const Scope &scope); void cmplHitRatioTable(BlobDb &db, XmlTag &parent, const Scope &scope) ; void cmplCheapProxyValidationVars(BlobDb &db, const Scope &scope); void cmplCheapProxyValidation(BlobDb &db, const Scope &scope); void cmplCheapProxyValidationTable(BlobDb &db, XmlTag &parent, const Scope &scope) ; void cmplByteLatencyVars(BlobDb &db, const Scope &scope); void cmplByteLatency(BlobDb &db, const Scope &scope); void cmplByteLatencyTable(BlobDb &db, XmlTag &parent, const Scope &scope); void cmplByteLatencyHist(BlobDb &db, XmlTag &parent, const Scope &scope); void cmplBaseStats(BlobDb &db, const Scope &scope); void cmplTraffic(BlobDb &db, const Scope &scope); void cmplRptm(BlobDb &db, const Scope &scope); void cmplSavings(BlobDb &db, const Scope &scope); void cmplLevels(BlobDb &db, const Scope &scope); void cmplAuthentication(BlobDb &db, const Scope &scope); void cmplErrors(BlobDb &db, const Scope &scope); void cmplNotes(BlobDb &db); protected: String theLabel; String theBenchmarkVersion; String thePglCfg; Time theStartTime; Array theSides; Array theScopes; String theOneSideWarn; Scope theExecScope; }; #endif polygraph-4.3.2/src/loganalyzers/RepOpts.cc0000644000175000017500000000220511546440450020331 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "loganalyzers/RepOpts.h" RepOpts TheRepOpts; RepOpts::RepOpts(): theHelpOpt(this, "help", "list of options"), theVersOpt(this, "version", "package version info"), theLabel(this, "label ", "test label"), thePhases(this, "phases ", "names of phases for executive summary"), theRepDir(this, "report_dir ", "report's root directory"), theTmpDir(this, "tmp_dir ", "temporary dir", "/tmp"), thePlotter(this, "plotter ", "gnuplot location", "gnuplot") { } bool RepOpts::validate() const { if (!thePlotter) cerr << "plotter location must be specified" << endl; else return OptGrp::validate(); return false; } ostream &RepOpts::printAnonym(ostream &os) const { return os << " ..."; } bool RepOpts::parseAnonym(const Array &opts) { for (int i = 0 ; i < opts.count(); ++i) theFiles.append(new String(opts[i])); return theFiles.count() > 0; } polygraph-4.3.2/src/loganalyzers/LoadTraceFig.h0000644000175000017500000000211111546440450021057 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_LOADTRACEFIG_H #define POLYGRAPH__LOGANALYZERS_LOADTRACEFIG_H #include "xstd/String.h" #include "loganalyzers/ReportTraceFigure.h" class LoadStex; class PhaseInfo; class PhaseTrace; class StatIntvlRec; // creates load trace figure based on interval load stats class LoadTraceFig: public ReportTraceFigure { public: typedef LoadStex Stex; public: LoadTraceFig(); void stats(const Stex *aStex, const PhaseInfo *phase); void compareWith(const Stex *stex); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); enum lineType { lnRate, lnBwidth }; int dumpDataLines(const LoadStex *stex, const lineType lt); int dumpDataLine(const LoadStex *stex, Time stamp, const StatIntvlRec &r, const lineType lt); protected: const Stex *theStex; Array theComparison; // theStex and others const PhaseInfo *thePhase; const PhaseTrace *theTrace; }; #endif polygraph-4.3.2/src/loganalyzers/InfoScopes.cc0000644000175000017500000000157611546440450021017 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "loganalyzers/InfoScope.h" #include "loganalyzers/InfoScopes.h" InfoScopes::InfoScopes() { } InfoScopes::~InfoScopes() { while (theScopes.count()) delete theScopes.pop(); } const InfoScope *InfoScopes::find(const String &image) const { int idx = -1; if (theIndex.find(image, idx)) return scope(idx); else return 0; } void InfoScopes::add(const InfoScope &scope) { InfoScope *clone = new InfoScope(scope); absorb(clone); } void InfoScopes::absorb(InfoScope *&scope) { const String image = scope->image(); int idx = -1; if (theIndex.find(image, idx)) theIndex.valAt(idx) = count(); // store last index else theIndex.add(image, count()); theScopes.append(scope); scope = 0; } polygraph-4.3.2/src/loganalyzers/PhaseInfo.h0000644000175000017500000000227311546440450020460 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_PHASEINFO_H #define POLYGRAPH__LOGANALYZERS_PHASEINFO_H #include "base/StatPhaseRec.h" #include "loganalyzers/SomeInfo.h" class PhaseTrace; // information about a stats phase class PhaseInfo: public SomeInfo { public: PhaseInfo(); ~PhaseInfo(); const String name() const; const StatPhaseRec *hasStats() const; const StatPhaseRec &stats() const { return thePhase; } const StatIntvlRec &availStats() const; const PhaseTrace &trace() const { return *theTrace; } void concat(const PhaseInfo &phase); void merge(const PhaseInfo &phase); PhaseTrace *startTrace(); void noteIntvl(const StatIntvlRec &r, const String &phaseName); void notePhase(const StatPhaseRec &r); void noteEndOfLog(); void checkConsistency(); void compileStats(BlobDb &db); protected: StatPhaseRec thePhase; StatIntvlRec theIntvl; // all intervals together int theIntvlCount; // number of intervals seen PhaseTrace *theTrace; bool gotPhaseStats; // and not just recovered interval-based ones }; #endif polygraph-4.3.2/src/loganalyzers/SomeInfo.h0000644000175000017500000000176311546440450020326 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SOMEINFO_H #define POLYGRAPH__LOGANALYZERS_SOMEINFO_H #include "loganalyzers/InfoScope.h" class String; class BlobDb; class ReportBlob; class XmlAttr; // common base for all report *Info classes class SomeInfo { public: typedef InfoScope Scope; public: virtual ~SomeInfo() {} protected: const ReportBlob &addLink(BlobDb &db, const String &newKey, const String &oldKey); const ReportBlob &addMeasBlob(BlobDb &db, const String &name, double val, const String &unit, const String &title); const ReportBlob &addMeasBlob(BlobDb &db, const String &name, Time val, const String &title); const ReportBlob &addMeasBlob(BlobDb &db, const String &name, const String &val, const String &unit, const String &title); const ReportBlob &addNaMeasBlob(BlobDb &db, const String &name, const String &title); }; #endif polygraph-4.3.2/src/loganalyzers/InfoScope.h0000644000175000017500000000244511546440450020472 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_INFOSCOPE_H #define POLYGRAPH__LOGANALYZERS_INFOSCOPE_H #include "xstd/String.h" #include "xstd/Array.h" class InfoScopeDim; // manages scope rules/info // scope is defined in terms of phase and side names class InfoScope { public: InfoScope(); InfoScope(const InfoScope &s); ~InfoScope(); void name(const String &aName); operator void*() const; String name() const; // imprecise, human-readable String image() const; // precise, for machine use const Array &sides() const; const Array &phases() const; // narrow or set InfoScope oneSide(const String &name) const; InfoScope onePhase(const String &name) const; // expand void addSide(const String &name); void addPhase(const String &name); void add(const InfoScope &scope); // test bool hasSide(const String &name) const; bool hasPhase(const String &name) const; bool operator ==(const InfoScope &s) const; InfoScope &operator =(const InfoScope &s); protected: void reset(); void copy(const InfoScope &s); protected: String theName; InfoScopeDim *theSides; InfoScopeDim *thePhases; }; #endif polygraph-4.3.2/src/loganalyzers/ReportFigure.h0000644000175000017500000000235411546440450021221 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_REPORTFIGURE_H #define POLYGRAPH__LOGANALYZERS_REPORTFIGURE_H #include "xstd/String.h" class String; class PhaseInfo; class BlobDb; class ReportBlob; // a figure based on interval or phase stats class ReportFigure { public: static String TheBaseDir; // where to put all figures public: ReportFigure(); virtual ~ReportFigure(); const String &title() const { return theTitle; } virtual void configure(const String &key, const String &title); virtual const ReportBlob &plot(BlobDb &db); public: // set before calling setCtrlOptions String theDataStyle; String theLabelX1; String theLabelY1; String theLabelY2; protected: virtual int createCtrlFile() = 0; virtual void setCtrlOptions() = 0; bool plotCtrlFile(); bool destroyCtrlFile(); void addPlotLine(const String &title, const String &unit); void addedAllPlotLines(); protected: const PhaseInfo *thePhase; String theKey; String theTitle; String theBaseName; String thePlotFname; String theCtrlFname; ostream *theCtrlFile; int thePlotLineCount; }; #endif polygraph-4.3.2/src/loganalyzers/BlobIdx.h0000644000175000017500000000135111546440450020123 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_BLOBIDX_H #define POLYGRAPH__LOGANALYZERS_BLOBIDX_H class ReportBlob; class String; // key->blob index with fast search method class BlobIdx { public: typedef String Key; public: BlobIdx(int aCapacity = 0); void add(const ReportBlob *blob); const ReportBlob *find(const Key &key) const; protected: const ReportBlob *find(const Key &key, int &idx) const; bool stopAt(const Key &key, const ReportBlob *&blob, int idx) const; void grow(); protected: Array theHash; int theCount; // hash slots occupied }; #endif polygraph-4.3.2/src/loganalyzers/SectionState.cc0000644000175000017500000000234311546440450021345 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/sstream.h" #include "xml/XmlTag.h" #include "xml/XmlAttr.h" #include "loganalyzers/SectionState.h" SectionState::SectionState(): theLevels(4) { theLevels.append(0); } void SectionState::reset() { theLevels.reset(); theLevels.append(0); } int SectionState::level() const { return theLevels.count(); } // suggests section "number" // no support for "level" attribute yet String SectionState::begSection(const XmlTag &s, String &trueNum) { theLevels.last()++; const String numStr = curNum(s, trueNum); theLevels.append(0); return numStr; } void SectionState::endSection() { if (theLevels.count() > 1) // be robust theLevels.pop(); } String SectionState::curNum(const XmlTag &s, String &trueNum) const { ostringstream buf; for (int i = 0; i < theLevels.count(); ++i) { if (i) buf << '.'; buf << theLevels[i]; } if (theLevels.count() == 1) buf << '.'; buf << ends; trueNum = buf.str().c_str(); streamFreeze(buf, false); if (const XmlAttr *a = s.attrs()->has("number")) return a->value(); else return trueNum; } polygraph-4.3.2/src/loganalyzers/TmSzHistFig.h0000644000175000017500000000135311546440450020755 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_TMSZHISTFIG_H #define POLYGRAPH__LOGANALYZERS_TMSZHISTFIG_H #include "loganalyzers/HistogramFigure.h" class TmSzHistStat; class Stex; // creates response time or response size distribution figure // based on phase stats class TmSzHistFig: public HistogramFigure { public: TmSzHistFig(const String &aUnit); virtual ~TmSzHistFig(); void stats(const Stex *aStex, const PhaseInfo *phase); protected: virtual const Histogram *extractHist(const Stex *stex, const PhaseInfo &phase) const = 0; protected: String theUnit; Stex *theAuth; }; #endif polygraph-4.3.2/src/loganalyzers/Stex.cc0000644000175000017500000004144311546440450017667 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xml/XmlText.h" #include "xml/XmlParagraph.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/Stex.h" Stex::Stex(const String &aKey, const String &aName): theKey(aKey), theName(aName), theParent(0), doIgnoreUnseen(false) { } Stex::~Stex() { } void Stex::parent(const Stex *aParent) { Assert(!theParent || !aParent); theParent = aParent; } double Stex::totalCount(const PhaseInfo &phase) const { if (const TmSzStat *const recStats = aggr(phase)) return recStats->size().count(); return 0; } double Stex::meanPartsCount(const PhaseInfo &phase) const { const AggrStat *const stats = partsStat(phase); return stats && stats->known() ? stats->mean() : 1; } // compare using "contribution by count" and other factors int Stex::cmpByCountContrib(const PhaseInfo &phase, const Stex &stex) const { static const double epsilon = 1e-3; const double x = totalCount(phase) * meanPartsCount(phase); const double y = stex.totalCount(phase) * stex.meanPartsCount(phase); const double diff = x - y; if (diff < -epsilon) return -1; if (diff > epsilon) return 1; // check if one stex is parent of another // parent stex contribute more than child if (parent() == &stex) return -1; if (stex.parent() == this) return 1; // compare names if nothing else return name().cmp(stex.name()); } const TmSzStat *Stex::aggr(const PhaseInfo &phase) const { return trace(phase.availStats()); } const AggrStat *Stex::partsStat(const PhaseInfo &phase) const { const Histogram *const h = partsHist(phase); return h ? &h->stats() : 0; } void Stex::describe(XmlNodes &nodes) const { nodes << XmlTextTag("No description is available for " "this object class."); describeParent(nodes); } void Stex::describeParent(XmlNodes &nodes) const { if (parent()) { XmlTextTag text; text.buf() << "This object class belongs to the '" << parent()->name() << "' class."; nodes << text; } } /* HitsStex */ HitsStex::HitsStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzHistStat *HitsStex::hist(const PhaseInfo &phase) const { return phase.hasStats() ? &phase.stats().theBasicXacts.hits() : 0; } const TmSzStat *HitsStex::trace(const StatIntvlRec &rec) const { return &rec.theRealHR.hits(); } void HitsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* MissesStex */ MissesStex::MissesStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzHistStat *MissesStex::hist(const PhaseInfo &phase) const { return phase.hasStats() ? &phase.stats().theBasicXacts.misses() : 0; } const TmSzStat *MissesStex::trace(const StatIntvlRec &rec) const { return &rec.theRealHR.misses(); } void MissesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* HitMissesStex */ HitMissesStex::HitMissesStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzHistStat *HitMissesStex::hist(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactHist.reset(); theXactHist = phase.stats().theBasicXacts.hits(); theXactHist += phase.stats().theBasicXacts.misses(); return &theXactHist; } else { return 0; } } const TmSzStat *HitMissesStex::trace(const StatIntvlRec &rec) const { theXactAggr = rec.theRealHR.xacts(); return &theXactAggr; } void HitMissesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ValidationHitStex */ ValidationHitStex::ValidationHitStex(const String &aKey, const String &aName, const HRHistPtr aHRHist): Stex(aKey, aName), theHRHist(aHRHist) { } const TmSzStat *ValidationHitStex::aggr(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactAggr = (phase.stats().*theHRHist).hits().aggr(); return &theXactAggr; } else { return 0; } } const TmSzHistStat *ValidationHitStex::hist(const PhaseInfo &phase) const { return phase.hasStats() ? &(phase.stats().*theHRHist).hits() : 0; } void ValidationHitStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ValidationMissStex */ ValidationMissStex::ValidationMissStex(const String &aKey, const String &aName, const HRHistPtr aHRHist): Stex(aKey, aName), theHRHist(aHRHist) { } const TmSzStat *ValidationMissStex::aggr(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactAggr = (phase.stats().*theHRHist).misses().aggr(); return &theXactAggr; } else { return 0; } } const TmSzHistStat *ValidationMissStex::hist(const PhaseInfo &phase) const { return phase.hasStats() ? &(phase.stats().*theHRHist).misses() : 0; } void ValidationMissStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ImsStex */ ValidationHitMissStex::ValidationHitMissStex(const String &aKey, const String &aName, const HRHistPtr aHRHist, const TracePtr aTrace): Stex(aKey, aName), theHRHist(aHRHist), theTrace(aTrace) { } const TmSzStat *ValidationHitMissStex::aggr(const PhaseInfo &phase) const { const TmSzStat *stat; if (theTrace) stat = Stex::aggr(phase); else if (phase.hasStats()) { theXactAggr.reset(); theXactAggr += (phase.stats().*theHRHist).hits().aggr(); theXactAggr += (phase.stats().*theHRHist).misses().aggr(); stat = &theXactAggr; } else stat = 0; return stat; } const TmSzHistStat *ValidationHitMissStex::hist(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactHist.reset(); theXactHist = (phase.stats().*theHRHist).hits(); theXactHist += (phase.stats().*theHRHist).misses(); return &theXactHist; } else { return 0; } } const TmSzStat *ValidationHitMissStex::trace(const StatIntvlRec &rec) const { return theTrace ? &(rec.*theTrace) : 0; } void ValidationHitMissStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* CachableStex */ CachableStex::CachableStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *CachableStex::trace(const StatIntvlRec &rec) const { return &rec.theChbR.hits(); } void CachableStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* UnCachableStex */ UnCachableStex::UnCachableStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *UnCachableStex::trace(const StatIntvlRec &rec) const { return &rec.theChbR.misses(); } void UnCachableStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllCachableStex */ AllCachableStex::AllCachableStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllCachableStex::trace(const StatIntvlRec &rec) const { theXactAggr = rec.theChbR.xacts(); return &theXactAggr; } void AllCachableStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* FillStex */ FillStex::FillStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *FillStex::trace(const StatIntvlRec &rec) const { return &rec.theFill; } void FillStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* SimpleStex */ SimpleStex::SimpleStex(const String &aKey, const String &aName, HistPtr aHist, TracePtr aTrace): Stex(aKey, aName), theHist(aHist), theTrace(aTrace) { } const TmSzHistStat *SimpleStex::hist(const PhaseInfo &phase) const { return theHist && phase.hasStats() ? &(phase.stats().*theHist) : 0; } const TmSzStat *SimpleStex::trace(const StatIntvlRec &rec) const { return theTrace ? &(rec.*theTrace) : 0; } void SimpleStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllMethodsStex */ AllMethodsStex::AllMethodsStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzHistStat *AllMethodsStex::hist(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactHist.reset(); theXactHist += phase.stats().theHeadXacts; theXactHist += phase.stats().thePostXacts; theXactHist += phase.stats().thePutXacts; theXactHist += phase.stats().theConnectXacts; return &theXactHist; } else { return 0; } } const TmSzStat *AllMethodsStex::trace(const StatIntvlRec &rec) const { theXactAggr = rec.theHead + rec.thePost + rec.thePut + rec.theConnect; return &theXactAggr; } void AllMethodsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllRepsStex */ AllRepsStex::AllRepsStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzHistStat *AllRepsStex::hist(const PhaseInfo &phase) const { if (phase.hasStats()) { theXactHist.reset(); phase.stats().repAll(theXactHist); return &theXactHist; } else { return 0; } } const TmSzStat *AllRepsStex::trace(const StatIntvlRec &rec) const { theXactAggr = rec.reps(); return &theXactAggr; } void AllRepsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ContTypeStex */ ContTypeStex::ContTypeStex(const String &aKey, const String &aName, int anIdx, ContTypePtr aContType): Stex(aKey, aName), theIdx(anIdx), theContType(aContType) { } const TmSzStat *ContTypeStex::aggr(const PhaseInfo &phase) const { if (phase.hasStats() && (phase.stats().*theContType).hasStats(theIdx)) { theXactAggr = TmSzStat(AggrStat(), (phase.stats().*theContType).stats(theIdx)); return &theXactAggr; } else { return 0; } } void ContTypeStex::describe(XmlNodes &nodes) const { XmlText text; text.buf() << "This object class represents one of the " << "content types specified in the PGL workload file and labeled " << "there as " << name() << " content type." << endl; nodes << text; } /* AllContTypesStex */ AllContTypesStex::AllContTypesStex(const String &aKey, const String &aName, ContTypePtr aContType): Stex(aKey, aName), theContType(aContType) { } const TmSzStat *AllContTypesStex::aggr(const PhaseInfo &phase) const { if (phase.hasStats()) { AggrStat szStat; for (int i = 0; i < ContTypeStat::Kinds().count(); ++i) { if ((phase.stats().*theContType).hasStats(i)) szStat += (phase.stats().*theContType).stats(i); } theXactAggr = TmSzStat(AggrStat(), szStat); return &theXactAggr; } else { return 0; } } void AllContTypesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* CompoundReplyStex */ CompoundReplyStex::CompoundReplyStex(const String &aKey, const String &aName, const CompoundPtr aCompoundPtr): Stex(aKey, aName), theCompoundPtr(aCompoundPtr) { } const TmSzStat *CompoundReplyStex::aggr(const PhaseInfo &phase) const { const CompoundXactStat &compound = phase.stats().*theCompoundPtr; theStat = TmSzStat(compound.duration.stats(), compound.repSize.stats()); return &theStat; } const Histogram *CompoundReplyStex::partsHist(const PhaseInfo &phase) const { const CompoundXactStat &compound = phase.stats().*theCompoundPtr; return &compound.exchanges; } void CompoundReplyStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* CompoundRequestStex */ CompoundRequestStex::CompoundRequestStex(const String &aKey, const String &aName, const CompoundPtr aCompoundPtr): Stex(aKey, aName), theCompoundPtr(aCompoundPtr) { } const TmSzStat *CompoundRequestStex::aggr(const PhaseInfo &phase) const { const CompoundXactStat &compound = phase.stats().*theCompoundPtr; theStat = TmSzStat(compound.duration.stats(), compound.reqSize.stats()); return &theStat; } const Histogram *CompoundRequestStex::partsHist(const PhaseInfo &phase) const { const CompoundXactStat &compound = phase.stats().*theCompoundPtr; return &compound.exchanges; } void CompoundRequestStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllCompoundRepsStex */ AllCompoundRepsStex::AllCompoundRepsStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllCompoundRepsStex::aggr(const PhaseInfo &phase) const { theCompound.reset(); phase.stats().compoundAll(theCompound); theStat = TmSzStat(theCompound.duration.stats(), theCompound.repSize.stats()); return &theStat; } const Histogram *AllCompoundRepsStex::partsHist(const PhaseInfo &phase) const { theCompound.reset(); phase.stats().compoundAll(theCompound); return &theCompound.exchanges; } void AllCompoundRepsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllCompoundReqsStex */ AllCompoundReqsStex::AllCompoundReqsStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllCompoundReqsStex::aggr(const PhaseInfo &phase) const { theCompound.reset(); phase.stats().compoundAll(theCompound); theStat = TmSzStat(theCompound.duration.stats(), theCompound.reqSize.stats()); return &theStat; } const Histogram *AllCompoundReqsStex::partsHist(const PhaseInfo &phase) const { theCompound.reset(); phase.stats().compoundAll(theCompound); return &theCompound.exchanges; } void AllCompoundReqsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AuthIngStex */ AuthIngStex::AuthIngStex(const String &aKey, const String &aName, const AuthPhaseStat::Scheme aScheme): Stex(aKey, aName), theScheme(aScheme) { } const TmSzStat *AuthIngStex::trace(const StatIntvlRec &rec) const { return &rec.theAuth.getAuthIng(theScheme); } void AuthIngStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AuthEdStex */ AuthEdStex::AuthEdStex(const String &aKey, const String &aName, const AuthPhaseStat::Scheme aScheme): Stex(aKey, aName), theScheme(aScheme) { } const TmSzStat *AuthEdStex::trace(const StatIntvlRec &rec) const { return &rec.theAuth.getAuthEd(theScheme); } void AuthEdStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllAuthIngStex */ AllAuthIngStex::AllAuthIngStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllAuthIngStex::trace(const StatIntvlRec &rec) const { theStat.reset(); rec.theAuth.authIngAll(theStat); return &theStat; } void AllAuthIngStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllAuthEdStex */ AllAuthEdStex::AllAuthEdStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllAuthEdStex::trace(const StatIntvlRec &rec) const { theStat.reset(); rec.theAuth.authEdAll(theStat); return &theStat; } void AllAuthEdStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllAuthStex */ AllAuthStex::AllAuthStex(const String &aKey, const String &aName): Stex(aKey, aName) { } const TmSzStat *AllAuthStex::trace(const StatIntvlRec &rec) const { theStat.reset(); rec.theAuth.authIngAll(theStat); rec.theAuth.authEdAll(theStat); return &theStat; } void AllAuthStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ProtoIntvlStex */ ProtoIntvlStex::ProtoIntvlStex(ProtoPtr aProto, const String &aKey, const String &aName): Stex(aKey, aName), theProto(aProto) { } /* ProtoHitsStex */ ProtoHitsStex::ProtoHitsStex(ProtoPtr aProto, const String &aKey, const String &aName): ProtoIntvlStex(aProto, aKey, aName) { } const TmSzStat *ProtoHitsStex::trace(const StatIntvlRec &rec) const { return &(rec.*theProto).doneXacts().hits(); } void ProtoHitsStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ProtoMissesStex */ ProtoMissesStex::ProtoMissesStex(ProtoPtr aProto, const String &aKey, const String &aName): ProtoIntvlStex(aProto, aKey, aName) { } const TmSzStat *ProtoMissesStex::trace(const StatIntvlRec &rec) const { return &(rec.*theProto).doneXacts().misses(); } void ProtoMissesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* ProtoHitMissesStex */ ProtoHitMissesStex::ProtoHitMissesStex(ProtoPtr aProto, const String &aKey, const String &aName): ProtoIntvlStex(aProto, aKey, aName) { } const TmSzStat *ProtoHitMissesStex::trace(const StatIntvlRec &rec) const { theAggr = (rec.*theProto).doneXacts().xacts(); return &theAggr; } void ProtoHitMissesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* CookiesStex */ CookiesStex::CookiesStex(const String &aKey, const String &aName, const AggrPtr anAggrPtr): Stex(aKey, aName), theAggrPtr(anAggrPtr) { } const AggrStat *CookiesStex::partsStat(const PhaseInfo &phase) const { return &(phase.stats().*theAggrPtr); } void CookiesStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* AllStatusCodeStex */ AllStatusCodeStex::AllStatusCodeStex(const String &aKey, const String &aName, const StatusCodePtr aPtr): Stex(aKey, aName), thePtr(aPtr) { Assert(thePtr); doIgnoreUnseen = true; } const TmSzStat *AllStatusCodeStex::aggr(const PhaseInfo &phase) const { theAggr = (phase.stats().*thePtr).allStats(); return &theAggr; } void AllStatusCodeStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } /* StatusCodeStex */ StatusCodeStex::StatusCodeStex(const String &aKey, const String &aName, const StatusCodePtr aPtr, const int aStatus): Stex(aKey, aName), thePtr(aPtr), theStatus(aStatus) { Assert(thePtr); doIgnoreUnseen = true; } const TmSzStat *StatusCodeStex::aggr(const PhaseInfo &phase) const { return (phase.stats().*thePtr).stats(theStatus); } void StatusCodeStex::describe(XmlNodes &nodes) const { Stex::describe(nodes); } polygraph-4.3.2/src/loganalyzers/LevelTraceFig.h0000644000175000017500000000205511546440450021256 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_LEVELTRACEFIG_H #define POLYGRAPH__LOGANALYZERS_LEVELTRACEFIG_H #include "xstd/String.h" #include "loganalyzers/ReportTraceFigure.h" class LevelStex; class PhaseInfo; class PhaseTrace; class StatIntvlRec; // creates level trace figure based on interval level stats class LevelTraceFig: public ReportTraceFigure { public: typedef LevelStex Stex; public: LevelTraceFig(); void stats(const LevelStex *const aStex, const PhaseInfo *const phase); void compareWith(const LevelStex *const stex); protected: virtual int createCtrlFile(); virtual void setCtrlOptions(); int dumpDataLines(const LevelStex *stex); int dumpDataLine(const LevelStex *stex, Time stamp, const StatIntvlRec &r); protected: const LevelStex *theStex; Array theComparison; // theStex and others const PhaseInfo *thePhase; const PhaseTrace *theTrace; }; #endif polygraph-4.3.2/src/loganalyzers/comparator.sh0000755000175000017500000000055610621177424021144 0ustar testertester delta=$1 if test -z "$delta" then delta=0 fi set -e -x #rm /tmp/polyrep/test[12]/*html #./reporter --label test1 /tmp/t[cs]1.log #./reporter --label test2 /tmp/t[cs]2.log cat /tmp/polyrep/test1/*html > /tmp/t1.html cat /tmp/polyrep/test2/*html > /tmp/t2.html cp comparator.css /tmp/t.html ./comparator -delta $delta /tmp/t{1,2,2,1}.html >> /tmp/t.html set +x polygraph-4.3.2/src/loganalyzers/LevelStex.h0000644000175000017500000000165511546440450020522 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_LEVELSTEX_H #define POLYGRAPH__LOGANALYZERS_LEVELSTEX_H #include "xstd/String.h" #include "base/StatIntvlRec.h" #include "base/LevelStat.h" // an algorithm of extracting a particular level statistics out of // interval stats record class LevelStex { public: typedef LevelStat StatIntvlRec::*StatPtr; public: LevelStex(const String &aKey, const String &aName, StatPtr aStats): theKey(aKey), theName(aName), theStats(aStats) {} const String &key() const { return theKey; } // precise, for machine use const String &name() const { return theName; } // imprecise, human-readable const LevelStat &level(const StatIntvlRec &rec) const { return rec.*theStats; } protected: String theKey; String theName; StatPtr theStats; }; #endif polygraph-4.3.2/src/loganalyzers/StatTable.h0000644000175000017500000000136311546440450020466 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_STATTABLE_H #define POLYGRAPH__LOGANALYZERS_STATTABLE_H #include "base/polygraph.h" #include "loganalyzers/Stex.h" #include "xml/XmlTable.h" #include class StatTable: public XmlTable { public: StatTable() {} explicit StatTable(const XmlTable &table): XmlTable(table) {} void addUnknown(const Stex &s) { if (!s.ignoreUnseen()) theUnknowns.push_back(s.name()); } const std::list &unknowns() const { return theUnknowns; } protected: std::list theUnknowns; }; XmlTag &operator <<(XmlTag &tag, const StatTable &statTable); #endif polygraph-4.3.2/src/loganalyzers/ReportTraceFigure.cc0000644000175000017500000000126211546440450022333 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/gadgets.h" #include "loganalyzers/ReportTraceFigure.h" void ReportTraceFigure::setCtrlOptions() { theLabelX1 = "minutes"; ReportFigure::setCtrlOptions(); } void ReportTraceFigure::globalStart(Time aStart) { theGlobalStart = aStart; } // use gnuplot date axis? void ReportTraceFigure::dumpTime(Time stamp) { static bool complained = false; complained = complained || !Should(theGlobalStart >= 0); *theCtrlFile << Max(0.0, (stamp - theGlobalStart).secd()/60); } polygraph-4.3.2/src/loganalyzers/Sample.h0000644000175000017500000000630311546440450020023 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_SAMPLE_H #define POLYGRAPH__LOGANALYZERS_SAMPLE_H #include "xstd/String.h" #include "xstd/Array.h" #include "xstd/h/iosfwd.h" class Panorama; class Formatter; // a data point, measurement, or spiecemen extracted from the logs // and tagged with a unique key and descriptive title class Sample { public: typedef String Key; public: virtual ~Sample() {} const Key &key() const { return theKey; } void key(const Key &aKey) { theKey = aKey; } const String &title() const { return theTitle; } void title(const String &aTitle) { theTitle = aTitle; } const String &location() const { return theLocation; } void location(const String &aLocation) { theLocation = aLocation; } virtual void propagateLocation(const String &aLocation); bool similar(const Sample &s) const; virtual Panorama *makePanoramaSkeleton() const = 0; virtual void fillPanorama(Panorama *p) const = 0; virtual void print(ostream &os) const = 0; virtual void reportDifferences(const Sample &, Formatter &form) const; virtual const String &typeId() const = 0; virtual Sample *clone() const = 0; protected: virtual bool selfSimilar(const Sample &s) const = 0; // type matches void copy(const Sample &s); protected: Key theKey; String theLocation; String theTitle; }; class CompositeSample: public Sample { public: static String TheId; public: virtual ~CompositeSample(); int kidCount() const { return theKids.count(); } void add(Sample *aKid); virtual Panorama *makePanoramaSkeleton() const; virtual void fillPanorama(Panorama *p) const; virtual void propagateLocation(const String &aLocation); virtual void print(ostream &os) const; virtual const String &typeId() const { return TheId; } virtual Sample *clone() const; protected: virtual bool selfSimilar(const Sample &s) const; void copy(const CompositeSample &c); protected: Array theKids; }; class AtomSample: public Sample { public: virtual void print(ostream &os) const; virtual void setImage(const String &image); const String &image() const { return theImage; } virtual Panorama *makePanoramaSkeleton() const; protected: virtual bool selfSimilar(const Sample &s) const; private: String theImage; }; class NumberSample: public AtomSample { public: static String TheId; static double TheDelta; // 0 = exact comparison, 1 = 100% difference OK public: NumberSample(); virtual void setImage(const String &image); virtual void fillPanorama(Panorama *p) const; virtual const String &typeId() const { return TheId; } virtual Sample *clone() const; virtual void reportDifferences(const Sample &, Formatter &form) const; protected: virtual bool selfSimilar(const Sample &s) const; private: typedef double Value; double theValue; }; class TextSample: public AtomSample { public: static String TheId; virtual void propagateLocation(const String &aLocation); virtual void fillPanorama(Panorama *p) const; virtual const String &typeId() const { return TheId; } virtual Sample *clone() const; }; #endif polygraph-4.3.2/src/loganalyzers/reporter.cc0000644000175000017500000005117611546440450020612 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/h/iostream.h" #include #include "xstd/h/sstream.h" #include "xstd/h/iomanip.h" #include "xstd/h/new.h" #include "xstd/Map.h" #include "base/StatPhaseRec.h" #include "base/polyLogCats.h" #include "base/polyLogTags.h" #include "base/CmdLine.h" #include "base/opts.h" #include "base/AnyToString.h" #include "xml/XmlAttr.h" #include "xml/XmlNodes.h" #include "xml/XmlDoc.h" #include "xml/XmlText.h" #include "xml/XmlParagraph.h" #include "xml/XmlTable.h" #include "xml/XmlSection.h" #include "logextractors/LogIter.h" #include "loganalyzers/InfoScopes.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/ProcInfo.h" #include "loganalyzers/SideInfo.h" #include "loganalyzers/TestInfo.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/ReportFigure.h" #include "loganalyzers/RepToHtmlFile.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" static TestInfo *TheTest = 0; typedef Map PhaseNames; // raw name -> count static PhaseNames ThePhaseNames; /* local routines */ String uniquePhaseName(const String &rawName) { if (int *count = ThePhaseNames.valp(rawName)) { Should(*count > 0); ++*count; return uniquePhaseName(rawName + "-n" + AnyToString(*count)); } ThePhaseNames.add(rawName, 1); return rawName; } void checkPhaseNames(const String &fname) { bool errors = false; for (int i = 0; i < ThePhaseNames.count(); ++i) { const int count = ThePhaseNames.valAt(i); if (count > 1) { clog << fname << ": warning: found " << count << " phases named '" << ThePhaseNames.keyAt(i) << "'" << endl; errors = true; } } if (errors) clog << fname << ": warning: appending unique suffixes to " << "phase names to avoid name clashes" << endl; } void resetPhaseNames() { ThePhaseNames.reset(); } static ProcInfo *scanLog1(LogIter &li) { ProcInfo *proc = new ProcInfo(li.log().fileName()); proc->startTime(li ? li.log().progress().time() : Time()); String phaseName; resetPhaseNames(); bool needComments = true; while (li) { switch (li->theTag) { case lgComment: { // XXX: add extraction of shutdown reason if (needComments) { String comment; li.log() >> comment; if (!comment.str("Configuration:")) break; const String kword = "version:"; if (const char *beg = comment.str(kword.cstr())) { beg += kword.len(); while (*beg && isspace(*beg)) ++beg; const char *end = beg; while (*end && !isspace(*end)) ++end; if (Should(end > beg)) { proc->benchmarkVersion( comment(beg-comment.cstr(), end-comment.cstr())); } } needComments = false; } break; } case lgContTypeKinds: { // should be called only once per log ContTypeStat::Load(li.log()); break; } case lgPglCfg: { String cfg; li.log() >> cfg; proc->pglCfg(cfg); break; } case lgStatPhaseBeg: { String rawName; li.log() >> rawName; phaseName = uniquePhaseName(rawName); break; } case lgStatPhaseEnd: { phaseName = 0; break; } case lgStatCycleRec: { const int cat = li->theCat; if (!Should(cat == lgcCltSide || cat == lgcSrvSide)) break; if (proc->logCat() == lgcEnd) proc->logCat(cat); if (proc->logCat() == cat) { StatIntvlRec r; r.load(li.log()); if (Should(r.sane())) proc->noteIntvl(r, phaseName); } break; } case lgStatPhaseRec: { const int cat = li->theCat; if (proc->logCat() == cat) { StatPhaseRec r; r.load(li.log()); if (Should(r.sane())) proc->addPhase(r); } break; } } ++li; } if (proc->logCat() == lgcEnd) { cerr << li.log().fileName() << ": error: cannot determine log 'side', skipping" << endl; delete proc; return 0; } proc->noteEndOfLog(); checkPhaseNames(li.log().fileName()); resetPhaseNames(); return proc; } static void scanLog2(LogIter &li, ProcInfo *proc) { PhaseTrace *trace = 0; while (li) { switch (li->theTag) { case lgStatPhaseBeg: { String rawName, phaseName; li.log() >> rawName; phaseName = uniquePhaseName(rawName); if (proc->hasPhase(phaseName)) trace = proc->tracePhase(phaseName); break; } case lgStatPhaseEnd: { trace = 0; break; } case lgStatCycleRec: { const int cat = li->theCat; if (Should(trace) && proc->logCat() == cat) { StatIntvlRec r; r.load(li.log()); if (Should(r.sane())) trace->addIntvl(li.log().progress().time(), r); } break; } } ++li; } } static void scanAll() { for (int i = 0; i < TheRepOpts.theFiles.count(); ++i) { const String &fname = *TheRepOpts.theFiles[i]; ILog log; if (fname == "-") log.stream("stdin", &cin); else log.stream(fname, (istream*)new ifstream(fname.cstr(), ios::binary|ios::in)); clog << "scanning " << fname << endl; LogIter li(&log); if (ProcInfo *proc = scanLog1(li)) { ILog log2; log.stream()->clear(); log.stream()->seekg(0, ios::beg); log2.stream(log.fileName(), log.stream()); LogIter li2(&log2); scanLog2(li2, proc); TheTest->side(proc->logCat()).add(proc); } } } static void checkConsistency() { clog << "checking consistency" << endl; TheTest->checkConsistency(); } static void compileStats(BlobDb &db) { clog << "compiling statistics" << endl; SideInfo::Configure(); TheTest->compileStats(db); } static String htmlFileName(const String &baseName) { return TheRepOpts.theRepDir + '/' + baseName + ".html"; } static XmlTag &addTitle(BlobDb &db, XmlTag &ctx, const String &text) { const XmlNode &prefix = db.ptr("summary.front" + TheTest->execScope(), XmlText(TheTest->label())); XmlText suffix; suffix.buf() << ": " << text; XmlTag *title = new XmlTag("title"); *title << prefix << suffix; ctx.addChild(title); return *title; } #if 0 static void alignRight(XmlTag &parent, const XmlNodes &subject) { XmlTable table; table << XmlAttr::Int("border", 0) << XmlAttr("align", "right") << XmlAttr::Int("cellspacing", 0) << XmlAttr::Int("cellpadding", 0); XmlTableRec tr; XmlTableCell cell; cell << subject; tr << cell; table << tr; parent << table; } #endif static void twoColumn(XmlTag &parent, const XmlNodes &lhs, const XmlNodes &rhs) { XmlTable table; table << XmlAttr::Int("border", 0) << XmlAttr::Int("cellspacing", 5) << XmlAttr::Int("cellpadding", 0) << XmlAttr("width", "100%"); XmlTableRec tr; XmlTableCell cellLeft; cellLeft << XmlAttr("valign", "top") << lhs; XmlTableCell cellRight; cellRight << XmlAttr("valign", "top") << rhs; tr << cellLeft << cellRight; table << tr; parent << table; } static const ReportBlob *buildFrontPage(BlobDb &db) { const InfoScope &scope = TheTest->execScope(); ReportBlob blob("summary.front" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); XmlTag title("title"); title << XmlText(String("Web Polygraph report: ") + TheTest->label()); chapter << title; chapter << db.quote("summary.exec.table" + scope); chapter << XmlTextTag("The following information is available."); XmlTag list("ul"); list << db.ptr("summary.1page", XmlText("One-page summary")); //list << db.ptr("summary.2page" + scope, XmlText("Two-page summary")); list << db.ptr("page.traffic" + scope, XmlText("Traffic rates, counts, and volumes")); list << db.ptr("page.rptm" + scope, XmlText("Response times")); list << db.ptr("page.savings" + scope, XmlText("Hit ratios")); list << db.ptr("page.levels" + scope, XmlText("Concurrency levels and robot population")); list << db.ptr("page.auth" + scope, XmlText("Authentication")); list << db.ptr("page.errors" + scope, XmlText("Errors")); list << db.ptr("page.workload", XmlText("Workload")); list << db.ptr("page.everything", XmlText("Details")); list << db.ptr("page.notes", XmlText("Report generation notes")); chapter << list; doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("index")); return db.add(blob); } static const ReportBlob *buildOnePage(BlobDb &db) { ReportBlob blob("summary.1page", ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "one-page summary"); const InfoScope &execScope = TheTest->execScope(); XmlNodes lhs; lhs << db.quote("summary.exec.table" + execScope); XmlNodes rhs; rhs << db.quote("load.table" + execScope); //rhs << db.quote("hit.ratio.table" + execScope); twoColumn(chapter, lhs, rhs); chapter << db.quote("summary.exec.phases" + execScope); if (TheTest->cltSideExists()) { const InfoScope &scope = TheTest->cltSide().scope(); chapter << db.quote("load.trace" + scope); chapter << db.quote("rptm.trace" + scope); } doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("one-page")); return db.add(blob); } static const ReportBlob *buildTrafficPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.traffic" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "traffic rates, counts, and volumes"); chapter << db.quote("traffic" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("traffic")); return db.add(blob); } static const ReportBlob *buildRptmPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.rptm" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "response times"); chapter << db.quote("rptm" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("rptm")); return db.add(blob); } static const ReportBlob *buildSavingsPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.savings" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "hit ratios"); chapter << db.quote("savings" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("savings")); return db.add(blob); } static const ReportBlob *buildLevelsPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.levels" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "concurrency levels and robot population"); chapter << db.quote("levels" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("levels")); return db.add(blob); } static const ReportBlob *buildAuthPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.auth" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "Authentication"); chapter << db.quote("authentication" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("authentication")); return db.add(blob); } static const ReportBlob *buildErrorsPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.errors" + scope, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "errors"); chapter << db.quote("errors" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("errors")); return db.add(blob); } static const ReportBlob *buildWorkloadPage(BlobDb &db, const InfoScope &scope) { ReportBlob blob("page.workload", ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "workload"); chapter << db.include("workload" + scope); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("workload")); return db.add(blob); } static const ReportBlob *buildNotesPage(BlobDb &db) { ReportBlob blob("page.notes", ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "report generation notes"); chapter << db.include("report_notes"); doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("notes")); return db.add(blob); } static void addScopeRecord(BlobDb &db, const String &name, const String &label, const InfoScopes &scopes, const String &ctx, XmlTable &table) { XmlTableRec tr; tr << XmlTableHeading(label); for (int i = 0; i < scopes.count(); ++i) { const InfoScope &scope = *scopes[i]; XmlTableCell cell; cell << XmlAttr("align", "center"); XmlText text(scope.name()); if (scope.image() == ctx) { cell << XmlAttr::Int("emphasized", true); cell << text; } else { XmlNode &ptr = db.ptr(name + scope, text); *ptr.attrs() << XmlAttr::Int("maybe_null", true); cell << ptr; } tr << cell; } table << tr; } static void addScopeTable(BlobDb &db, const String &name, const InfoScopes &cltScopes, const InfoScopes &srvScopes, const InfoScopes &tstScopes, const String &ctx, XmlTag &tag) { XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); addScopeRecord(db, name, "client side", cltScopes, ctx, table); addScopeRecord(db, name, "server side", srvScopes, ctx, table); addScopeRecord(db, name, "all sides", tstScopes, ctx, table); tag << table; } static void buildEverything(BlobDb &db, Array &res) { const XmlNodes &blobs = db.blobs(); InfoScopes cltScopes; InfoScopes srvScopes; InfoScopes tstScopes; TheTest->scopes(tstScopes); if (TheTest->cltSideExists()) TheTest->cltSide().scopes(cltScopes); if (TheTest->srvSideExists()) TheTest->srvSide().scopes(srvScopes); Map< Array *> scope2names; Map< Array *> name2scopes; XmlSearchRes vprimitives; blobs.selByAttrName("vprimitive", vprimitives); // segregate vprimitive blobs based on their scope for (int p = 0; p < vprimitives.count(); ++p) { const String &key = vprimitives[p]->attrs()->value("key"); if (const char *scopeImage = key.str(".scope=")) { const String name = key(0, scopeImage-key.cstr()); { Array *names = 0; if (!scope2names.find(scopeImage, names)) { names = new Array(); scope2names.add(scopeImage, names); } names->append(new String(name)); } { Array *scopes = 0; if (!name2scopes.find(name, scopes)) { scopes = new Array(); name2scopes.add(name, scopes); } scopes->append(scopeImage); } } } const String pfx = "page.everything"; // for each vprimitive name, list scopes it belongs to { ReportBlob blob(pfx, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); addTitle(db, chapter, "everything (index)"); for (int i = 0; i < name2scopes.count(); ++i) { const String &name = name2scopes.keyAt(i); const Array *scopes = name2scopes.valAt(i); // get a title in hope that all titles are the same const ReportBlob &b = db.get(name + scopes->last()); ReportBlob sblob(pfx + "." + name, ReportBlob::NilTitle); XmlSection s(b.attrs()->value("vprimitive")); addScopeTable(db, String("page.everything.") + name, cltScopes, srvScopes, tstScopes, 0, s); sblob << s; chapter << *db.add(sblob); delete scopes; } doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName("everything")); res.append(db.add(blob)); } // for each scope, create a page with corresponding vprimitives for (int s = 0; s < scope2names.count(); ++s) { const String &scopeImage = scope2names.keyAt(s); const Array *names = scope2names.valAt(s); ReportBlob blob(pfx + scopeImage, ReportBlob::NilTitle); XmlTag doc("document"); XmlTag chapter("chapter"); XmlTag &title = addTitle(db, chapter, "everything (scoped)"); const char *ctx = scopeImage.cstr() + strlen(".scope="); addScopeTable(db, pfx, cltScopes, srvScopes, tstScopes, ctx, title); XmlTable table; XmlTableRec tr1, tr2; tr1 << XmlTableHeading("highlighted cell(s) above show current scope"); tr2 << XmlTableHeading("links point to other scopes"); table << tr1 << tr2; title << table; chapter << title; for (int i = 0; i < names->count(); ++i) { const String key = *names->item(i) + scopeImage; const ReportBlob &b = db.get(key); ReportBlob sblob(pfx + "." + key, ReportBlob::NilTitle); XmlSection s; s << XmlAttr("src", key); XmlTag stitle("title"); stitle << db.ptr(pfx + "." + *names->item(i), XmlText(b.attrs()->value("vprimitive"))); s << stitle; s << db.include(key); sblob << s; chapter << *db.add(sblob); delete names->item(i); } doc << chapter; blob << doc; RepToHtmlFile::Location(db, blob, htmlFileName(String("everything") + scopeImage)); res.append(db.add(blob)); delete names; } } static void renderToFile(BlobDb &db, const String &key) { if (const String fname = RepToHtmlFile::Location(key)) { clog << "creating: " << fname << endl; ofstream f(fname.cstr()); if (Should(f)) { RepToHtmlFile r(db, &f, fname); r.renderReportBlob(db.get(key)); } } else { clog << "internal_error: no location for " << key << endl; } } static void buildReport(BlobDb &db) { clog << "building report" << endl; /* build first, then render so that all links are defined */ Array blobs; buildEverything(db, blobs); blobs.append(buildFrontPage(db)); blobs.append(buildOnePage(db)); blobs.append(buildTrafficPage(db, TheTest->execScope())); blobs.append(buildRptmPage(db, TheTest->execScope())); blobs.append(buildSavingsPage(db, TheTest->execScope())); blobs.append(buildLevelsPage(db, TheTest->execScope())); blobs.append(buildAuthPage(db, TheTest->execScope())); blobs.append(buildErrorsPage(db, TheTest->execScope())); blobs.append(buildWorkloadPage(db, TheTest->execScope())); blobs.append(buildNotesPage(db)); for (int i = 0; i < blobs.count(); ++i) renderToFile(db, blobs[i]->key()); //doc.print(clog, "DOC: "); //db.print(clog, "DB: "); } static String guessLabel() { Array parts; Array ignoredParts; for (int i = 0; i < TheRepOpts.theFiles.count(); ++i) { String fname = *TheRepOpts.theFiles[i]; int partIdx = 0; int skip = 0; while (const int pos = strcspn(fname.cstr()+skip, "-.:")) { const String part = fname(0, pos+1); if (i == 0) { parts.append(new String(part)); ignoredParts.append(false); } else if (partIdx < parts.count() && part != *parts[partIdx]) { ignoredParts[partIdx] = true; } partIdx++; if (pos >= fname.len()) break; fname = fname(pos+1, fname.len()); skip = strspn(fname.cstr(), "-.:"); } } String label; // cut the last part off because it is probably an extension const int lastIdx = parts.count() - 2; for (int p = 0; p <= lastIdx ; ++p) { if (!ignoredParts[p]) { const int len = p == lastIdx ? strcspn(parts[p]->cstr(), "-.:") : parts[p]->len(); label += (*parts[p])(0, len); } } if (!label) label = "unlabeled"; clog << "no test label specified, using '" << label << "'" << endl; while (parts.count()) delete parts.pop(); return label; } static void shutdownAtNew() { cerr << "error: ran out of RAM" << endl << xexit; } static void configure() { const String label = TheRepOpts.theLabel ? (String)TheRepOpts.theLabel : guessLabel(); if (!TheRepOpts.theRepDir) TheRepOpts.theRepDir.val(String("/tmp/polyrep/") + label); TheTest = new TestInfo(label); ReportFigure::TheBaseDir = TheRepOpts.theRepDir + "/figures"; Should(::system((String("mkdir -p ") + TheRepOpts.theRepDir).cstr()) == 0); Should(::system((String("mkdir -p ") + ReportFigure::TheBaseDir).cstr()) == 0); Should(xset_new_handler(&shutdownAtNew)); } static void configureExecScope() { if (TheRepOpts.thePhases) { InfoScope scope; scope.name("baseline"); scope.addSide("client"); scope.addSide("server"); for (int i = 0; i < TheRepOpts.thePhases.val().count(); ++i) { const String &name = *TheRepOpts.thePhases.val()[i]; if (const SideInfo *side = TheTest->cltSideExists()) { if (!side->hasPhase(name)) { cerr << "warning: ignoring '--phases " << name << "' because client-side " << "logs lack the named phase" << endl; continue; } } if (const SideInfo *side = TheTest->srvSideExists()) { if (!side->hasPhase(name)) { cerr << "warning: ignoring '--phases " << name << "' because server-side " << "logs lack the named phase" << endl; continue; } } scope.addPhase(*TheRepOpts.thePhases.val()[i]); } if (!scope.phases().empty()) TheTest->execScope(scope); } } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheRepOpts); if (!cmd.parse(argc, argv) || !TheRepOpts.validate()) return -1; configure(); scanAll(); configureExecScope(); checkConsistency(); BlobDb db; compileStats(db); buildReport(db); return 0; } polygraph-4.3.2/src/loganalyzers/ScatteredFig.cc0000644000175000017500000000330611546440450021304 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/gadgets.h" #include "loganalyzers/PointStex.h" //#include "loganalyzers/ReportBlob.h" //#include "loganalyzers/BlobDb.h" //#include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/ScatteredFig.h" ScatteredFig::ScatteredFig(): theStex1(0), theStex2(0), thePhase(0), theTrace(0) { } void ScatteredFig::stats(const Stex *aStex1, const Stex *aStex2, const PhaseInfo *aPhase) { theStex1 = aStex1; theStex2 = aStex2; thePhase = aPhase; theTrace = &thePhase->trace(); Assert(theStex1 && theStex2 && theTrace); } void ScatteredFig::setCtrlOptions() { theLabelX1 = theStex1->name() + ", " + theStex1->unit(); theLabelY1 = theStex2->name() + ", " + theStex2->unit(); theDataStyle = "points"; ReportFigure::setCtrlOptions(); } int ScatteredFig::createCtrlFile() { if (ReportFigure::createCtrlFile() < 0) return -1; addPlotLine("", theLabelY1); addedAllPlotLines(); int pointCount = 0; for (int i = 0; i < theTrace->count(); ++i) pointCount += dumpDataLine(theTrace->winStats(i)); return pointCount; } int ScatteredFig::dumpDataLine(const StatIntvlRec &r) { bool bothKnown = dumpAxis(theStex1, r); *theCtrlFile << ' '; bothKnown = dumpAxis(theStex2, r) && bothKnown; *theCtrlFile << endl; return bothKnown ? 1 : 0; } bool ScatteredFig::dumpAxis(const Stex *stex, const StatIntvlRec &r) { if (stex->valueKnown(r)) { *theCtrlFile << stex->value(r); return true; } else { *theCtrlFile << '?'; return false; } } polygraph-4.3.2/src/loganalyzers/HistStex.h0000644000175000017500000000375211546440450020362 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_HISTSTEX_H #define POLYGRAPH__LOGANALYZERS_HISTSTEX_H #include "xstd/String.h" #include "xstd/gadgets.h" #include "base/StatPhaseRec.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/StexBase.h" // an algorithm of extracting a single value statistics out of // interval stats record class HistStex: public StexBase { public: typedef TimeHist StatPhaseRec::*TimeHistPtr; typedef CompoundXactStat StatPhaseRec::*CompoundXactPtr; HistStex(const String &aKey, const String &aName, const String &aUnit): StexBase(aKey, aName, aUnit) {} virtual bool valueKnown(const PhaseInfo &phase) const { return value(phase)->known(); } virtual const Histogram *value(const PhaseInfo &phase) const = 0; }; class PipelineDepthHistStex: public HistStex { public: PipelineDepthHistStex(): HistStex("pdepths", "pipeline depth", "xact") {} virtual const Histogram *value(const PhaseInfo &phase) const { return &phase.stats().theConnPipelineDepths; } }; class TimeHistStex: public HistStex { public: TimeHistStex(const String &aKey, const String &aName, const TimeHistPtr aTimeHist): HistStex(aKey, aName, "msec"), theTimeHist(aTimeHist) {} virtual const Histogram *value(const PhaseInfo &phase) const { return &(phase.stats().*theTimeHist); } protected: const TimeHistPtr theTimeHist; }; template < class T, T CompoundXactStat::*histPtr > class CompoundXactHistStex: public HistStex { public: CompoundXactHistStex(const String &aName, const String &aUnit, const CompoundXactPtr aCXactPtr): HistStex("cxact", aName, aUnit), theCXactPtr(aCXactPtr) {} virtual const Histogram *value(const PhaseInfo &phase) const { return &(phase.stats().*theCXactPtr.*histPtr); } protected: const CompoundXactPtr theCXactPtr; }; #endif polygraph-4.3.2/src/loganalyzers/PhaseTrace.cc0000644000175000017500000000505111546440450020756 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/gadgets.h" #include "loganalyzers/PhaseTrace.h" Time PhaseTrace::TheWinLen = Time::Sec(60); bool PhaseTraceWin::contains(Time tm) const { return start <= tm && tm < start + PhaseTrace::TheWinLen; } PhaseTrace::PhaseTrace() { } PhaseTrace::~PhaseTrace() { while (theWins.count()) delete theWins.pop().stats; } void PhaseTrace::configure(const StatIntvlRec &stats) { theAggr.merge(stats); theWins.stretch((int)(stats.theDuration/TheWinLen)); } Time PhaseTrace::start() const { return theWins.count() ? theWins[0].start : Time(); } Time PhaseTrace::winPos(int idx) const { Assert(0 <= idx && idx < theWins.count()); return theWins[idx].start + TheWinLen/2; } const StatIntvlRec &PhaseTrace::winStats(int idx) const { Assert(0 <= idx && idx < theWins.count()); Assert(theWins[idx].stats); return *theWins[idx].stats; } void PhaseTrace::addIntvl(Time tm, const StatIntvlRec &r) { PhaseTraceWin &w = allocWin(tm); w.stats->concat(r); } void PhaseTrace::mergeWin(const PhaseTraceWin &win) { PhaseTraceWin &w = allocWin(win.start); w.stats->merge(*win.stats); } void PhaseTrace::concatWin(const PhaseTraceWin &win) { PhaseTraceWin &w = allocWin(win.start); w.stats->concat(*win.stats); } PhaseTraceWin &PhaseTrace::allocWin(Time tm) { int idx = -1; if (!findWin(tm, idx)) { PhaseTraceWin win; win.stats = new StatIntvlRec; win.start = TheWinLen * (int)(tm/TheWinLen); theWins.insert(win, idx); } Assert(0 <= idx && idx < count()); Assert(theWins[idx].stats); return theWins[idx]; } // if tm belongs to a window, return that window's index // otherwise, return index where a new window should be insterted to // maintain win.start order bool PhaseTrace::findWin(Time tm, int &idx) const { idx = 0; for (int left = 0, right = count() - 1; left <= right;) { idx = (left + right)/2; if (theWins[idx].contains(tm)) return true; else if (theWins[idx].start < tm) left = ++idx; else right = idx-1; } return false; } void PhaseTrace::merge(const PhaseTrace &trace) { theAggr.merge(trace.theAggr); theWins.stretch(trace.theWins.count()); for (int i = 0; i < trace.theWins.count(); ++i) mergeWin(trace.theWins[i]); } void PhaseTrace::concat(const PhaseTrace &trace) { theAggr.concat(trace.theAggr); theWins.stretch(theWins.count() + trace.theWins.count()); for (int i = 0; i < trace.theWins.count(); ++i) concatWin(trace.theWins[i]); } polygraph-4.3.2/src/loganalyzers/SideInfo.cc0000644000175000017500000026637711546440450020463 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iomanip.h" #include "base/AnyToString.h" #include "base/ErrorRec.h" #include "base/ErrorStat.h" #include "base/ProtoIntvlStat.h" #include "base/polyLogCats.h" #include "runtime/httpHdrs.h" #include "runtime/HttpDate.h" #include "xml/XmlAttr.h" #include "xml/XmlParagraph.h" #include "xml/XmlText.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/HistogramFigure.h" #include "loganalyzers/RptmHistFig.h" #include "loganalyzers/SizeHistFig.h" #include "loganalyzers/PointTraceFig.h" #include "loganalyzers/RptmTraceFig.h" #include "loganalyzers/LevelTraceFig.h" #include "loganalyzers/LoadTraceFig.h" #include "loganalyzers/ScatteredFig.h" #include "loganalyzers/InfoScopes.h" #include "loganalyzers/Stex.h" #include "loganalyzers/PointStex.h" #include "loganalyzers/HistStex.h" #include "loganalyzers/LevelStex.h" #include "loganalyzers/LoadStexes.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/TestInfo.h" #include "loganalyzers/ProcInfo.h" #include "loganalyzers/SideInfo.h" #include "loganalyzers/StatTable.h" Stex *SideInfo::TheAllReps = 0; Stex *SideInfo::TheAllReqs = 0; Stex *SideInfo::TheUsefulProxyValidation = 0; Array SideInfo::TheReplyStex; Array SideInfo::TheRequestStex; Array SideInfo::TheCompoundReplyStex; Array SideInfo::TheCompoundRequestStex; Array SideInfo::TheAuthStex; Array SideInfo::TheReplyStatusStex; static XmlAttr algnLeft("align", "left"); static XmlAttr algnRight("align", "right"); SideInfo::SideInfo(int aLogCat): theLogCat(aLogCat), theTest(0) { Assert(theLogCat == lgcCltSide || theLogCat == lgcSrvSide); theScope.name("all phases"); theScope.addSide(name()); } SideInfo::~SideInfo() { while (theProcs.count()) { theProcs.last()->side(0); delete theProcs.pop(); } while (thePhases.count()) delete thePhases.pop(); } void SideInfo::test(TestInfo *t) { Assert(!theTest ^ !t); theTest = t; } int SideInfo::logCat() const { return theLogCat; } const String &SideInfo::name() const { static String cltName = "client"; static String srvName = "server"; return theLogCat == lgcCltSide ? cltName : srvName; } const String &SideInfo::otherName() const { static String cltName = "client"; static String srvName = "server"; return theLogCat == lgcCltSide ? srvName : cltName; } const String &SideInfo::benchmarkVersion() const { return theBenchmarkVersion; } const String &SideInfo::pglCfg() const { return thePglCfg; } Time SideInfo::startTime() const { return theStartTime; } int SideInfo::scopes(InfoScopes &res) const { res.add(execScope()); res.add(theScope); for (int p = 0; p < thePhases.count(); ++p) { const String &pname = thePhases[p]->name(); Scope *scope = new Scope(theScope.onePhase(pname)); scope->name(pname); res.absorb(scope); } return res.count(); } const InfoScope &SideInfo::execScope() const { Assert(theTest); if (!theExecScope) { theExecScope = theTest->execScope().oneSide(name()); theExecScope.name("baseline"); } return theExecScope; } const StatPhaseRec &SideInfo::execScopeStats() const { return theExecScopePhase.stats(); } int SideInfo::repCount(const Scope &scope) const { int count = 0; for (int i = 0; i < theProcs.count(); ++i) { count += theProcs[i]->repCount(scope); } return count; } int SideInfo::hitCount(const Scope &scope) const { int count = 0; for (int i = 0; i < theProcs.count(); ++i) { count += theProcs[i]->hitCount(scope); } return count; } int SideInfo::offeredHitCount(const Scope &scope) const { int count = 0; for (int i = 0; i < theProcs.count(); ++i) { count += theProcs[i]->offeredHitCount(scope); } return count; } int SideInfo::uselessProxyValidationCount(const Scope &scope) const { int count = 0; for (int i = 0; i < theProcs.count(); ++i) { count += theProcs[i]->uselessProxyValidationCount(scope); } return count; } BigSize SideInfo::repVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < theProcs.count(); ++i) { volume += theProcs[i]->repVolume(scope); } return volume; } BigSize SideInfo::hitVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < theProcs.count(); ++i) { volume += theProcs[i]->hitVolume(scope); } return volume; } BigSize SideInfo::offeredHitVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < theProcs.count(); ++i) { volume += theProcs[i]->offeredHitVolume(scope); } return volume; } BigSize SideInfo::uselessProxyValidationVolume(const Scope &scope) const { BigSize volume = 0; for (int i = 0; i < theProcs.count(); ++i) { volume += theProcs[i]->uselessProxyValidationVolume(scope); } return volume; } AggrStat SideInfo::lastReqByteWritten(const Scope &scope) const { AggrStat stat; for (int i = 0; i < theProcs.count(); ++i) { stat += theProcs[i]->lastReqByteWritten(scope); } return stat; } AggrStat SideInfo::lastReqByteRead(const Scope &scope) const { AggrStat stat; for (int i = 0; i < theProcs.count(); ++i) { stat += theProcs[i]->lastReqByteRead(scope); } return stat; } AggrStat SideInfo::firstRespByteWritten(const Scope &scope) const { AggrStat stat; for (int i = 0; i < theProcs.count(); ++i) { stat += theProcs[i]->firstRespByteWritten(scope); } return stat; } AggrStat SideInfo::firstRespByteRead(const Scope &scope) const { AggrStat stat; for (int i = 0; i < theProcs.count(); ++i) { stat += theProcs[i]->firstRespByteRead(scope); } return stat; } void SideInfo::add(ProcInfo *p) { Assert(p); p->side(this); theProcs.append(p); // sync phases thePhases.stretch(p->phaseCount()); for (int i = 0; i < p->phaseCount(); ++i) addPhase(p->phase(i)); } void SideInfo::addPhase(const PhaseInfo &procPhase) { const String &name = procPhase.name(); PhaseInfo *accPhase = findPhase(name); if (!accPhase) { accPhase = new PhaseInfo(); thePhases.append(accPhase); theScope.addPhase(name); } accPhase->merge(procPhase); } ProcInfo &SideInfo::proc(int idx) { Assert(0 <= idx && idx < theProcs.count()); return *theProcs[idx]; } int SideInfo::procCount() const { return theProcs.count(); } const PhaseInfo &SideInfo::phase(const Scope &scope) const { if (scope.phases().count() == 1) return phase(*scope.phases().last()); if (scope.phases().count() == thePhases.count()) return theAllPhasesPhase; return theExecScopePhase; // what else can it be? } const PhaseInfo &SideInfo::phase(const String &name) const { const PhaseInfo *p = findPhase(name); Assert(p); return *p; } const PhaseInfo *SideInfo::findPhase(const String &name) const { for (int i = 0; i < thePhases.count(); ++i) { if (thePhases[i]->name() == name) return thePhases[i]; } return 0; } PhaseInfo *SideInfo::findPhase(const String &name) { for (int i = 0; i < thePhases.count(); ++i) { if (thePhases[i]->name() == name) return thePhases[i]; } return 0; } const PhaseInfo &SideInfo::phase(int idx) const { Assert(0 <= idx && idx < thePhases.count()); return *thePhases[idx]; } int SideInfo::phaseCount() const { return thePhases.count(); } void SideInfo::checkCommonPglCfg() { Assert(!thePglCfg); if (procCount()) { const ProcInfo &p = proc(0); bool mismatch = false; for (int i = 1; i < procCount(); ++i) { if (p.pglCfg() != proc(i).pglCfg()) { mismatch = true; cerr << "PGL configuration in " << p.name() << " differs from the one in " << proc(i).name() << endl; } } if (!mismatch) thePglCfg = p.pglCfg(); } } void SideInfo::checkCommonBenchmarkVersion() { Assert(!theBenchmarkVersion); if (procCount()) { const ProcInfo &p = proc(0); bool mismatch = false; for (int i = 1; i < procCount(); ++i) { if (p.benchmarkVersion() != proc(i).benchmarkVersion()) { mismatch = true; cerr << "benchmark version in " << p.name() << " differs from the one in " << proc(i).name() << endl; } } if (!mismatch) theBenchmarkVersion = p.benchmarkVersion(); } } void SideInfo::checkCommonStartTime() { Time firstTime, lastTime; String firstName, lastName; for (int i = 0; i < procCount(); ++i) { const Time t = proc(i).startTime(); if (t < 0) continue; if (firstTime < 0 || t < firstTime) { firstTime = t; firstName = proc(i).name(); } if (lastTime < 0 || lastTime < t) { lastTime = t; lastName = proc(i).name(); } } const Time diff = lastTime - firstTime; if (diff > Time::Sec(5*60)) { cerr << "warning: " << name() << "-side processes were started" " with a " << diff << " gap" << endl; cerr << "\tfirst process to start: " << firstName; HttpDatePrint(cerr << " at ", firstTime); cerr << "\tlast process to start: " << lastName; HttpDatePrint(cerr << " at ", lastTime); } theStartTime = firstTime; // regardless of the diff? } void SideInfo::checkCommonPhases() { if (procCount()) { const ProcInfo &p = proc(0); bool mismatch = false; for (int i = 1; i < procCount(); ++i) { if (p.phaseCount() != proc(i).phaseCount()) { mismatch = true; cerr << p.name() << " has " << p.phaseCount() << " phases" << " while " << proc(i).name() << " has " << proc(i).phaseCount() << endl; } const int pCount = Min(p.phaseCount(), proc(i).phaseCount()); for (int n = 0; n < pCount; ++n) { if (p.phase(n).name() != proc(i).phase(n).name()) { mismatch = true; cerr << "phase " << n << " in " << p.name() << " is named " << p.phase(n).name() << " while" << " phase " << n << " in " << proc(i).name() << " is named " << proc(i).phase(n).name() << endl; } } } if (mismatch) { cerr << "phase mismatch detected; any report information based" << " on phase aggregation is likely to be wrong" << endl; } } } void SideInfo::checkConsistency() { for (int i = 0; i < procCount(); ++i) proc(i).checkConsistency(); checkCommonBenchmarkVersion(); checkCommonPglCfg(); checkCommonStartTime(); checkCommonPhases(); } void SideInfo::CompileEmptyStats(BlobDb &db, const Scope &scope) { static const String tlTitle = "side stats"; ReportBlob blob(BlobDb::Key("summary", scope), tlTitle); XmlParagraph para; XmlText text; text.buf() << "no side information was extracted from the logs"; para << text; blob << para; db << blob; } void SideInfo::compileStats(BlobDb &db) { clog << "compiling statistics for the " << name() << " side" << endl; for (int i = 0; i < theProcs.count(); ++i) { theProcs[i]->compileStats(db); theExecScopePhase.merge(theProcs[i]->execScopePhase()); theAllPhasesPhase.merge(theProcs[i]->allPhasesPhase()); } bool gotExecScope = false; for (int i = 0; i < phaseCount(); ++i) { PhaseInfo &phase = *thePhases[i]; Scope phScope = scope().onePhase(phase.name()); phScope.name(phase.name()); gotExecScope = gotExecScope || phScope.image() == execScope().image(); compileStats(db, phase, phScope); } if (!gotExecScope) compileStats(db, theExecScopePhase, execScope()); // else should copy existing phase(i) stats? compileStats(db, theAllPhasesPhase, theScope); cmplSideSum(db); } void SideInfo::compileStats(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { theUnseenObjects.clear(); const String sfx = BlobDb::KeySuffix(scope); const StatIntvlRec &stats = phase.availStats(); clog << "\t scope: " << '"' << scope.name() << '"' << endl; if (!phase.hasStats()) clog << "\t\twarning: no phase statistics stored in this scope" << endl; addMeasBlob(db, "xact.count" + sfx, stats.xactCnt(), "xact", "transaction count"); addMeasBlob(db, "xact.error.count" + sfx, stats.theXactErrCnt, "xact", "erroneous xaction count"); addMeasBlob(db, "xact.error.ratio" + sfx, stats.errPercent(), "%", "portion of erroneous transactions"); addMeasBlob(db, "duration" + sfx, stats.theDuration, "test duration"); addMeasBlob(db, "offered.hit.ratio.obj" + sfx, stats.theIdealHR.dhp(), "%", "offered document hit ratio"); addMeasBlob(db, "offered.hit.ratio.byte" + sfx, stats.theIdealHR.bhp(), "%", "offered byte hit ratio"); addMeasBlob(db, "hit.ratio.obj" + sfx, stats.theRealHR.dhp(), "%", "measured document hit ratio"); addMeasBlob(db, "hit.ratio.byte" + sfx, stats.theRealHR.bhp(), "%", "measured byte hit ratio"); addMeasBlob(db, "req.rate" + sfx, stats.reqRate(), "xact/sec", "offered request rate"); addMeasBlob(db, "rep.rate" + sfx, stats.repRate(), "xact/sec", "measured response rate"); addMeasBlob(db, "req.bwidth" + sfx, stats.reqBwidth()/(1024*1024/8), "Mbits/sec", "request bandwidth"); addMeasBlob(db, "rep.bwidth" + sfx, stats.repBwidth()/(1024*1024/8), "Mbits/sec", "response bandwidth"); addMeasBlob(db, "rep.rptm.mean" + sfx, Time::Secd(stats.repTime().mean()/1000.), "mean response time"); cmpProtoStats(db, phase, &StatIntvlRec::theSocksStat, scope); cmpProtoStats(db, phase, &StatIntvlRec::theSslStat, scope); cmpProtoStats(db, phase, &StatIntvlRec::theFtpStat, scope); addMeasBlob(db, "conn.count" + sfx, stats.theConnUseCnt.count(), "conn", "connection count"), addMeasBlob(db, "conn.pipeline.count" + sfx, stats.theConnPipelineDepth.count(), "conn", "pipelined connection count"); addMeasBlob(db, "conn.pipeline.ratio" + sfx, Percent(stats.theConnPipelineDepth.count(), stats.theConnUseCnt.count()), "%", "portion of pipelined connections"); addMeasBlob(db, "conn.pipeline.depth.min" + sfx, stats.theConnPipelineDepth.min(), "xact/pipe", "minimum transactions in pipeline"); addMeasBlob(db, "conn.pipeline.depth.max" + sfx, stats.theConnPipelineDepth.max(), "xact/pipe", "maximum transactions in pipeline"); addMeasBlob(db, "conn.pipeline.depth.mean" + sfx, stats.theConnPipelineDepth.mean(), "xact/pipe", "mean transactions in pipeline"); addMeasBlob(db, "url.unique.count" + sfx, stats.theUniqUrlCnt, "xact", "unique URL count"); addMeasBlob(db, "url.recurrence.ratio" + sfx, stats.recurrenceRatio(), "%", "recurrence ratio"); cmplLoadBlob(db, scope); cmplProtoLoadBlob(db, phase, &StatIntvlRec::theSocksStat, scope); cmplProtoLoadBlob(db, phase, &StatIntvlRec::theSslStat, scope); cmplProtoLoadBlob(db, phase, &StatIntvlRec::theFtpStat, scope); cmplRptmFigure(db, scope); cmplRptmVsLoadFigure(db, phase, scope); cmplHitRatioTable(db, scope); cmplXactLevelTable(db, phase, scope); cmplConnLevelTable(db, phase, scope); cmplConnPipelineBlob(db, scope); cmplPopulLevelTable(db, phase, scope); cmplReplyStreamTable(db, phase, scope); cmplRequestStreamTable(db, phase, scope); cmplReplyObjectTable(db, phase, scope); cmplRequestObjectTable(db, phase, scope); cmplValidationTable(db, phase, scope); cmplErrorTable(db, phase, scope); cmplCompoundReplyStreamTable(db, phase, scope); cmplCompoundRequestStreamTable(db, phase, scope); cmplCompoundReplyObjectTable(db, phase, scope); cmplCompoundRequestObjectTable(db, phase, scope); cmplAuthStreamTable(db, phase, scope); cmplAuthObjectTable(db, phase, scope); cmplReplyStatusStreamTable(db, phase, scope); cmplReplyStatusObjectTable(db, phase, scope); cmplCookieTable(db, phase, scope); cmplObjectBlobs(db, phase, scope, TheReplyStex); cmplObjectBlobs(db, phase, scope, TheRequestStex); cmplObjectBlobs(db, phase, scope, TheCompoundReplyStex); cmplObjectBlobs(db, phase, scope, TheCompoundRequestStex); cmplObjectBlobs(db, phase, scope, TheAuthStex); cmplObjectBlobs(db, phase, scope, TheReplyStatusStex); cmplUnseenObjectsBlob(db, scope); } void SideInfo::cmplLoadBlob(BlobDb &db, const Scope &scope) { ReportBlob blob(BlobDb::Key("load", scope), ReportBlob::NilTitle); blob << XmlAttr("vprimitive", "Load"); cmplLoadTable(db, blob, scope); cmplLoadFigure(db, blob, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The load table shows offered and measured load from " << name() << " side point of view. Offered load statistics " << "are based on the request stream. Measured load statistics " << "are based on reply messages. The 'count' column depicts the " << "number of requests or responses. "; descr << p1; XmlTextTag p2; p2.buf() << "The 'volume' column is a little bit more tricky to " << "interpret. Offered volume is " << "reply bandwidth that would have been required to support " << "offered load. This volume is computed as request rate " << "multiplied by measured mean response size. " << "Measured volume is the actual or measured reply bandwidth."; descr << p2; blob << descr; } db << blob; } void SideInfo::cmplLoadTable(BlobDb &db, ReportBlob &parent, const Scope &scope) { ReportBlob blob(BlobDb::Key("load.table", scope), name() + " load table"); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Load"); XmlTableHeading dhr("Count"); dhr << XmlTag("br") << XmlText("(xact/sec)"); tr << dhr; XmlTableHeading bhr("Volume"); bhr << XmlTag("br") << XmlText("(Mbits/sec)"); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("offered"); XmlTableCell cnt; cnt << algnRight << db.quote("req.rate" + scope); tr << cnt; XmlTableCell vol; vol << algnRight << db.quote("req.bwidth" + scope); tr << vol; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("measured"); XmlTableCell cnt; cnt << algnRight << db.quote("rep.rate" + scope); tr << cnt; XmlTableCell vol; vol << algnRight << db.quote("rep.bwidth" + scope); tr << vol; table << tr; } blob << table; db << blob; parent << blob; } void SideInfo::cmplLoadFigure(BlobDb &db, ReportBlob &blob, const Scope &scope) { SideLoadStex stex1("req", "offered", &StatIntvlRec::reqRate, &StatIntvlRec::reqBwidth); SideLoadStex stex2("rep", "measured", &StatIntvlRec::repRate, &StatIntvlRec::repBwidth); LoadTraceFig fig; fig.configure("load.trace" + scope, "load trace"); fig.stats(&stex1, &phase(scope)); fig.compareWith(&stex2); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmpProtoStats(BlobDb &db, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope) { const String sfx = BlobDb::KeySuffix(scope); const StatIntvlRec &stats = phase.availStats(); const ProtoIntvlStat &protoStat = stats.*protoPtr; const double reqRate = protoStat.reqRate(stats.theDuration); const double repRate = protoStat.repRate(stats.theDuration); const double reqBwidth = protoStat.reqBwidth(stats.theDuration)/(1024*1024/8); const double repBwidth = protoStat.repBwidth(stats.theDuration)/(1024*1024/8); const String pfx = protoStat.id(); const String protoName = protoStat.name(); addMeasBlob(db, pfx + ".req.rate" + sfx, reqRate, "xact/sec", "offered " + protoName + " request rate"); addMeasBlob(db, pfx + ".rep.rate" + sfx, repRate, "xact/sec", "measured " + protoName + " response rate"); addMeasBlob(db, pfx + ".req.bwidth" + sfx, reqBwidth, "Mbits/sec", protoName + " request bandwidth"); addMeasBlob(db, pfx + ".rep.bwidth" + sfx, repBwidth, "Mbits/sec", protoName + " response bandwidth"); } void SideInfo::cmplProtoLoadBlob(BlobDb &db, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope) { const ProtoIntvlStat &protoStat = phase.availStats().*protoPtr; const String pfx = protoStat.id(); const String protoName = protoStat.name(); ReportBlob blob(BlobDb::Key(pfx + ".load", scope), ReportBlob::NilTitle); blob << XmlAttr("vprimitive", protoName + " load"); cmplProtoLoadTable(db, blob, phase, protoPtr, scope); cmplProtoLoadFigure(db, blob, phase, protoPtr, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The " << protoName << " load table " << "shows offered and measured load from " << name() << " side point of view. Offered load statistics " << "are based on the request stream. Measured load statistics " << "are based on reply messages. The 'count' column depicts the " << "number of requests or responses. "; descr << p1; XmlTextTag p2; p2.buf() << "The 'volume' column is a little bit more tricky to " << "interpret. Offered volume is " << "reply bandwidth that would have been required to support " << "offered load. This volume is computed as request rate " << "multiplied by measured mean response size. " << "Measured volume is the actual or measured reply bandwidth."; descr << p2; blob << descr; } db << blob; } void SideInfo::cmplProtoLoadTable(BlobDb &db, ReportBlob &parent, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope) { const ProtoIntvlStat &protoStat = phase.availStats().*protoPtr; const String pfx = protoStat.id(); const String protoName = protoStat.name(); ReportBlob blob(BlobDb::Key(pfx + ".load.table", scope), name() + protoName + " load table"); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading(protoName + " load"); XmlTableHeading dhr("Count"); dhr << XmlTag("br") << XmlText("(xact/sec)"); tr << dhr; XmlTableHeading bhr("Volume"); bhr << XmlTag("br") << XmlText("(Mbits/sec)"); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("offered"); XmlTableCell cnt; cnt << algnRight << db.quote(pfx + ".req.rate" + scope); tr << cnt; XmlTableCell vol; vol << algnRight << db.quote(pfx + ".req.bwidth" + scope); tr << vol; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("measured"); XmlTableCell cnt; cnt << algnRight << db.quote(pfx + ".rep.rate" + scope); tr << cnt; XmlTableCell vol; vol << algnRight << db.quote(pfx + ".rep.bwidth" + scope); tr << vol; table << tr; } blob << table; db << blob; parent << blob; } void SideInfo::cmplProtoLoadFigure(BlobDb &db, ReportBlob &blob, const PhaseInfo &phase, ProtoIntvlPtr protoPtr, const Scope &scope) { const ProtoIntvlStat &protoStat = phase.availStats().*protoPtr; const String pfx = protoStat.id(); const String protoName = protoStat.name(); ProtoSideLoadStex stex1(pfx + ".req", protoName + " offered", protoPtr, &ProtoIntvlStat::reqRate, &ProtoIntvlStat::reqBwidth); ProtoSideLoadStex stex2(pfx + ".rep", protoName + " measured", protoPtr, &ProtoIntvlStat::repRate, &ProtoIntvlStat::repBwidth); LoadTraceFig fig; fig.configure(pfx + ".load.trace" + scope, protoName + " load trace"); fig.stats(&stex1, &phase); fig.compareWith(&stex2); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplRptmFigure(BlobDb &db, const Scope &scope) { MissesStex misses("misses", "misses"); HitsStex hits("hits", "hits"); RptmTraceFig fig; fig.configure("rptm.trace" + scope, "Response times trace"); fig.stats(&misses, &phase(scope)); fig.moreStats(TheAllReps); fig.moreStats(&hits); fig.globalStart(theTest->startTime()); //const String &figKey = fig.plot(db).key(); //blob << db.include(figKey); } void SideInfo::cmplRptmVsLoadFigure(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("rptm-load" + scope, "mean response time versus response rate"); blob << XmlAttr("vprimitive", "Mean response time versus response rate"); LoadPointStex load("rep", "response rate", "xact/sec", &StatIntvlRec::repRate); MeanRptmPointStex rptm; ScatteredFig fig; fig.configure("rptm-load.scatt" + scope, "Mean response time versus response rate"); fig.stats(&load, &rptm, &phase); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); db << blob; } void SideInfo::cmplHitRatioTable(BlobDb &db, const Scope &scope) { ReportBlob blob("hit.ratio" + scope, "hit ratios"); blob << XmlAttr("vprimitive", "Client Side Hit Ratios"); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Client Side Hit Ratios"); XmlTableHeading dhr("DHR"); dhr << XmlTag("br") << XmlText("(%)"); tr << dhr; XmlTableHeading bhr("BHR"); bhr << XmlTag("br") << XmlText("(%)"); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("offered"); XmlTableCell dhr; dhr << algnRight << db.include("offered.hit.ratio.obj" + scope); tr << dhr; XmlTableCell bhr; bhr << algnRight << db.include("offered.hit.ratio.byte" + scope); tr << bhr; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("measured"); XmlTableCell dhr; dhr << algnRight << db.include("hit.ratio.obj" + scope); tr << dhr; XmlTableCell bhr; bhr << algnRight << db.include("hit.ratio.byte" + scope); tr << bhr; table << tr; } blob << table; { XmlTag descr("description"); if (name() == "client") { XmlTextTag p1; p1.buf() << "The hit ratios table shows offered and measured hit " << "ratios from " << name() << " side point of view. " << "Polygraph counts every repeated request to a cachable " << "object as an offered hit. Measured (cache) hits are " << "detected using Polygraph-specific headers. All hits are " << "counted for 'basic' transactions only (simple HTTP GET " << "requests with '200 OK' responses)."; descr << p1; XmlTextTag p2; p2.buf() << "DHR, Document Hit Ratio, is the ratio of the total " << "number of hits to the number of all basic transactions " << "(hits and misses). BHR, Byte Hit Ratio, is the ratio of " << "the total volume (a sum of response sizes) of hits to the " << "total volume of all basic transactions."; descr << p2; } else { XmlTextTag p1; p1.buf() << "The server-side hit ratios should always be zero. " << "If a request reaches a server, it is, by definition, " << "a miss."; descr << p1; } XmlParagraph p; Scope testScope = scope.oneSide("client"); testScope.addSide("server"); p << XmlText("A better way to measure hit ratio is to compare " "client- and server-side traffic. A hit ratio table " "based on such a comparison is available "); p << db.ptr("hit.ratio" + testScope, XmlText("elsewhere")); p << XmlText("."); descr << p; blob << descr; } cmplHrTraces(db, blob, scope); db << blob; } void SideInfo::cmplHrTraces(BlobDb &db, ReportBlob &blob, const Scope &scope) { cmplDhrTrace(db, blob, scope); cmplBhrTrace(db, blob, scope); } void SideInfo::cmplDhrTrace(BlobDb &db, ReportBlob &blob, const Scope &scope) { DhpPointStex stex1("dhp.ideal", "offered DHR", &StatIntvlRec::theIdealHR); DhpPointStex stex2("dhp.real", "measured DHR", &StatIntvlRec::theRealHR); PointTraceFig fig; fig.configure("dhr.trace" + scope, "Document hit ratio trace"); fig.stats(&stex1, &stex2, &phase(scope)); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplBhrTrace(BlobDb &db, ReportBlob &blob, const Scope &scope) { BhpPointStex stex1("bhp.ideal", "offered BHR", &StatIntvlRec::theIdealHR); BhpPointStex stex2("bhp.real", "measured BHR", &StatIntvlRec::theRealHR); PointTraceFig fig; fig.configure("bhr.trace" + scope, "Byte hit ratio trace"); fig.stats(&stex1, &stex2, &phase(scope)); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplConnLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("conn.level.table" + scope, "concurrent connection level"); blob << XmlAttr("vprimitive", "Concurrent HTTP/TCP connection level table"); const StatIntvlRec &stats = phase.availStats(); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Connection state", 1, 2); tr << XmlTableHeading("Number of times", 2, 1); tr << XmlTableHeading("Mean concurrency level", 1, 2); table << tr; } { XmlTableRec tr; tr << XmlTableHeading("entered"); tr << XmlTableHeading("left"); table << tr; } cmplLevelTableRec(db, "conn.open.", "open", stats.theOpenLvl, scope, table); cmplLevelTableRec(db, "conn.estb.", "established", stats.theEstbLvl, scope, table); cmplLevelTableRec(db, "conn.idle.", "idle", stats.theIdleLvl, scope, table); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "TBD."; descr << p1; blob << descr; } // XXX: move cmplConnLevelFigure(db, blob, scope); db << blob; } void SideInfo::cmplConnLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope) { LevelStex stex1("open", "open", &StatIntvlRec::theOpenLvl); LevelStex stex2("estb", "established", &StatIntvlRec::theEstbLvl); LevelStex stex3("idle", "idle", &StatIntvlRec::theIdleLvl); LevelTraceFig fig; fig.configure("conn.level.trace" + scope, "concurrent HTTP/TCP connection level trace"); fig.stats(&stex1, &phase(scope)); fig.compareWith(&stex2); fig.compareWith(&stex3); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplConnPipelineBlob(BlobDb &db, const Scope &scope) { ReportBlob blob(BlobDb::Key("pipeline", scope), "Pipelined HTTP connections"); blob << XmlAttr("vprimitive", "Pipelined HTTP connections"); cmplConnPipelineTable(db, blob, scope); cmplConnPipelineHist(db, blob, scope); cmplConnPipelineTrace(db, blob, scope); { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "Connection pipelining stats are based on measurements " << "collected for pipelined HTTP connections. To calculate " << "pipelining probability, a connection is counted as pipelined " << "if it had pipelined (concurrent) requests " << "pending at any given moment of its lifetime."; descr << p1; XmlTextTag p2; p2.buf() << "The pipeline " << "depth varies as new requests are added to the connection and " << "old requests are satisfied by responses. The depth reported " << "her is based on the maximum pipelining depth achieved during " << "a pipelined connection lifetime. That is, the depth stats are " << "collected everytime a pipelined connection is closed, not " << "when a new request is added to or removed from the pipe."; descr << p2; blob << descr; } db << blob; } void SideInfo::cmplConnPipelineTable(BlobDb &db, ReportBlob &parent, const Scope &scope) { ReportBlob blob("conn.pipeline.table" + scope, "HTTP pipelining summary table"); XmlTable table; table << XmlAttr::Int("border", 0) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << algnLeft << XmlTableHeading("probability:"); XmlTableCell cell; cell << db.include("conn.pipeline.ratio" + scope); cell << XmlText(" or "); cell << db.include("conn.pipeline.count" + scope); cell << XmlText(" pipelined out of total "); cell << db.include("conn.count" + scope); tr << cell; table << tr; } if (phase(scope).availStats().theConnPipelineDepth.known()) { XmlTableRec tr; tr << algnLeft << XmlTableHeading("depth:"); XmlTableCell cell; cell << db.include("conn.pipeline.depth.min" + scope); cell << XmlText(" min, "); cell << db.include("conn.pipeline.depth.mean" + scope); cell << XmlText(" mean, and "); cell << db.include("conn.pipeline.depth.max" + scope); cell << XmlText(" max"); tr << cell; table << tr; } blob << table; db << blob; parent << blob; } void SideInfo::cmplConnPipelineTrace(BlobDb &db, ReportBlob &blob, const Scope &scope) { PipelineProbPointStex stex1; MeanAggrPointStex stex2("depth", "pipeline depth", "connections", &StatIntvlRec::theConnPipelineDepth); PointTraceFig fig; fig.configure("conn.pipeline.trace" + scope, "HTTP pipelining trace"); fig.stats(&stex1, &stex2, &phase(scope)); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplConnPipelineHist(BlobDb &db, ReportBlob &blob, const Scope &scope) { PipelineDepthHistStex stex1; HistogramFigure fig; fig.configure("conn.pipeline.depth.histogram" + scope, "HTTP pipelining depth histogram"); fig.stats(&stex1, &phase(scope)); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplPopulLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("populus.level.table" + scope, "population level"); blob << XmlAttr("vprimitive", "Population level table"); const StatIntvlRec &stats = phase.availStats(); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Number of agents", 2, 1); tr << XmlTableHeading("Mean population level", 1, 2); table << tr; } { XmlTableRec tr; tr << XmlTableHeading("created"); tr << XmlTableHeading("destroyed"); table << tr; } cmplLevelTableRec(db, "agent.", 0, stats.thePopulusLvl, scope, table); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "Populus is a set of all live robot or server agents. " << "While alive, an agent may participate in HTTP transactions " << "or remain idle."; descr << p1; blob << descr; } // XXX: move cmplPopulLevelFigure(db, blob, scope); db << blob; } void SideInfo::cmplPopulLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope) { LevelStex stex1("populus", "agents", &StatIntvlRec::thePopulusLvl); LevelTraceFig fig; fig.configure("populus.level.trace" + scope, "population level trace"); fig.stats(&stex1, &phase(scope)); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplXactLevelTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("xact.level.table" + scope, "concurrent transaction level"); blob << XmlAttr("vprimitive", "Concurrent HTTP transaction level table"); const StatIntvlRec &stats = phase.availStats(); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Transaction state", 1, 2); tr << XmlTableHeading("Number of times", 2, 1); tr << XmlTableHeading("Mean concurrency level", 1, 2); table << tr; } { XmlTableRec tr; tr << XmlTableHeading("entered"); tr << XmlTableHeading("left"); table << tr; } cmplLevelTableRec(db, "xact.", "active", stats.theXactLvl, scope, table); cmplLevelTableRec(db, "wait.", "waiting", stats.theWaitLvl, scope, table); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "TBD."; descr << p1; blob << descr; } // XXX: move cmplXactLevelFigure(db, blob, scope); db << blob; } void SideInfo::cmplXactLevelFigure(BlobDb &db, ReportBlob &blob, const Scope &scope) { LevelStex stex1("xact", "active", &StatIntvlRec::theXactLvl); LevelStex stex2("wait", "waiting", &StatIntvlRec::theWaitLvl); LevelTraceFig fig; fig.configure("xact.level.trace" + scope, "concurrent HTTP transaction level trace"); fig.stats(&stex1, &phase(scope)); fig.compareWith(&stex2); fig.globalStart(theTest->startTime()); const String &figKey = fig.plot(db).key(); blob << db.include(figKey); } void SideInfo::cmplLevelTableRec(BlobDb &db, const String &pfx, const String &state, const LevelStat &stats, const Scope &scope, XmlTable &table) { XmlTableRec tr; if (state) tr << algnLeft << XmlTableHeading(state); const String startedName = pfx + "started" + scope; addMeasBlob(db, startedName, stats.incCnt(), "", "started"); XmlTableCell started; started << algnRight << db.include(startedName); tr << started; const String finishedName = pfx + "finished" + scope; addMeasBlob(db, finishedName, stats.decCnt(), "", "finished"); XmlTableCell finished; finished << algnRight << db.include(finishedName); tr << finished; const String levelName = pfx + "level.mean" + scope; addMeasBlob(db, levelName, stats.mean(), "", "average level"); XmlTableCell level; level << algnRight << db.include(levelName); tr << level; table << tr; } XmlTable SideInfo::makeStreamTableHdr(const bool hasParts) { XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Stream", 1, 2); tr << XmlTableHeading("Contribution", 2, 1); tr << XmlTableHeading("Rates", 2, 1); tr << XmlTableHeading("Totals", 2, 1); if (hasParts) tr << XmlTableHeading("Parts", 1, 1); table << tr; } { XmlTableRec tr; XmlTableHeading cCnt("Count"); cCnt << XmlTag("br") << XmlText("(%)"); tr << cCnt; XmlTableHeading cVol("Volume"); cVol << XmlTag("br") << XmlText("(%)"); tr << cVol; XmlTableHeading rCnt("Count"); rCnt << XmlTag("br") << XmlText("(xact/sec)"); tr << rCnt; XmlTableHeading rVol("Volume"); rVol << XmlTag("br") << XmlText("(Mbits/sec)"); tr << rVol; XmlTableHeading tCnt("Count"); tCnt << XmlTag("br") << XmlText("(xact,M)"); tr << tCnt; XmlTableHeading tVol("Volume"); tVol << XmlTag("br") << XmlText("(Gbyte)"); tr << tVol; if (hasParts) { XmlTableHeading pCnt("Count"); pCnt << XmlTag("br") << XmlText("(xact/sec)"); tr << pCnt; } table << tr; } return table; } void SideInfo::cmplReplyStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("reply_stream.table" + scope, "reply traffic stream"); blob << XmlAttr("vprimitive", "Reply traffic stream table"); StatTable table(makeStreamTableHdr()); for (int s = 0; s < TheReplyStex.count(); ++s) cmplStreamTableRec(db, table, *TheReplyStex[s], phase, scope, TheAllReps); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Reply stream' table provides count and volume " << "statistics for many classes of transactions and for " << "so-called pages. The " << "'Contribution' columns show count- and volume-based " << "portions of all transactions. The 'Rates' columns show " << "throughput and bandwidth measurements. The 'Totals' " << "columns contain the total number of transactions " << "and the total volume (a sum of individual response " << "sizes) for each stream."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some streams are a combination of other " << "streams. For example, the 'all ims' stream contains " << "transactions with If-Modified-Since requests that resulted in " << "either '200 OK' (the 'ims/304' stream) or " << "'304 Not Modified' (the 'ims/304' stream) responses. "; descr << p2; XmlTextTag p3; p3.buf() << "Many combination streams, such as 'all content types' " << "or 'hits and misses' stream, contribute less than 100% " << "because properties like content type or hit status are " << "distinguished for 'basic' transactions only. A basic " << "transactions is a simple HTTP GET request resulted in " << "a '200 OK' response. Various special transactions such " << "as IMS or aborts do not belong to the 'basic' category."; descr << p3; XmlParagraph p4; p4 << XmlText("The "); p4 << db.ptr("reply_object.table" + scope, XmlText("'Reply object' table")); p4 << XmlText(" contains corresponding response time and size " "statistics for streams."); descr << p4; XmlParagraph p5; p5 << XmlText("A similar table covering request messages is available "); p5 << db.ptr("request_stream.table" + scope, XmlText("elsewhere")); p5 << XmlText("."); descr << p5; blob << descr; } db << blob; } void SideInfo::cmplRequestStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("request_stream.table" + scope, "request traffic stream"); blob << XmlAttr("vprimitive", "Request traffic stream table"); StatTable table(makeStreamTableHdr()); Array a; SortStexes(phase, TheRequestStex, a); for (int s = 0; s < a.count(); ++s) cmplStreamTableRec(db, table, *a[s], phase, scope, TheAllReqs); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Request stream' table provides count and volume " << "statistics for requests. The " << "'Contribution' columns show count- and volume-based " << "portions of all transactions. The 'Rates' columns show " << "throughput and bandwidth measurements. The 'Totals' " << "columns contain the total number of transactions " << "and the total volume (a sum of individual request " << "sizes) for each stream."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some streams are a combination of other " << "streams. For example, the 'all request content types' stream contains " << "requests with different content types."; descr << p2; XmlTextTag p3; p3.buf() << "Note that only request messages containing " "bodies contribute to these stats at the moment."; descr << p3; XmlParagraph p4; p4 << XmlText("The "); p4 << db.ptr("request_object.table" + scope, XmlText("'Request object' table")); p4 << XmlText(" contains corresponding response time and size " "statistics for streams."); descr << p4; XmlParagraph p5; p5 << XmlText("A similar table covering reply messages is available "); p5 << db.ptr("reply_stream.table" + scope, XmlText("elsewhere")); p5 << XmlText("."); descr << p5; blob << descr; } db << blob; } void SideInfo::cmplCompoundReplyStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("compound.reply_stream.table" + scope, "compound reply traffic stream"); blob << XmlAttr("vprimitive", "Compound reply traffic stream table"); StatTable table(makeStreamTableHdr(true)); for (int s = 0; s < TheCompoundReplyStex.count(); ++s) cmplStreamTableRec(db, table, *TheCompoundReplyStex[s], phase, scope, TheAllReps); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "A compound transaction consists of related " "transactions, working on a single goal such as " "authenticating a transfer. While individual " "transactions consist of a single request/response " "pair, compound transactions usually have several " "such pairs. Isolated transactions are individual " "transactions that do not belong to any compound " "transaction."; descr << p1; XmlTextTag p2; p2.buf() << "Stats in the 'Compound reply stream' table " "provides count and volume statistics for responses. " "The 'Contribution' columns show count- and volume-based " "portions of all compound transactions (isolated " "transaction not included). The 'Rates' columns show " "throughput and bandwidth measurements. The 'Totals' " "columns contain the total number of transactions and the " "total volume (a sum of individual request sizes) for " "each stream. The 'Parts' column shows individual " "transactions rate in a compound transaction."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("compound.reply_object.table" + scope, XmlText("'Compound reply object' table")); p3 << XmlText(" contains corresponding response time and size " "statistics for streams."); descr << p3; XmlParagraph p4; p4 << XmlText("A similar table covering request messages is available "); p4 << db.ptr("compound.request_stream.table" + scope, XmlText("elsewhere")); p4 << XmlText("."); descr << p4; blob << descr; } db << blob; } void SideInfo::cmplCompoundRequestStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("compound.request_stream.table" + scope, "compound request traffic stream"); blob << XmlAttr("vprimitive", "Compound request traffic stream table"); StatTable table(makeStreamTableHdr(true)); for (int s = 0; s < TheCompoundRequestStex.count(); ++s) cmplStreamTableRec(db, table, *TheCompoundRequestStex[s], phase, scope, TheAllReqs); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "A compound transaction consists of related " "transactions, working on a single goal such as " "authenticating a transfer. While individual " "transactions consist of a single request/response " "pair, compound transactions usually have several " "such pairs. Isolated transactions are individual " "transactions that do not belong to any compound " "transaction."; descr << p1; XmlTextTag p2; p2.buf() << "Stats in the 'Compound request stream' table " "provides count and volume statistics for requests. " "The 'Contribution' columns show count- and volume-based " "portions of all compound transactions (isolated " "transaction not included). The 'Rates' columns show " "throughput and bandwidth measurements. The 'Totals' " "columns contain the total number of transactions and the " "total volume (a sum of individual request sizes) for " "each stream. The 'Parts' column shows individual " "transactions rate in a compound transaction."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("compound.request_object.table" + scope, XmlText("'Compound request object' table")); p3 << XmlText(" contains corresponding request time and size " "statistics for streams."); descr << p3; XmlParagraph p4; p4 << XmlText("A similar table covering reply messages is available "); p4 << db.ptr("compound.reply_stream.table" + scope, XmlText("elsewhere")); p4 << XmlText("."); descr << p4; blob << descr; } db << blob; } void SideInfo::cmplAuthStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("auth.stream.table" + scope, "authentication traffic stream"); blob << XmlAttr("vprimitive", "Authentication traffic stream table"); StatTable table(makeStreamTableHdr()); for (int s = 0; s < TheAuthStex.count(); ++s) cmplStreamTableRec(db, table, *TheAuthStex[s], phase, scope, TheAllReps); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Authentication stream' table provides count " "and volume statistics for authentication-related " "transactions. The 'Contribution' columns show count- " "and volume-based portions of all transactions. The " "'Rates' columns show throughput and bandwidth " "measurements. The 'Totals' columns contain the total " "number of transactions and the total volume (a sum of " "individual response sizes) for each stream."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some streams are a combination of other " "streams. For example, the 'all auth-ing' stream contains " "auth-ing transactions with any authentication scheme."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("auth.object.table" + scope, XmlText("'Authentication object' table")); p3 << XmlText(" contains corresponding response time and size " "statistics for streams."); descr << p3; blob << descr; } db << blob; } void SideInfo::cmplReplyStatusStreamTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob("reply_status.stream.table" + scope, "reply status traffic stream"); blob << XmlAttr("vprimitive", "HTTP reply status traffic stream table"); StatTable table(makeStreamTableHdr()); Array a; SortStexes(phase, TheReplyStatusStex, a); for (int s = 0; s < a.count(); ++s) cmplStreamTableRec(db, table, *a[s], phase, scope, TheReplyStatusStex.last()); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'HTTP reply status stream' table provides " "count and volume statistics for HTTP responses with " "different status codes. The 'Contribution' columns " "show count- and volume-based portions of all replies. " "The 'Rates' columns show throughput and bandwidth " "measurements. The 'Totals' columns contain the total " "number of transactions and the total volume (a sum of " "individual response sizes) for each stream. 'Other " "HTTP reply status code' stream contains transactions " "with errors, e.g. parsing or connection failure, " "where response status code is unknown."; descr << p1; XmlTextTag p2; p2.buf() << "The 'All HTTP reply status codes' stream contains " "statistics for responses with all status codes."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("reply_status.object.table" + scope, XmlText("'HTTP reply status codes' table")); p3 << XmlText(" contains corresponding response time and size statistics."); descr << p3; blob << descr; } db << blob; } // addMeasBlob() calls should be moved out if we want to support partial reports void SideInfo::cmplStreamTableRec(BlobDb &db, StatTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope, const Stex *const topStex) { const String pfx = "stream." + stex.key(); const String ratioCountName = BlobDb::Key(pfx + ".ratio.obj", scope); const String ratioVolumeName = BlobDb::Key(pfx + ".ratio.byte", scope); const String rateCountName = BlobDb::Key(pfx + ".rate", scope); const String rateVolumeName = BlobDb::Key(pfx + ".bwidth", scope); const String totalCountName = BlobDb::Key(pfx + ".size.count", scope); const String totalVolumeName = BlobDb::Key(pfx + ".size.sum", scope); const String partsRateCountName = BlobDb::Key(pfx + ".parts.rate", scope); const String ratioCountTitle = "contribution by count"; const String ratioVolumeTitle = "contribution by volume"; const String rateCountTitle = "transaction rate"; const String rateVolumeTitle = "transaction bandwidth"; const String totalCountTitle = "total transaction count"; const String totalVolumeTitle = "total transaction volume"; const Time duration = phase.availStats().theDuration; const AggrStat *const partsStat = stex.partsStat(phase); bool known(false); if (const TmSzStat *const recStats = stex.aggr(phase)) { const AggrStat &cstats = recStats->size(); known = cstats.known(); const double rateCountVal = Ratio(cstats.count(), duration.secd()); const double rateVolumeVal = Ratio(cstats.sum()/1024/1024*8, duration.secd()); const double totalCountVal = cstats.count(); const double totalVolumeVal = cstats.sum(); if ((stex.parent() || &stex == topStex) && topStex->aggr(phase)) { // compute contribution towards "all responses" const AggrStat all = topStex->aggr(phase)->size(); const double partsCount = stex.meanPartsCount(phase); addMeasBlob(db, ratioCountName, Percent(totalCountVal * partsCount, all.count()), "%", ratioCountTitle); addMeasBlob(db, ratioVolumeName, Percent(totalVolumeVal, all.sum()), "%", ratioVolumeTitle); } else { addNaMeasBlob(db, ratioCountName, ratioCountTitle); addNaMeasBlob(db, ratioVolumeName, ratioVolumeTitle); } addMeasBlob(db, rateCountName, rateCountVal, "/sec", rateCountTitle); addMeasBlob(db, rateVolumeName, rateVolumeVal, "Mbits/sec", rateVolumeTitle); addMeasBlob(db, totalCountName, totalCountVal/1e6, "M", totalCountTitle); addMeasBlob(db, totalVolumeName, totalVolumeVal/(1024*1024*1024), "GByte", totalVolumeTitle); } else { addNaMeasBlob(db, ratioCountName, ratioCountTitle); addNaMeasBlob(db, ratioVolumeName, ratioVolumeTitle); addNaMeasBlob(db, rateCountName, rateCountTitle); addNaMeasBlob(db, rateVolumeName, rateVolumeTitle); addNaMeasBlob(db, totalCountName, totalCountTitle); addNaMeasBlob(db, totalVolumeName, totalVolumeTitle); } if (partsStat) { known = known || partsStat->known(); const String partsRateCountTitle = "parts rate"; const double partsRateCountVal = Ratio(partsStat->sum(), duration.secd()); addMeasBlob(db, partsRateCountName, partsRateCountVal, "/sec", partsRateCountTitle); } if (!known) { table.addUnknown(stex); return; } XmlTableRec tr; XmlTableHeading th; th << db.ptr("object." + stex.key() + scope, XmlText(stex.name())); th << algnLeft; tr << th; XmlTableCell ratioCountCell; ratioCountCell << algnRight << db.quote(ratioCountName); tr << ratioCountCell; XmlTableCell ratioVolumeCell; ratioVolumeCell << algnRight << db.quote(ratioVolumeName); tr << ratioVolumeCell; XmlTableCell rateCountCell; rateCountCell << algnRight << db.quote(rateCountName); tr << rateCountCell; XmlTableCell rateVolumeCell; rateVolumeCell << algnRight << db.quote(rateVolumeName); tr << rateVolumeCell; XmlTableCell totalCountCell; totalCountCell << algnRight << db.quote(totalCountName); tr << totalCountCell; XmlTableCell totalVolumeCell; totalVolumeCell << algnRight << db.quote(totalVolumeName); tr << totalVolumeCell; if (partsStat) { XmlTableCell partsRateCountCell; partsRateCountCell << algnRight << db.quote(partsRateCountName); tr << partsRateCountCell; } table << tr; } XmlTable SideInfo::makeObjectTableHdr(const bool hasParts) { XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Object", 1, 2); tr << XmlTableHeading("Response time (msec)", 3, 1); tr << XmlTableHeading("Size (KBytes)", 3, 1); if (hasParts) tr << XmlTableHeading("Parts", 3, 1); table << tr; } { XmlTableRec tr; XmlNodes nodes; nodes << XmlTableHeading("Min"); nodes << XmlTableHeading("Mean"); nodes << XmlTableHeading("Max"); tr << nodes; tr << nodes; if (hasParts) tr << nodes; table << tr; } return table; } void SideInfo::cmplReplyObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { { ReportBlob blob(BlobDb::Key("reply_object.table", scope), "response kind stats"); blob << XmlAttr("vprimitive", "Reply object kind table"); StatTable table(makeObjectTableHdr()); for (int s = 0; s < TheReplyStex.count(); ++s) cmplObjectTableRec(db, table, *TheReplyStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Reply object' table provides response time and response " << "size statistics for many classes of transactions and " << "for so-called pages."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some classes are a combination of other " << "classes. For example, the 'all ims' class contains " << "transactions with If-Modified-Since requests that resulted in " << "either '200 OK' (the 'ims/304' class) or " << "'304 Not Modified' (the 'ims/304' class) responses. "; descr << p2; XmlParagraph p3; p3 << XmlText("Some statistics may not be available because either " "no objects of the corresponding class were seen during the " "test or no facilities to collect the stats exist for " "the class. The former can be verified using a "); p3 << db.ptr("reply_stream.table" + scope, XmlText("'Reply stream' table")); p3 << XmlText("."); descr << p3; XmlParagraph p4; p4 << XmlText("A similar table covering request messages is available "); p4 << db.ptr("request_object.table" + scope, XmlText("elsewhere")); p4 << XmlText("."); descr << p4; blob << descr; } db << blob; } } void SideInfo::cmplRequestObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { { ReportBlob blob(BlobDb::Key("request_object.table", scope), "request kind stats"); blob << XmlAttr("vprimitive", "Request object kind table"); StatTable table(makeObjectTableHdr()); for (int s = 0; s < TheRequestStex.count(); ++s) cmplObjectTableRec(db, table, *TheRequestStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Request object' table provides time and " << "size statistics for requests."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some classes are a combination of other " << "classes. For example, the 'all request content types' class contains " << "requests with different content type."; descr << p2; XmlTextTag p3; p3.buf() << "Note that only request messages containing " "bodies contribute to these stats at the moment."; descr << p3; XmlParagraph p4; p4 << XmlText("Some statistics may not be available because either " "no objects of the corresponding class were seen during the " "test or no facilities to collect the stats exist for " "the class. The former can be verified using a "); p4 << db.ptr("request_stream.table" + scope, XmlText("'Request stream' table")); p4 << XmlText("."); descr << p4; XmlParagraph p5; p5 << XmlText("A similar table covering reply messages is available "); p5 << db.ptr("reply_object.table" + scope, XmlText("elsewhere")); p5 << XmlText("."); descr << p5; blob << descr; } db << blob; } } void SideInfo::cmplCompoundReplyObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob(BlobDb::Key("compound.reply_object.table", scope), "compound response kind stats"); blob << XmlAttr("vprimitive", "Compound reply object kind table"); StatTable table(makeObjectTableHdr(true)); for (int s = 0; s < TheCompoundReplyStex.count(); ++s) cmplObjectTableRec(db, table, *TheCompoundReplyStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "A compound transaction consists of related " "transactions, working on a single goal such as " "authenticating a transfer. While individual " "transactions consist of a single request/response " "pair, compound transactions usually have several " "such pairs. Isolated transactions are individual " "transactions that do not belong to any compound " "transaction."; descr << p1; XmlTextTag p2; p2.buf() << "Stats in the 'Compound reply object' table " "provides time and size statistics for responses. " "Compound transaction response time is time from " "the start of the first transaction until the end of " "the last one. Request (response) size is the total " "size of all individual requests (responses) in a " "compound transaction. The 'Parts' column shows " "the number of individual transactions in a compound " "transaction."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("compound.reply_stream.table" + scope, XmlText("'Compound reply stream' table")); p3 << XmlText(" contains corresponding stream statistics."); descr << p3; XmlParagraph p4; p4 << XmlText("A similar table covering request messages is available "); p4 << db.ptr("compound.request_object.table" + scope, XmlText("elsewhere")); p4 << XmlText("."); descr << p4; blob << descr; } db << blob; } void SideInfo::cmplCompoundRequestObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob(BlobDb::Key("compound.request_object.table", scope), "compound request kind stats"); blob << XmlAttr("vprimitive", "Compound request object kind table"); StatTable table(makeObjectTableHdr(true)); for (int s = 0; s < TheCompoundRequestStex.count(); ++s) cmplObjectTableRec(db, table, *TheCompoundRequestStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "A compound transaction consists of related " "transactions, working on a single goal such as " "authenticating a transfer. While individual " "transactions consist of a single request/response " "pair, compound transactions usually have several " "such pairs. Isolated transactions are individual " "transactions that do not belong to any compound " "transaction."; descr << p1; XmlTextTag p2; p2.buf() << "Stats in the 'Compound request object' table " "provides time and size statistics for requests. " "Compound transaction response time is time from " "the start of the first transaction until the end of " "the last one. Request (response) size is the total " "size of all individual requests (responses) in a " "compound transaction. The 'Parts' column shows " "the number of individual transactions in a compound " "transaction."; descr << p2; XmlParagraph p3; p3 << XmlText("The "); p3 << db.ptr("compound.request_stream.table" + scope, XmlText("'Compound request stream' table")); p3 << XmlText(" contains corresponding stream statistics."); descr << p3; XmlParagraph p4; p4 << XmlText("A similar table covering response messages is available "); p4 << db.ptr("compound.reply_object.table" + scope, XmlText("elsewhere")); p4 << XmlText("."); descr << p4; blob << descr; } db << blob; } void SideInfo::cmplAuthObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob(BlobDb::Key("auth.object.table", scope), "authentication kind stats"); blob << XmlAttr("vprimitive", "Authentication object kind table"); StatTable table(makeObjectTableHdr()); for (int s = 0; s < TheAuthStex.count(); ++s) cmplObjectTableRec(db, table, *TheAuthStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Authentication object' table provides response " "time and response size statistics authentication-related " "tansactions."; descr << p1; XmlTextTag p2; p2.buf() << "Note that some streams are a combination of other " "streams. For example, the 'all auth-ing' stream contains " "auth-ing transactions with any authentication scheme."; descr << p2; XmlParagraph p3; p3 << XmlText("Some statistics may not be available because either " "no objects of the corresponding class were seen during the " "test or no facilities to collect the stats exist for " "the class. The former can be verified using a "); p3 << db.ptr("auth.stream.table" + scope, XmlText("'Authentication stream' table")); p3 << XmlText("."); descr << p3; blob << descr; } db << blob; } void SideInfo::cmplReplyStatusObjectTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob(BlobDb::Key("reply_status.object.table", scope), "reply status code stats"); blob << XmlAttr("vprimitive", "HTTP reply status codes table"); StatTable table(makeObjectTableHdr()); for (int s = 0; s < TheReplyStatusStex.count(); ++s) cmplObjectTableRec(db, table, *TheReplyStatusStex[s], phase, scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'HTTP reply status codes' table provides " "response time and response size statistics for " "HTTP transactions with different status codes. " "'Other HTTP reply status code' stream contains " "transactions with errors, e.g. parsing or connection " "failure, where response status code is unknown."; descr << p1; XmlTextTag p2; p2.buf() << "The 'All HTTP reply status codes' stream contains " "statistics for responses with all status codes."; descr << p2; XmlParagraph p3; p3 << XmlText("See also: "); p3 << db.ptr("reply_status.stream.table" + scope, XmlText("'HTTP reply status stream' table")); p3 << XmlText("."); descr << p3; blob << descr; } db << blob; } void SideInfo::cmplObjectTableRec(BlobDb &db, StatTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope) { const String pfx = "object." + stex.key(); const String rptmMinName = BlobDb::Key(pfx + ".rptm.min", scope); const String rptmMeanName = BlobDb::Key(pfx + ".rptm.mean", scope); const String rptmMaxName = BlobDb::Key(pfx + ".rptm.max", scope); const String sizeMinName = BlobDb::Key(pfx + ".size.min", scope); const String sizeMeanName = BlobDb::Key(pfx + ".size.mean", scope); const String sizeMaxName = BlobDb::Key(pfx + ".size.max", scope); const String partsMinName = BlobDb::Key(pfx + ".parts.min", scope); const String partsMeanName = BlobDb::Key(pfx + ".parts.mean", scope); const String partsMaxName = BlobDb::Key(pfx + ".parts.max", scope); const TmSzStat *cstats = stex.aggr(phase); const AggrStat *const partsStat = stex.partsStat(phase); bool known(false); { const String rptmMinTitle = "minimum response time"; const String rptmMeanTitle = "mean response time"; const String rptmMaxTitle = "maximum response time"; if (cstats && cstats->time().known()) { known = true; const AggrStat &time = cstats->time(); addMeasBlob(db, rptmMinName, time.min(), "msec", rptmMinTitle); addMeasBlob(db, rptmMeanName, time.mean(), "msec", rptmMeanTitle); addMeasBlob(db, rptmMaxName, time.max(), "msec", rptmMaxTitle); } else { addNaMeasBlob(db, rptmMinName, rptmMinTitle); addNaMeasBlob(db, rptmMeanName, rptmMeanTitle); addNaMeasBlob(db, rptmMaxName, rptmMaxTitle); } } { const String sizeMinTitle = "minimum size"; const String sizeMeanTitle = "mean size"; const String sizeMaxTitle = "maximum size"; if (cstats && cstats->size().known()) { known = true; const AggrStat &size = cstats->size(); addMeasBlob(db, sizeMinName, size.min()/1024., "KBytes", sizeMinTitle); addMeasBlob(db, sizeMeanName, size.mean()/1024., "KBytes", sizeMeanTitle); addMeasBlob(db, sizeMaxName, size.max()/1024., "KBytes", sizeMaxTitle); } else { addNaMeasBlob(db, sizeMinName, sizeMinTitle); addNaMeasBlob(db, sizeMeanName, sizeMeanTitle); addNaMeasBlob(db, sizeMaxName, sizeMaxTitle); } if (partsStat) { const String partsMinTitle = "minimum count"; const String partsMeanTitle = "mean count"; const String partsMaxTitle = "maximum count"; if (partsStat->known()) { known = true; addMeasBlob(db, partsMinName, partsStat->min(), "xacts", partsMinTitle); addMeasBlob(db, partsMeanName, partsStat->mean(), "xacts", partsMeanTitle); addMeasBlob(db, partsMaxName, partsStat->max(), "xacts", partsMaxTitle); } else { addNaMeasBlob(db, partsMinName, partsMinTitle); addNaMeasBlob(db, partsMeanName, partsMeanTitle); addNaMeasBlob(db, partsMaxName, partsMaxTitle); } } } if (!known) { table.addUnknown(stex); return; } XmlTableRec tr; XmlTableHeading th; th << db.ptr(pfx + scope, XmlText(stex.name())); th << algnLeft; tr << th; XmlTableCell rptmMinCell; rptmMinCell << algnRight << db.quote(rptmMinName); tr << rptmMinCell; XmlTableCell rptmMeanCell; rptmMeanCell << algnRight << db.quote(rptmMeanName); tr << rptmMeanCell; XmlTableCell rptmMaxCell; rptmMaxCell << algnRight << db.quote(rptmMaxName); tr << rptmMaxCell; XmlTableCell sizeMinCell; sizeMinCell << algnRight << db.quote(sizeMinName); tr << sizeMinCell; XmlTableCell sizeMeanCell; sizeMeanCell << algnRight << db.quote(sizeMeanName); tr << sizeMeanCell; XmlTableCell sizeMaxCell; sizeMaxCell << algnRight << db.quote(sizeMaxName); tr << sizeMaxCell; if (partsStat) { XmlTableCell partsMinCell; partsMinCell << algnRight << db.quote(partsMinName); tr << partsMinCell; XmlTableCell partsMeanCell; partsMeanCell << algnRight << db.quote(partsMeanName); tr << partsMeanCell; XmlTableCell partsMaxCell; partsMaxCell << algnRight << db.quote(partsMaxName); tr << partsMaxCell; } table << tr; } void SideInfo::cmplValidationTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { const String pfx = BlobDb::Key("validation.table", scope); ReportBlob blob(pfx, "validation effectiveness"); blob << XmlAttr("vprimitive", "Validation effectiveness table"); XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Validation method"); XmlTableHeading cnt("Useful count"); cnt << XmlTag("br") << XmlText("(%)"); tr << cnt; XmlTableHeading vol("Useful volume"); vol << XmlTag("br") << XmlText("(%)"); tr << vol; table << tr; } cmplValidationTableRec(db, table, *TheUsefulProxyValidation, phase, scope, pfx + ".proxy_validation", "Proxy validation"); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Validation effectiveness' table shows useful " << "request ratios for various validation methods. " << "Polygraph counts every validation request that " << "results in bodyless response as useless. Validation " << "requests that result in response with body are " << "useful."; descr << p1; blob << descr; } db << blob; } void SideInfo::cmplValidationTableRec(BlobDb &db, XmlTable &table, const Stex &stex, const PhaseInfo &phase, const Scope &scope, const String &pfx, const String &name) { Assert(stex.parent()); const String ratioCountName = BlobDb::Key(pfx + ".ratio.obj", scope); const String ratioVolumeName = BlobDb::Key(pfx + ".ratio.byte", scope); const String ratioCountTitle = "contribution by count"; const String ratioVolumeTitle = "contribution by volume"; if (stex.aggr(phase) && stex.parent()->aggr(phase)) { const AggrStat &cstats = stex.aggr(phase)->size(); const double totalCountVal = cstats.count(); const double totalVolumeVal = cstats.sum(); const AggrStat all = stex.parent()->aggr(phase)->size(); addMeasBlob(db, ratioCountName, Percent(totalCountVal, all.count()), "%", ratioCountTitle); addMeasBlob(db, ratioVolumeName, Percent(totalVolumeVal, all.sum()), "%", ratioVolumeTitle); } else { addNaMeasBlob(db, ratioCountName, ratioCountTitle); addNaMeasBlob(db, ratioVolumeName, ratioVolumeTitle); } XmlTableRec tr; tr << algnLeft << XmlTableHeading(name); XmlTableCell ratioCountCell; ratioCountCell << algnRight << db.quote(ratioCountName); tr << ratioCountCell; XmlTableCell ratioVolumeCell; ratioVolumeCell << algnRight << db.quote(ratioVolumeName); tr << ratioVolumeCell; table << tr; } void SideInfo::cmplErrorTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { ReportBlob blob(BlobDb::Key("errors.table", scope), "error stats"); blob << XmlAttr("vprimitive", "Errors"); ErrorStat::Index idx; if (phase.hasStats() && phase.stats().theErrors.index(idx)) { XmlParagraph p; XmlText text; text.buf() << "The total of " << phase.stats().theErrors.count() << " errors detected. Out of those errors, "; p << text << db.include("xact.error.count" + scope); p << XmlText(" or ") << db.include("xact.error.ratio" + scope); p << XmlText(" of all transactions were classified as transaction errors."); blob << p; XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Error"); tr << XmlTableHeading("Count"); tr << XmlTableHeading("Contribution (%)"); table << tr; } for (int i = 0; i < idx.count(); ++i) cmplErrorTableRec(db, table, phase.stats().theErrors, *idx[i], scope); blob << table; { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'Errors' table shows detected errors. For each " << "error type, the number of errors and their contribution towards " << "total error count are shown."; descr << p1; blob << descr; } } else if (phase.hasStats()) { blob << XmlTextTag("No errors detected in the given scope."); } else { XmlParagraph p; p << XmlText("The total of ") << db.include("xact.error.count" + scope) << XmlText(" or ") << db.include("xact.error.ratio" + scope); p << XmlText(" transaction errors detected."); blob << p; } db << blob; } void SideInfo::cmplErrorTableRec(BlobDb &, XmlTable &table, const ErrorStat &errors, const ErrorRec &error, const Scope &) { XmlTableRec tr; XmlTableHeading th; XmlText tht; error.print(tht.buf()); th << algnLeft; th << tht; tr << th; XmlTableCell countCell; XmlText countText; countText.buf() << error.count(); countCell << algnRight << countText; tr << countCell; XmlTableCell contribCell; XmlText contribText; contribText.buf() << Percent(error.count(), errors.count()); contribCell << algnRight << contribText; tr << contribCell; table << tr; } void SideInfo::cmplCookieTable(BlobDb &db, const PhaseInfo &phase, const Scope &scope) { const String pfx = BlobDb::Key("cookie.table", scope); ReportBlob blob(pfx, "cookie stats"); blob << XmlAttr("vprimitive", "HTTP Cookies table"); { XmlTable table; table << XmlAttr::Int("border", 1) << XmlAttr::Int("cellspacing", 1); { XmlTableRec tr; tr << XmlTableHeading("Stream", 1, 2); XmlTableHeading cnt("#Messages with cookies", 2, 1); tr << cnt; XmlTableHeading cookies("#Cookies in messages", 3, 1); tr << cookies; XmlTableHeading total("Total #cookies", 1, 2); tr << total; table << tr; } { XmlTableRec tr; XmlTableHeading absolute("Absolute"); tr << absolute; XmlTableHeading relative("Relative"); tr << relative; XmlTableHeading min("Min"); tr << min; XmlTableHeading mean("Mean"); tr << mean; XmlTableHeading max("Max"); tr << max; table << tr; } { CookiesStex stex("cookie.sent", "Sent cookies", &StatPhaseRec::theCookiesSent); cmplCookieTableRec(db, table, stex, *TheAllReqs, phase, scope, pfx + ".sent", "Sent"); } { CookiesStex stex("cookie.recv", "Received cookies", &StatPhaseRec::theCookiesRecv); cmplCookieTableRec(db, table, stex, *TheAllReps, phase, scope, pfx + ".recv", "Received"); } blob << table; } if (scope.hasSide("client")) { XmlTable table; table << XmlAttr::Int("border", 0); { const String name(BlobDb::Key(pfx + ".purged.fresh", scope)); const String title("fresh cookies evicted:"); XmlTableRec tr; tr << algnLeft << XmlTableHeading(title); addMeasBlob(db, name, phase.stats().theCookiesPurgedFresh, "cookie", title); XmlTableCell cell; cell << db.quote(name); tr << cell; table << tr; } { const String name(BlobDb::Key(pfx + ".purged.stale", scope)); const String title("stale cookies evicted:"); XmlTableRec tr; tr << algnLeft << XmlTableHeading(title); addMeasBlob(db, name, phase.stats().theCookiesPurgedStale, "cookie", title); XmlTableCell cell; cell << db.quote(name); tr << cell; table << tr; } { const String name(BlobDb::Key(pfx + ".updated", scope)); const String title("cookies updated:"); XmlTableRec tr; tr << algnLeft << XmlTableHeading(title); addMeasBlob(db, name, phase.stats().theCookiesUpdated, "cookie", title); XmlTableCell cell; cell << db.quote(name); tr << cell; table << tr; } blob << table; } { XmlTag descr("description"); XmlTextTag p1; p1.buf() << "The 'HTTP Cookies' table provides statistics " "for HTTP cookies sent and received. The 'Messages " "with cookies' column shows the absolute and relative " "number of HTTP messages containing cookies. The " "'Cookies per message' column shows mininum, maximum, " "and mean number of cookies in HTTP messages containing " "cookies. The 'Total #cookies' column shows the total " "number of cookies sent and received."; descr << p1; blob << descr; } db << blob; } void SideInfo::cmplCookieTableRec(BlobDb &db, XmlTable &table, const Stex &stex, const Stex &allStex, const PhaseInfo &phase, const Scope &scope, const String &pfx, const String &name) { const String msgAbsoluteCountName = BlobDb::Key(pfx + ".messages.absolute", scope); const String msgRelativeCountName = BlobDb::Key(pfx + ".messages.relative", scope); const String meanName = BlobDb::Key(pfx + ".mean", scope); const String minName = BlobDb::Key(pfx + ".min", scope); const String maxName = BlobDb::Key(pfx + ".max", scope); const String totalCountName = BlobDb::Key(pfx + ".total", scope); const String msgAbsoluteCountTitle = "absolute message count"; const String msgRelativeCountTitle = "relative message count"; const String meanTitle = "mean"; const String minTitle = "min"; const String maxTitle = "max"; const String totalCountTitle = "total count"; if (const AggrStat *const stats = stex.partsStat(phase)) { addMeasBlob(db, msgAbsoluteCountName, stats->count(), "msg", msgAbsoluteCountTitle); if (const TmSzStat *const allStats = allStex.aggr(phase)) addMeasBlob(db, msgRelativeCountName, Percent(stats->count(), allStats->count()), "%", msgRelativeCountTitle); else addNaMeasBlob(db, msgRelativeCountName, msgRelativeCountTitle); addMeasBlob(db, minName, stats->min(), "cookie", minTitle); addMeasBlob(db, meanName, stats->mean(), "cookie", meanTitle); addMeasBlob(db, maxName, stats->max(), "cookie", maxTitle); addMeasBlob(db, totalCountName, stats->sum(), "cookie", totalCountTitle); } else { addNaMeasBlob(db, msgAbsoluteCountName, msgAbsoluteCountTitle); addNaMeasBlob(db, msgRelativeCountName, msgRelativeCountTitle); addNaMeasBlob(db, minName, minTitle); addNaMeasBlob(db, meanName, meanTitle); addNaMeasBlob(db, maxName, maxTitle); addNaMeasBlob(db, totalCountName, totalCountTitle); } XmlTableRec tr; tr << algnLeft << XmlTableHeading(name); { XmlTableCell cell; cell << algnRight << db.quote(msgAbsoluteCountName); tr << cell; } { XmlTableCell cell; cell << algnRight << db.quote(msgRelativeCountName); tr << cell; } { XmlTableCell cell; cell << algnRight << db.quote(minName); tr << cell; } { XmlTableCell cell; cell << algnRight << db.quote(meanName); tr << cell; } { XmlTableCell cell; cell << algnRight << db.quote(maxName); tr << cell; } { XmlTableCell cell; cell << algnRight << db.quote(totalCountName); tr << cell; } table << tr; } void SideInfo::cmplObjectBlobs(BlobDb &db, const PhaseInfo &phase, const Scope &scope, const Array &stexes) { for (int s = 0; s < stexes.count(); ++s) cmplObjectBlob(db, *stexes[s], phase, scope); } void SideInfo::cmplObjectBlob(BlobDb &db, const Stex &stex, const PhaseInfo &phase, const Scope &scope) { const TmSzStat *aggr = stex.aggr(phase); if (aggr && aggr->count()) { const String pfx = "object." + stex.key(); const String tlTitle = stex.name() + " stats"; ReportBlob blob(pfx + scope, tlTitle); blob << XmlAttr("vprimitive", String("Object '") + stex.name() + "'"); XmlTable table; table << XmlAttr::Int("border", 0); if (stex.parent() || &stex == TheAllReps || &stex == TheAllReqs) { XmlTableRec tr; tr << algnLeft << XmlTableHeading("contribution:"); XmlTableCell cell; cell << db.include("stream." + stex.key() + ".ratio.obj" + scope); cell << XmlText(" by count and "); cell << db.include("stream." + stex.key() + ".ratio.byte" + scope); cell << XmlText(" by volume"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("rates:"); XmlTableCell cell; cell << db.include("stream." + stex.key() + ".rate" + scope); cell << XmlText(" or "); cell << db.include("stream." + stex.key() + ".bwidth" + scope); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("totals:"); XmlTableCell cell; cell << db.include("stream." + stex.key() + ".size.count" + scope); cell << XmlText(" and "); cell << db.include("stream." + stex.key() + ".size.sum" + scope); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("response time:"); XmlTableCell cell; cell << db.include("object." + stex.key() + ".rptm.min" + scope); cell << XmlText(" min, "); cell << db.include("object." + stex.key() + ".rptm.mean" + scope); cell << XmlText(" mean, and "); cell << db.include("object." + stex.key() + ".rptm.max" + scope); cell << XmlText(" max"); tr << cell; table << tr; } { XmlTableRec tr; tr << algnLeft << XmlTableHeading("response size:"); XmlTableCell cell; cell << db.include("object." + stex.key() + ".size.min" + scope); cell << XmlText(" min, "); cell << db.include("object." + stex.key() + ".size.mean" + scope); cell << XmlText(" mean, and "); cell << db.include("object." + stex.key() + ".size.max" + scope); cell << XmlText(" max"); tr << cell; table << tr; } blob << table; if (stex.hist(phase)) { { RptmHistFig fig; fig.configure(pfx + ".rptm.fig" + scope, "response time distribution"); fig.stats(&stex, &phase); blob << db.include(fig.plot(db).key()); } { SizeHistFig fig; fig.configure(pfx + ".size.fig" + scope, "object size distribution"); fig.stats(&stex, &phase); blob << db.include(fig.plot(db).key()); } } else { blob << XmlTextTag("No response time and size " "histograms were collected or stored for this object class."); } if (stex.trace(phase.availStats())) { LoadTraceFig figLoad; TmSzLoadStex loadStex(&stex); figLoad.configure(pfx + ".load.trace" + scope, "load trace"); figLoad.stats(&loadStex, &this->phase(scope)); figLoad.globalStart(theTest->startTime()); const String &figLoadKey = figLoad.plot(db).key(); blob << db.include(figLoadKey); RptmTraceFig figRptm; figRptm.configure(pfx + ".rptm.trace" + scope, "response time trace"); figRptm.stats(&stex, &this->phase(scope)); figRptm.globalStart(theTest->startTime()); const String &figRptmKey = figRptm.plot(db).key(); blob << db.include(figRptmKey); } else { blob << XmlTextTag("No response time and size " "traces are collected for this object class."); } { XmlTag descr("description"); XmlNodes nodes; stex.describe(nodes); descr << nodes; blob << descr; } db.add(blob); } else if (!stex.ignoreUnseen()) theUnseenObjects.push_back(stex.name()); } void SideInfo::cmplUnseenObjectsBlob(BlobDb &db, const Scope &scope) { if (theUnseenObjects.empty()) return; ReportBlob blob(BlobDb::Key("unseen_objects", scope), "Unseen objects"); blob << XmlAttr("vprimitive", String("Unseen objects")); { XmlTextTag p; p.buf() << "No instances of these objects were observed or " << "recorded in the given scope:"; for (std::list::const_iterator i = theUnseenObjects.begin(); i != theUnseenObjects.end(); ++i) p.buf() << (i == theUnseenObjects.begin() ? " " : ", ") << *i; p.buf() << '.'; blob << p; } { XmlTextTag p; p.buf() << "An object unseen in one scope may be present in " << "another scope."; blob << p; } db << blob; } void SideInfo::cmplSideSum(BlobDb &db) { ReportBlob blob(BlobDb::Key("summary", execScope()), "test side summary"); blob << db.quote("load" + execScope()); blob << db.quote("hit.ratio" + execScope()); blob << db.quote("reply_stream.table" + execScope()); blob << db.quote("reply_object.table" + execScope()); db << blob; } void SideInfo::AddStex(Array &stexes, Stex *stex, const Stex *parent) { Assert(stex); if (parent && Should(stex != parent)) stex->parent(parent); stexes.append(stex); } void SideInfo::Configure() { TheAllReps = new AllRepsStex("rep", "all replies"); Stex *allContTypes = new AllContTypesStex("cont_type_all", "all response content types", &StatPhaseRec::theRepContType); for (int i = 0; i < ContTypeStat::Kinds().count(); ++i) { char buf[128]; ofixedstream s(buf, sizeof(buf)); s << "rep_cont_type_" << i << ends; const String key = buf; const String &cname = *ContTypeStat::Kinds()[i]; const String name = String("\"") + cname + "\" response"; if (cname[0] != '_') AddStex(TheReplyStex, new ContTypeStex(key, name, i, &StatPhaseRec::theRepContType), allContTypes); } AddStex(TheReplyStex, allContTypes, TheAllReps); Stex *hitsAndMisses = new HitMissesStex("hits_and_misses", "hits and misses"); AddStex(TheReplyStex, new HitsStex("hits", "hits"), hitsAndMisses); AddStex(TheReplyStex, new MissesStex("misses", "misses"), hitsAndMisses); AddStex(TheReplyStex, hitsAndMisses, TheAllReps); Stex *allIms = new ValidationHitMissStex("ims_scAll", "all ims", &StatPhaseRec::theImsXacts, &StatIntvlRec::theIms); AddStex(TheReplyStex, new ValidationHitStex("ims_sc200", "ims/200", &StatPhaseRec::theImsXacts), allIms); AddStex(TheReplyStex, new ValidationMissStex("ims_sc304", "ims/304", &StatPhaseRec::theImsXacts), allIms); AddStex(TheReplyStex, allIms, TheAllReps); Stex *allCachable = new AllCachableStex("all_cachable", "cachable and not"); AddStex(TheReplyStex, new CachableStex("cachable", "cachable"), allCachable); AddStex(TheReplyStex, new UnCachableStex("uncachable", "not cachable"), allCachable); AddStex(TheReplyStex, allCachable, TheAllReps); Stex *allFtp = new ValidationHitMissStex("ftp_all_modes", "FTP all modes", &StatPhaseRec::theFtpXacts, 0); AddStex(TheReplyStex, new ValidationHitStex("ftp_active", "FTP active", &StatPhaseRec::theFtpXacts), allFtp); AddStex(TheReplyStex, new ValidationMissStex("ftp_passive", "FTP passive", &StatPhaseRec::theFtpXacts), allFtp); AddStex(TheReplyStex, allFtp, TheAllReps); AddStex(TheReplyStex, new FillStex("fill", "fill"), TheAllReps); AddStex(TheReplyStex, new SimpleStex("reload", "reload", &StatPhaseRec::theReloadXacts, &StatIntvlRec::theReload), TheAllReps); AddStex(TheReplyStex, new SimpleStex("range", "range", &StatPhaseRec::theRangeXacts, &StatIntvlRec::theRange), TheAllReps); AddStex(TheReplyStex, new SimpleStex("abort", "abort", 0, &StatIntvlRec::theAbort), TheAllReps); AddStex(TheReplyStex, new SimpleStex("redir_req", "redirected request", &StatPhaseRec::theRediredReqXacts, &StatIntvlRec::theRediredReq), TheAllReps); AddStex(TheReplyStex, new SimpleStex("rep_to_redir", "reply to redirect", &StatPhaseRec::theRepToRedirXacts, &StatIntvlRec::theRepToRedir), TheAllReps); Stex *allMethods = new AllMethodsStex("method_all", "all non-gets"); AddStex(TheReplyStex, new SimpleStex("method_head", "HEAD", &StatPhaseRec::theHeadXacts, &StatIntvlRec::theHead), allMethods); AddStex(TheReplyStex, new SimpleStex("method_post", "POST", &StatPhaseRec::thePostXacts, &StatIntvlRec::thePost), allMethods); AddStex(TheReplyStex, new SimpleStex("method_put", "PUT", &StatPhaseRec::thePutXacts, &StatIntvlRec::thePut), allMethods); AddStex(TheReplyStex, new SimpleStex("method_connect", "CONNECT", &StatPhaseRec::theConnectXacts, &StatIntvlRec::theConnect), allMethods); AddStex(TheReplyStex, allMethods, TheAllReps); AddStex(TheReplyStex, TheAllReps, 0); AddStex(TheReplyStex, new SimpleStex("page", "page", &StatPhaseRec::thePageHist, &StatIntvlRec::thePage), 0); TheAllReqs = new AllContTypesStex("req_cont_type_all", "all request content types", &StatPhaseRec::theReqContType); for (int i = 0; i < ContTypeStat::Kinds().count(); ++i) { char buf[128]; ofixedstream s(buf, sizeof(buf)); s << "req_cont_type_" << i << ends; const String key = buf; const String &cname = *ContTypeStat::Kinds()[i]; const String name = String("\"") + cname + "\" request"; if (cname[0] != '_') AddStex(TheRequestStex, new ContTypeStex(key, name, i, &StatPhaseRec::theReqContType), TheAllReqs); } AddStex(TheRequestStex, TheAllReqs, 0); Stex *proxyValidation = new ValidationHitMissStex("all_proxy_validations", "all proxy validations", &StatPhaseRec::theProxyValidationR, &StatIntvlRec::theProxyValidations); TheUsefulProxyValidation = new ValidationHitStex("useful_proxy_validations", "useful proxy validations", &StatPhaseRec::theProxyValidationR); AddStex(TheReplyStex, TheUsefulProxyValidation, proxyValidation); AddStex(TheReplyStex, new ValidationMissStex("useless_proxy_validations", "useless proxy validations", &StatPhaseRec::theProxyValidationR), proxyValidation); AddStex(TheReplyStex, proxyValidation, TheAllReps); AddProtoStexes(&StatIntvlRec::theSocksStat); AddProtoStexes(&StatIntvlRec::theSslStat); AddProtoStexes(&StatIntvlRec::theFtpStat); /* auth related stexes */ { Stex *const authNone = new SimpleStex("auth.none", "no auth", 0, &StatIntvlRec::theAuthNone); AddStex(TheAuthStex, authNone, TheAllReps); AddStex(TheReplyStex, authNone, 0); } { Stex *const authIngBasic = new AuthIngStex("auth.ing.basic", "Basic auth-ing", AuthPhaseStat::sBasic); AddStex(TheAuthStex, authIngBasic, TheAllReps); } { Stex *const authIngNtlm = new AuthIngStex("auth.ing.ntlm", "NTLM auth-ing", AuthPhaseStat::sNtlm); AddStex(TheAuthStex, authIngNtlm, TheAllReps); } { Stex *const authIngNegotiate = new AuthIngStex("auth.ing.negotiate", "Negotiate auth-ing", AuthPhaseStat::sNegotiate); AddStex(TheAuthStex, authIngNegotiate, TheAllReps); } { Stex *const authIngAny = new AllAuthIngStex("auth.ing.any", "all auth-ing"); AddStex(TheAuthStex, authIngAny, TheAllReps); } { Stex *const authEdBasic = new AuthEdStex("auth.ed.basic", "Basic auth-ed", AuthPhaseStat::sBasic); AddStex(TheAuthStex, authEdBasic, TheAllReps); } { Stex *const authEdNtlm = new AuthEdStex("auth.ed.ntlm", "NTLM auth-ed", AuthPhaseStat::sNtlm); AddStex(TheAuthStex, authEdNtlm, TheAllReps); } { Stex *const authEdNegotiate = new AuthEdStex("auth.ed.negotiate", "Negotiate auth-ed", AuthPhaseStat::sNegotiate); AddStex(TheAuthStex, authEdNegotiate, TheAllReps); } { Stex *const authFtp = new AuthIngStex("auth.ftp", "FTP auth", AuthPhaseStat::sFtp); AddStex(TheAuthStex, authFtp, TheAllReps); } { Stex *const authEdAny = new AllAuthEdStex("auth.ed.any", "all auth-ed"); AddStex(TheAuthStex, authEdAny, TheAllReps); } { Stex *const authAny = new AllAuthStex("auth.any", "all auth"); AddStex(TheAuthStex, authAny, TheAllReps); AddStex(TheReplyStex, authAny, 0); } { Stex *const tunneled = new SimpleStex("tunneled", "tunneled", 0, &StatIntvlRec::theTunneled); AddStex(TheAuthStex, tunneled, TheAllReps); AddStex(TheReplyStex, tunneled, 0); } AddStex(TheAuthStex, TheAllReps, 0); AddStex(TheCompoundReplyStex, new CompoundReplyStex("compound.auth.basic.rep", "Basic auth replies", &StatPhaseRec::theAuthBasic), TheAllReps); AddStex(TheCompoundReplyStex, new CompoundReplyStex("compound.auth.ntlm.rep", "NTLM auth replies", &StatPhaseRec::theAuthNtlm), TheAllReps); AddStex(TheCompoundReplyStex, new CompoundReplyStex("compound.auth.negotiate.rep", "Negotiate auth replies", &StatPhaseRec::theAuthNegotiate), TheAllReps); AddStex(TheCompoundReplyStex, new AllCompoundRepsStex("compound.any.reps", "all compound replies"), TheAllReps); AddStex(TheCompoundReplyStex, new CompoundReplyStex("compound.auth.not.rep", "isolated replies", &StatPhaseRec::theIsolated), TheAllReps); AddStex(TheCompoundRequestStex, new CompoundRequestStex("compound.auth.basic.req", "Basic auth requests", &StatPhaseRec::theAuthBasic), TheAllReqs); AddStex(TheCompoundRequestStex, new CompoundRequestStex("compound.auth.ntlm.req", "NTLM auth requests", &StatPhaseRec::theAuthNtlm), TheAllReqs); AddStex(TheCompoundRequestStex, new CompoundRequestStex("compound.auth.negotiate.req", "Negotiate auth requests", &StatPhaseRec::theAuthNegotiate), TheAllReqs); AddStex(TheCompoundRequestStex, new AllCompoundReqsStex("compound.any.reqs", "all compound requests"), TheAllReqs); AddStex(TheCompoundRequestStex, new CompoundRequestStex("compound.auth.not.req", "isolated requests", &StatPhaseRec::theIsolated), TheAllReqs); AddStex(TheReplyStex, new SimpleStex("custom_stats", "custom", &StatPhaseRec::theCustomXacts, &StatIntvlRec::theCustom), 0); Stex *allReplyStatus = new AllStatusCodeStex("reply_status.all", "All HTTP reply status codes", &StatPhaseRec::theStatusCode); for (int i = 0; i <= StatusCodeStat::scsMaxValue; ++i) { const String label(AnyToString(i)); const String name("reply_status." + label); const String title("HTTP reply status code " + label); AddStex(TheReplyStatusStex, new StatusCodeStex(name, title, &StatPhaseRec::theStatusCode, i), allReplyStatus); } AddStex(TheReplyStatusStex, new StatusCodeStex("reply_status.other", "Other HTTP reply status code", &StatPhaseRec::theStatusCode, RepHdr::scUnknown), allReplyStatus); AddStex(TheReplyStatusStex, allReplyStatus, 0); } void SideInfo::AddProtoStexes(ProtoIntvlPtr protoPtr) { StatIntvlRec dummy; const ProtoIntvlStat protoStat = dummy.*protoPtr; const String pfx = protoStat.id(); const String protoName = protoStat.name(); Stex *hitsAndMisses = new ProtoHitMissesStex(protoPtr, pfx + "_hits_and_misses", protoName + " hits and misses"); AddStex(TheReplyStex, new ProtoHitsStex(protoPtr, pfx + "_hits", protoName + " hits"), hitsAndMisses); AddStex(TheReplyStex, new ProtoMissesStex(protoPtr, pfx + "_misses", protoName + " misses"), hitsAndMisses); AddStex(TheReplyStex, hitsAndMisses, TheAllReps); } // sort stexes using cmpByCountContrib void SideInfo::SortStexes(const PhaseInfo &phase, const Array &in, Array &out) { out.stretch(in.count()); // insertion sort keeps original array constant and needs no cmp wrapper for (int i = 0; i < in.count(); ++i) { Stex *const stex = in[i]; bool inserted = false; for (int j = 0; j < out.count(); ++j) { if (stex->cmpByCountContrib(phase, *out[j]) < 0) { out.insert(stex, j); inserted = true; break; } } if (!inserted) out.append(stex); } } polygraph-4.3.2/src/loganalyzers/PointStex.h0000644000175000017500000000725211546440450020543 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_POINTSTEX_H #define POLYGRAPH__LOGANALYZERS_POINTSTEX_H #include "xstd/String.h" #include "xstd/gadgets.h" #include "base/StatIntvlRec.h" #include "loganalyzers/StexBase.h" // an algorithm of extracting a single value statistics out of // interval stats record class PointStex: public StexBase { public: PointStex(const String &aKey, const String &aName, const String &aUnit): StexBase(aKey, aName, aUnit) {} virtual bool valueKnown(const StatIntvlRec &rec) const = 0; virtual double value(const StatIntvlRec &rec) const = 0; }; class AggrPointStex: public PointStex { public: typedef AggrStat StatIntvlRec::*AggrPtr; public: AggrPointStex(const String &aKey, const String &aName, const String &aUnit, AggrPtr anAggrPtr): PointStex(aKey, aName, aUnit), theAggrPtr(anAggrPtr) {} const AggrStat &aggr(const StatIntvlRec &rec) const { return rec.*theAggrPtr; } virtual bool valueKnown(const StatIntvlRec &rec) const { return (rec.*theAggrPtr).known(); } protected: AggrPtr theAggrPtr; }; class HRPointStex: public PointStex { public: typedef HRStat StatIntvlRec::*HRPtr; public: HRPointStex(const String &aKey, const String &aName, const String &aUnit, HRPtr anHRPtr): PointStex(aKey, aName, aUnit), theHRPtr(anHRPtr) {} const HRStat &stats(const StatIntvlRec &rec) const { return rec.*theHRPtr; } virtual bool valueKnown(const StatIntvlRec &rec) const { return stats(rec).active(); } protected: HRPtr theHRPtr; }; class MeanAggrPointStex: public AggrPointStex { public: MeanAggrPointStex(const String &aKey, const String &aName, const String &aUnit, AggrPtr anAggrPtr): AggrPointStex(aKey, aName, aUnit, anAggrPtr){} virtual double value(const StatIntvlRec &rec) const { return aggr(rec).mean(); } }; class DhpPointStex: public HRPointStex { public: DhpPointStex(const String &aKey, const String &aName, HRPtr anHRPtr): HRPointStex(aKey, aName, "%", anHRPtr){} virtual double value(const StatIntvlRec &rec) const { return stats(rec).dhp(); } }; class BhpPointStex: public HRPointStex { public: BhpPointStex(const String &aKey, const String &aName, HRPtr anHRPtr): HRPointStex(aKey, aName, "%", anHRPtr){} virtual double value(const StatIntvlRec &rec) const { return stats(rec).bhp(); } }; class LoadPointStex: public PointStex { public: typedef double (StatIntvlRec::*StatPtr)() const; public: LoadPointStex(const String &aKey, const String &aName, const String &aUnit, StatPtr aStatPtr): PointStex(aKey, aName, aUnit), theStatPtr(aStatPtr) {} virtual bool valueKnown(const StatIntvlRec &rec) const { return rec.theDuration > 0; } virtual double value(const StatIntvlRec &rec) const { return (rec.*theStatPtr)(); } protected: StatPtr theStatPtr; }; class PipelineProbPointStex: public PointStex { public: PipelineProbPointStex(): PointStex("pprob", "portion of pipelined connections", "%") {} virtual bool valueKnown(const StatIntvlRec &rec) const { return rec.theConnUseCnt.known(); } virtual double value(const StatIntvlRec &rec) const { return Percent(rec.theConnPipelineDepth.count(), rec.theConnUseCnt.count()); } }; class MeanRptmPointStex: public PointStex { public: MeanRptmPointStex(): PointStex("mean-rptm", "mean response time", "msec") {} virtual bool valueKnown(const StatIntvlRec &rec) const { return rec.repTime().known(); } virtual double value(const StatIntvlRec &rec) const { return rec.repTime().mean(); } }; #endif polygraph-4.3.2/src/loganalyzers/RepToHtmlFile.h0000644000175000017500000000373011546440450021261 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_REPTOHTMLFILE_H #define POLYGRAPH__LOGANALYZERS_REPTOHTMLFILE_H #include "xstd/Map.h" #include "xstd/String.h" #include "xml/XmlRenderer.h" #include "loganalyzers/SectionState.h" class BlobDb; class RepToHtmlFile: public XmlRenderer { public: static void Location(BlobDb &db, const ReportBlob &blob, const String &fname); static void CollectLocations(BlobDb &db, const XmlNode &node, const String &fname); static String Location(const String &key); public: RepToHtmlFile(BlobDb &db, ostream *aStream, const String &aLocation); virtual ~RepToHtmlFile(); virtual void render(const XmlDoc &doc); virtual void renderReportBlob(const ReportBlob &blob); virtual void renderText(const char *buf, Size sz); virtual void renderTag(const XmlTag &tag); protected: void renderDocument(const XmlTag &tag); void renderChapter(const XmlTag &tag); void renderSection(const XmlTag &tag); void renderBlobInclude(const XmlTag &tag); void renderBlobPtr(const XmlTag &tag); void renderBlob(const XmlTag &tag); void renderMeasurement(const XmlTag &tag); void renderMeasurementVal(const XmlTag &tag, const String &val, bool renderUnit, const String &unit); void renderList(const XmlTag &tag); void renderTableCell(const XmlTag &tag); void renderImage(const XmlTag &tag); void renderSampleStart(const XmlNode &n, const String &element, const String &typeId); String relativeUrl(const String &from, const String &to) const; String location(const String &key) const; protected: static Map TheLocations; // global file names mutable Map theLocations; // local names BlobDb &theDb; ostream *theStream; String theLocation; Array theParents; // used to link out-of-tree blobs int theQuoteLevel; SectionState theSectionState; }; #endif polygraph-4.3.2/src/loganalyzers/ReportFigure.cc0000644000175000017500000001017011546440450021352 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include #include "xml/XmlAttr.h" #include "xml/XmlText.h" #include "xml/XmlParagraph.h" #include "xml/XmlTable.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/ReportFigure.h" String ReportFigure::TheBaseDir = "."; ReportFigure::ReportFigure(): thePhase(0), theCtrlFile(0), thePlotLineCount(0) { theDataStyle = "lines"; } ReportFigure::~ReportFigure() { delete theCtrlFile; } void ReportFigure::configure(const String &aKey, const String &aTitle) { theKey = aKey; theTitle = aTitle; theBaseName = TheBaseDir + '/' + theKey; thePlotFname = theBaseName + ".png"; theCtrlFname = theBaseName + ".gp"; } const ReportBlob &ReportFigure::plot(BlobDb &db) { const int pointCount = createCtrlFile(); const bool success = pointCount > 0 && plotCtrlFile(); destroyCtrlFile(); ReportBlob blob(theKey, theTitle); if (success) { XmlTable table; table << XmlAttr::Int("border", 1); XmlTableRec r1; r1 << XmlTableHeading(title()); table << r1; XmlTableRec r2; XmlTableCell cell; XmlTag img("img"); img << XmlAttr("src", thePlotFname) << XmlAttr("alt", blob.key()); cell << img; r2 << cell; table << r2; blob << table; return *db.add(blob); } else if (pointCount == 0) { XmlTextTag warning; warning.buf() << "Figure '" << title() << "' not available: " << "no datapoints to plot."; blob << warning; } else { XmlTextTag error; error.buf() << "Figure '" << title() << "' not available: " << "something went wrong while generating the graph."; blob << error; } return *db.add(blob); } int ReportFigure::createCtrlFile() { theCtrlFile = new ofstream(theCtrlFname.cstr()); if (!theCtrlFile || !*theCtrlFile) { clog << "error: cannot create " << theCtrlFname << ": " << Error::Last() << endl; return false; } setCtrlOptions(); return *theCtrlFile ? 0 : -1; } bool ReportFigure::plotCtrlFile() { const String cmd = TheRepOpts.thePlotter + ' ' + theCtrlFname + " > " + thePlotFname; if (::system(cmd.cstr()) != 0) { clog << "error: cannot plot " << theCtrlFname << "; consult error messages above, if any" << endl; clog << "plot file was:" << endl; // TODO: report system(3) failure if (system((String("cat ") + theCtrlFname).cstr()) != 0) {} return false; } return true; } bool ReportFigure::destroyCtrlFile() { delete theCtrlFile; theCtrlFile = 0; return unlink(theCtrlFname.cstr()) == 0; } void ReportFigure::setCtrlOptions() { *theCtrlFile << "set term png small" << endl; //*theCtrlFile << "set term png color" << endl; *theCtrlFile << "set output '" << thePlotFname << "'" << endl; *theCtrlFile << "set title ''" << endl; *theCtrlFile << "set grid" << endl; *theCtrlFile << "set style line 1 lt 3" << endl; *theCtrlFile << "set style line 2 lt 1" << endl; *theCtrlFile << "set style line 3 lt 2" << endl; *theCtrlFile << "set style line 4 lt 4" << endl; *theCtrlFile << "set style data " << theDataStyle << endl; *theCtrlFile << "set xlabel '" << theLabelX1 << "'" << endl; *theCtrlFile << "set ylabel '" << theLabelY1 << "'" << endl; *theCtrlFile << "set y2label '" << theLabelY2 << "'" << endl; if (theLabelY2) { *theCtrlFile << "set ytics nomirror" << endl; *theCtrlFile << "set y2tics nomirror" << endl; } *theCtrlFile << "set size 1.0,0.5" << endl; } void ReportFigure::addPlotLine(const String &title, const String &unit) { ++thePlotLineCount; if (thePlotLineCount == 1) *theCtrlFile << "plot \\" << endl; else *theCtrlFile << ", \\" << endl; bool useY2 = unit != theLabelY1; *theCtrlFile << "\t'-'"; if (useY2) *theCtrlFile << " axes x1y2"; *theCtrlFile << " title '" << title; if (useY2) *theCtrlFile << " (right Y axis)"; *theCtrlFile << "'"; *theCtrlFile << " with " << theDataStyle << " ls " << thePlotLineCount; } void ReportFigure::addedAllPlotLines() { *theCtrlFile << endl << endl; } polygraph-4.3.2/src/loganalyzers/BlobDb.cc0000644000175000017500000000750111546440450020065 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/sstream.h" #include "base/AnyToString.h" #include "xml/XmlAttr.h" #include "xml/XmlText.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" static const String strKey = "key"; String BlobDb::Key(const String &name, const Scope &scope) { return name + KeySuffix(scope); } String BlobDb::KeySuffix(const Scope &scope) { return scope ? String(".scope=") + scope.image() : String(""); } BlobDb::BlobDb(): theBlobIdx(15991), theNotesCount(0) { } const ReportBlob *BlobDb::has(const String &key) { return find(key); } bool CheckParent(const XmlNode &n) { if (n.kids()) { for (int i = 0; i < n.kids()->count(); ++i) { //clog << here << n.kids()->item(i)->name() << endl; Assert(n.kids()->item(i)->parent()); if (!CheckParent(*n.kids()->item(i))) { Assert(false); return false; } } } return true; } const ReportBlob *BlobDb::add(const ReportBlob &b) { //clog << here << "created: " << b.key() << endl; //CheckParent(b); if (const ReportBlob *copy = find(b.key())) { //cerr << here << "duplicate: " << b.key() << endl; ostringstream buf1, buf2; b.print(buf1, "") << ends; copy->print(buf2, "") << ends; if (buf1.str() != buf2.str()) cerr << "internal_error: inconsistent computation of " << b.key() << endl; streamFreeze(buf1, false); streamFreeze(buf2, false); return copy; } else { theBlobs << b; theBlobIdx.add((const ReportBlob*)theBlobs.last()); return (const ReportBlob*)theBlobs.last(); } } const ReportBlob &BlobDb::get(const String &key) { //cerr << here << "looking for blob: " << key << endl; const char *descr = 0; if (const ReportBlob *blob = find(key)) return *blob; XmlSearchRes errs; theErrors.selByAttrVal(strKey, key, errs); if (errs.count()) return *(const ReportBlob*)errs.last(); cerr << here << "error: cannot find blob: " << key << endl; // create an error blob so that we can return something ReportBlob blob(key, "internal reporter error"); XmlTag err("internal_error"); if (descr) err << XmlAttr("description", "blob not found"); err << XmlAttr("context", key); blob << err; theErrors << blob; return *(const ReportBlob*)theErrors.last(); } // XXX: check that include is used at most once per key const XmlNode &BlobDb::include(const String &key) { XmlTag t("include"); t << XmlAttr("src", key); t << XmlAttr("auth", "1"); // authoritative theIncludes << t; return *theIncludes.last(); } const XmlNode &BlobDb::quote(const String &key) { XmlTag t("include"); t << XmlAttr("src", key); theIncludes << t; return *theIncludes.last(); } XmlNode &BlobDb::ptr(const String &key, const XmlNodes &context) { XmlTag t("blob_ptr"); t << XmlAttr(strKey, key); t << context; thePtrs << t; return *thePtrs.last(); } XmlTag BlobDb::reportNote(const String &id, const XmlNode ¬e) { static const String key("notes."); XmlNode *node = theBlobs.findByAttrVal("report_note", id); String number; if (node) { number = node->attrs()->value("report_note_number"); if (!node->attrs()->has("many_occurrences")) { node->addAttr(new XmlAttr("many_occurrences", "")); node->addChild(new XmlText(" (many occurrences)")); } } else { static const String title("report note "); number = AnyToString(++theNotesCount); ReportBlob blob(key + number, title + number); blob << XmlAttr("report_note", id); blob << XmlAttr("report_note_number", number); blob << note; add(blob); } XmlTag t("sup"); t << ptr(key + number, XmlText("[" + number + "]")); return t; } const ReportBlob *BlobDb::find(const String &key) const { return theBlobIdx.find(key); } ostream &BlobDb::print(ostream &os, const String &pfx) const { return theBlobs.print(os, pfx); } polygraph-4.3.2/src/loganalyzers/comparator.cc0000644000175000017500000002161011546440450021105 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xstd/h/iostream.h" #include #include "xstd/h/iomanip.h" #include "xstd/gadgets.h" #include "base/AnyToString.h" #include #include #include "base/CmdLine.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/CompOpts.h" #include "loganalyzers/Sample.h" #include "loganalyzers/Panorama.h" #include "loganalyzers/Formatter.h" #include "Hapy/Rule.h" #include "Hapy/Rules.h" #include "Hapy/Parser.h" typedef list Samples; Hapy::Rule rGrammar("grammar", 0); Hapy::Rule rNode("node", 0); Hapy::Rule rPi("pi", 0); Hapy::Rule rTag("tag", 0); Hapy::Rule rOpenTag("open-tag", 0); Hapy::Rule rCloseTag("close-tag", 0); Hapy::Rule rClosedElement("closed-element", 0); Hapy::Rule rText("text", 0); Hapy::Rule rAttr("attr", 0); Hapy::Rule rName("name", 0); Hapy::Rule rValue("value", 0); inline bool operator ==(const String &s1, const std::string &s2) { return s1 == String(s2.c_str()); } inline bool operator ==(const std::string &s2, const String &s1) { return s1 == String(s2.c_str()); } static void buildGrammar() { using namespace Hapy; rGrammar = *rNode; rNode = rPi | rTag | rText; rTag = rOpenTag | rCloseTag | rClosedElement; rPi = "> rName >> *(anychar_r - "?>") >> "?>"; rOpenTag = "<" >> rName >> *rAttr >> ">"; rCloseTag = "> rName >> ">"; rClosedElement = "<" >> rName >> *rAttr >> "/>"; rText = +(anychar_r - '<'); rAttr = rName >> '=' >> rValue; rName = alpha_r >> *(alnum_r | '_' | ':'); rValue = quoted_r(anychar_r, '"') | quoted_r(anychar_r, "'"); // trimming rules rGrammar.trim(*space_r); rText.verbatim(true); rName.verbatim(true); rValue.verbatim(true); // parse tree shaping rules rText.leaf(true); rName.leaf(true); rValue.leaf(true); // parsing optimization rules rGrammar.committed(true); rText.committed(true); rName.committed(true); rValue.committed(true); rNode.committed(true); } static void parseFile(ifstream &is, Hapy::Parser &parser) { is.unsetf(std::ios::skipws); string content; char c; while (is.get(c)) content += c; parser.grammar(rGrammar); if (!parser.parse(content)) { cerr << parser.result().location() << ": syntax error" << endl; exit(2); } } static bool findAttr(const Hapy::Pree &tag, const String &name, String *value) { // "<" >> rName >> *rAttr >> ">"; const Hapy::Pree &attrs = tag[2]; for (Hapy::Pree::const_iterator i = attrs.begin(); i < attrs.end(); ++i) { // rAttr = rName >> '=' >> rValue; const Hapy::Pree &attr = *i; if (attr[0].image() == name) { if (value) { const Hapy::Pree &v = attr[2]; if (Should(v.image().size() >= 2)) *value = String(v.image().substr( 1,v.image().size()-2).c_str()); } return true; } } return false; } typedef Hapy::Pree::const_iterator PreeIter; static Sample *skipSample(PreeIter &begin, const PreeIter &end); static bool findCompositeSampleEnd(const PreeIter &begin, PreeIter &end, const Hapy::Pree &opener, CompositeSample *c) { const Hapy::string tagName = opener[1].image(); for (PreeIter i = begin; i != end;) { const Hapy::Pree &node = *i; if (node.rid() == rNode.id()) { // rNode = rPi | rTag | rText; const Hapy::Pree &pree = node[0]; if (pree.rid() == rTag.id()) { // rTag = rOpenTag | rCloseTag | rClosedElement; const Hapy::Pree &p = pree[0]; if (p.rid() == rCloseTag.id() && p[1].image() == tagName) { end = i+1; return true; } } } if (Sample *kid = skipSample(i, end)) c->add(kid); else ++i; } cerr << "warning: skipping open sample: " << c->key() << endl; return false; } static bool findAtomSampleEnd(const PreeIter &begin, PreeIter &end, const Hapy::Pree &opener, String &image) { const Hapy::string tagName = opener[1].image(); for (PreeIter i = begin; i != end; ++i) { const Hapy::Pree &node = *i; if (node.rid() == rNode.id()) { // rNode = rPi | rTag | rText; const Hapy::Pree &pree = node[0]; if (pree.rid() == rText.id()) { image += String(pree.image().c_str()); continue; } if (pree.rid() == rTag.id()) { // rTag = rOpenTag | rCloseTag | rClosedElement; const Hapy::Pree &p = pree[0]; if (p.rid() == rCloseTag.id() && p[1].image() == tagName) { end = i+1; return true; } } if (pree.rid() == rPi.id()) continue; } cerr << "warning: ignoring non-text component of a text sample " << "near " << node.image() << endl; } cerr << "warning: skipping open sample near " << begin->image() << endl; return false; } static Sample *skipSample(PreeIter &begin, const PreeIter &end) { const Hapy::Pree &node = *begin; if (node.rid() != rNode.id()) return 0; const Hapy::Pree &pree = node[0]; if (pree.rid() != rTag.id()) return 0; // rTag = rOpenTag | rCloseTag | rClosedElement; const Hapy::Pree &tag = pree[0]; const bool kids = tag.rid() == rOpenTag.id(); // is it a Sample element? String attrId; String attrClass; String attrTitle; if (!findAttr(tag, "id", &attrId) || !findAttr(tag, "class", &attrClass) || !findAttr(tag, "title", &attrTitle)) return 0; // possibly a Sample element, check class Sample *s = 0; if (attrClass == CompositeSample::TheId) { CompositeSample *c = new CompositeSample; if (kids) { PreeIter b = begin + 1; PreeIter e = end; if (findCompositeSampleEnd(b, e, tag, c)) { begin = e; } else { delete c; c = 0; } } s = c; } else if (attrClass == NumberSample::TheId) { NumberSample *n = new NumberSample; if (Should(kids)) { PreeIter b = begin + 1; PreeIter e = end; String buf; if (findAtomSampleEnd(b, e, tag, buf)) { begin = e; n->setImage(buf); } else { delete n; n = 0; } } s = n; } else if (attrClass == TextSample::TheId) { TextSample *t = new TextSample; if (Should(kids)) { PreeIter b = begin + 1; PreeIter e = end; String buf; if (findAtomSampleEnd(b, e, tag, buf)) { begin = e; t->setImage(buf); } else { delete t; t = 0; } } s = t; } if (s) { s->key(attrId); s->title(attrTitle); } return s; } static void scanAll(Samples &samples) { buildGrammar(); for (int i = 0; i < TheCompOpts.theReports.count(); ++i) { const String &fname = *TheCompOpts.theReports[i]; clog << "scanning " << fname << endl; Hapy::Parser parser; ifstream f(fname.cstr(), ios::in); parseFile(f, parser); const Hapy::Pree &pree = parser.result().pree; CompositeSample *s = new CompositeSample; s->key(""); s->title("TBD"); for (PreeIter p = pree.begin(); p < pree.end();) { if (Sample *kid = skipSample(p, pree.end())) s->add(kid); else ++p; } s->propagateLocation(fname); s->title(Panorama::LocationLabel(fname)); if (s->kidCount()) { samples.push_back(s); } else { clog << "warning: no samples detected in " << fname << ", skipping" << endl; delete s; } } } static void buildReport(const Samples &samples) { Assert(samples.size() >= 2); // XXX: enforce in options const Sample *cur = *samples.begin(); /*clog << here << "built sample tree" << endl; for (Samples::const_iterator i = samples.begin(); i != samples.end(); ++i) (*i)->print(clog);*/ // make panorama Panorama *pan = cur->makePanoramaSkeleton(); for (Samples::const_iterator i = samples.begin(); i != samples.end(); ++i) (*i)->fillPanorama(pan); /*clog << here << "built panorama" << endl; pan->print(clog);*/ Panorama *diff = pan->genDiff(); WebPageFormatter formatter(&cout); //formatter.openPage(); if (TheCompOpts.theDelta < 0) { formatter.addText("Side-by-side comparion, all values are reported."); } else { formatter.addText( "Side-by-side comparion with values different by at least " + AnyToString(TheCompOpts.theDelta*100.) + "%."); } if (diff) diff->report(formatter); else formatter.addText("No values found or no differences detected."); //formatter.closePage(); formatter.make(); } static void configure() { configureStream(cout, 3); double &delta = NumberSample::TheDelta; delta = TheCompOpts.theDelta; if (delta < 0) { clog << "fyi: negative delta specified; " << "even identical values will be reported" << endl; } else if (delta < 1e-10) { clog << "fyi: zero delta specified or implied; " << "all different values will be reported" << endl; } else { clog << "fyi: " << delta << " delta specified; " << "differences of more than " << 100*delta << "% will be reported" << endl; } } int main(int argc, char *argv[]) { CmdLine cmd; cmd.configure(Array() << &TheCompOpts); if (!cmd.parse(argc, argv) || !TheCompOpts.validate()) return -1; configure(); Samples samples; scanAll(samples); if (samples.size() >= 2) { clog << "comparing..." << endl; buildReport(samples); return 0; } else { cerr << "error: no stat samples detected in input file" << endl; return 255; } } polygraph-4.3.2/src/loganalyzers/PhaseTrace.h0000644000175000017500000000221511546440450020617 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_PHASETRACE_H #define POLYGRAPH__LOGANALYZERS_PHASETRACE_H #include "base/StatIntvlRec.h" class PhaseTraceWin { public: StatIntvlRec *stats; Time start; bool contains(Time tm) const; }; // maintains trace data class PhaseTrace { public: static Time TheWinLen; public: PhaseTrace(); ~PhaseTrace(); void configure(const StatIntvlRec &stats); const StatIntvlRec &aggr() const { return theAggr; } int count() const { return theWins.count(); } const StatIntvlRec &winStats(int idx) const; Time winPos(int idx) const; Time start() const; void addIntvl(Time tm, const StatIntvlRec &r); void merge(const PhaseTrace &trace); void concat(const PhaseTrace &trace); protected: void mergeWin(const PhaseTraceWin &win); void concatWin(const PhaseTraceWin &win); PhaseTraceWin &allocWin(Time tm); bool findWin(Time tm, int &idx) const; protected: StatIntvlRec theAggr; // all intervals together Array theWins; }; #endif polygraph-4.3.2/src/loganalyzers/SomeInfo.cc0000644000175000017500000000525211546440450020461 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xml/XmlTag.h" #include "xml/XmlText.h" #include "xml/XmlAttr.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/Sample.h" #include "loganalyzers/SomeInfo.h" class MeasBlob: public ReportBlob { public: MeasBlob(const String &key, const String &title); void value(const XmlAttr &val); void unit(const String &unit); void typeId(const String &typeId); public: XmlNode *measurement; }; MeasBlob::MeasBlob(const String &key, const String &title): ReportBlob(key, title), measurement(0) { static const XmlAttr dtype("dtype", "span"); *this << dtype; static const String tagName = "measurement"; measurement = append(XmlTag(tagName)); } void MeasBlob::value(const XmlAttr &val) { *measurement->attrs() << val; } void MeasBlob::unit(const String &unit) { static String attrName = "unit"; *measurement->attrs() << XmlAttr(attrName, unit); } void MeasBlob::typeId(const String &typeId) { static String attrName = "typeId"; *measurement->attrs() << XmlAttr(attrName, typeId); } /* SomeInfo */ const ReportBlob &SomeInfo::addLink(BlobDb &db, const String &newKey, const String &oldKey) { ReportBlob blob(newKey, ReportBlob::NilTitle); blob << db.include(oldKey); return *db.add(blob); } const ReportBlob &SomeInfo::addMeasBlob(BlobDb &db, const String &name, double val, const String &unit, const String &title) { MeasBlob blob(name, title); blob.value(XmlAttr::Double("value", val)); blob.unit(unit); blob.typeId(NumberSample::TheId); const ReportBlob &res = *db.add(blob); return res; } const ReportBlob &SomeInfo::addMeasBlob(BlobDb &db, const String &name, Time val, const String &title) { MeasBlob blob(name, title); blob.value(XmlAttr::Double("value", val.secd())); blob.unit("sec"); blob.typeId(NumberSample::TheId); XmlTag image("image"); XmlText text; text.buf() << val; image << text; blob.measurement->addChild(image.clone()); return *db.add(blob); } const ReportBlob &SomeInfo::addMeasBlob(BlobDb &db, const String &name, const String &val, const String &unit, const String &title) { MeasBlob blob(name, title); blob.value(XmlAttr("value", val)); blob.unit(unit); blob.typeId(TextSample::TheId); return *db.add(blob); } const ReportBlob &SomeInfo::addNaMeasBlob(BlobDb &db, const String &name, const String &title) { static const XmlText textNote("no measurement was collected or stored at run-time"); MeasBlob measBlob(name, title); measBlob << XmlText("n/a") << db.reportNote("n/a", textNote); return *db.add(measBlob); } polygraph-4.3.2/src/loganalyzers/Formatter.cc0000644000175000017500000000472411546440450020710 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iostream.h" #include "xml/XmlAttr.h" #include "loganalyzers/Formatter.h" inline const String &attr(const String &value) { return value; } inline const String &text(const String &value) { return value; } WebPageFormatter::WebPageFormatter(ostream *aPage): thePage(aPage) { } void WebPageFormatter::openSection(const String &id, const String &title) { *thePage << "
    " << endl; *thePage << "

    " << text(title) << "

    " << endl; } void WebPageFormatter::closeSection() { *thePage << "
    " << endl; } void WebPageFormatter::openTable(const String &id, const String &title) { *thePage << "" << endl; *thePage << "" << endl; } void WebPageFormatter::openTableAnonym() { *thePage << "
    " << text(title) << "
    " << endl; } void WebPageFormatter::closeTable() { *thePage << "
    " << endl; } void WebPageFormatter::openTableHeader(const String &id, const String &title) { *thePage << "" << endl; } void WebPageFormatter::closeTableHeader() { *thePage << "" << endl; } void WebPageFormatter::openTableRecord() { *thePage << "" << endl; } void WebPageFormatter::closeTableRecord() { *thePage << "" << endl; } void WebPageFormatter::openTableCell(const String &classId) { *thePage << " 0) XmlAttr("class", classId).print(*thePage, " "); *thePage << ">"; } void WebPageFormatter::closeTableCell() { *thePage << ""; } void WebPageFormatter::addTableCell(const String &cell) { openTableCell(""); addText(cell); closeTableCell(); } void WebPageFormatter::addLink(const String &addr, const String &text) { *thePage << ""; addText(text); *thePage << ""; } void WebPageFormatter::addText(const String &text) { *thePage << text; } void WebPageFormatter::addInteger(int v, const String &unit, bool addSign) { if (addSign && v >= 0) addText(v > 0 ? "+" : " "); *thePage << v << unit; } void WebPageFormatter::addNothing() { *thePage << " "; } void WebPageFormatter::make() { Assert(thePage); } polygraph-4.3.2/src/loganalyzers/ReportBlob.h0000644000175000017500000000122711546440450020654 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_REPORTBLOB_H #define POLYGRAPH__LOGANALYZERS_REPORTBLOB_H #include "xstd/String.h" #include "xml/XmlTag.h" // an XML container with a key or name // a labeled "box" so others to find its contents class ReportBlob: public XmlTag { public: typedef String Key; static const String NilTitle; public: ReportBlob(const Key &aKey, const String &aTitle); virtual XmlNode *clone() const; const Key &key() const { return theKey; } protected: Key theKey; }; #endif polygraph-4.3.2/src/loganalyzers/RptmTraceFig.cc0000644000175000017500000000356411546440450021275 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include #include "xml/XmlAttr.h" #include "loganalyzers/Stex.h" #include "loganalyzers/ReportBlob.h" #include "loganalyzers/BlobDb.h" #include "loganalyzers/RepOpts.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/PhaseTrace.h" #include "loganalyzers/RptmTraceFig.h" RptmTraceFig::RptmTraceFig(): thePhase(0), theTrace(0) { } void RptmTraceFig::stats(const Stex *aStex, const PhaseInfo *aPhase) { thePhase = aPhase; theTrace = &thePhase->trace(); Assert(theTrace); moreStats(aStex); } void RptmTraceFig::moreStats(const Stex *stex) { if (const TmSzStat *aggr = stex->aggr(*thePhase)) { if (aggr->count()) theStexes.append(stex); } } void RptmTraceFig::setCtrlOptions() { theLabelY1 = "msec"; ReportTraceFigure::setCtrlOptions(); } int RptmTraceFig::createCtrlFile() { if (ReportTraceFigure::createCtrlFile() < 0) return -1; // create plot command for (int i = 0; i < theStexes.count(); ++i) addPlotLine(theStexes[i]->name(), theLabelY1); addedAllPlotLines(); // dump data to plot int pointCount = 0; {for (int s = 0; s < theStexes.count(); ++s) { if (s) *theCtrlFile << 'e' << endl; // note: two empty lines do not work pointCount += dumpDataLines(theStexes[s]); }} return pointCount; } int RptmTraceFig::dumpDataLines(const Stex *stex) { int pointCount = 0; for (int i = 0; i < theTrace->count(); ++i) pointCount += dumpDataLine(stex, theTrace->winPos(i), theTrace->winStats(i)); return pointCount; } int RptmTraceFig::dumpDataLine(const Stex *stex, Time stamp, const StatIntvlRec &r) { const AggrStat &stat = stex->trace(r)->time(); if (stat.count()) { dumpTime(stamp); *theCtrlFile << ' ' << stat.mean() << endl; return 1; } return 0; } polygraph-4.3.2/src/loganalyzers/HistogramFigure.cc0000644000175000017500000000413211546440450022035 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/iostream.h" #include "xstd/gadgets.h" #include "base/Histogram.h" #include "loganalyzers/HistStex.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/HistogramFigure.h" HistogramFigure::HistogramFigure(): thePhase(0), theStex(0) { } void HistogramFigure::stats(const HistStex *aStex, const PhaseInfo *aPhase) { theStex = aStex; thePhase = aPhase; } void HistogramFigure::compareWith(const HistStex *stex) { Assert(thePhase); if (stex->value(*thePhase)) theComparison.append(stex); } void HistogramFigure::setCtrlOptions() { theLabelX1 = theStex->unit(); theLabelY1 = "cumulative %"; ReportFigure::setCtrlOptions(); *theCtrlFile << "set key right bottom" << endl; } int HistogramFigure::createCtrlFile() { if (ReportFigure::createCtrlFile() < 0) return -1; // make sure that the most interesting line is on top if (theStex->value(*thePhase)) theComparison.append(theStex); // create plot command for (int i = 0; i < theComparison.count(); ++i) addPlotLine(theComparison[i]->name(), theLabelY1); addedAllPlotLines(); // dump data to plot int pointCount = 0; for (int s = 0; s < theComparison.count(); ++s) { if (s) *theCtrlFile << 'e' << endl; // note: two empty lines do not work const Histogram *hist = theComparison[s]->value(*thePhase); const int count = hist->stats().count(); int c = 0; for (HistogramConstIter i(*hist); count && i; ++i) c += dumpDataLine(*i, count); if (theStex == theComparison[s]) pointCount = c; } return pointCount; } int HistogramFigure::dumpDataLine(const HistogramBin &bin, int totCount) { if (bin.count) { const double p = Percent(bin.accCount, totCount); if (p < 5) return 0; // ignore small values unless they contribute a lot if (Percent(bin.accCount-bin.count, totCount) > 95) return 0; // ingore large values unless they contribute a lot *theCtrlFile << bin.sup << ' ' << p << endl; return 1; } return 0; } polygraph-4.3.2/src/loganalyzers/ProcInfo.h0000644000175000017500000000457311546440450020330 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_PROCINFO_H #define POLYGRAPH__LOGANALYZERS_PROCINFO_H #include "xstd/Time.h" #include "xstd/String.h" #include "xstd/BigSize.h" #include "xstd/Array.h" #include "loganalyzers/PhaseInfo.h" #include "loganalyzers/SomeInfo.h" class SideInfo; class PhaseInfo; class PhaseTrace; class BlobDb; // aggregate stats and other logged information about a Polygraph process class ProcInfo: public SomeInfo { public: ProcInfo(const String &name); ~ProcInfo(); const String &name() const; int logCat() const; const SideInfo *side() const; const String &benchmarkVersion() const; const String &pglCfg() const; Time startTime() const; const PhaseInfo &allPhasesPhase() const; const PhaseInfo &execScopePhase() const; // delete these, use allPhaseStats() ? int repCount(const Scope &scope) const; int hitCount(const Scope &scope) const; int offeredHitCount(const Scope &scope) const; int uselessProxyValidationCount(const Scope &scope) const; BigSize repVolume(const Scope &scope) const; BigSize hitVolume(const Scope &scope) const; BigSize offeredHitVolume(const Scope &scope) const; BigSize uselessProxyValidationVolume(const Scope &scope) const; AggrStat lastReqByteWritten(const Scope &scope) const; AggrStat lastReqByteRead(const Scope &scope) const; AggrStat firstRespByteWritten(const Scope &scope) const; AggrStat firstRespByteRead(const Scope &scope) const; void logCat(int aLogCat); void side(SideInfo *aSide); void benchmarkVersion(const String &aVersion); void pglCfg(const String &aPglCfg); void startTime(Time aStartTime); void noteIntvl(const StatIntvlRec &r, const String &phaseName); void addPhase(const StatPhaseRec &r); void noteEndOfLog(); int phaseCount() const; const PhaseInfo &phase(int idx) const; const PhaseInfo &phase(const String &name) const; const PhaseInfo *hasPhase(const String &name) const; PhaseTrace *tracePhase(const String &name); void checkConsistency(); void compileStats(BlobDb &db); protected: String theName; SideInfo *theSide; int theLogCat; String theBenchmarkVersion; String thePglCfg; Time theStartTime; Array thePhases; PhaseInfo theExecScopePhase; PhaseInfo theAllPhasesPhase; }; #endif polygraph-4.3.2/src/loganalyzers/RptmHistFig.cc0000644000175000017500000000102311546440450021132 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "base/Histogram.h" #include "base/TmSzHistStat.h" #include "loganalyzers/Stex.h" #include "loganalyzers/RptmHistFig.h" RptmHistFig::RptmHistFig(): TmSzHistFig("msec") { } const Histogram *RptmHistFig::extractHist(const Stex *stex, const PhaseInfo &info) const { const TmSzHistStat *h = stex->hist(info); return h ? &h->time() : 0; } polygraph-4.3.2/src/loganalyzers/LoadStexes.h0000644000175000017500000000443611546440450020662 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__LOGANALYZERS_LOADSTEXES_H #define POLYGRAPH__LOGANALYZERS_LOADSTEXES_H #include "loganalyzers/LoadStex.h" // total side load (all requests or all responses) class SideLoadStex: public LoadStex { public: typedef double (StatIntvlRec::*StatPtr)() const; public: SideLoadStex(const String &aKey, const String &aName, StatPtr aRate, StatPtr aBwidth); virtual double rate(const StatIntvlRec &rec) const; virtual double bwidth(const StatIntvlRec &rec) const; protected: StatPtr theRateStats; StatPtr theBwidthStats; }; // load based on TmSzStex class TmSzLoadStex: public LoadStex { public: TmSzLoadStex(const Stex *aStex); virtual double rate(const StatIntvlRec &rec) const; virtual double bwidth(const StatIntvlRec &rec) const; protected: const Stex *theStex; }; #if FUTURE_CODE // load based on TmSzStat data class TmSzLoadStex: public LoadStex { public: typedef TmSzStat (StatIntvlRec::*StatPtr); public: TmSzLoadStex(const String &aKey, const String &aName, StatPtr aStats): LoadStex(aKey, aName), theStats(aStats) {} virtual double rate(const StatIntvlRec &rec) const; virtual double bwidth(const StatIntvlRec &rec) const; protected: StatPtr theStats; }; // load based on HRStat data class HrLoadStex: public LoadStex { public: typedef HRStat (StatIntvlRec::*StatPtr); public: HrLoadStex(const String &aKey, const String &aName, StatPtr aStats): LoadStex(aKey, aName), theStats(aStats) {} virtual double rate(const StatIntvlRec &rec) const; virtual double bwidth(const StatIntvlRec &rec) const; protected: StatPtr theStats; }; #endif // total protocol-specific side load (all protocol messages) class ProtoSideLoadStex: public LoadStex { public: typedef double (ProtoIntvlStat::*StatPtr)(Time) const; typedef ProtoIntvlStat StatIntvlRec::*ProtoPtr; public: ProtoSideLoadStex(const String &aKey, const String &aName, ProtoPtr aProto, StatPtr aRate, StatPtr aBwidth); virtual double rate(const StatIntvlRec &rec) const; virtual double bwidth(const StatIntvlRec &rec) const; protected: ProtoPtr theProto; StatPtr theRateStats; StatPtr theBwidthStats; }; #endif polygraph-4.3.2/src/loganalyzers/SizeHistFig.cc0000644000175000017500000000102411546440450021123 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "base/Histogram.h" #include "base/TmSzHistStat.h" #include "loganalyzers/Stex.h" #include "loganalyzers/SizeHistFig.h" SizeHistFig::SizeHistFig(): TmSzHistFig("bytes") { } const Histogram *SizeHistFig::extractHist(const Stex *stex, const PhaseInfo &info) const { const TmSzHistStat *h = stex->hist(info); return h ? &h->size() : 0; } polygraph-4.3.2/src/dns/0000755000175000017500000000000011546445453014511 5ustar testertesterpolygraph-4.3.2/src/dns/DnsResp.h0000644000175000017500000000157411546440450016237 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #ifndef POLYGRAPH__DNS_DNSRESP_H #define POLYGRAPH__DNS_DNSRESP_H #include "xstd/Array.h" #include "dns/DnsMsg.h" class Socket; class IBStream; // common type for all DNS messages class DnsResp: public DnsMsg { public: typedef Array Answers; public: DnsResp(); void reset(); bool recv(Socket &s); const Answers &answers() const { return theAnswers; } const Error &error() const { return theError; } protected: bool parseResp(); bool parseName(IBStream &is, String &name); bool parseName(Size &off, String &name); void interpretRCode(Field rCode); protected: Answers theAnswers; Error theError; private: /* temporary info for parsing */ const char *theBuf; int theBufSize; }; #endif polygraph-4.3.2/src/dns/DnsQuery.cc0000644000175000017500000000265311546440450016570 0ustar testertester /* Web Polygraph http://www.web-polygraph.org/ * Copyright 2003-2011 The Measurement Factory * Licensed under the Apache License, Version 2.0 */ #include "base/polygraph.h" #include "xstd/h/sstream.h" #include "xstd/Socket.h" #include "base/BStream.h" #include "runtime/ErrorMgr.h" #include "runtime/polyErrors.h" #include "dns/DnsQuery.h" static String DnsQueryStrName = "dnsquerystr"; bool DnsQuery::sendTo(const NetAddr &ns, Socket &s) { char buf[512]; ofixedstream ostr(buf, sizeof(buf)); OBStream os; os.configure(&ostr, DnsQueryStrName); Assert(dumpAQuery(os)); Assert(os.offset() <= SizeOf(buf)); return s.sendTo(buf, os.offset(), ns) == os.offset(); } bool DnsQuery::dumpAQuery(OBStream &os) const { /* header section */ os << id(); // XXX: check that we dump as short // XXX: check that we dump as short const Field flags = flagQuery | flagAQuery | flagRecursionDesired; os << flags; os << (Field)1; // QDCOUNT os << (Field)0; // ANCOUNT os << (Field)0; // NSCOUNT os << (Field)0; // ARCOUNT /* question section */ dumpName(os, theQueryAddr); os << (Field)theType; os << (Field)classIn; return os.good(); } bool DnsQuery::dumpName(OBStream &os, const NetAddr &addr) const { //