pax_global_header00006660000000000000000000000064125336724210014517gustar00rootroot0000000000000052 comment=e2806aedcc776d5a5a0dbd2992c303e36761ef97 kafkacat-1.2.0/000077500000000000000000000000001253367242100132645ustar00rootroot00000000000000kafkacat-1.2.0/.dir-locals.el000066400000000000000000000000521253367242100157120ustar00rootroot00000000000000( (c-mode . ((c-file-style . "linux"))) ) kafkacat-1.2.0/.gitignore000066400000000000000000000001401253367242100152470ustar00rootroot00000000000000\#* *~ *.o *.d kafkacat config.cache config.log* config.h Makefile.config tmp-bootstrap *.offsetkafkacat-1.2.0/.travis.yml000066400000000000000000000001301253367242100153670ustar00rootroot00000000000000language: c compiler: - gcc - clang os: - linux - osx script: ./bootstrap.sh kafkacat-1.2.0/LICENSE000066400000000000000000000025131253367242100142720ustar00rootroot00000000000000librdkafka - Apache Kafka C driver library Copyright (c) 2012, Magnus Edenhill All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. kafkacat-1.2.0/Makefile000077500000000000000000000007741253367242100147370ustar00rootroot00000000000000include Makefile.config BIN= kafkacat SRCS_y= kafkacat.c format.c SRCS_$(ENABLE_JSON) += json.c OBJS= $(SRCS_y:.c=.o) .PHONY: all: $(BIN) include mklove/Makefile.base # librdkafka must be compiled with -gstrict-dwarf, but kafkacat must not, # due to some clang bug on OSX 10.9 CPPFLAGS := $(subst strict-dwarf,,$(CPPFLAGS)) install: bin-install install-man install-man: echo $(INSTALL) -d $$DESTDIR$(man1dir) && \ echo $(INSTALL) kafkacat.1 $$DESTDIR$(man1dir) clean: bin-clean -include $(DEPS) kafkacat-1.2.0/README.md000066400000000000000000000064061253367242100145510ustar00rootroot00000000000000kafkacat ======== Copyright (c) 2014-2015 Magnus Edenhill [https://github.com/edenhill/kafkacat](https://github.com/edenhill/kafkacat) **kafkacat** is a generic non-JVM producer and consumer for Apache Kafka 0.8, think of it as a netcat for Kafka. In **producer** mode kafkacat reads messages from stdin, delimited with a configurable delimeter (-D, defaults to newline), and produces them to the provided Kafka cluster (-b), topic (-t) and partition (-p). In **consumer** mode kafkacat reads messages from a topic and partition and prints them to stdout using the configured message delimiter. kafkacat also features a Metadata list (-L) mode to display the current state of the Kafka cluster and its topics and partitions. kafkacat is fast and lightweight; statically linked it is no more than 150Kb. # Install On recent enough Debian systems: ```` apt-get install kafkacat ```` And on Mac OS X with homebrew installed: ```` brew install kafkacat ```` Otherwise follow directions below. # Requirements * librdkafka - https://github.com/edenhill/librdkafka * libyajl (for JSON support, optional) On Ubuntu or Debian: `sudo apt-get install librdkafka-dev libyajl-dev` # Build ./configure make sudo make install # Quick build The bootstrap.sh build script will download and build the required dependencies, providing a quick and easy means of building kafkacat. Internet connectivity and wget/curl is required by this script. The resulting kafkacat binary will be linked statically to avoid runtime dependencies. ./bootstrap.sh # Examples Read messages from stdin, produce to 'syslog' topic with snappy compression $ tail -f /var/log/syslog | kafkacat -b mybroker -t syslog -z snappy Read messages from Kafka 'syslog' topic, print to stdout $ kafkacat -b mybroker -t syslog Produce messages from file (one file is one message) $ kafkacat -P -b mybroker -t filedrop -p 0 myfile1.bin /etc/motd thirdfile.tgz Read the last 2000 messages from 'syslog' topic, then exit $ kafkacat -C -b mybroker -t syslog -p 0 -o -2000 -e Consume from all partitions from 'syslog' topic $ kafkacat -C -b mybroker -t syslog Output consumed messages in JSON envelope: $ kafkacat -b mybroker -t syslog -J Output consumed messages according to format string: $ kafkacat -b mybroker -t syslog -f 'Topic %t[%p], offset: %o, key: %k, payload: %S bytes: %s\n' Metadata listing ```` $ kafkacat -L -b mybroker Metadata for all topics (from broker 1: mybroker:9092/1): 3 brokers: broker 1 at mybroker:9092 broker 2 at mybrokertoo:9092 broker 3 at thirdbroker:9092 16 topics: topic "syslog" with 3 partitions: partition 0, leader 3, replicas: 1,2,3, isrs: 1,2,3 partition 1, leader 1, replicas: 1,2,3, isrs: 1,2,3 partition 2, leader 1, replicas: 1,2, isrs: 1,2 topic "rdkafkatest1_auto_49f744a4327b1b1e" with 2 partitions: partition 0, leader 3, replicas: 3, isrs: 3 partition 1, leader 1, replicas: 1, isrs: 1 topic "rdkafkatest1_auto_e02f58f2c581cba" with 2 partitions: partition 0, leader 3, replicas: 3, isrs: 3 partition 1, leader 1, replicas: 1, isrs: 1 .... ```` JSON metadata listing $ kafkacat -b mybroker -L -J Pretty-printed JSON metadata listing $ kafkacat -b mybroker -L -J | jq . kafkacat-1.2.0/bootstrap.sh000077500000000000000000000040511253367242100156400ustar00rootroot00000000000000#!/bin/bash # # This script provides a quick build alternative: # * Dependencies are downloaded and built automatically # * kafkacat is built automatically. # * kafkacat is linked statically to avoid runtime dependencies. # # While this might not be the preferred method of building kafkacat, it # is the easiest and quickest way. # set -o errexit -o nounset -o pipefail function github_download { repo=$1 version=$2 dir=$3 url=https://github.com/${repo}/archive/${version}.tar.gz if [[ -d $dir ]]; then echo "Directory $dir already exists, not downloading $url" return 0 fi echo "Downloading $url to $dir" if which wget 2>&1 > /dev/null; then DL='wget -q -O-' else DL='curl -s -L' fi mkdir -p "$dir" pushd "$dir" > /dev/null ($DL "$url" | tar -xzf - --strip-components 1) || exit 1 popd > /dev/null } function build { dir=$1 cmds=$2 echo "Building $dir" pushd $dir > /dev/null set +o errexit eval $cmds ret=$? set -o errexit popd > /dev/null if [[ $ret == 0 ]]; then echo "Build of $dir SUCCEEDED!" else echo "Build of $dir FAILED!" fi return $ret } mkdir -p tmp-bootstrap pushd tmp-bootstrap > /dev/null github_download "edenhill/librdkafka" "master" "librdkafka" github_download "lloyd/yajl" "master" "libyajl" build librdkafka "./configure && make && make DESTDIR=\"${PWD}/\" install" || (echo "Failed to build librdkafka: bootstrap failed" ; false) build libyajl "./configure && make && make DESTDIR=\"${PWD}/\" install" || (echo "Failed to build libyajl: JSON support will probably be disabled" ; true) popd > /dev/null echo "Building kafkacat" export CPPFLAGS="${CPPFLAGS:-} -Itmp-bootstrap/usr/local/include" export LDFLAGS="${LDFLAGS:-} -Ltmp-bootstrap/usr/local/lib" export STATIC_LIB_rdkafka="tmp-bootstrap/usr/local/lib/librdkafka.a" export STATIC_LIB_yajl="tmp-bootstrap/usr/local/lib/libyajl_s.a" ./configure --enable-static --enable-json make echo "" echo "Success! kafkacat is now built" echo "" kafkacat-1.2.0/configure000077500000000000000000000107221253367242100151750ustar00rootroot00000000000000#!/bin/bash # MKL_CONFIGURE_ARGS="$0 $*" # Load base module source mklove/modules/configure.base # Read some special command line options right away that must be known prior to # sourcing modules. mkl_in_list "$*" "--no-download" && MKL_NO_DOWNLOAD=1 # Disable downloads when --help is used to avoid blocking calls. mkl_in_list "$*" "--help" && MKL_NO_DOWNLOAD=1 mkl_in_list "$*" "--debug" && MKL_DEBUG=1 # This is the earliest possible time to check for color support in # terminal because mkl_check_terminal_color_support uses mkl_dbg which # needs to know if MKL_DEBUG is set mkl_check_terminal_color_support # Delete temporary Makefile and header files on exit. trap "{ rm -f $MKL_OUTMK $MKL_OUTH; }" EXIT ## ## Load builtin modules ## # Builtin options, etc. mkl_require builtin # Host/target support mkl_require host # Compiler detection mkl_require cc # Load application provided modules (in current directory), if any. for fname in configure.* ; do if [[ $fname = 'configure.*' ]]; then continue fi # Skip temporary files if [[ $fname = *~ ]]; then continue fi mkl_require $fname done ## ## Argument parsing (options) ## ## # Parse arguments while [[ ! -z $@ ]]; do if [[ $1 != --* ]]; then mkl_err "Unknown non-option argument: $1" mkl_usage exit 1 fi opt=${1#--} shift if [[ $opt = *=* ]]; then name="${opt%=*}" arg="${opt#*=}" eqarg=1 else name="$opt" arg="" eqarg=0 fi safeopt="$(mkl_env_esc $name)" if ! mkl_func_exists opt_$safeopt ; then mkl_err "Unknown option $opt" mkl_usage exit 1 fi # Check if this option needs an argument. reqarg=$(mkl_meta_get "MKL_OPT_ARGS" "$(mkl_env_esc $name)") if [[ ! -z $reqarg ]]; then if [[ $eqarg == 0 && -z $arg ]]; then arg=$1 shift if [[ -z $arg ]]; then mkl_err "Missing argument to option --$name $reqarg" exit 1 fi fi else if [[ ! -z $arg ]]; then mkl_err "Option --$name expects no argument" exit 1 fi arg=y fi case $name in re|reconfigure) oldcmd=$(grep '^# configure exec: ' config.log | \ sed -e 's/^\# configure exec: //') if [[ -z $oldcmd ]]; then mkl_err "No previous execution found in config.log" exit 1 fi echo "Reconfiguring: $oldcmd" exec $oldcmd ;; list-modules) echo "Modules loaded:" for mod in $MKL_MODULES ; do echo " $mod" done exit 0 ;; list-checks) echo "Check functions in calling order:" for mf in $MKL_CHECKS ; do mod=${mf%:*} func=${mf#*:} echo -e "${MKL_GREEN}From module $mod:$MKL_CLR_RESET" declare -f $func echo "" done exit 0 ;; update-modules) fails=0 echo "Updating modules" for mod in $MKL_MODULES ; do echo -n "Updating $mod..." if mkl_module_download "$mod" > /dev/null ; then echo -e "${MKL_GREEN}ok${MKL_CLR_RESET}" else echo -e "${MKL_RED}failed${MKL_CLR_RESET}" fails=$(expr $fails + 1) fi done exit $fails ;; help) mkl_usage exit 0 ;; *) opt_$safeopt $arg || exit 1 mkl_var_append MKL_OPTS_SET "$safeopt" ;; esac done if [[ ! -z $MKL_CLEAN ]]; then mkl_clean exit 0 fi # Move away previous log file [[ -f $MKL_OUTDBG ]] && mv $MKL_OUTDBG ${MKL_OUTDBG}.old # Create output files echo "# configure exec: $0 $*" >> $MKL_OUTDBG echo "# On $(date)" >> $MKL_OUTDBG rm -f $MKL_OUTMK $MKL_OUTH mkl_write_mk "# Automatically generated by $0 $*" mkl_write_h "// Automatically generated by $0 $*" mkl_write_h "#pragma once" # Load cache file mkl_cache_read # Run checks mkl_checks_run # Check accumulated failures, will not return on failure. mkl_check_fails # Generate outputs mkl_generate # Summarize what happened mkl_summary # Write cache file mkl_cache_write echo "" echo "Now type 'make' to build" trap - EXIT exit 0 kafkacat-1.2.0/configure.kafkacat000066400000000000000000000025611253367242100167400ustar00rootroot00000000000000#!/bin/bash # mkl_require good_cflags mkl_require gitversion as KAFKACAT_VERSION function checks { # Check that librdkafka is available, and allow to link it statically. mkl_meta_set "librdkafka" "desc" "librdkafka is available at http://github.com/edenhill/librdkafka. To quickly download all dependencies and build kafkacat try ./bootstrap.sh" mkl_meta_set "librdkafka" "deb" "librdkafka-dev" mkl_lib_check --static=-lrdkafka "librdkafka" "" fail CC "-lrdkafka -lpthread -lz" \ "#include " # Make sure rdkafka is new enough. mkl_meta_set "librdkafkaver" "name" "librdkafka metadata API" mkl_meta_set "librdkafkaver" "desc" "librdkafka 0.8.4 or later is required for the Metadata API" mkl_compile_check "librdkafkaver" "" fail CC "" \ "#include struct rd_kafka_metadata foo;" # -lrt required on linux mkl_lib_check "librt" "" cont CC "-lrt" mkl_meta_set "yajl" "deb" "libyajl-dev" # Check for JSON library (yajl) if [[ $WITH_JSON == y ]] && \ mkl_lib_check --static=-lyajl "yajl" HAVE_YAJL disable CC "-lyajl" \ "#include #if YAJL_MAJOR >= 2 #else #error \"Requires libyajl2\" #endif " then mkl_allvar_set "json" ENABLE_JSON y fi } mkl_toggle_option "kafkacat" WITH_JSON --enable-json "JSON support (requires libyajl2)" y kafkacat-1.2.0/format.c000066400000000000000000000213461253367242100147260ustar00rootroot00000000000000/* * kafkacat - Apache Kafka consumer and producer * * Copyright (c) 2015, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include "kafkacat.h" static void fmt_add (fmt_type_t type, const char *str, int len) { if (conf.fmt_cnt == KC_FMT_MAX_SIZE) FATAL("Too many formatters & strings (KC_FMT_MAX_SIZE=%i)", KC_FMT_MAX_SIZE); conf.fmt[conf.fmt_cnt].type = type; /* For STR types */ if (len) { const char *s; char *d; conf.fmt[conf.fmt_cnt].str = d = malloc(len+1); memcpy(d, str, len); d[len] = '\0'; s = d; /* Convert \.. sequences */ while (*s) { if (*s == '\\' && *(s+1)) { int base = 0; const char *next; s++; switch (*s) { case 't': *d = '\t'; break; case 'n': *d = '\n'; break; case 'r': *d = '\r'; break; case 'x': s++; base = 16; /* FALLTHRU */ case '0'...'9': *d = (char)strtoul(s, (char **)&next, base); if (next > s) s = next-1; break; default: *d = *s; break; } } else { *d = *s; } s++; d++; } *d = '\0'; conf.fmt[conf.fmt_cnt].str_len = strlen(conf.fmt[conf.fmt_cnt].str); } conf.fmt_cnt++; } /** * Parse a format string to create a formatter list. */ void fmt_parse (const char *fmt) { const char *s = fmt, *t; while (*s) { if ((t = strchr(s, '%'))) { if (t > s) fmt_add(KC_FMT_STR, s, (int)(t-s)); s = t+1; switch (*s) { case 'o': fmt_add(KC_FMT_OFFSET, NULL, 0); break; case 'k': fmt_add(KC_FMT_KEY, NULL, 0); break; case 'K': fmt_add(KC_FMT_KEY_LEN, NULL, 0); break; case 's': fmt_add(KC_FMT_PAYLOAD, NULL, 0); break; case 'S': fmt_add(KC_FMT_PAYLOAD_LEN, NULL, 0); break; case 't': fmt_add(KC_FMT_TOPIC, NULL, 0); break; case 'p': fmt_add(KC_FMT_PARTITION, NULL, 0); break; case '%': fmt_add(KC_FMT_STR, s, 1); break; case '\0': FATAL("Empty formatter"); break; default: FATAL("Unsupported formatter: %%%c", *s); break; } s++; } else { fmt_add(KC_FMT_STR, s, strlen(s)); break; } } } void fmt_init (void) { #ifdef ENABLE_JSON if (conf.flags & CONF_F_FMT_JSON) fmt_init_json(); #endif } void fmt_term (void) { #ifdef ENABLE_JSON if (conf.flags & CONF_F_FMT_JSON) fmt_term_json(); #endif } /** * Delimited output */ static void fmt_msg_output_str (FILE *fp, const rd_kafka_message_t *rkmessage) { int i; for (i = 0 ; i < conf.fmt_cnt ; i++) { int r = 1; switch (conf.fmt[i].type) { case KC_FMT_OFFSET: r = fprintf(fp, "%"PRId64, rkmessage->offset); break; case KC_FMT_KEY: if (rkmessage->key_len) r = fwrite(rkmessage->key, rkmessage->key_len, 1, fp); else if (conf.flags & CONF_F_NULL) r = fwrite(conf.null_str, conf.null_str_len, 1, fp); break; case KC_FMT_KEY_LEN: r = fprintf(fp, "%zd", /* Use -1 to indicate NULL keys */ rkmessage->key ? rkmessage->key_len : -1); break; case KC_FMT_PAYLOAD: if (rkmessage->len) r = fwrite(rkmessage->payload, rkmessage->len, 1, fp); else if (conf.flags & CONF_F_NULL) r = fwrite(conf.null_str, conf.null_str_len, 1, fp); break; case KC_FMT_PAYLOAD_LEN: r = fprintf(fp, "%zd", /* Use -1 to indicate NULL messages */ rkmessage->payload ? rkmessage->len : -1); break; case KC_FMT_STR: r = fwrite(conf.fmt[i].str, conf.fmt[i].str_len, 1, fp); break; case KC_FMT_TOPIC: r = fprintf(fp, "%s", rd_kafka_topic_name(rkmessage->rkt)); break; case KC_FMT_PARTITION: r = fprintf(fp, "%"PRId32, rkmessage->partition); break; } if (r < 1) FATAL("Write error for message " "of %zd bytes at offset %"PRId64"): %s", rkmessage->len, rkmessage->offset, strerror(errno)); } } /** * Format and output a received message. */ void fmt_msg_output (FILE *fp, const rd_kafka_message_t *rkmessage) { #ifdef ENABLE_JSON if (conf.flags & CONF_F_FMT_JSON) fmt_msg_output_json(fp, rkmessage); else #endif fmt_msg_output_str(fp, rkmessage); } kafkacat-1.2.0/json.c000066400000000000000000000156341253367242100144120ustar00rootroot00000000000000/* * kafkacat - Apache Kafka consumer and producer * * Copyright (c) 2015, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include "kafkacat.h" #include #define JS_STR(G, STR) do { \ const char *_s = (STR); \ yajl_gen_string(G, (const unsigned char *)_s, strlen(_s)); \ } while (0) void fmt_msg_output_json (FILE *fp, const rd_kafka_message_t *rkmessage) { yajl_gen g; const char *topic = rd_kafka_topic_name(rkmessage->rkt); const unsigned char *buf; size_t len; g = yajl_gen_alloc(NULL); yajl_gen_map_open(g); JS_STR(g, "topic"); JS_STR(g, topic); JS_STR(g, "partition"); yajl_gen_integer(g, (int)rkmessage->partition); JS_STR(g, "offset"); yajl_gen_integer(g, (long long int)rkmessage->offset); JS_STR(g, "key"); yajl_gen_string(g, (const unsigned char *)rkmessage->key, rkmessage->key_len); JS_STR(g, "payload"); yajl_gen_string(g, (const unsigned char *)rkmessage->payload, rkmessage->len); yajl_gen_map_close(g); yajl_gen_get_buf(g, &buf, &len); if (fwrite(buf, len, 1, fp) != 1 || (conf.fmt[0].str_len > 0 && fwrite(conf.fmt[0].str, conf.fmt[0].str_len, 1, fp) != 1)) FATAL("Output write error: %s", strerror(errno)); yajl_gen_free(g); } /** * Print metadata information */ void metadata_print_json (const struct rd_kafka_metadata *metadata) { yajl_gen g; int i, j, k; const unsigned char *buf; size_t len; g = yajl_gen_alloc(NULL); yajl_gen_map_open(g); JS_STR(g, "originating_broker"); yajl_gen_map_open(g); JS_STR(g, "id"); yajl_gen_integer(g, (long long int)metadata->orig_broker_id); JS_STR(g, "name"); JS_STR(g, metadata->orig_broker_name); yajl_gen_map_close(g); JS_STR(g, "query"); yajl_gen_map_open(g); JS_STR(g, "topic"); JS_STR(g, conf.topic ? : "*"); yajl_gen_map_close(g); /* Iterate brokers */ JS_STR(g, "brokers"); yajl_gen_array_open(g); for (i = 0 ; i < metadata->broker_cnt ; i++) { int blen = strlen(metadata->brokers[i].host); char *host = alloca(blen+1+5+1); sprintf(host, "%s:%i", metadata->brokers[i].host, metadata->brokers[i].port); yajl_gen_map_open(g); JS_STR(g, "id"); yajl_gen_integer(g, (long long int)metadata->brokers[i].id); JS_STR(g, "name"); JS_STR(g, host); yajl_gen_map_close(g); } yajl_gen_array_close(g); /* Iterate topics */ JS_STR(g, "topics"); yajl_gen_array_open(g); for (i = 0 ; i < metadata->topic_cnt ; i++) { const struct rd_kafka_metadata_topic *t = &metadata->topics[i]; yajl_gen_map_open(g); JS_STR(g, "topic"); JS_STR(g, t->topic); if (t->err) { JS_STR(g, "error"); JS_STR(g, rd_kafka_err2str(t->err)); } JS_STR(g, "partitions"); yajl_gen_array_open(g); /* Iterate topic's partitions */ for (j = 0 ; j < t->partition_cnt ; j++) { const struct rd_kafka_metadata_partition *p; p = &t->partitions[j]; yajl_gen_map_open(g); JS_STR(g, "partition"); yajl_gen_integer(g, (long long int)p->id); if (p->err) { JS_STR(g, "error"); JS_STR(g, rd_kafka_err2str(p->err)); } JS_STR(g, "leader"); yajl_gen_integer(g, (long long int)p->leader); /* Iterate partition's replicas */ JS_STR(g, "replicas"); yajl_gen_array_open(g); for (k = 0 ; k < p->replica_cnt ; k++) { yajl_gen_map_open(g); JS_STR(g, "id"); yajl_gen_integer(g, (long long int)p->replicas[k]); yajl_gen_map_close(g); } yajl_gen_array_close(g); /* Iterate partition's ISRs */ JS_STR(g, "isrs"); yajl_gen_array_open(g); for (k = 0 ; k < p->isr_cnt ; k++) { yajl_gen_map_open(g); JS_STR(g, "id"); yajl_gen_integer(g, (long long int)p->isrs[k]); yajl_gen_map_close(g); } yajl_gen_array_close(g); yajl_gen_map_close(g); } yajl_gen_array_close(g); yajl_gen_map_close(g); } yajl_gen_array_close(g); yajl_gen_map_close(g); yajl_gen_get_buf(g, &buf, &len); if (fwrite(buf, len, 1, stdout) != 1) FATAL("Output write error: %s", strerror(errno)); yajl_gen_free(g); } void fmt_init_json (void) { } void fmt_term_json (void) { } kafkacat-1.2.0/kafkacat.1000066400000000000000000000026311253367242100151150ustar00rootroot00000000000000.Dd $Mdocdate: December 09 2014 $ .Dt KAFKACAT 1 .Os .Sh NAME .Nm kafkacat .Nd generic producer and consumer for Apache Kafka .Sh SYNOPSIS .Nm .Fl C | P | L .Fl t Ar topic .Op Fl p Ar partition .Fl b Ar brokers Op , Ar ... .Op Fl D Ar delim .Op Fl K Ar delim .Op Fl c Ar cnt .Op Fl X Ar list .Op Fl X Ar prop=val .Op Fl X Ar dump .Op Fl d Ar dbg Op , Ar ... .Op Fl q .Op Fl v .Op Fl Z .Op specific options .Nm .Fl C .Op generic options .Op Fl o Ar offset .Op Fl e .Op Fl O .Op Fl u .Op Fl J .Op Fl f Ar fmtstr .Nm .Fl P .Op generic options .Op Fl z Ar snappy | gzip .Op Fl p Li -1 .Op Ar file Op ... .Nm .Fl L .Op generic options .Op Fl t Ar topic .Sh DESCRIPTION .Nm is a generic non-JVM producer and consumer for Apache Kafka 0.8, think of it as a netcat for Kafka. .Pp In producer mode ( .Fl P ), .Nm reads messages from stdin, delimited with a configurable delimeter and produces them to the provided Kafka cluster, topic and partition. In consumer mode ( .Fl C ), .Nm reads messages from a topic and partition and prints them to stdout using the configured message delimiter. .Pp If neither .Fl P or .Fl C are specified .Nm attempts to figure out the mode automatically based on stdin/stdout tty types. .Pp .Nm also features a metadata list mode ( .Fl L ), to display the current state of the Kafka cluster and its topics and partitions. .Sh SEE ALSO For a more extensive help and some simple examples, run .Nm with .Fl h flag. kafkacat-1.2.0/kafkacat.c000066400000000000000000001160711253367242100152030ustar00rootroot00000000000000/* * kafkacat - Apache Kafka consumer and producer * * Copyright (c) 2014, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include #include #include #include #include #include #include #include #include #include #include "kafkacat.h" struct conf conf = { .run = 1, .verbosity = 1, .partition = RD_KAFKA_PARTITION_UA, .msg_size = 1024*1024, .null_str = "NULL", }; static struct stats { uint64_t tx; uint64_t tx_err_q; uint64_t tx_err_dr; uint64_t tx_delivered; uint64_t rx; } stats; /* Partition's at EOF state array */ int *part_eof = NULL; /* Number of partitions that has reached EOF */ int part_eof_cnt = 0; /* Threshold level (partitions at EOF) before exiting */ int part_eof_thres = 0; /** * Fatal error: print error and exit */ void __attribute__((noreturn)) fatal0 (const char *func, int line, const char *fmt, ...) { va_list ap; char buf[1024]; va_start(ap, fmt); vsnprintf(buf, sizeof(buf), fmt, ap); va_end(ap); INFO(2, "Fatal error at %s:%i:\n", func, line); fprintf(stderr, "%% ERROR: %s\n", buf); exit(1); } /** * The delivery report callback is called once per message to * report delivery success or failure. */ static void dr_msg_cb (rd_kafka_t *rk, const rd_kafka_message_t *rkmessage, void *opaque) { static int say_once = 1; if (rkmessage->err) { INFO(1, "Delivery failed for message: %s\n", rd_kafka_err2str(rkmessage->err)); stats.tx_err_dr++; return; } INFO(3, "Message delivered to partition %"PRId32" (offset %"PRId64")\n", rkmessage->partition, rkmessage->offset); if (rkmessage->offset == 0 && say_once) { INFO(3, "Enable message offset reporting " "with '-X topic.produce.offset.report=true'\n"); say_once = 0; } stats.tx_delivered++; } /** * Produces a single message, retries on queue congestion, and * exits hard on error. */ static void produce (void *buf, size_t len, const void *key, size_t key_len, int msgflags) { /* Produce message: keep trying until it succeeds. */ do { rd_kafka_resp_err_t err; if (!conf.run) FATAL("Program terminated while " "producing message of %zd bytes", len); if (rd_kafka_produce(conf.rkt, conf.partition, msgflags, buf, len, key, key_len, NULL) != -1) { stats.tx++; break; } err = rd_kafka_errno2err(errno); if (err != RD_KAFKA_RESP_ERR__QUEUE_FULL) FATAL("Failed to produce message (%zd bytes): %s", len, rd_kafka_err2str(err)); stats.tx_err_q++; /* Internal queue full, sleep to allow * messages to be produced/time out * before trying again. */ rd_kafka_poll(conf.rk, 5); } while (1); /* Poll for delivery reports, errors, etc. */ rd_kafka_poll(conf.rk, 0); } /** * Produce contents of file as a single message. * Returns the file length on success, else -1. */ static ssize_t produce_file (const char *path) { int fd; void *ptr; struct stat st; if ((fd = open(path, O_RDONLY)) == -1) { INFO(1, "Failed to open %s: %s\n", path, strerror(errno)); return -1; } if (fstat(fd, &st) == -1) { INFO(1, "Failed to stat %s: %s\n", path, strerror(errno)); close(fd); return -1; } if (st.st_size == 0) { INFO(3, "Skipping empty file %s\n", path); close(fd); return 0; } ptr = mmap(NULL, st.st_size, PROT_READ, MAP_PRIVATE, fd, 0); if (ptr == MAP_FAILED) { INFO(1, "Failed to mmap %s: %s\n", path, strerror(errno)); close(fd); return -1; } INFO(4, "Producing file %s (%"PRIdMAX" bytes)\n", path, (intmax_t)st.st_size); produce(ptr, st.st_size, NULL, 0, RD_KAFKA_MSG_F_COPY); munmap(ptr, st.st_size); return st.st_size; } /** * Run producer, reading messages from 'fp' and producing to kafka. * Or if 'pathcnt' is > 0, read messages from files in 'paths' instead. */ static void producer_run (FILE *fp, char **paths, int pathcnt) { char *sbuf = NULL; size_t size = 0; ssize_t len; char errstr[512]; /* Assign per-message delivery report callback. */ rd_kafka_conf_set_dr_msg_cb(conf.rk_conf, dr_msg_cb); /* Create producer */ if (!(conf.rk = rd_kafka_new(RD_KAFKA_PRODUCER, conf.rk_conf, errstr, sizeof(errstr)))) FATAL("Failed to create producer: %s", errstr); if (conf.debug) rd_kafka_set_log_level(conf.rk, LOG_DEBUG); else if (conf.verbosity == 0) rd_kafka_set_log_level(conf.rk, 0); /* Create topic */ if (!(conf.rkt = rd_kafka_topic_new(conf.rk, conf.topic, conf.rkt_conf))) FATAL("Failed to create topic %s: %s", conf.topic, rd_kafka_err2str(rd_kafka_errno2err(errno))); conf.rk_conf = NULL; conf.rkt_conf = NULL; if (pathcnt > 0) { int i; int good = 0; /* Read messages from files, each file is its own message. */ for (i = 0 ; i < pathcnt ; i++) if (produce_file(paths[i]) != -1) good++; if (!good) conf.exitcode = 1; else if (good < pathcnt) INFO(1, "Failed to produce from %i/%i files\n", pathcnt - good, pathcnt); } else { /* Read messages from stdin, delimited by conf.delim */ while (conf.run && (len = getdelim(&sbuf, &size, conf.delim, fp)) != -1) { int msgflags = 0; char *buf = sbuf; char *key = NULL; size_t key_len = 0; size_t orig_len = len; if (len == 0) continue; /* Shave off delimiter */ if ((int)buf[len-1] == conf.delim) len--; if (len == 0) continue; /* Extract key, if desired and found. */ if (conf.flags & CONF_F_KEY_DELIM) { char *t; if ((t = memchr(buf, conf.key_delim, len))) { key_len = (size_t)(t-sbuf); key = buf; buf += key_len+1; len -= key_len+1; if (conf.flags & CONF_F_NULL) { if (len == 0) buf = NULL; if (key_len == 0) key = NULL; } } } if (len > 1024 && !(conf.flags & CONF_F_TEE)) { /* If message is larger than this arbitrary * threshold it will be more effective to * not copy the data but let rdkafka own it * instead. * * Note that CONF_T_TEE must be checked, * otherwise a possible race might occur. * */ msgflags |= RD_KAFKA_MSG_F_FREE; } else { /* For smaller messages a copy is * more efficient. */ msgflags |= RD_KAFKA_MSG_F_COPY; } /* Produce message */ produce(buf, len, key, key_len, msgflags); if (conf.flags & CONF_F_TEE && fwrite(sbuf, orig_len, 1, stdout) != 1) FATAL("Tee write error for message of %zd bytes: %s", orig_len, strerror(errno)); if (msgflags & RD_KAFKA_MSG_F_FREE) { /* rdkafka owns the allocated buffer * memory now. */ sbuf = NULL; size = 0; } /* Enforce -c */ if (stats.tx == conf.msg_cnt) conf.run = 0; } if (conf.run) { if (!feof(fp)) FATAL("Unable to read message: %s", strerror(errno)); } } /* Wait for all messages to be transmitted */ conf.run = 1; while (conf.run && rd_kafka_outq_len(conf.rk)) rd_kafka_poll(conf.rk, 50); rd_kafka_topic_destroy(conf.rkt); rd_kafka_destroy(conf.rk); if (sbuf) free(sbuf); if (stats.tx_err_q || stats.tx_err_dr) conf.exitcode = 1; } /** * Consume callback, called for each message consumed. */ static void consume_cb (rd_kafka_message_t *rkmessage, void *opaque) { FILE *fp = opaque; if (!conf.run) return; if (rkmessage->err) { if (rkmessage->err == RD_KAFKA_RESP_ERR__PARTITION_EOF) { /* Store EOF offset. * If partition is empty and at offset 0, * store future first message (0). */ rd_kafka_offset_store(rkmessage->rkt, rkmessage->partition, rkmessage->offset == 0 ? 0 : rkmessage->offset-1); if (conf.exit_eof) { if (!part_eof[rkmessage->partition]) { /* Stop consuming this partition */ rd_kafka_consume_stop(rkmessage->rkt, rkmessage->partition); part_eof[rkmessage->partition] = 1; part_eof_cnt++; if (part_eof_cnt >= part_eof_thres) conf.run = 0; } INFO(1, "Reached end of topic %s [%"PRId32"] " "at offset %"PRId64"%s\n", rd_kafka_topic_name(rkmessage->rkt), rkmessage->partition, rkmessage->offset, !conf.run ? ": exiting" : ""); } return; } FATAL("Topic %s [%"PRId32"] error: %s", rd_kafka_topic_name(rkmessage->rkt), rkmessage->partition, rd_kafka_message_errstr(rkmessage)); } /* Print message */ fmt_msg_output(fp, rkmessage); rd_kafka_offset_store(rkmessage->rkt, rkmessage->partition, rkmessage->offset); if (++stats.rx == conf.msg_cnt) conf.run = 0; } /** * Run consumer, consuming messages from Kafka and writing to 'fp'. */ static void consumer_run (FILE *fp) { char errstr[512]; rd_kafka_resp_err_t err; const rd_kafka_metadata_t *metadata; int i; rd_kafka_queue_t *rkqu; /* Create consumer */ if (!(conf.rk = rd_kafka_new(RD_KAFKA_CONSUMER, conf.rk_conf, errstr, sizeof(errstr)))) FATAL("Failed to create producer: %s", errstr); if (conf.debug) rd_kafka_set_log_level(conf.rk, LOG_DEBUG); else if (conf.verbosity == 0) rd_kafka_set_log_level(conf.rk, 0); /* The callback-based consumer API's offset store granularity is * not good enough for us, disable automatic offset store * and do it explicitly per-message in the consume callback instead. */ if (rd_kafka_topic_conf_set(conf.rkt_conf, "auto.commit.enable", "false", errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) FATAL("%s", errstr); /* Create topic */ if (!(conf.rkt = rd_kafka_topic_new(conf.rk, conf.topic, conf.rkt_conf))) FATAL("Failed to create topic %s: %s", conf.topic, rd_kafka_err2str(rd_kafka_errno2err(errno))); conf.rk_conf = NULL; conf.rkt_conf = NULL; /* Query broker for topic + partition information. */ if ((err = rd_kafka_metadata(conf.rk, 0, conf.rkt, &metadata, 5000))) FATAL("Failed to query metadata for topic %s: %s", rd_kafka_topic_name(conf.rkt), rd_kafka_err2str(err)); /* Error handling */ if (metadata->topic_cnt == 0) FATAL("No such topic in cluster: %s", rd_kafka_topic_name(conf.rkt)); if ((err = metadata->topics[0].err)) FATAL("Topic %s error: %s", rd_kafka_topic_name(conf.rkt), rd_kafka_err2str(err)); if (metadata->topics[0].partition_cnt == 0) FATAL("Topic %s has no partitions", rd_kafka_topic_name(conf.rkt)); /* If Exit-at-EOF is enabled, set up array to track EOF * state for each partition. */ if (conf.exit_eof) { part_eof = calloc(sizeof(*part_eof), metadata->topics[0].partition_cnt); if (conf.partition != RD_KAFKA_PARTITION_UA) part_eof_thres = 1; else part_eof_thres = metadata->topics[0].partition_cnt; } /* Create a shared queue that combines messages from * all wanted partitions. */ rkqu = rd_kafka_queue_new(conf.rk); /* Start consuming from all wanted partitions. */ for (i = 0 ; i < metadata->topics[0].partition_cnt ; i++) { int32_t partition = metadata->topics[0].partitions[i].id; /* If -p was specified: skip unwanted partitions */ if (conf.partition != RD_KAFKA_PARTITION_UA && conf.partition != partition) continue; /* Start consumer for this partition */ if (rd_kafka_consume_start_queue(conf.rkt, partition, conf.offset, rkqu) == -1) FATAL("Failed to start consuming " "topic %s [%"PRId32"]: %s", conf.topic, partition, rd_kafka_err2str(rd_kafka_errno2err(errno))); if (conf.partition != RD_KAFKA_PARTITION_UA) break; } if (conf.partition != RD_KAFKA_PARTITION_UA && i == metadata->topics[0].partition_cnt) FATAL("Topic %s (with partitions 0..%i): " "partition %i does not exist", rd_kafka_topic_name(conf.rkt), metadata->topics[0].partition_cnt-1, conf.partition); /* Read messages from Kafka, write to 'fp'. */ while (conf.run) { rd_kafka_consume_callback_queue(rkqu, 100, consume_cb, fp); } /* Stop consuming */ for (i = 0 ; i < metadata->topics[0].partition_cnt ; i++) { int32_t partition = metadata->topics[0].partitions[i].id; /* If -p was specified: skip unwanted partitions */ if (conf.partition != RD_KAFKA_PARTITION_UA && conf.partition != partition) continue; /* Dont stop already stopped partitions */ if (!part_eof || !part_eof[partition]) rd_kafka_consume_stop(conf.rkt, partition); rd_kafka_consume_stop(conf.rkt, partition); } /* Destroy shared queue */ rd_kafka_queue_destroy(rkqu); /* Wait for outstanding requests to finish. */ conf.run = 1; while (conf.run && rd_kafka_outq_len(conf.rk) > 0) rd_kafka_poll(conf.rk, 50); rd_kafka_topic_destroy(conf.rkt); rd_kafka_destroy(conf.rk); } /** * Print metadata information */ static void metadata_print (const rd_kafka_metadata_t *metadata) { int i, j, k; printf("Metadata for %s (from broker %"PRId32": %s):\n", conf.topic ? : "all topics", metadata->orig_broker_id, metadata->orig_broker_name); /* Iterate brokers */ printf(" %i brokers:\n", metadata->broker_cnt); for (i = 0 ; i < metadata->broker_cnt ; i++) printf(" broker %"PRId32" at %s:%i\n", metadata->brokers[i].id, metadata->brokers[i].host, metadata->brokers[i].port); /* Iterate topics */ printf(" %i topics:\n", metadata->topic_cnt); for (i = 0 ; i < metadata->topic_cnt ; i++) { const rd_kafka_metadata_topic_t *t = &metadata->topics[i]; printf(" topic \"%s\" with %i partitions:", t->topic, t->partition_cnt); if (t->err) { printf(" %s", rd_kafka_err2str(t->err)); if (t->err == RD_KAFKA_RESP_ERR_LEADER_NOT_AVAILABLE) printf(" (try again)"); } printf("\n"); /* Iterate topic's partitions */ for (j = 0 ; j < t->partition_cnt ; j++) { const rd_kafka_metadata_partition_t *p; p = &t->partitions[j]; printf(" partition %"PRId32", " "leader %"PRId32", replicas: ", p->id, p->leader); /* Iterate partition's replicas */ for (k = 0 ; k < p->replica_cnt ; k++) printf("%s%"PRId32, k > 0 ? ",":"", p->replicas[k]); /* Iterate partition's ISRs */ printf(", isrs: "); for (k = 0 ; k < p->isr_cnt ; k++) printf("%s%"PRId32, k > 0 ? ",":"", p->isrs[k]); if (p->err) printf(", %s\n", rd_kafka_err2str(p->err)); else printf("\n"); } } } /** * Lists metadata */ static void metadata_list (void) { char errstr[512]; rd_kafka_resp_err_t err; const rd_kafka_metadata_t *metadata; /* Create handle */ if (!(conf.rk = rd_kafka_new(RD_KAFKA_PRODUCER, conf.rk_conf, errstr, sizeof(errstr)))) FATAL("Failed to create producer: %s", errstr); rd_kafka_set_logger(conf.rk, rd_kafka_log_print); if (conf.debug) rd_kafka_set_log_level(conf.rk, LOG_DEBUG); else if (conf.verbosity == 0) rd_kafka_set_log_level(conf.rk, 0); /* Create topic, if specified */ if (conf.topic && !(conf.rkt = rd_kafka_topic_new(conf.rk, conf.topic, conf.rkt_conf))) FATAL("Failed to create topic %s: %s", conf.topic, rd_kafka_err2str(rd_kafka_errno2err(errno))); conf.rk_conf = NULL; conf.rkt_conf = NULL; /* Fetch metadata */ err = rd_kafka_metadata(conf.rk, conf.rkt ? 0 : 1, conf.rkt, &metadata, 5000); if (err != RD_KAFKA_RESP_ERR_NO_ERROR) FATAL("Failed to acquire metadata: %s", rd_kafka_err2str(err)); /* Print metadata */ #if ENABLE_JSON if (conf.flags & CONF_F_FMT_JSON) metadata_print_json(metadata); else #endif metadata_print(metadata); rd_kafka_metadata_destroy(metadata); if (conf.rkt) rd_kafka_topic_destroy(conf.rkt); rd_kafka_destroy(conf.rk); } /** * Print usage and exit. */ static void __attribute__((noreturn)) usage (const char *argv0, int exitcode, const char *reason) { if (reason) printf("Error: %s\n\n", reason); printf("Usage: %s [file1 file2 ..]\n" "kafkacat - Apache Kafka producer and consumer tool\n" "https://github.com/edenhill/kafkacat\n" "Copyright (c) 2014-2015, Magnus Edenhill\n" "Version %s%s (librdkafka %s)\n" "\n" "\n" "General options:\n" " -C | -P | -L Mode: Consume, Produce or metadata List\n" " -t Topic to consume from, produce to, " "or list\n" " -p Partition\n" " -b Bootstrap broker(s) (host[:port])\n" " -D Message delimiter character:\n" " a-z.. | \\r | \\n | \\t | \\xNN\n" " Default: \\n\n" " -K Key delimiter (same format as -D)\n" " -c Limit message count\n" " -X list List available librdkafka configuration " "properties\n" " -X prop=val Set librdkafka configuration property.\n" " Properties prefixed with \"topic.\" are\n" " applied as topic properties.\n" " -X dump Dump configuration and exit.\n" " -d Enable librdkafka debugging:\n" " " RD_KAFKA_DEBUG_CONTEXTS "\n" " -q Be quiet (verbosity set to 0)\n" " -v Increase verbosity\n" "\n" "Producer options:\n" " -z snappy|gzip Message compression. Default: none\n" " -p -1 Use random partitioner\n" " -D Delimiter to split input into messages\n" " -K Delimiter to split input key and message\n" " -T Output sent messages to stdout, acting like tee.\n" " -c Exit after producing this number " "of messages\n" " -Z Send empty messages as NULL messages\n" " file1 file2.. Read messages from files.\n" " The entire file contents will be sent as\n" " one single message.\n" "\n" "Consumer options:\n" " -o Offset to start consuming from:\n" " beginning | end | stored |\n" " (absolute offset) |\n" " - (relative offset from end)\n" " -e Exit successfully when last message " "received\n" " -f Output formatting string, see below.\n" " Takes precedence over -D and -K.\n" #if ENABLE_JSON " -J Output with JSON envelope\n" #endif " -D Delimiter to separate messages on output\n" " -K Print message keys prefixing the message\n" " with specified delimiter.\n" " -O Print message offset using -K delimiter\n" " -c Exit after consuming this number " "of messages\n" " -Z Print NULL messages and keys as \"%s\"" "(instead of empty)\n" " -u Unbuffered output\n" "\n" "Metadata options:\n" " -t Topic to query (optional)\n" "\n" "\n" "Format string tokens:\n" " %%s Message payload\n" " %%S Message payload length (or -1 for NULL)\n" " %%k Message key\n" " %%K Message key length (or -1 for NULL)\n" " %%t Topic\n" " %%p Partition\n" " %%o Message offset\n" " \\n \\r \\t Newlines, tab\n" " \\xXX \\xNNN Any ASCII character\n" " Example:\n" " -f 'Topic %%t [%%p] at offset %%o: key %%k: %%s\\n'\n" "\n" "\n" "Consumer mode (writes messages to stdout):\n" " kafkacat -b -t -p \n" " or:\n" " kafkacat -C -b ...\n" "\n" "Producer mode (reads messages from stdin):\n" " ... | kafkacat -b -t -p \n" " or:\n" " kafkacat -P -b ...\n" "\n" "Metadata listing:\n" " kafkacat -L -b [-t ]\n" "\n", argv0, KAFKACAT_VERSION, #if ENABLE_JSON " (JSON)", #else "", #endif rd_kafka_version_str(), conf.null_str ); exit(exitcode); } /** * Terminate by putting out the run flag. */ static void term (int sig) { conf.run = 0; } /** * Parse delimiter string from command line arguments. */ static int parse_delim (const char *str) { int delim; if (!strncmp(str, "\\x", strlen("\\x"))) delim = strtoul(str+strlen("\\x"), NULL, 16) & 0xff; else if (!strcmp(str, "\\n")) delim = (int)'\n'; else if (!strcmp(str, "\\t")) delim = (int)'\t'; else delim = (int)*str & 0xff; return delim; } /** * Parse command line arguments */ static void argparse (int argc, char **argv) { char errstr[512]; int opt; const char *fmt = NULL; const char *delim = "\n"; const char *key_delim = NULL; char tmp_fmt[64]; while ((opt = getopt(argc, argv, "PCLt:p:b:z:o:eD:K:Od:qvX:c:Tuf:Z" #if ENABLE_JSON "J" #endif )) != -1) { switch (opt) { case 'P': case 'C': case 'L': conf.mode = opt; break; case 't': conf.topic = optarg; break; case 'p': conf.partition = atoi(optarg); break; case 'b': conf.brokers = optarg; break; case 'z': if (rd_kafka_conf_set(conf.rk_conf, "compression.codec", optarg, errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) FATAL("%s", errstr); break; case 'o': if (!strcmp(optarg, "end")) conf.offset = RD_KAFKA_OFFSET_END; else if (!strcmp(optarg, "beginning")) conf.offset = RD_KAFKA_OFFSET_BEGINNING; else if (!strcmp(optarg, "stored")) conf.offset = RD_KAFKA_OFFSET_STORED; else { conf.offset = strtoll(optarg, NULL, 10); if (conf.offset < 0) conf.offset = RD_KAFKA_OFFSET_TAIL(-conf.offset); } break; case 'e': conf.exit_eof = 1; break; case 'f': fmt = optarg; break; #if ENABLE_JSON case 'J': conf.flags |= CONF_F_FMT_JSON; break; #endif case 'D': delim = optarg; break; case 'K': key_delim = optarg; conf.flags |= CONF_F_KEY_DELIM; break; case 'O': conf.flags |= CONF_F_OFFSET; break; case 'c': conf.msg_cnt = strtoll(optarg, NULL, 10); break; case 'Z': conf.flags |= CONF_F_NULL; conf.null_str_len = strlen(conf.null_str); break; case 'd': conf.debug = optarg; if (rd_kafka_conf_set(conf.rk_conf, "debug", conf.debug, errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) FATAL("%s", errstr); break; case 'q': conf.verbosity = 0; break; case 'v': conf.verbosity++; break; case 'T': conf.flags |= CONF_F_TEE; break; case 'u': setbuf(stdout, NULL); break; case 'X': { char *name, *val; rd_kafka_conf_res_t res; if (!strcmp(optarg, "list") || !strcmp(optarg, "help")) { rd_kafka_conf_properties_show(stdout); exit(0); } if (!strcmp(optarg, "dump")) { conf.conf_dump = 1; continue; } name = optarg; if (!(val = strchr(name, '='))) { fprintf(stderr, "%% Expected " "-X property=value, not %s, " "use -X list to display available " "properties\n", name); exit(1); } *val = '\0'; val++; res = RD_KAFKA_CONF_UNKNOWN; /* Try "topic." prefixed properties on topic * conf first, and then fall through to global if * it didnt match a topic configuration property. */ if (!strncmp(name, "topic.", strlen("topic."))) res = rd_kafka_topic_conf_set(conf.rkt_conf, name+ strlen("topic."), val, errstr, sizeof(errstr)); if (res == RD_KAFKA_CONF_UNKNOWN) res = rd_kafka_conf_set(conf.rk_conf, name, val, errstr, sizeof(errstr)); if (res != RD_KAFKA_CONF_OK) FATAL("%s", errstr); } break; default: usage(argv[0], 1, "unknown argument"); break; } } if (!conf.brokers) usage(argv[0], 1, "-b missing"); /* Decide mode if not specified */ if (!conf.mode) { if (isatty(STDIN_FILENO)) conf.mode = 'C'; else conf.mode = 'P'; INFO(1, "Auto-selecting %s mode (use -P or -C to override)\n", conf.mode == 'C' ? "Consumer":"Producer"); } if (conf.mode != 'L' && !conf.topic) usage(argv[0], 1, "-t missing"); if (rd_kafka_conf_set(conf.rk_conf, "metadata.broker.list", conf.brokers, errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) usage(argv[0], 1, errstr); fmt_init(); if (conf.mode == 'C') { if (!fmt) { if ((conf.flags & CONF_F_FMT_JSON)) { /* For JSON the format string is simply the * output object delimiter (e.g., newline). */ fmt = delim; } else { if (key_delim) snprintf(tmp_fmt, sizeof(tmp_fmt), "%%k%s%%s%s", key_delim, delim); else snprintf(tmp_fmt, sizeof(tmp_fmt), "%%s%s", delim); fmt = tmp_fmt; } } fmt_parse(fmt); } else if (conf.mode == 'P') { conf.delim = parse_delim(delim); if (conf.flags & CONF_F_KEY_DELIM) conf.key_delim = parse_delim(key_delim); } } /** * Dump current rdkafka configuration to stdout. */ static void conf_dump (void) { const char **arr; size_t cnt; int pass; for (pass = 0 ; pass < 2 ; pass++) { int i; if (pass == 0) { arr = rd_kafka_conf_dump(conf.rk_conf, &cnt); printf("# Global config\n"); } else { printf("# Topic config\n"); arr = rd_kafka_topic_conf_dump(conf.rkt_conf, &cnt); } for (i = 0 ; i < cnt ; i += 2) printf("%s = %s\n", arr[i], arr[i+1]); printf("\n"); rd_kafka_conf_dump_free(arr, cnt); } } int main (int argc, char **argv) { char tmp[16]; signal(SIGINT, term); signal(SIGTERM, term); signal(SIGPIPE, term); /* Create config containers */ conf.rk_conf = rd_kafka_conf_new(); conf.rkt_conf = rd_kafka_topic_conf_new(); /* * Default config */ /* Enable quick termination of librdkafka */ snprintf(tmp, sizeof(tmp), "%i", SIGIO); rd_kafka_conf_set(conf.rk_conf, "internal.termination.signal", tmp, NULL, 0); /* Parse command line arguments */ argparse(argc, argv); /* Dump configuration and exit, if so desired. */ if (conf.conf_dump) { conf_dump(); exit(0); } if (optind < argc && conf.mode != 'P') usage(argv[0], 1, "file list only allowed in produce mode"); /* Run according to mode */ switch (conf.mode) { case 'C': consumer_run(stdout); break; case 'P': producer_run(stdin, &argv[optind], argc-optind); break; case 'L': metadata_list(); break; default: usage(argv[0], 0, NULL); break; } rd_kafka_wait_destroyed(5000); fmt_term(); exit(conf.exitcode); } kafkacat-1.2.0/kafkacat.h000066400000000000000000000072521253367242100152100ustar00rootroot00000000000000/* * kafkacat - Apache Kafka consumer and producer * * Copyright (c) 2015, Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #pragma once #include #include #include #include #include #include "config.h" typedef enum { KC_FMT_STR, KC_FMT_OFFSET, KC_FMT_KEY, KC_FMT_KEY_LEN, KC_FMT_PAYLOAD, KC_FMT_PAYLOAD_LEN, KC_FMT_TOPIC, KC_FMT_PARTITION, } fmt_type_t; #define KC_FMT_MAX_SIZE 128 struct conf { int run; int verbosity; int exitcode; char mode; int flags; #define CONF_F_FMT_JSON 0x1 /* JSON formatting */ #define CONF_F_KEY_DELIM 0x2 /* Producer: use key delimiter */ #define CONF_F_OFFSET 0x4 /* Print offsets */ #define CONF_F_TEE 0x8 /* Tee output when producing */ #define CONF_F_NULL 0x10 /* Send empty messages as NULL */ int delim; int key_delim; struct { fmt_type_t type; const char *str; int str_len; } fmt[KC_FMT_MAX_SIZE]; int fmt_cnt; int msg_size; char *brokers; char *topic; int32_t partition; int64_t offset; int exit_eof; int64_t msg_cnt; char *null_str; int null_str_len; rd_kafka_conf_t *rk_conf; rd_kafka_topic_conf_t *rkt_conf; rd_kafka_t *rk; rd_kafka_topic_t *rkt; char *debug; int conf_dump; }; extern struct conf conf; void __attribute__((noreturn)) fatal0 (const char *func, int line, const char *fmt, ...); #define FATAL(fmt...) fatal0(__FUNCTION__, __LINE__, fmt) /* Info printout */ #define INFO(VERBLVL,FMT...) do { \ if (conf.verbosity >= (VERBLVL)) \ fprintf(stderr, "%% " FMT); \ } while (0) /* * format.c */ void fmt_msg_output (FILE *fp, const rd_kafka_message_t *rkmessage); void fmt_parse (const char *fmt); void fmt_init (void); void fmt_term (void); #if ENABLE_JSON /* * json.c */ void fmt_msg_output_json (FILE *fp, const rd_kafka_message_t *rkmessage); void metadata_print_json (const struct rd_kafka_metadata *metadata); void fmt_init_json (void); void fmt_term_json (void); #endif kafkacat-1.2.0/mklove/000077500000000000000000000000001253367242100145615ustar00rootroot00000000000000kafkacat-1.2.0/mklove/Makefile.base000077500000000000000000000076431253367242100171470ustar00rootroot00000000000000# Base Makefile providing various standard targets # Part of mklove suite but may be used independently. MKL_RED?= \033[031m MKL_GREEN?= \033[032m MKL_YELLOW?= \033[033m MKL_BLUE?= \033[034m MKL_CLR_RESET?= \033[0m DEPS= $(OBJS:%.o=%.d) # TOPDIR is "TOPDIR/mklove/../" i.e., TOPDIR. # We do it with two dir calls instead of /.. to support mklove being symlinked. MKLOVE_DIR := $(dir $(lastword $(MAKEFILE_LIST))) TOPDIR = $(MKLOVE_DIR:mklove/=.) # Silence lousy default ARFLAGS (rv) ARFLAGS= -include $(TOPDIR)/Makefile.config _UNAME_S := $(shell uname -s) ifeq ($(_UNAME_S),Darwin) LIBFILENAME=$(LIBNAME).$(LIBVER).dylib LIBFILENAMELINK=$(LIBNAME).dylib else LIBFILENAME=$(LIBNAME).so.$(LIBVER) LIBFILENAMELINK=$(LIBNAME).so endif INSTALL?= install INSTALL_PROGRAM?= $(INSTALL) INSTALL_DATA?= $(INSTALL) -m 644 prefix?= /usr/local exec_prefix?= $(prefix) bindir?= $(exec_prefix)/bin sbindir?= $(exec_prefix)/sbin libexecdir?= $(exec_prefix)/libexec/ # append PKGNAME on install datarootdir?= $(prefix)/share datadir?= $(datarootdir) # append PKGNAME on install sysconfdir?= $(prefix)/etc sharedstatedir?=$(prefix)/com localestatedir?=$(prefix)/var runstatedir?= $(localestatedir)/run includedir?= $(prefix)/include docdir?= $(datarootdir)/doc/$(PKGNAME) infodir?= $(datarootdir)/info libdir?= $(prefix)/lib localedir?= $(datarootdir)/locale mandir?= $(datarootdir)/man man1dir?= $(mandir)/man1 man2dir?= $(mandir)/man2 man3dir?= $(mandir)/man3 man4dir?= $(mandir)/man4 man5dir?= $(mandir)/man5 man6dir?= $(mandir)/man6 man7dir?= $(mandir)/man7 man8dir?= $(mandir)/man8 # Checks that mklove is set up and ready for building mklove-check: @if [ ! -f "$(TOPDIR)/Makefile.config" ]; then \ printf "$(MKL_RED)$(TOPDIR)/Makefile.config missing: please run ./configure$(MKL_CLR_RESET)\n" ; \ exit 1 ; \ fi %.o: %.c $(CC) -MD -MP $(CPPFLAGS) $(CFLAGS) -c $< -o $@ %.o: %.cpp $(CXX) -MD -MP $(CPPFLAGS) $(CXXFLAGS) -c $< -o $@ lib: $(LIBFILENAME) $(LIBNAME).a $(LIBFILENAME): $(OBJS) $(LIBNAME).lds @printf "$(MKL_YELLOW)Creating shared library $@$(MKL_CLR_RESET)\n" $(CC) $(LDFLAGS) $(LIB_LDFLAGS) $(OBJS) -o $@ $(LIBS) $(LIBNAME).a: $(OBJS) @printf "$(MKL_YELLOW)Creating static library $@$(MKL_CLR_RESET)\n" $(AR) rcs$(ARFLAGS) $@ $(OBJS) $(BIN): $(OBJS) @printf "$(MKL_YELLOW)Creating program $@$(MKL_CLR_RESET)\n" $(CC) $(CPPFLAGS) $(LDFLAGS) $(OBJS) -o $@ $(LIBS) file-check: @printf "$(MKL_YELLOW)Checking $(LIBNAME) integrity$(MKL_CLR_RESET)\n" @RET=true ; \ for f in $(CHECK_FILES) ; do \ printf "%-30s " $$f ; \ if [ -f "$$f" ]; then \ printf "$(MKL_GREEN)OK$(MKL_CLR_RESET)\n" ; \ else \ printf "$(MKL_RED)MISSING$(MKL_CLR_RESET)\n" ; \ RET=false ; \ fi ; \ done ; \ $$($$RET) lib-install: @printf "$(MKL_YELLOW)Install $(LIBNAME) to $$DESTDIR$(prefix)$(MKL_CLR_RESET)\n" $(INSTALL) -d $$DESTDIR$(includedir)/$(PKGNAME) ; \ $(INSTALL) -d $$DESTDIR$(libdir) ; \ $(INSTALL) $(HDRS) $$DESTDIR$(includedir)/$(PKGNAME) ; \ $(INSTALL) $(LIBNAME).a $$DESTDIR$(libdir) ; \ $(INSTALL) $(LIBFILENAME) $$DESTDIR$(libdir) ; \ (cd $$DESTDIR$(libdir) && ln -sf $(LIBFILENAME) $(LIBFILENAMELINK)) lib-uninstall: @printf "$(MKL_YELLOW)Uninstall $(LIBNAME) from $$DESTDIR$(prefix)$(MKL_CLR_RESET)\n" for hdr in $(HDRS) ; do \ rm -f $$DESTDIR$(includedir)/$(PKGNAME)/$$hdr ; done rm -f $$DESTDIR$(libdir)/$(LIBNAME).a rm -f $$DESTDIR$(libdir)/$(LIBFILENAME) rm -f $$DESTDIR$(libdir)/$(LIBFILENAMELINK) rmdir $$DESTDIR$(includedir)/$(PKGNAME) || true bin-install: @printf "$(MKL_YELLOW)Install $(BIN) to $$DESTDIR$(prefix)$(MKL_CLR_RESET)\n" $(INSTALL) -d $$DESTDIR$(bindir) && \ $(INSTALL) $(BIN) $$DESTDIR$(bindir) bin-uninstall: @printf "$(MKL_YELLOW)Uninstall $(BIN) from $$DESTDIR$(prefix)$(MKL_CLR_RESET)\n" rm -f $$DESTDIR$(bindir)/$(BIN) generic-clean: rm -f $(OBJS) $(DEPS) lib-clean: generic-clean rm -f $(LIBNAME)*.a $(LIBFILENAME) $(LIBFILENAMELINK) \ $(LIBNAME).lds bin-clean: generic-clean rm -f $(BIN) kafkacat-1.2.0/mklove/modules/000077500000000000000000000000001253367242100162315ustar00rootroot00000000000000kafkacat-1.2.0/mklove/modules/configure.base000066400000000000000000001156721253367242100210620ustar00rootroot00000000000000#!/bin/bash # # # mklove base configure module, implements the mklove configure framework # MKL_MODULES="base" MKL_CACHEVARS="" MKL_MKVARS="" MKL_CHECKS="" MKL_LOAD_STACK="" MKL_IDNEXT=1 MKL_OUTMK=_mklout.mk MKL_OUTH=_mklout.h MKL_OUTDBG=config.log MKL_GENERATORS="base:mkl_generate_late_vars" MKL_CLEANERS="" MKL_FAILS="" MKL_LATE_VARS="" MKL_OPTS_SET="" MKL_RED="" MKL_GREEN="" MKL_YELLOW="" MKL_BLUE="" MKL_CLR_RESET="" MKL_NO_DOWNLOAD=0 if [[ -z "$MKL_REPO_URL" ]]; then MKL_REPO_URL="http://github.com/edenhill/mklove/raw/master" fi # Default mklove directory to PWD/mklove [[ -z "$MKLOVE_DIR" ]] && MKLOVE_DIR=mklove ########################################################################### # # Variable types: # env - Standard environment variables. # var - mklove runtime variable, cached or not. # mkvar - Makefile variables, also sets runvar # define - config.h variables/defines # ########################################################################### # Low level variable assignment # Arguments: # variable name # variable value function mkl_var0_set { export "$1"="$2" } # Sets a runtime variable (only used during configure) # If cache=1 these variables are cached to config.cache. # Arguments: # variable name # variable value # [ "cache" ] function mkl_var_set { mkl_var0_set "$1" "$2" if [[ $3 == "cache" ]]; then if ! mkl_in_list "$MKL_CACHEVARS" "$1" ; then MKL_CACHEVARS="$MKL_CACHEVARS $1" fi fi } # Unsets a mkl variable # Arguments: # variable name function mkl_var_unset { unset $1 } # Appends to a mkl variable (space delimited) # Arguments: # variable name # variable value function mkl_var_append { if [[ -z ${!1} ]]; then mkl_var_set "$1" "$2" else mkl_var0_set "$1" "${!1} $2" fi } # Prepends to a mkl variable (space delimited) # Arguments: # variable name # variable value function mkl_var_prepend { if [[ -z ${!1} ]]; then mkl_var_set "$1" "$2" else mkl_var0_set "$1" "$2 ${!1}" fi } # Shift the first word off a variable. # Arguments: # variable name function mkl_var_shift { local n="${!1}" mkl_var0_set "$1" "${n#* }" return 0 } # Returns the contents of mkl variable # Arguments: # variable name function mkl_var_get { echo "${!1}" } # Set environment variable (runtime) # These variables are not cached nor written to any of the output files, # its just simply a helper wrapper for standard envs. # Arguments: # varname # varvalue function mkl_env_set { mkl_var0_set "$1" "$2" } # Append to environment variable # Arguments: # varname # varvalue # [ separator (" ") ] function mkl_env_append { local sep=" " if [[ -z ${!1} ]]; then mkl_env_set "$1" "$2" else [ ! -z ${3} ] && sep="$3" mkl_var0_set "$1" "${!1}${sep}$2" fi } # Set a make variable (Makefile.config) # Arguments: # config name # variable name # value function mkl_mkvar_set { if [[ ! -z $2 ]]; then mkl_env_set "$2" "$3" mkl_in_list "$MKL_MKVARS" "$2"|| mkl_env_append MKL_MKVARS $2 fi } # Appends to a make variable (Makefile.config) # Arguments: # config name # variable name # value function mkl_mkvar_append { if [[ ! -z $2 ]]; then mkl_env_append "$2" "$3" mkl_in_list "$MKL_MKVARS" "$2"|| mkl_env_append MKL_MKVARS $2 fi } # Return mkvar variable value # Arguments: # variable name function mkl_mkvar_get { [[ -z ${!1} ]] && return 1 echo ${!1} return 0 } # Defines a config header define (config.h) # Arguments: # config name # define name # define value (optional, default: 1) # if value starts with code: then no "" are added function mkl_define_set { if [[ -z $2 ]]; then return 0 fi [[ ! -z $1 ]] && mkl_write_h "// $1" local val="$3" if [[ -z "$val" ]]; then val="$(mkl_def $2 1)" fi # Define as code, string or integer? if [[ $val == code:* ]]; then # Code block, copy verbatim without quotes, strip code: prefix val=${val#code:} elif [[ ! ( "$val" =~ ^[0-9]+([lL]?[lL][dDuU]?)?$ || \ "$val" =~ ^0x[0-9a-fA-F]+([lL]?[lL][dDuU]?)?$ ) ]]; then # String: quote val="\"$val\"" fi # else: unquoted integer/hex mkl_write_h "#define $2 $val" } # Sets "all" configuration variables, that is: # for name set: Makefile variable, config.h define # Will convert value "y"|"n" to 1|0 for config.h # Arguments: # config name # variable name # value function mkl_allvar_set { mkl_mkvar_set "$1" "$2" "$3" local val=$3 if [[ $3 = "y" ]]; then val=1 elif [[ $3 = "n" ]]; then val=0 fi mkl_define_set "$1" "$2" "$val" } ########################################################################### # # # Check failure functionality # # ########################################################################### # Summarize all fatal failures and then exits. function mkl_fail_summary { echo " " local pkg_cmd="" local install_pkgs="" mkl_err "###########################################################" mkl_err "### Configure failed ###" mkl_err "###########################################################" mkl_err "### Accumulated failures: ###" mkl_err "###########################################################" local n for n in $MKL_FAILS ; do local conf=$(mkl_var_get MKL_FAIL__${n}__conf) mkl_err " $conf ($(mkl_var_get MKL_FAIL__${n}__define)) $(mkl_meta_get $conf name)" if mkl_meta_exists $conf desc; then mkl_err0 " desc: $MKL_YELLOW$(mkl_meta_get $conf desc)$MKL_CLR_RESET" fi mkl_err0 " module: $(mkl_var_get MKL_FAIL__${n}__module)" mkl_err0 " action: $(mkl_var_get MKL_FAIL__${n}__action)" mkl_err0 " reason: $(mkl_var_get MKL_FAIL__${n}__reason) " # Dig up some metadata to assist the user case $MKL_DISTRO in Debian|Ubuntu|*) local debs=$(mkl_meta_get $conf "deb") pkg_cmd="sudo apt-get install" if [[ ${#debs} > 0 ]]; then install_pkgs="$install_pkgs $debs" fi ;; esac done if [[ ! -z $install_pkgs ]]; then mkl_err "###########################################################" mkl_err "### Installing the following packages might help: ###" mkl_err "###########################################################" mkl_err0 "$pkg_cmd $install_pkgs" mkl_err0 "" fi exit 1 } # Checks if there were failures. # Returns 0 if there were no failures, else calls failure summary and exits. function mkl_check_fails { if [[ ${#MKL_FAILS} = 0 ]]; then return 0 fi mkl_fail_summary } # A check has failed but we want to carry on (and we should!). # We fail it all later. # Arguments: # config name # define name # action # reason function mkl_fail { local n="$(mkl_env_esc "$1")" mkl_var_set "MKL_FAIL__${n}__conf" "$1" mkl_var_set "MKL_FAIL__${n}__module" $MKL_MODULE mkl_var_set "MKL_FAIL__${n}__define" $2 mkl_var_set "MKL_FAIL__${n}__action" "$3" if [[ -z $(mkl_var_get "MKL_FAIL__${n}__reason") ]]; then mkl_var_set "MKL_FAIL__${n}__reason" "$4" else mkl_var_append "MKL_FAIL__${n}__reason" " And also: $4" fi mkl_in_list "$MKL_FAILS" "$n" || mkl_var_append MKL_FAILS "$n" } # A check failed, handle it # Arguments: # config name # define name # action (fail|disable|ignore|cont) # reason function mkl_check_failed { # Override action based on require directives, unless the action is # set to cont (for fallthrough to sub-sequent tests). local action="$3" if [[ $3 != "cont" ]]; then action=$(mkl_meta_get "MOD__$MKL_MODULE" "override_action" $3) fi # --fail-fatal option [[ $MKL_FAILFATAL ]] && action="fail" mkl_check_done "$1" "$2" "$action" "failed" mkl_dbg "Check $1 ($2, action $action (originally $3)) failed: $4" case $action in fail) # Check failed fatally, fail everything eventually mkl_fail "$1" "$2" "$3" "$4$extra" return 1 ;; disable) # Check failed, disable [[ ! -z $2 ]] && mkl_mkvar_set "$1" "$2" "n" return 1 ;; ignore) # Check failed but we ignore the results and set it anyway. [[ ! -z $2 ]] && mkl_define_set "$1" "$2" "1" [[ ! -z $2 ]] && mkl_mkvar_set "$1" "$2" "y" return 1 ;; cont) # Check failed but we ignore the results and do nothing. return 0 ;; esac } ########################################################################### # # # Output generators # # ########################################################################### # Generate late variables. # Late variables are those referenced in command line option defaults # but then never set by --option. function mkl_generate_late_vars { local n for n in $MKL_LATE_VARS ; do local func=${n%:*} local safeopt=${func#opt_} local val=${n#*:} if mkl_in_list "$MKL_OPTS_SET" "$safeopt" ; then # Skip options set explicitly with --option continue fi # Expand variable references "\$foo" by calling eval # and pass it opt_... function. $func "$(eval echo $val)" done } # Generate output files. # Must be called following a succesful configure run. function mkl_generate { local mf= for mf in $MKL_GENERATORS ; do MKL_MODULE=${mf%:*} local func=${mf#*:} $func || exit 1 done mkl_write_mk "# Config variables" mkl_write_mk "#" mkl_write_mk "# Generated by:" mkl_write_mk "# $MKL_CONFIGURE_ARGS" mkl_write_mk "" # Export colors to Makefile.config mkl_write_mk "MKL_RED=\t${MKL_RED}" mkl_write_mk "MKL_GREEN=\t${MKL_GREEN}" mkl_write_mk "MKL_YELLOW=\t${MKL_YELLOW}" mkl_write_mk "MKL_BLUE=\t${MKL_BLUE}" mkl_write_mk "MKL_CLR_RESET=\t${MKL_CLR_RESET}" local n= for n in $MKL_MKVARS ; do # Some special variables should be prefixable by the caller, so # define them in the makefile as appends. local op="=" case $n in CFLAGS|CPPFLAGS|CXXFLAGS|LDFLAGS|LIBS) op="+=" ;; esac mkl_write_mk "$n$op\t${!n}" done mkl_write_mk "# End of config variables" MKL_OUTMK_FINAL=Makefile.config mv $MKL_OUTMK $MKL_OUTMK_FINAL echo "Generated $MKL_OUTMK_FINAL" MKL_OUTH_FINAL=config.h mv $MKL_OUTH $MKL_OUTH_FINAL echo "Generated $MKL_OUTH_FINAL" } # Remove file noisily, if it exists function mkl_rm { if [[ -f $fname ]]; then echo "Removing $fname" rm -f "$fname" fi } # Remove files generated by configure function mkl_clean { for fname in Makefile.config config.h config.cache config.log ; do mkl_rm "$fname" done local mf= for mf in $MKL_CLEANERS ; do MKL_MODULE=${mf%:*} local func=${mf#*:} $func || exit 1 done } # Print summary of succesful configure run function mkl_summary { echo " Configuration summary:" local n= for n in $MKL_MKVARS ; do # Skip the boring booleans if [[ $n == WITH_* || $n == WITHOUT_* || $n == HAVE_* ]]; then continue fi printf " %-24s %s\n" "$n" "${!n}" done } # Write to mk file # Argument: # string .. function mkl_write_mk { echo -e "$*" >> $MKL_OUTMK } # Write to header file # Argument: # string .. function mkl_write_h { echo -e "$*" >> $MKL_OUTH } ########################################################################### # # # Logging and debugging # # ########################################################################### # Debug print # Only visible on terminal if MKL_DEBUG is set. # Always written to config.log # Argument: # string .. function mkl_dbg { if [[ ! -z $MKL_DEBUG ]]; then echo -e "${MKL_BLUE}DBG:$$: $*${MKL_CLR_RESET}" 1>&2 fi echo "DBG: $*" >> $MKL_OUTDBG } # Error print (with color) # Always printed to terminal and config.log # Argument: # string .. function mkl_err { echo -e "${MKL_RED}$*${MKL_CLR_RESET}" 1>&2 echo "$*" >> $MKL_OUTDBG } # Same as mkl_err but without coloring # Argument: # string .. function mkl_err0 { echo -e "$*" 1>&2 echo "$*" >> $MKL_OUTDBG } # Standard print # Always printed to terminal and config.log # Argument: # string .. function mkl_info { echo -e "$*" 1>&2 echo -e "$*" >> $MKL_OUTDBG } ########################################################################### # # # Misc helpers # # ########################################################################### # Returns the absolute path (but not necesarily canonical) of the first argument function mkl_abspath { echo $1 | sed -e "s|^\([^/]\)|$PWD/\1|" } # Returns true (0) if function $1 exists, else false (1) function mkl_func_exists { declare -f "$1" > /dev/null return $? } # Rename function. # Returns 0 on success or 1 if old function (origname) was not defined. # Arguments: # origname # newname function mkl_func_rename { if ! mkl_func_exists $1 ; then return 1 fi local orig=$(declare -f $1) local new="$2${orig#$1}" eval "$new" unset -f "$1" return 0 } # Push module function for later call by mklove. # The function is renamed to an internal name. # Arguments: # list variable name # module name # function name function mkl_func_push { local newfunc="__mkl__f_${2}_$(( MKL_IDNEXT++ ))" if mkl_func_rename "$3" "$newfunc" ; then mkl_var_append "$1" "$2:$newfunc" fi } # Returns value, or the default string if value is empty. # Arguments: # value # default function mkl_def { if [[ ! -z $1 ]]; then echo $1 else echo $2 fi } # Render a string (e.g., evaluate its $varrefs) # Arguments: # string function mkl_render { if [[ $* == *\$* ]]; then eval "echo $*" else echo "$*" fi } # Escape a string so that it becomes suitable for being an env variable. # This is a destructive operation and the original string cannot be restored. function mkl_env_esc { echo $* | LC_ALL=C sed -e 's/[^a-zA-Z0-9_]/_/g' } # Convert arguments to upper case function mkl_upper { echo "$*" | tr '[:lower:]' '[:upper:]' } # Convert arguments to lower case function mkl_lower { echo "$*" | tr '[:upper:]' '[:lower:]' } # Checks if element is in list # Arguments: # list # element function mkl_in_list { local n for n in $1 ; do [[ $n == $2 ]] && return 0 done return 1 } ########################################################################### # # # Cache functionality # # ########################################################################### # Write cache file function mkl_cache_write { [[ ! -z "$MKL_NOCACHE" ]] && return 0 echo "# mklove configure cache file generated at $(date)" > config.cache for n in $MKL_CACHEVARS ; do echo "$n=${!n}" >> config.cache done echo "Generated config.cache" } # Read cache file function mkl_cache_read { [[ ! -z "$MKL_NOCACHE" ]] && return 0 [ -f config.cache ] || return 1 echo "using cache file config.cache" local ORIG_IFS=$IFS IFS="$IFS=" while read -r n v ; do [[ -z $n || $n = \#* || -z $v ]] && continue mkl_var_set $n $v cache done < config.cache IFS=$ORIG_IFS } ########################################################################### # # # Config name meta data # # ########################################################################### # Set metadata for config name # This metadata is used by mkl in various situations # Arguments: # config name # metadata key # metadata value (appended) function mkl_meta_set { local metaname="mkl__$1__$2" eval "$metaname=\"\$$metaname $3\"" } # Returns metadata for config name # Arguments: # config name # metadata key # default (optional) function mkl_meta_get { local metaname="mkl__$1__$2" if [[ ! -z ${!metaname} ]]; then echo ${!metaname} else echo "$3" fi } # Checks if metadata exists # Arguments: # config name # metadata key function mkl_meta_exists { local metaname="mkl__$1__$2" if [[ ! -z ${!metaname} ]]; then return 0 else return 1 fi } ########################################################################### # # # Check framework # # ########################################################################### # Print that a check is beginning to run # Returns 0 if a cached result was used (do not continue with your tests), # else 1. # # If the check should not be cachable then specify argument 3 as "no-cache", # this is useful when a check not only checks but actually sets config # variables itself (which is not recommended, but desired sometimes). # # Arguments: # [ --verb "verb.." ] (replace "checking for") # config name # define name # action (fail,cont,disable or no-cache) # [ display name ] function mkl_check_begin { local verb="checking for" if [[ $1 == "--verb" ]]; then verb="$2" shift shift fi local name=$(mkl_meta_get $1 name "$4") [[ -z $name ]] && name="x:$1" echo -n "$verb $name..." if [[ $3 != "no-cache" ]]; then local status=$(mkl_var_get "MKL_STATUS_$1") # Check cache (from previous run or this one). # Only used cached value if the cached check succeeded: # it is more likely that a failed check has been fixed than the other # way around. if [[ ! -z $status && ( $status = "ok" ) ]]; then mkl_check_done "$1" "$2" "$3" $status "cached" return 0 fi fi return 1 } # Print that a check is done # Arguments: # config name # define name # action # status (ok|failed) # extra-info (optional) function mkl_check_done { mkl_var_set "MKL_STATUS_$1" "$4" cache local extra="" if [[ $4 = "failed" ]]; then local clr=$MKL_YELLOW extra=" ($3)" case "$3" in fail) clr=$MKL_RED ;; cont) extra="" ;; esac echo -e " $clr$4$MKL_CLR_RESET${extra}" else [[ ! -z $2 ]] && mkl_define_set "$1" "$2" "1" [[ ! -z $2 ]] && mkl_mkvar_set "$1" "$2" "y" [ ! -z "$5" ] && extra=" ($5)" echo -e " $MKL_GREEN$4${MKL_CLR_RESET}$extra" fi } # Perform configure check by compiling source snippet # Arguments: # [--ldflags="..." ] (appended after "compiler arguments" below) # config name # define name # action (fail|disable) # compiler (CC|CXX) # compiler arguments (optional "", example: "-lzookeeper") # source snippet function mkl_compile_check { local ldf= if [[ $1 == --ldflags=* ]]; then ldf=${1#*=} shift fi mkl_check_begin "$1" "$2" "$3" "$1 (by compile)" && return $? local cflags= if [[ $4 = "CXX" ]]; then local ext=cpp cflags="$(mkl_mkvar_get CXXFLAGS)" else local ext=c cflags="$(mkl_mkvar_get CFLAGS)" fi local srcfile=$(mktemp _mkltmpXXXXX) mv "$srcfile" "${srcfile}.$ext" srcfile="$srcfile.$ext" echo "$6" > $srcfile echo " int main () { return 0; } " >> $srcfile local cmd="${!4} $cflags $(mkl_mkvar_get CPPFLAGS) -Wall -Werror $5 $srcfile -o ${srcfile}.o $ldf $(mkl_mkvar_get LDFLAGS)"; mkl_dbg "Compile check $1 ($2): $cmd" local output output=$($cmd 2>&1) if [[ $? != 0 ]] ; then mkl_dbg "compile check for $1 ($2) failed: $cmd: $output" mkl_check_failed "$1" "$2" "$3" "compile check failed: CC: $4 flags: $5 $cmd: $output source: $6" local ret=1 else mkl_check_done "$1" "$2" "$3" "ok" local ret=0 fi # OSX XCode toolchain creates dSYM directories when -g set set, # delete them specifically. rm -rf "$srcfile" "${srcfile}.o" "$srcfile*dSYM" return $ret } # Try to link with a library. # Arguments: # config name # define name # action (fail|disable) # linker flags (e.g. "-lpthreads") function mkl_link_check { mkl_check_begin "$1" "$2" "$3" "$1 (by linking)" && return $? local srcfile=$(mktemp _mktmpXXXXX) echo "int main () { return 0; }" > $srcfile local cmd="${CC} $(mkl_mkvar_get LDFLAGS) -c $srcfile -o ${srcfile}.o $4"; mkl_dbg "Link check $1 ($2): $cmd" local output output=$($cmd 2>&1) if [[ $? != 0 ]] ; then mkl_dbg "link check for $1 ($2) failed: $output" mkl_check_failed "$1" "$2" "$3" "compile check failed: $output" local ret=1 else mkl_check_done "$1" "$2" "$3" "ok" "$4" local ret=0 fi rm -f $srcfile* return $ret } # Tries to figure out if we can use a static library or not. # Arguments: # library name (e.g. -lrdkafka) # compiler flags (optional "", e.g: "-lyajl") # Returns/outputs: # New list of compiler flags function mkl_lib_check_static { local libname=$1 local libs=$2 local arfile_var=STATIC_LIB_${libname#-l} # If STATIC_LIB_ specifies an existing .a file we # use that instead. if [[ -f ${!arfile_var} ]]; then libs=$(echo $libs | sed -e "s|$libname|${!arfile_var}|g") else libs=$(echo $libs | sed -e "s|$libname|${LDFLAGS_STATIC} $libname ${LDFLAGS_DYNAMIC}|g") fi echo $libs } # Checks that the specified lib is available through a number of methods. # compiler flags are automatically appended to "LIBS" mkvar on success. # # If STATIC_LIB_ is set to the path of an .a file # it will be used instead of -l. # # Arguments: # [--static=] (allows static linking (--enable-static) for the # library provided, e.g.: --static=-lrdkafka "librdkafka"..) # config name (library name (for pkg-config)) # define name # action (fail|disable|cont) # compiler (CC|CXX) # compiler flags (optional "", e.g: "-lyajl") # source snippet function mkl_lib_check { local staticopt= if [[ $1 == --static* ]]; then staticopt=$1 shift fi if [[ $WITH_PKGCONFIG == "y" ]]; then if mkl_pkg_config_check $staticopt "$1" "$2" cont; then return 0 fi fi local libs="$5" if [[ $WITH_STATIC_LINKING == y && ! -z $staticopt ]]; then libs=$(mkl_lib_check_static "${staticopt#*=}" "$libs") fi if ! mkl_compile_check "$1" "$2" "$3" "$4" "$libs" "$6"; then return 1 fi mkl_mkvar_append "$1" LIBS "$libs" return 0 } # Check for library with pkg-config # Automatically sets CFLAGS and LIBS from pkg-config information. # Arguments: # [--static=] (allows static linking (--enable-static) for the # library provided, e.g.: --static=-lrdkafka "librdkafka"..) # config name # define name # action (fail|disable|ignore) function mkl_pkg_config_check { local staticopt= if [[ $1 == --static* ]]; then staticopt=$1 shift fi mkl_check_begin "$1" "$2" "no-cache" "$1 (by pkg-config)" && return $? local cflags= local cmd="${PKG_CONFIG} --short-errors --cflags $1" mkl_dbg "pkg-config check $1 ($2): $cmd" cflags=$($cmd 2>&1) if [[ $? != 0 ]]; then mkl_dbg "'$cmd' failed: $cflags" mkl_check_failed "$1" "$2" "$3" "'$cmd' failed: $cflags" return 1 fi local libs= libs=$(${PKG_CONFIG} --short-errors --libs $1 2>&1) if [[ $? != 0 ]]; then mkl_dbg "${PKG_CONFIG} --libs $1 failed: $libs" mkl_check_failed "$1" "$2" "$3" "pkg-config --libs failed" return 1 fi mkl_mkvar_append $1 "CFLAGS" "$cflags" if [[ $WITH_STATIC_LINKING == y && ! -z $staticopt ]]; then libs=$(mkl_lib_check_static "${staticopt#*=}" "$libs") fi mkl_mkvar_append "$1" LIBS "$libs" mkl_check_done "$1" "$2" "$3" "ok" return 0 } # Check that a command runs and exits succesfully. # Arguments: # config name # define name (optional, can be empty) # action # command function mkl_command_check { mkl_check_begin "$1" "$2" "$3" "$1 (by command)" && return $? local out= out=$($4 2>&1) if [[ $? != 0 ]]; then mkl_dbg "$1: $2: $4 failed: $out" mkl_check_failed "$1" "$2" "$3" "command '$4' failed: $out" return 1 fi mkl_check_done "$1" "$2" "$3" "ok" return 0 } # Check that a program is executable, but will not execute it. # Arguments: # config name # define name (optional, can be empty) # action # program name (e.g, objdump) function mkl_prog_check { mkl_check_begin --verb "checking executable" "$1" "$2" "$3" "$1" && return $? local out= out=$(command -v "$4" 2>&1) if [[ $? != 0 ]]; then mkl_dbg "$1: $2: $4 is not executable: $out" mkl_check_failed "$1" "$2" "$3" "$4 is not executable" return 1 fi mkl_check_done "$1" "$2" "$3" "ok" return 0 } # Checks that the check for the given config name passed. # This does not behave like the other checks, if the given config name passed # its test then nothing is printed. Else the configure will fail. # Arguments: # checked config name function mkl_config_check { local status=$(mkl_var_get "MKL_STATUS_$1") [[ $status = "ok" ]] && return 0 mkl_fail $1 "" "fail" "$MKL_MODULE requires $1" return 1 } # Checks that all provided config names are set. # Arguments: # config name # define name # action # check_config_name1 # check_config_name2.. function mkl_config_check_all { local cname= local res="ok" echo start this now for $1 for cname in ${@:4}; do local st=$(mkl_var_get "MKL_STATUS_$cname") [[ $status = "ok" ]] && continue mkl_fail $1 $2 $3 "depends on $cname" res="failed" done echo "try res $res" mkl_check_done "$1" "$2" "$3" $res } # Check environment variable # Arguments: # config name # define name # action # environment variable function mkl_env_check { mkl_check_begin "$1" "$2" "$3" "$1 (by env $4)" && return $? if [[ -z ${!4} ]]; then mkl_check_failed "$1" "$2" "$3" "environment variable $4 not set" return 1 fi mkl_check_done "$1" "$2" "$3" "ok" "${!4}" return 0 } # Run all checks function mkl_checks_run { # Call checks functions in dependency order local mf for mf in $MKL_CHECKS ; do MKL_MODULE=${mf%:*} local func=${mf#*:} if mkl_func_exists $func ; then $func else mkl_err "Check function $func from $MKL_MODULE disappeared ($mf)" fi unset MKL_MODULE done } # Check for color support in terminal. # If the terminal supports colors, the function will alter # MKL_RED # MKL_GREEN # MKL_YELLOW # MKL_BLUE # MKL_CLR_RESET function mkl_check_terminal_color_support { local use_color=false local has_tput=false if [[ -z ${TERM} ]]; then # tput and dircolors require $TERM mkl_dbg "\$TERM is not set! Cannot check for color support in terminal." return 1 elif hash tput 2>/dev/null; then has_tput=true [[ $(tput colors 2>/dev/null) -ge 8 ]] && use_color=true mkl_dbg "tput reports color support: ${use_color}" elif hash dircolors 2>/dev/null; then # Enable color support only on colorful terminals. # dircolors --print-database uses its own built-in database # instead of using /etc/DIR_COLORS. Try to use the external file # first to take advantage of user additions. local safe_term=${TERM//[^[:alnum:]]/?} local match_lhs="" [[ -f ~/.dir_colors ]] && match_lhs="${match_lhs}$(<~/.dir_colors)" [[ -f /etc/DIR_COLORS ]] && match_lhs="${match_lhs}$(&1) if [[ $? -ne 0 ]]; then rm -f "$tmpfile" mkl_err "Failed to download $modname:" mkl_err0 $out return 1 fi # Move downloaded file into place replacing the old file. mv "$tmpfile" "$fname" || return 1 # "Return" filename echo "$fname" return 0 } # Load module by name or filename # Arguments: # "require"|"try" # filename # [ module arguments ] function mkl_module_load { local try=$1 shift local fname=$1 shift local modname=${fname#*configure.} local bypath=1 # Check if already loaded if mkl_in_list "$MKL_MODULES" "$modname"; then return 0 fi if [[ $fname = $modname ]]; then # Module specified by name, find the file. bypath=0 for fname in configure.$modname \ ${MKLOVE_DIR}/modules/configure.$modname ; do [[ -s $fname ]] && break done fi # Calling module local cmod=$MKL_MODULE [[ -z $cmod ]] && cmod="base" if [[ ! -s $fname ]]; then # Attempt to download module, if permitted if [[ $MKL_NO_DOWNLOAD != 0 || $bypath == 1 ]]; then mkl_err "Module $modname not found at $fname (required by $cmod) and downloads disabled" if [[ $try = "require" ]]; then mkl_fail "$modname" "none" "fail" \ "Module $modname not found (required by $cmod) and downloads disabled" fi return 1 fi fname=$(mkl_module_download "$modname") if [[ $? -ne 0 ]]; then mkl_err "Module $modname not found (required by $cmod)" if [[ $try = "require" ]]; then mkl_fail "$modname" "none" "fail" \ "Module $modname not found (required by $cmod)" return 1 fi fi # Now downloaded, try loading the module again. mkl_module_load $try "$fname" "$@" return $? fi # Set current module local save_MKL_MODULE=$MKL_MODULE MKL_MODULE=$modname mkl_dbg "Loading module $modname (required by $cmod) from $fname" # Source module file (positional arguments are available to module) source $fname # Restore current module (might be recursive) MKL_MODULE=$save_MKL_MODULE # Add module to list of modules mkl_var_append MKL_MODULES $modname # Rename module's special functions so we can call them separetely later. mkl_func_rename "options" "${modname}_options" mkl_func_push MKL_CHECKS "$modname" "checks" mkl_func_push MKL_GENERATORS "$modname" "generate" mkl_func_push MKL_CLEANERS "$modname" "clean" } # Require and load module # Must only be called from module file outside any function. # Arguments: # [ --try ] Dont fail if module doesn't exist # module1 # [ "must" "pass" ] # [ module arguments ... ] function mkl_require { local try="require" if [[ $1 = "--try" ]]; then local try="try" shift fi local mod=$1 shift local override_action= # Check for cyclic dependencies if mkl_in_list "$MKL_LOAD_STACK" "$mod"; then mkl_err "Cyclic dependency detected while loading $mod module:" local cmod= local lmod=$mod for cmod in $MKL_LOAD_STACK ; do mkl_err " $lmod required by $cmod" lmod=$cmod done mkl_fail base "" fail "Cyclic dependency detected while loading module $mod" return 1 fi mkl_var_prepend MKL_LOAD_STACK "$mod" if [[ "$1 $2" == "must pass" ]]; then shift shift override_action="fail" fi if [[ ! -z $override_action ]]; then mkl_meta_set "MOD__$mod" "override_action" "$override_action" fi mkl_module_load $try $mod "$@" local ret=$? mkl_var_shift MKL_LOAD_STACK return $ret } ########################################################################### # # # Usage options # # ########################################################################### MKL_USAGE="Usage: ./configure [OPTIONS...] mklove configure script - mklove, not autoconf Copyright (c) 2014 Magnus Edenhill - https://github.com/edenhill/mklove " function mkl_usage { echo "$MKL_USAGE" local og for og in $MKL_USAGE_GROUPS ; do og="MKL_USAGE_GROUP__$og" echo "${!og}" done echo "Honoured environment variables: CC, CPP, CXX, CFLAGS, CPPFLAGS, CXXFLAGS, LDFLAGS, LIBS, LD, NM, OBJDUMP, STRIP, PKG_CONFIG, PKG_CONFIG_PATH, STATIC_LIB_=.../libname.a " } # Add usage option informative text # Arguments: # text function mkl_usage_info { MKL_USAGE="$MKL_USAGE $1" } # Add option to usage output # Arguments: # option group ("Standard", "Cross-Compilation", etc..) # variable name # option ("--foo=feh") # help # default (optional) # assignvalue (optional, default:"y") # function block (optional) function mkl_option { local optgroup=$1 local varname=$2 # Fixed width between option name and help in usage output local pad=" " if [[ ${#3} -lt ${#pad} ]]; then pad=${pad:0:$(expr ${#pad} - ${#3})} else pad="" fi # Add to usage output local optgroup_safe=$(mkl_env_esc $optgroup) if ! mkl_in_list "$MKL_USAGE_GROUPS" "$optgroup_safe" ; then mkl_env_append MKL_USAGE_GROUPS "$optgroup_safe" mkl_env_set "MKL_USAGE_GROUP__$optgroup_safe" "$optgroup options: " fi local defstr="" [[ ! -z $5 ]] && defstr=" [$5]" mkl_env_append "MKL_USAGE_GROUP__$optgroup_safe" " $3 $pad $4$defstr " local optname="${3#--}" local safeopt= local optval="" if [[ $3 == *=* ]]; then optname="${optname%=*}" optval="${3#*=}" fi safeopt=$(mkl_env_esc $optname) mkl_meta_set "MKL_OPT_ARGS" "$safeopt" "$optval" # # Optional variable scoping by prefix: "env:", "mk:", "def:" # local setallvar="mkl_allvar_set ''" local setmkvar="mkl_mkvar_set ''" if [[ $varname = env:* ]]; then # Set environment variable (during configure runtime only) varname=${varname#*:} setallvar=mkl_env_set setmkvar=mkl_env_set elif [[ $varname = mk:* ]]; then # Set Makefile.config variable varname=${varname#*:} setallvar="mkl_mkvar_append ''" setmkvar="mkl_mkvar_append ''" elif [[ $varname = def:* ]]; then # Set config.h define varname=${varname#*:} setallvar="mkl_define_set ''" setmkvar="mkl_define_set ''" fi if [[ ! -z $7 ]]; then # Function block specified. eval "function opt_$safeopt { $7 }" else # Add default implementation of function simply setting the value. # Application may override this by redefining the function after calling # mkl_option. if [[ $optval = "PATH" ]]; then # PATH argument: make it an absolute path. # Only set the make variable (not config.h) eval "function opt_$safeopt { $setmkvar $varname \"\$(mkl_abspath \$(mkl_render \$1))\"; }" else # Standard argument: simply set the value if [[ -z "$6" ]]; then eval "function opt_$safeopt { $setallvar $varname \"\$1\"; }" else eval "function opt_$safeopt { $setallvar $varname \"$6\"; }" fi fi fi # If default value is provided and does not start with "$" (variable ref) # then set it right away. # $ variable refs are set after all checks have run during the # generating step. if [[ ${#5} != 0 ]] ; then if [[ $5 = *\$* ]]; then mkl_var_append "MKL_LATE_VARS" "opt_$safeopt:$5" else opt_$safeopt $5 fi fi if [[ ! -z $varname ]]; then # Add variable to list MKL_CONFVARS="$MKL_CONFVARS $varname" fi } # Adds a toggle (--enable-X, --disable-X) option. # Arguments: # option group ("Standard", ..) # variable name (WITH_FOO) # option (--enable-foo) # help ("foo.." ("Enable" and "Disable" will be prepended)) # default (y or n) function mkl_toggle_option { # Add option argument mkl_option "$1" "$2" "$3" "$4" "$5" # Add corresponding "--disable-foo" option for "--enable-foo". local disname="${3/--enable/--disable}" local dishelp="${4/Enable/Disable}" mkl_option "$1" "$2" "$disname" "$dishelp" "" "n" } # Adds a toggle (--enable-X, --disable-X) option with builtin checker. # This is the library version. # Arguments: # option group ("Standard", ..) # config name (foo, must be same as pkg-config name) # variable name (WITH_FOO) # action (fail or disable) # option (--enable-foo) # help (defaults to "Enable ") # linker flags (-lfoo) # default (y or n) function mkl_toggle_option_lib { local help="$6" [[ -z "$help" ]] && help="Enable $2" # Add option argument mkl_option "$1" "$3" "$5" "$help" "$8" # Add corresponding "--disable-foo" option for "--enable-foo". local disname="${5/--enable/--disable}" local dishelp="${help/Enable/Disable}" mkl_option "$1" "$3" "$disname" "$dishelp" "" "n" # Create checks eval "function _tmp_func { mkl_lib_check \"$2\" \"$3\" \"$4\" CC \"$7\"; }" mkl_func_push MKL_CHECKS "$MKL_MODULE" _tmp_func } kafkacat-1.2.0/mklove/modules/configure.builtin000066400000000000000000000051301253367242100216010ustar00rootroot00000000000000#!/bin/bash # # mklove builtin checks and options # Sets: # prefix, etc.. mkl_option "Standard" prefix "--prefix=PATH" \ "Install arch-independent files in PATH" "/usr/local" mkl_option "Standard" exec_prefix "--exec-prefix=PATH" \ "Install arch-dependent files in PATH" "\$prefix" mkl_option "Standard" bindir "--bindir=PATH" "User executables" "\$exec_prefix/bin" mkl_option "Standard" sbindir "--sbindir=PATH" "System admin executables" \ "\$exec_prefix/sbin" mkl_option "Standard" libexecdir "--libexecdir=PATH" "Program executables" \ "\$exec_prefix/libexec" mkl_option "Standard" datadir "--datadir=PATH" "Read-only arch-independent data" \ "\$prefix/share" mkl_option "Standard" sysconfdir "--sysconfdir=PATH" "Configuration data" \ "\$prefix/etc" mkl_option "Standard" sharedstatedir "--sharedstatedir=PATH" \ "Modifiable arch-independent data" "\$prefix/com" mkl_option "Standard" localstatedir "--localstatedir=PATH" \ "Modifiable local state data" "\$prefix/var" mkl_option "Standard" libdir "--libdir=PATH" "Libraries" "\$exec_prefix/lib" mkl_option "Standard" includedir "--includedir=PATH" "C/C++ header files" \ "\$prefix/include" mkl_option "Standard" infodir "--infodir=PATH" "Info documentation" "\$prefix/info" mkl_option "Standard" mandir "--mandir=PATH" "Manual pages" "\$prefix/man" mkl_option "Configure tool" "" "--list-modules" "List loaded mklove modules" mkl_option "Configure tool" "" "--list-checks" "List checks" mkl_option "Configure tool" env:MKL_FAILFATAL "--fail-fatal" "All failures are fatal" mkl_option "Configure tool" env:MKL_NOCACHE "--no-cache" "Dont use or generate config.cache" mkl_option "Configure tool" env:MKL_DEBUG "--debug" "Enable configure debugging" mkl_option "Configure tool" env:MKL_CLEAN "--clean" "Remove generated configure files" mkl_option "Configure tool" "" "--reconfigure" "Rerun configure with same arguments as last run" mkl_option "Configure tool" env:MKL_NO_DOWNLOAD "--no-download" "Disable downloads of required mklove modules" mkl_option "Configure tool" env:MKL_UPDATE_MODS "--update-modules" "Update modules from global repository" mkl_option "Configure tool" env:MKL_REPO_URL "--repo-url=URL_OR_PATH" "Override mklove modules repo URL" "$MKL_REPO_URL" mkl_option "Configure tool" "" "--help" "Show configure usage" mkl_toggle_option "Compatibility" "mk:MKL_MAINT_MODE" "--enable-maintainer-mode" "Maintainer mode (no-op)" mkl_option "Configure tool" "mk:PROGRAM_PREFIX" "--program-prefix=PFX" "Program prefix" mkl_option "Compatibility" "mk:DISABL_DEP_TRACK" "--disable-dependency-tracking" "Disable dependency tracking (no-op)" kafkacat-1.2.0/mklove/modules/configure.cc000066400000000000000000000137161253367242100205310ustar00rootroot00000000000000#!/bin/bash # # Compiler detection # Sets: # CC, CXX, CFLAGS, CPPFLAGS, LDFLAGS, ARFLAGS, PKG_CONFIG, INSTALL, MBITS mkl_require host function checks { # C compiler mkl_meta_set "ccenv" "name" "C compiler from CC env" if ! mkl_command_check "ccenv" "WITH_CC" cont "$CC --version"; then if mkl_command_check "gcc" "WITH_GCC" cont "gcc --version"; then CC=gcc elif mkl_command_check "clang" "WITH_CLANG" cont "clang --version"; then CC=clang elif mkl_command_check "cc" "WITH_CC" fail "cc --version"; then CC=cc fi fi export CC="${CC}" mkl_mkvar_set CC CC "$CC" if [[ $MKL_CC_WANT_CXX == 1 ]]; then # C++ compiler mkl_meta_set "cxxenv" "name" "C++ compiler from CXX env" if ! mkl_command_check "cxxenv" "WITH_CXX" cont "$CXX --version" ; then mkl_meta_set "gxx" "name" "C++ compiler (g++)" mkl_meta_set "clangxx" "name" "C++ compiler (clang++)" mkl_meta_set "cxx" "name" "C++ compiler (c++)" if mkl_command_check "gxx" "WITH_GXX" cont "g++ --version"; then CXX=g++ elif mkl_command_check "clangxx" "WITH_CLANGXX" cont "clang++ --version"; then CXX=clang++ elif mkl_command_check "cxx" "WITH_CXX" fail "c++ --version"; then CXX=c++ fi fi export CXX="${CXX}" mkl_mkvar_set "CXX" CXX $CXX fi # Handle machine bits, if specified. if [[ ! -z "$MBITS" ]]; then mkl_meta_set mbits_m name "mbits compiler flag (-m$MBITS)" if mkl_compile_check mbits_m "" fail CC "-m$MBITS"; then mkl_mkvar_append CPPFLAGS CPPFLAGS "-m$MBITS" mkl_mkvar_append LDFLAGS LDFLAGS "-m$MBITS" fi if [[ -z "$ARFLAGS" && $MBITS == 64 && $MKL_DISTRO == "SunOS" ]]; then # Turn on 64-bit archives on SunOS mkl_mkvar_append ARFLAGS ARFLAGS "S" fi fi # Provide prefix and checks for various other build tools. local t= for t in LD:ld NM:nm OBJDUMP:objdump STRIP:strip ; do local tenv=${t%:*} t=${t#*:} local tval="${!tenv}" [[ -z $tval ]] && tval="$t" if mkl_prog_check "$t" "" disable "$tval" ; then if [[ $tval != ${!tenv} ]]; then export "$tenv"="$tval" fi mkl_mkvar_set $tenv $tenv "$tval" fi done # Compiler and linker flags [[ ! -z $CFLAGS ]] && mkl_mkvar_set "CFLAGS" "CFLAGS" "$CFLAGS" [[ ! -z $CPPFLAGS ]] && mkl_mkvar_set "CPPFLAGS" "CPPFLAGS" "$CPPFLAGS" [[ ! -z $CXXFLAGS ]] && mkl_mkvar_set "CXXFLAGS" "CXXFLAGS" "$CXXFLAGS" [[ ! -z $LDFLAGS ]] && mkl_mkvar_set "LDFLAGS" "LDFLAGS" "$LDFLAGS" [[ ! -z $ARFLAGS ]] && mkl_mkvar_set "ARFLAGS" "ARFLAGS" "$ARFLAGS" if [[ $MKL_NO_DEBUG_SYMBOLS != "y" ]]; then # Add debug symbol flag (-g) # OSX 10.9 requires -gstrict-dwarf for some reason. mkl_meta_set cc_g_dwarf name "debug symbols compiler flag (-g...)" if [[ $MKL_DISTRO == "osx" ]]; then if mkl_compile_check cc_g_dwarf "" cont CC "-gstrict-dwarf"; then mkl_mkvar_append CPPFLAGS CPPFLAGS "-gstrict-dwarf" else mkl_mkvar_append CPPFLAGS CPPFLAGS "-g" fi else mkl_mkvar_append CPPFLAGS CPPFLAGS "-g" fi fi # pkg-config if [ -z "$PKG_CONFIG" ]; then PKG_CONFIG=pkg-config fi if mkl_command_check "pkgconfig" "WITH_PKGCONFIG" cont "$PKG_CONFIG --version"; then export PKG_CONFIG fi mkl_mkvar_set "pkgconfig" PKG_CONFIG $PKG_CONFIG [[ ! -z "$PKG_CONFIG_PATH" ]] && mkl_env_append PKG_CONFIG_PATH "$PKG_CONFIG_PATH" # install if [ -z "$INSTALL" ]; then if [[ $MKL_DISTRO == "SunOS" ]]; then mkl_meta_set ginstall name "GNU install" if mkl_command_check ginstall "" ignore "ginstall --version"; then INSTALL=ginstall else INSTALL=install fi else INSTALL=install fi fi if mkl_command_check "install" "WITH_INSTALL" cont "$INSTALL --version"; then export INSTALL fi mkl_mkvar_set "install" INSTALL $INSTALL # Enable profiling if desired if [[ $WITH_PROFILING == y ]]; then mkl_allvar_set "" "WITH_PROFILING" "y" mkl_mkvar_append CPPFLAGS CPPFLAGS "-pg" mkl_mkvar_append LDFLAGS LDFLAGS "-pg" fi # Optimization if [[ $WITHOUT_OPTIMIZATION == n ]]; then mkl_mkvar_append CPPFLAGS CPPFLAGS "-O2" else mkl_mkvar_append CPPFLAGS CPPFLAGS "-O0" fi # Static linking if [[ $WITH_STATIC_LINKING == y ]]; then # LDFLAGS_STATIC is the LDFLAGS needed to enable static linking # of sub-sequent libraries, while # LDFLAGS_DYNAMIC is the LDFLAGS needed to enable dynamic linking. mkl_mkvar_set staticlinking LDFLAGS_STATIC "-Wl,-Bstatic" mkl_mkvar_set staticlinking LDFLAGS_DYNAMIC "-Wl,-Bdynamic" fi } mkl_option "Compiler" "env:CC" "--cc=CC" "Build using C compiler CC" "\$CC" mkl_option "Compiler" "env:CXX" "--cxx=CXX" "Build using C++ compiler CXX" "\$CXX" mkl_option "Compiler" "ARCH" "--arch=ARCH" "Build for architecture" "$(uname -m)" mkl_option "Compiler" "CPU" "--cpu=CPU" "Build and optimize for specific CPU" "generic" mkl_option "Compiler" "MBITS" "--mbits=BITS" "Machine bits (32 or 64)" "" for n in CFLAGS CPPFLAGS CXXFLAGS LDFLAGS ARFLAGS; do mkl_option "Compiler" "mk:$n" "--$n=$n" "Add $n flags" done mkl_option "Compiler" "env:PKG_CONFIG_PATH" "--pkg-config-path" "Extra paths for pkg-config" mkl_option "Compiler" "WITH_PROFILING" "--enable-profiling" "Enable profiling" mkl_option "Compiler" "WITH_STATIC_LINKING" "--enable-static" "Enable static linking" mkl_option "Compiler" "WITHOUT_OPTIMIZATION" "--disable-optimization" "Disable optimization flag to compiler" "n" mkl_option "Compiler" "env:MKL_NO_DEBUG_SYMBOLS" "--disable-debug-symbols" "Disable debugging symbols" "n" mkl_option "Compiler" "env:MKL_WANT_WERROR" "--enable-werror" "Enable compiler warnings as errors" "n" kafkacat-1.2.0/mklove/modules/configure.gitversion000066400000000000000000000006741253367242100223340ustar00rootroot00000000000000#!/bin/bash # # Sets version variable from git information. # Optional arguments: # "as" # VARIABLE_NAME # # Example: Set version in variable named "MYVERSION": # mkl_require gitversion as MYVERSION if [[ $1 == "as" ]]; then __MKL_GITVERSION_VARNAME="$2" else __MKL_GITVERSION_VARNAME="VERSION" fi function checks { mkl_allvar_set "gitversion" "$__MKL_GITVERSION_VARNAME" "$(git describe --abbrev=6 --tags HEAD --always)" } kafkacat-1.2.0/mklove/modules/configure.good_cflags000066400000000000000000000004741253367242100224100ustar00rootroot00000000000000#!/bin/bash # # Provides some known-good CFLAGS # Sets: # CFLAGS # CXXFLAGS # CPPFLAGS function checks { mkl_mkvar_append CPPFLAGS CPPFLAGS \ "-Wall -Wfloat-equal -Wpointer-arith" if [[ $MKL_WANT_WERROR = "y" ]]; then mkl_mkvar_append CPPFLAGS CPPFLAGS \ "-Werror" fi } kafkacat-1.2.0/mklove/modules/configure.host000066400000000000000000000053661253367242100211230ustar00rootroot00000000000000#!/bin/bash # # Host OS support # Sets: # HOST # BUILD # TARGET # FIXME: No need for this right now #mkl_require host_linux #mkl_require host_osx #mkl_require host_cygwin #mkl_option "Cross-compilation" "mk:HOST_OS" "--host-os=osname" "Host OS (linux,osx,cygwin,..)" "auto" # autoconf compatibility - does nothing at this point mkl_option "Cross-compilation" "mk:HOST" "--host=HOST" "Configure to build programs to run on HOST (no-op)" mkl_option "Cross-compilation" "mk:BUILD" "--build=BUILD" "Configure for building on BUILD (no-op)" mkl_option "Cross-compilation" "mk:TARGET" "--target=TARGET" "Configure for building cross-toolkits for platform TARGET (no-op)" function checks { # Try to figure out what OS/distro we are running on. mkl_check_begin "distro" "" "no-cache" "OS or distribution" # Try lsb_release local sys sys=$(lsb_release -is 2>/dev/null) if [[ $? -gt 0 ]]; then # That didnt work, try uname. local kn=$(uname -s) case $kn in Linux) sys=Linux ;; Darwin) sys=osx ;; CYGWIN*) sys=Cygwin ;; *) sys="$kn" ;; esac fi if [[ -z $sys ]]; then mkl_check_failed "distro" "" "ignore" "" else mkl_check_done "distro" "" "ignore" "ok" "$sys" mkl_mkvar_set "distro" "MKL_DISTRO" "$sys" fi } #function checks { # mkl_check_begin "host" "HOST_OS" "no-cache" "host OS" # # # # # If --host-os=.. was not specified then this is most likely not a # # a cross-compilation and we can base the host-os on the native OS. # # # if [[ $HOST_OS != "auto" ]]; then # mkl_check_done "host" "HOST_OS" "cont" "ok" "$HOST_OS" # return 0 # fi # # kn=$(uname -s) # case $kn in # Linux) # hostos=linux # ;; # Darwin) # hostos=osx # ;; # CYGWIN*) # hostos=cygwin # ;; # *) # hostos="$(mkl_lower $kn)" # mkl_err "Unknown host OS kernel name: $kn" # mkl_err0 " Will attempt to load module host_$hostos anyway." # mkl_err0 " Please consider writing a configure.host_$hostos" # ;; # esac # # if ! mkl_require --try "host_$hostos"; then # # Module not found # mkl_check_done "host" "HOST_OS" "cont" "failed" "$kn?" # else # # Module loaded # # if mkl_func_exists "host_${hostos}_setup" ; then # "host_${hostos}_setup" # fi # # mkl_check_done "host" "HOST_OS" "cont" "ok" "$hostos" # fi # # # Set HOST_OS var even if probing failed. # mkl_mkvar_set "host" "HOST_OS" "$hostos" #} kafkacat-1.2.0/rpm/000077500000000000000000000000001253367242100140625ustar00rootroot00000000000000kafkacat-1.2.0/rpm/kafkacat.spec000066400000000000000000000030601253367242100165020ustar00rootroot00000000000000Name: kafkacat Version: 1.2.0 Release: 1%{?dist} Summary: kafkacat is a generic non-JVM producer and consumer for Apache Kafka 0.8, think of it as a netcat for Kafka. Group: Productivity/Networking/Other License: BSD-2-Clause URL: https://github.com/edenhill/kafkacat Source: kafkacat-%{version}.tar.gz Requires: librdkafka1 BuildRequires: zlib-devel gcc >= 4.1 librdkafka-devel BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX) %description kafkacat is a generic non-JVM producer and consumer for Apache Kafka 0.8, think of it as a netcat for Kafka. In producer mode kafkacat reads messages from stdin, delimited with a configurable delimeter (-D, defaults to newline), and produces them to the provided Kafka cluster (-b), topic (-t) and partition (-p). In consumer mode kafkacat reads messages from a topic and partition and prints them to stdout using the configured message delimiter. kafkacat also features a Metadata list (-L) mode to display the current state of the Kafka cluster and its topics and partitions. kafkacat is fast and lightweight; statically linked it is no more than 150Kb. %prep %setup -q %configure %build make %install rm -rf %{buildroot} DESTDIR=%{buildroot} make install %clean rm -rf %{buildroot} %files -n %{name} %defattr(755,root,root) %{_bindir}/kafkacat %defattr(644,root,root) %doc README.md %doc LICENSE %changelog * Wed Jun 03 2015 Magnus Edenhill 1.2.0-1 - Relase 1.2.0 * Fri Dec 19 2014 François Saint-Jacques 1.1.0-1 - Initial RPM package