cuttlefish-3.0.1/0000755000232200023220000000000014027401005014224 5ustar debalancedebalancecuttlefish-3.0.1/priv/0000755000232200023220000000000014027401005015204 5ustar debalancedebalancecuttlefish-3.0.1/priv/erlang_vm.schema0000644000232200023220000003025114027401005020341 0ustar debalancedebalance%%-*- mode: erlang -*- %% @doc Starts the Erlang runtime system with SMP support %% enabled. This may fail if no runtime system with SMP support is %% available. The 'auto' setting starts the Erlang runtime system with %% SMP support enabled if it is available and more than one logical %% processor are detected. -smp disable starts a runtime system %% without SMP support. %% %% NOTE: The runtime system with SMP support will not be available on %% all supported platforms. See also the erlang.schedulers settings. %% %% NOTE: Some native extensions (NIFs) require use of the SMP %% emulator. %% %% More information at: http://erlang.org/doc/man/erl.html {mapping, "erlang.smp", "vm_args.-smp", [ {default, enable}, {datatype, {enum, [enable, auto, disable]}}, hidden ]}. %% @doc Sets the mapping of warning messages for error_logger. %% Messages sent to the error logger using one of the warning %% routines can be mapped either to errors (default), warnings %% (w - default), or info reports (i). {mapping, "erlang.W", "vm_args.+W", [ {default, "w"}, hidden ]}. %% @doc Sets the number of scheduler threads to create and scheduler %% threads to set online when erlang.smp support has been enabled. The %% maximum for both values is 1024. If the Erlang runtime system is %% able to determine the amount of logical processors configured and %% logical processors available, schedulers.total will default to %% logical processors configured, and schedulers.online will default %% to logical processors available; otherwise, the default values will %% be 1. Schedulers may be omitted if schedulers.online is not and %% vice versa. %% %% If schedulers.total or schedulers.online is specified as a negative %% number, the value is subtracted from the default number of logical %% processors configured or logical processors available, %% respectively. %% %% Specifying the value 0 for Schedulers or SchedulersOnline resets %% the number of scheduler threads or scheduler threads online %% respectively to its default value. %% %% This option is ignored if the emulator doesn't have SMP support %% enabled (see the erlang.smp flag). %% %% More information at: http://erlang.org/doc/man/erl.html %% +S Schedulers:SchedulerOnline {mapping, "erlang.schedulers.total", "vm_args.+S", [ {default, undefined}, {datatype, integer}, {validators, ["=<1024"]} ]}. %% @see erlang.schedulers.total {mapping, "erlang.schedulers.online", "vm_args.+S", [ {default, undefined}, {datatype, integer}, {validators, ["=<1024"]} ]}. {translation, "vm_args.+S", fun(Conf) -> Total = cuttlefish:conf_get("erlang.schedulers.total", Conf, undefined), Online = cuttlefish:conf_get("erlang.schedulers.online", Conf, undefined), case {Total, Online} of {undefined, undefined} -> cuttlefish:unset(); {undefined, O} -> ":" ++ integer_to_list(O); {T, undefined} -> integer_to_list(T); _ -> integer_to_list(Total) ++ ":" ++ integer_to_list(Online) end end }. {validator, "=<1024", "has a maximum value of 1024", fun(X) -> X =< 1024 end}. %% @doc Enables or disables the kernel poll functionality if the %% emulator supports it. If the emulator does not support kernel poll, %% and the K flag is passed to the emulator, a warning is issued at %% startup. %% %% Similar information at: http://erlang.org/doc/man/erl.html {mapping, "erlang.K", "vm_args.+K", [ {default, on}, {datatype, flag}, hidden ]}. %%%% Tunables %% @doc Name of the Erlang node {mapping, "nodename", "vm_args.-name", [ {default, "{{node}}"} ]}. %% @doc Cookie for distributed node communication. All nodes in the %% same cluster should use the same cookie or they will not be able to %% communicate. {mapping, "distributed_cookie", "vm_args.-setcookie", [ {default, "erlang"} ]}. %% @doc Sets the number of threads in async thread pool, valid range %% is 0-1024. If thread support is available, the default is 64. %% %% More information at: http://erlang.org/doc/man/erl.html {mapping, "erlang.async_threads", "vm_args.+A", [ {default, 64}, {datatype, integer}, {validators, ["range:0-1024"]} ]}. {validator, "range:0-1024", "must be 0 to 1024", fun(X) -> X >= 0 andalso X =< 1024 end}. %% @doc Suggested stack size, in bytes, for threads in the %% async-thread pool. Valid range is 16-8192 kilowords. The default %% suggested stack size is 16 kilowords, i.e, 64 kilobyte on 32-bit %% architectures. This small default size has been chosen since the %% amount of async-threads might be quite large. The default size is %% enough for drivers delivered with Erlang/OTP, but might not be %% sufficiently large for other dynamically linked in drivers that use %% the driver_async() functionality. Note that the value passed is %% only a suggestion, and it might even be ignored on some platforms. %% %% More information at: http://erlang.org/doc/man/erl.html {mapping, "erlang.async_threads.stack_size", "vm_args.+a", [ {datatype, bytesize}, {validators, [ "stack-size-divisible", "stack-size-range"]}, hidden ]}. {validator, "stack-size-divisible", ("must be divisible by " ++ integer_to_list(erlang:system_info({wordsize,external}))), fun(X) -> X rem (erlang:system_info({wordsize, external})) == 0 end}. {validator, "stack-size-range", begin WordSize = erlang:system_info({wordsize, external}), ("must be in the range of " ++ cuttlefish_bytesize:to_string(16 * 1024 * WordSize) ++ " to " ++ cuttlefish_bytesize:to_string(8192 * 1024 * WordSize)) end, fun(X) -> Scaled = X div (1024 * erlang:system_info({wordsize, external})), Scaled =< 8192 andalso Scaled >= 16 end}. {translation, "vm_args.+a", fun(Conf) -> RawValue = cuttlefish:conf_get("erlang.async_threads.stack_size", Conf), RawValue div (1024 * erlang:system_info({wordsize, external})) end}. %% Note: OTP R15 and earlier uses -env ERL_MAX_PORTS, R16+ uses +Q %% @doc The number of concurrent ports/sockets %% Valid range is 1024-134217727 {mapping, "erlang.max_ports", cuttlefish:otp("R16", "vm_args.+Q", "vm_args.-env ERL_MAX_PORTS"), [ {default, 262144}, {datatype, integer}, {validators, ["range4ports"]} ]}. {validator, "range4ports", "must be 1024 to 134217727", fun(X) -> X >= 1024 andalso X =< 134217727 end}. %% @doc A non-negative integer which indicates how many times %% generational garbage collections can be done without forcing a %% fullsweep collection. In low-memory systems (especially without %% virtual memory), setting the value to 0 can help to conserve %% memory. %% %% More information at: %% http://www.erlang.org/doc/man/erlang.html#system_flag-2 {mapping, "erlang.fullsweep_after", "vm_args.-env ERL_FULLSWEEP_AFTER", [ {default, 0}, {datatype, integer}, hidden, {validators, ["positive_integer"]} ]}. {validator, "positive_integer", "must be a positive integer", fun(X) -> X >= 0 end}. %% @doc Set the location of crash dumps {mapping, "erlang.crash_dump", "vm_args.-env ERL_CRASH_DUMP", [ {default, "{{crash_dump}}"}, {datatype, file}, hidden ]}. %% Note: OTP R15 and earlier uses -env ERL_MAX_ETS_TABLES, %% R16+ uses +e %% @doc Raise the ETS table limit {mapping, "erlang.max_ets_tables", cuttlefish:otp("R16", "vm_args.+e", "vm_args.-env ERL_MAX_ETS_TABLES"), [ {default, 256000}, {datatype, integer}, hidden ]}. %% @doc Raise the default erlang process limit {mapping, "erlang.process_limit", "vm_args.+P", [ {datatype, integer}, {default, 256000}, hidden ]}. %% @doc For nodes with many busy_dist_port events, Basho recommends %% raising the sender-side network distribution buffer size. %% 32MB may not be sufficient for some workloads and is a suggested %% starting point. Erlangers may know this as +zdbbl. %% The Erlang/OTP default is 1024 (1 megabyte). %% See: http://www.erlang.org/doc/man/erl.html#%2bzdbbl {mapping, "erlang.distribution_buffer_size", "vm_args.+zdbbl", [ {datatype, bytesize}, {commented, "32MB"}, hidden, {validators, ["zdbbl_range"]} ]}. {translation, "vm_args.+zdbbl", fun(Conf) -> ZDBBL = cuttlefish:conf_get("erlang.distribution_buffer_size", Conf, undefined), case ZDBBL of undefined -> undefined; X when is_integer(X) -> cuttlefish_util:ceiling(X / 1024); %% Bytes to Kilobytes; _ -> undefined end end }. {validator, "zdbbl_range", "must be between 1KB and 2097151KB", fun(ZDBBL) -> %% 2097151KB = 2147482624 ZDBBL >= 1024 andalso ZDBBL =< 2147482624 end }. %% @doc Set scheduler forced wakeup interval. All run queues will be %% scanned each Interval milliseconds. While there are sleeping %% schedulers in the system, one scheduler will be woken for each %% non-empty run queue found. An Interval of zero disables this %% feature, which also is the default. %% %% This feature is a workaround for lengthy executing native code, and %% native code that do not bump reductions properly. %% %% More information: http://www.erlang.org/doc/man/erl.html#+sfwi {mapping, "erlang.schedulers.force_wakeup_interval", "vm_args.+sfwi", [ {commented, 500}, {datatype, integer} ]}. %% @doc Enable or disable scheduler compaction of load. By default %% scheduler compaction of load is enabled. When enabled, load %% balancing will strive for a load distribution which causes as many %% scheduler threads as possible to be fully loaded (i.e., not run out %% of work). This is accomplished by migrating load (e.g. runnable %% processes) into a smaller set of schedulers when schedulers %% frequently run out of work. When disabled, the frequency with which %% schedulers run out of work will not be taken into account by the %% load balancing logic. %% %% More information: http://www.erlang.org/doc/man/erl.html#+scl {mapping, "erlang.schedulers.compaction_of_load", "vm_args.+scl", [ {commented, "false"}, {datatype, {enum, [true, false]}} ]}. %% @doc Enable or disable scheduler utilization balancing of load. By %% default scheduler utilization balancing is disabled and instead %% scheduler compaction of load is enabled which will strive for a %% load distribution which causes as many scheduler threads as %% possible to be fully loaded (i.e., not run out of work). When %% scheduler utilization balancing is enabled the system will instead %% try to balance scheduler utilization between schedulers. That is, %% strive for equal scheduler utilization on all schedulers. %% %% More information: http://www.erlang.org/doc/man/erl.html#+sub {mapping, "erlang.schedulers.utilization_balancing", "vm_args.+sub", [ {commented, "true"}, {datatype, {enum, [true, false]}} ]}. %% @doc For ease of firewall configuration, the Erlang distribution %% can be bound to a limited range of TCP ports. If this is set, and %% erlang.distribution.port_range.maximum is *unset*, only this port %% will be used. If the minimum is *unset*, no restriction will be %% made on the port range; instead Erlang will listen on a random %% high-numbered port. %% %% More information: http://www.erlang.org/faq/how_do_i.html#id55090 %% http://www.erlang.org/doc/man/kernel_app.html {mapping, "erlang.distribution.port_range.minimum", "kernel.inet_dist_listen_min", [ {commented, 6000}, {datatype, integer}, hidden ]}. %% @see erlang.distribution.port_range.minimum {mapping, "erlang.distribution.port_range.maximum", "kernel.inet_dist_listen_max", [ {commented, 7999}, {datatype, integer}, hidden ]}. %% @doc Set the interface/IP to listen for distributed Erlang connections. %% %% More information: http://erlang.org/doc/man/kernel_app.html {mapping, "erlang.distribution.interface", "kernel.inet_dist_use_interface", [ {commented, "true"}, {datatype, string}, {validators, ["ip_strict"]}, hidden ]}. {translation, "kernel.inet_dist_use_interface", fun(Conf) -> IPStr = cuttlefish:conf_get("erlang.distribution.interface", Conf), {ok, IP_address} = inet:parse_strict_address(IPStr), IP_address end }. {validator, "ip_strict", "must be a valid IPv4 or IPv6 address", fun(String) -> try inet:parse_strict_address(String) of {ok,{_,_,_,_}} -> true; {ok,{_,_,_,_,_,_,_,_}} -> true; _ -> false catch _:_ -> false end end}. %% @doc Set the net_kernel's net_ticktime. %% %% More information: http://www.erlang.org/doc/man/kernel_app.html#net_ticktime %% and http://www.erlang.org/doc/man/net_kernel.html#set_net_ticktime-1 {mapping, "erlang.distribution.net_ticktime", "vm_args.-kernel net_ticktime", [ {commented, 60}, {datatype, integer}, hidden ]}. cuttlefish-3.0.1/test/0000755000232200023220000000000014027401005015203 5ustar debalancedebalancecuttlefish-3.0.1/test/value20000644000232200023220000000002114027401005016315 0ustar debalancedebalancemulti line value cuttlefish-3.0.1/test/bad_erlang.schema0000644000232200023220000000056114027401005020445 0ustar debalancedebalance%% this is a bad erlang schema file %% it's purpose is to validate that we're good about %% reporting WHERE things went wrong. %% @doc this is some mapping that is ok! {mapping, "setting", "nested.setting", []}. %% but this one is bad! %% it has no closing bracket on the proplist %% @doc bad setting {mapping, "setting2, "nested.nesting", [ {datatype, string} }. cuttlefish-3.0.1/test/cuttlefish_test_logging.erl0000644000232200023220000000434014027401005022627 0ustar debalancedebalance-module(cuttlefish_test_logging). -behaviour(gen_event). -include_lib("kernel/include/logger.hrl"). %% API -export([start_link/0, add_handler/0]). -export([set_up/0, log/2, reset/0, get_logs/0, bounce/0, bounce/1]). %% gen_event callbacks -export([init/1, handle_event/2, handle_call/2, handle_info/2, terminate/2, code_change/3]). -define(SERVER, ?MODULE). -record(state, { logs :: [string() | binary() ]}). %% %% API %% set_up() -> Pid = case start_link() of {ok, Pid0} -> Pid0; {error, {already_started, Pid1}} -> Pid1 end, case lists:member(?MODULE, gen_event:which_handlers(Pid)) of true -> ok; false -> gen_event:add_handler(Pid, ?MODULE, []) end, ok. -spec get_logs() -> [iolist()] | {error, term()}. get_logs() -> gen_event:call(?SERVER, ?MODULE, get_logs, infinity). bounce() -> bounce(error). bounce(Level) -> gen_event:call(?SERVER, ?MODULE, reset), logger:remove_handler(?MODULE), logger:add_handler(?SERVER, ?MODULE, #{ level => Level }), ok. start_link() -> gen_event:start_link({local, ?SERVER}). log(LogEvent, Config) -> gen_event:call(?SERVER, ?MODULE, {log, LogEvent, Config}). reset() -> gen_event:call(?SERVER, ?MODULE, reset). add_handler() -> gen_event:add_handler(?SERVER, ?MODULE, []). %% %% Callbacks %% init([]) -> {ok, #state{ logs = [] }}. handle_event(Event, State = #state{logs = Logs}) -> {ok, State#state{logs = [Event | Logs]}}; handle_event(_Event, State) -> {ok, State}. handle_call({log, LogEvent, LogConfig}, State = #state{logs = Logs0}) -> #{formatter := {FModule, FConfig}} = LogConfig, %% [Time, " ", LevelStr, Message] Log = FModule:format(LogEvent, FConfig), Logs = [Log | Logs0], {ok, Logs, State#state{logs = Logs}}; handle_call(get_logs, State = #state{logs = Logs}) -> {ok, lists:reverse(Logs), State}; handle_call(reset, State) -> Reply = ok, {ok, Reply, State#state{logs = []}}; handle_call(_Request, State) -> Reply = ok, {ok, Reply, State}. handle_info(_Info, State) -> {ok, State}. terminate(_Reason, _State) -> ok. code_change(_OldVsn, State, _Extra) -> {ok, State}. cuttlefish-3.0.1/test/durations.schema0000644000232200023220000000032614027401005020376 0ustar debalancedebalance{mapping, "a.b.c", "cuttlefish.duration_extended", [ {default, "1m"}, {datatype, [{duration, s}, {atom, foo}]} ]}. {mapping, "b.c", "cuttlefish.duration", [ {default, "10s"}, {datatype, {duration, ms}} ]}.cuttlefish-3.0.1/test/include_dir.conf0000644000232200023220000000002614027401005020331 0ustar debalancedebalanceinclude conf.d/*.conf cuttlefish-3.0.1/test/included_value.conf0000644000232200023220000000022714027401005021036 0ustar debalancedebalancevalue1 = $( "toplevel" end}. %% And validators! {validator, "a.validator2", "so much validator", fun(_AlwaysFalse) -> false end}. %% But we can have our own mappings too! And they will come cuttlefish_test_logging {mapping, "top_level.var1", "app_a.big_var", []}. {translation, "app_a.big_var", fun(X) -> "tippedy top" end}. {validator, "top.val", "you only validate once.", fun(_AlwaysFalse) -> false end}. cuttlefish-3.0.1/test/cuttlefish_integration_test.erl0000644000232200023220000002437014027401005023531 0ustar debalancedebalance-module(cuttlefish_integration_test). -include_lib("kernel/include/logger.hrl"). -include_lib("eunit/include/eunit.hrl"). %% This test generates a default .conf file from the riak.schema. view it at generated.conf generated_conf_file_test() -> {_, Mappings, _} = cuttlefish_schema:file("test/riak.schema"), cuttlefish_conf:generate_file(Mappings, "generated.conf"), %% Schema generated a conf file, let's parse it! Conf = cuttlefish_conf:file("generated.conf"), ?assertEqual("8099", proplists:get_value(["handoff","port"], Conf)), ok. %% Same as above, but with the files in an .ez archive. generated_conf_file_ez_test() -> {_, Mappings, _} = cuttlefish_schema:file("test/riakconf.ez/riakconf/riak.schema"), cuttlefish_conf:generate_file(Mappings, "generated.conf"), %% Schema generated a conf file, let's parse it! Conf = cuttlefish_conf:file("generated.conf"), ?assertEqual("8099", proplists:get_value(["handoff","port"], Conf)), ok. %% This test generates a .config file from the riak.schema. view it at generated.config generated_config_file_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [], %% conf_parse:file("test/riak.conf"), NewConfig = cuttlefish_generator:map(Schema, Conf), file:write_file("generated.config",io_lib:fwrite("~p.\n",[NewConfig])), ok. %% Same as above, but with the files in an .ez archive. generated_config_file_ez_test() -> Schema = cuttlefish_schema:file("test/riakconf.ez/riakconf/riak.schema"), Conf = [], %% conf_parse:file("test/riak.conf"), NewConfig = cuttlefish_generator:map(Schema, Conf), file:write_file("generated.config",io_lib:fwrite("~p.\n",[NewConfig])), ok. breaks_on_fuzzy_and_strict_match_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [{["listener", "protobuf", "$name"], "127.0.0.1:8087"}], ?assertMatch({error, add_defaults, _}, cuttlefish_generator:map(Schema, Conf)), ok. breaks_on_rhs_not_found_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [{["ring", "state_dir"], "$(tyktorp)/ring"}], ?assertMatch({error, rhs_subs, _}, cuttlefish_generator:map(Schema, Conf)), ok. breaks_on_rhs_infinite_loop_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [ {["ring", "state_dir"], "$(platform_data_dir)/ring"}, {["platform_data_dir"], "$(ring.state_dir)/data"} ], ?assertMatch({error, rhs_subs, _}, cuttlefish_generator:map(Schema, Conf)), ok. breaks_on_bad_enum_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [{["storage_backend"], penguin}], ?assertMatch({error, transform_datatypes, _}, cuttlefish_generator:map(Schema, Conf)), ok. breaks_on_bad_validation_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [{["ring_size"], 10}], ?assertMatch({error, validation, _}, cuttlefish_generator:map(Schema, Conf)), ok. %% Tests that the schema can generate a default app.config from nothing all_the_marbles_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [], %conf_parse:file("test/riak.conf"), NewConfig = cuttlefish_generator:map(Schema, Conf), ?assert(is_proplist(NewConfig)), NewConfigWithoutVmargs = proplists:delete(vm_args, NewConfig), {ok, [AppConfig]} = file:consult("test/default.config"), ?assert(is_proplist(AppConfig)), proplist_equals(AppConfig, NewConfigWithoutVmargs), ok. multibackend_test() -> Schema = cuttlefish_schema:files(["test/riak.schema", "test/multi_backend.schema"]), Conf = [ {["storage_backend"], "multi"}, {["multi_backend","bitcask_mult","storage_backend"], "bitcask"}, {["multi_backend","bitcask_mult","bitcask","data_root"], "/path/to/dat/cask"}, {["multi_backend","leveldb_mult","storage_backend"], "leveldb"}, {["multi_backend","leveldb_mult","leveldb","data_root"], "/path/to/dat/level"}, {["multi_backend","memory_mult","storage_backend"], "memory"}, {["multi_backend","memory_mult","memory_backend","ttl"], "1d"}, {["multi_backend","leveldb_mult2","storage_backend"], "leveldb"}, {["multi_backend","leveldb_mult2","leveldb","data_root"], "/path/to/dat/level2"} ], NewConfig = cuttlefish_generator:map(Schema, Conf), KV = proplists:get_value(riak_kv, NewConfig), Multi = proplists:get_value(multi_backend, KV), {<<"bitcask_mult">>, riak_kv_bitcask_backend, BitcaskProps} = lists:keyfind(<<"bitcask_mult">>, 1, Multi), _ = ?LOG_INFO("BitcaskProps: ~p", [BitcaskProps]), ?assertEqual("/path/to/dat/cask", proplists:get_value(data_root, BitcaskProps)), ?assertEqual(4, proplists:get_value(open_timeout, BitcaskProps)), ?assertEqual(2147483648, proplists:get_value(max_file_size, BitcaskProps)), ?assertEqual(60, proplists:get_value(frag_merge_trigger, BitcaskProps)), ?assertEqual(536870912, proplists:get_value(dead_bytes_merge_trigger, BitcaskProps)), ?assertEqual(40, proplists:get_value(frag_threshold, BitcaskProps)), ?assertEqual(134217728, proplists:get_value(dead_bytes_threshold, BitcaskProps)), ?assertEqual(10485760, proplists:get_value(small_file_threshold, BitcaskProps)), ?assertEqual(-1, proplists:get_value(max_fold_age, BitcaskProps)), ?assertEqual(0, proplists:get_value(max_fold_puts, BitcaskProps)), ?assertEqual(-1, proplists:get_value(expiry_secs, BitcaskProps)), ?assertEqual(true, proplists:get_value(require_hint_crc, BitcaskProps)), ?assertEqual(0, proplists:get_value(expiry_grace_time, BitcaskProps)), ?assertEqual(erlang, proplists:get_value(io_mode, BitcaskProps)), ?assertEqual(none, proplists:get_value(sync_strategy, BitcaskProps)), ?assertEqual(always, proplists:get_value(merge_window, BitcaskProps)), {<<"leveldb_mult">>, riak_kv_eleveldb_backend, Level1Props} = lists:keyfind(<<"leveldb_mult">>, 1, Multi), ?assertEqual("/path/to/dat/level", proplists:get_value(data_root, Level1Props)), ?assertEqual(30, proplists:get_value(max_open_files, Level1Props)), ?assertEqual(8388608, proplists:get_value(cache_size, Level1Props)), ?assertEqual(false, proplists:get_value(sync, Level1Props)), ?assertEqual(15728640, proplists:get_value(write_buffer_size_min, Level1Props)), ?assertEqual(31457280, proplists:get_value(write_buffer_size_max, Level1Props)), ?assertEqual(4096, proplists:get_value(sst_block_size, Level1Props)), ?assertEqual(16, proplists:get_value(block_restart_interval, Level1Props)), ?assertEqual(true, proplists:get_value(verify_checksums, Level1Props)), ?assertEqual(true, proplists:get_value(verify_compaction, Level1Props)), ?assertEqual(true, proplists:get_value(use_bloomfilter, Level1Props)), {<<"leveldb_mult2">>, riak_kv_eleveldb_backend, Level2Props} = lists:keyfind(<<"leveldb_mult2">>, 1, Multi), ?assertEqual("/path/to/dat/level2", proplists:get_value(data_root, Level2Props)), ?assertEqual(30, proplists:get_value(max_open_files, Level2Props)), ?assertEqual(8388608, proplists:get_value(cache_size, Level2Props)), ?assertEqual(false, proplists:get_value(sync, Level2Props)), ?assertEqual(15728640, proplists:get_value(write_buffer_size_min, Level2Props)), ?assertEqual(31457280, proplists:get_value(write_buffer_size_max, Level2Props)), ?assertEqual(4096, proplists:get_value(sst_block_size, Level2Props)), ?assertEqual(16, proplists:get_value(block_restart_interval, Level2Props)), ?assertEqual(true, proplists:get_value(verify_checksums, Level2Props)), ?assertEqual(true, proplists:get_value(verify_compaction, Level2Props)), ?assertEqual(true, proplists:get_value(use_bloomfilter, Level2Props)), {<<"memory_mult">>, riak_kv_memory_backend, MemProps} = lists:keyfind(<<"memory_mult">>, 1, Multi), ?assertEqual(86400, proplists:get_value(ttl, MemProps)), ?assertEqual(4096, proplists:get_value(max_memory, MemProps)), ok. unset_translation_test() -> Schema = cuttlefish_schema:files(["test/unset_translation.schema"]), Conf = [ {["a", "b"], "8"} ], NewConfig = cuttlefish_generator:map(Schema, Conf), Props = proplists:get_value(erlang, NewConfig), _ = ?LOG_INFO("~p", [NewConfig]), ?assertEqual(8, proplists:get_value(key, Props)). not_found_error_test() -> Schema = cuttlefish_schema:files(["test/throw_not_found.schema"]), Conf = [], NewConfig = cuttlefish_generator:map(Schema, Conf), ?assertMatch({error, apply_translations, _}, NewConfig). duration_test() -> Schema = cuttlefish_schema:files(["test/durations.schema"]), %% Test that the duration parsing doesn't emit "error" into the %% config instead of the extended type. Conf = conf_parse:parse(<<"a.b.c = foo\n">>), NewConfig = cuttlefish_generator:map(Schema, Conf), ?assertEqual(foo, proplists:get_value(duration_extended, proplists:get_value(cuttlefish, NewConfig))), %% Test that for a non-extended duration, a bad value results in %% an erroroneous config, not emitting error. Conf2 = conf_parse:parse(<<"b.c = fish\n">>), ErrConfig = cuttlefish_generator:map(Schema, Conf2), ?assertMatch({error, transform_datatypes, _}, ErrConfig). proplist_equals(Expected, Actual) -> ExpectedKeys = lists:sort(proplists:get_keys(Expected)), ActualKeys = lists:sort(proplists:get_keys(Actual)), ?assertEqual(ExpectedKeys, ActualKeys), [ begin ExpectedValue = proplists:get_value(EKey, Expected), ActualValue = proplists:get_value(EKey, Actual, undefined), case {is_proplist(ExpectedValue), is_proplist(ActualValue)} of {true, true} -> proplist_equals(ExpectedValue, ActualValue); {false, false} -> ?assertEqual({EKey, ExpectedValue}, {EKey, ActualValue}); _ -> ?assertEqual({EKey, ExpectedValue}, {EKey, ActualValue}) end end || EKey <- ExpectedKeys]. is_proplist(Proplist) when is_list(Proplist) -> lists:all( fun(X) -> is_tuple(X) andalso tuple_size(X) =:= 2 end, Proplist); is_proplist(_) -> false. cuttlefish-3.0.1/test/multi2.schema0000644000232200023220000000145314027401005017604 0ustar debalancedebalance%% This'll be the 'a' namespace {mapping, "a.some.var1", "app_a.some_var1", []}. {translation, "app_a.some_var1", fun(X) -> "a1" end}. {mapping, "a.some.var2", "app_a.some_var2", []}. {translation, "app_a.some_var2", fun(X) -> "a2" end}. {mapping, "a.some.var3", "app_a.some_var3", []}. {translation, "app_a.some_var3", fun(X) -> "a3" end}. {validator, "a.validator1", "validate! validate!", fun(_AlwaysTrue) -> true end}. {validator, "a.validator2", "validate! validate!", fun(_AlwaysTrue) -> true end}. %% In a weird and highly unlikely cross namespace definition. some developer of app A has decided %% that they want to override "b.validator2". They can do that here, but it will still occur after %% "b.validator2" {validator, "b.validator2", "validators are just ok", fun(_AlwaysTrue) -> true end}. cuttlefish-3.0.1/test/value10000644000232200023220000000000314027401005016314 0ustar debalancedebalance42 cuttlefish-3.0.1/test/cuttlefish_escript_integration_tests.erl0000644000232200023220000000731714027401005025447 0ustar debalancedebalance-module(cuttlefish_escript_integration_tests). -include_lib("kernel/include/logger.hrl"). -include_lib("eunit/include/eunit.hrl"). escript_utf8_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(error), ?assertThrow(stop_deactivate, cuttlefish_escript:main( "-d test_fixtures/escript_utf8_test/generated.config " "-s test_fixtures/escript_utf8_test/lib " "-e test_fixtures/escript_utf8_test/etc " "-c test_fixtures/escript_utf8_test/etc/utf8.conf generate" )), [Log] = cuttlefish_test_logging:get_logs(), ?assertMatch({match, _}, re:run(Log, "utf8.conf: Error converting value on line #1 to latin1")), ok. advanced_config_format_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(error), ?assertThrow(stop_deactivate, cuttlefish_escript:main( "-d test_fixtures/acformat/generated.config " "-s test_fixtures/acformat/lib " "-e test_fixtures/acformat/etc " "-c test_fixtures/acformat/etc/acformat.conf generate" )), [Log] = cuttlefish_test_logging:get_logs(), ?assertMatch({match, _}, re:run(Log, "Error parsing test_fixtures/acformat/etc/advanced.config, incorrect format: \\[\\[a\\],\\[b\\]\\]")), ok. escript_prune_test_() -> {timeout, 20, [ escript_prune("-m 3", 3), escript_prune("", 3), %% default escript_prune("-m 6", 6) ]}. escript_prune(DashM, ExpectedMax) -> %% Empty workspace case file:list_dir("test_fixtures/escript_prune_test/generated.config") of {ok, FilenamesToDelete} -> [ file:delete(filename:join(["test_fixtures/escript_prune_test/generated.config",F])) || F <- FilenamesToDelete ]; _ -> ok end, {_, _, T} = lists:foldl( fun(Counter, {PrevConfigs, PrevVMArgs, Tests}) -> io:format("Running iteration: ~p", [Counter]), %% Timer to keep from generating more than one file per second timer:sleep(1100), cuttlefish_escript:main( "-d test_fixtures/escript_prune_test/generated.config " "-s test_fixtures/escript_prune_test/lib " "-e test_fixtures/escript_prune_test/etc " ++ DashM ++ " generate" ), AppConfigs = lists:sort( filelib:wildcard("app.*.config", "test_fixtures/escript_prune_test/generated.config")), VMArgs = lists:sort( filelib:wildcard("vm.*.args", "test_fixtures/escript_prune_test/generated.config")), {AppConfigs, VMArgs, [?_assert(length(AppConfigs) =< ExpectedMax), ?_assert(length(VMArgs) =< ExpectedMax), compare_lists(PrevConfigs, AppConfigs), compare_lists(PrevVMArgs, VMArgs) | Tests]} end, {[], [], []}, lists:seq(1,10)), T. %% This function is asserting that Previous is the tail of Current OR %% that the tail of Previous is equal to the first length(Previous) %% elements of Current compare_lists(Previous, Current) when (length(Previous) +1) =:= length(Current) -> compare_lists([stub|Previous], Current); compare_lists([_|PTail] = Previous, Current) when length(Previous) =:= length(Current) -> NewPrevious = PTail ++ [lists:last(Current)], ?_assertEqual(NewPrevious, Current); compare_lists(_Previous, _Current) -> ?_assert(false). cuttlefish-3.0.1/test/default.config0000644000232200023220000003747414027401005020035 0ustar debalancedebalance%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*- %% ex: ft=erlang ts=4 sw=4 et [ %% Riak Client APIs config {riak_api, [ %% pb_backlog is the maximum length to which the queue of pending %% connections may grow. If set, it must be an integer >= 0. %% By default the value is 5. If you anticipate a huge number of %% connections being initialised *simultaneously*, set this number %% higher. %% {pb_backlog, 64}, %% pb is a list of IP addresses and TCP ports that the Riak %% Protocol Buffers interface will bind. %% {pb, [ {"127.0.0.1", 8087 } ]} {pb, []} ]}, %% Riak Core config {riak_core, [ %% Default location of ringstate {ring_state_dir, "./data/ring"}, %% Default ring creation size. Make sure it is a power of 2, %% e.g. 16, 32, 64, 128, 256, 512 etc {ring_creation_size, 64}, %% http is a list of IP addresses and TCP ports that the Riak %% HTTP interface will bind. %% {http, [ {"127.0.0.1", 8098 } ]}, {http, []}, %% https is a list of IP addresses and TCP ports that the Riak %% HTTPS interface will bind. %{https, [{ "127.0.0.1", 8098 }]}, %% Default cert and key locations for https can be overridden %% with the ssl config variable, for example: %{ssl, [ % {certfile, "./etc/cert.pem"}, % {keyfile, "./etc/key.pem"} % ]}, %% riak_handoff_port is the TCP port that Riak uses for %% intra-cluster data handoff. {handoff_port, 8099 }, %% To encrypt riak_core intra-cluster data handoff traffic, %% uncomment the following line and edit its path to an %% appropriate certfile and keyfile. (This example uses a %% single file with both items concatenated together.) %{handoff_ssl_options, [{certfile, "/tmp/erlserver.pem"}]}, %% DTrace support %% Do not enable 'dtrace_support' unless your Erlang/OTP %% runtime is compiled to support DTrace. DTrace is %% available in R15B01 (supported by the Erlang/OTP %% official source package) and in R14B04 via a custom %% source repository & branch. {dtrace_support, false}, %% Health Checks %% If disabled, health checks registered by an application will %% be ignored. NOTE: this option cannot be changed at runtime. %% To re-enable, the setting must be changed and the node restarted. %% NOTE: As of Riak 1.3.2, health checks are deprecated as they %% may interfere with the new overload protection mechanisms. %% If there is a good reason to re-enable them, you must uncomment %% this line and also add an entry in the riak_kv section: %% {riak_kv, [ ..., {enable_health_checks, true}, ...]} %% {enable_health_checks, true}, %% Platform-specific installation paths (substituted by rebar) {platform_bin_dir, "./bin"}, {platform_data_dir, "./data"}, {platform_etc_dir, "./etc"}, {platform_lib_dir, "./lib"}, {platform_log_dir, "./log"} ]}, %% Riak KV config {riak_kv, [ %% Storage_backend specifies the Erlang module defining the storage %% mechanism that will be used on this node. {storage_backend, riak_kv_bitcask_backend}, %% raw_name is the first part of all URLS used by the Riak raw HTTP %% interface. See riak_web.erl and raw_http_resource.erl for %% details. %{raw_name, "riak"}, %% Enable active anti-entropy subsystem + optional debug messages: %% {anti_entropy, {on|off, []}}, %% {anti_entropy, {on|off, [debug]}}, {anti_entropy, {on, []}}, %% Restrict how fast AAE can build hash trees. Building the tree %% for a given partition requires a full scan over that partition's %% data. Once built, trees stay built until they are expired. %% Config is of the form: %% {num-builds, per-timespan-in-milliseconds} %% Default is 1 build per hour. {anti_entropy_build_limit, {1, 3600000}}, %% Determine how often hash trees are expired after being built. %% Periodically expiring a hash tree ensures the on-disk hash tree %% data stays consistent with the actual k/v backend data. It also %% helps Riak identify silent disk failures and bit rot. However, %% expiration is not needed for normal AAE operation and should be %% infrequent for performance reasons. The time is specified in %% milliseconds. The default is 1 week. {anti_entropy_expire, 604800000}, %% Limit how many AAE exchanges/builds can happen concurrently. {anti_entropy_concurrency, 2}, %% The tick determines how often the AAE manager looks for work %% to do (building/expiring trees, triggering exchanges, etc). %% The default is every 15 seconds. Lowering this value will %% speedup the rate that all replicas are synced across the cluster. %% Increasing the value is not recommended. {anti_entropy_tick, 15000}, %% The directory where AAE hash trees are stored. {anti_entropy_data_dir, "./data/anti_entropy"}, %% The LevelDB options used by AAE to generate the LevelDB-backed %% on-disk hashtrees. {anti_entropy_leveldb_opts, [{write_buffer_size, 4194304}, {max_open_files, 20}]}, %% mapred_name is URL used to submit map/reduce requests to Riak. {mapred_name, "mapred"}, %% mapred_2i_pipe indicates whether secondary-index %% MapReduce inputs are queued in parallel via their own %% pipe ('true'), or serially via a helper process %% ('false' or undefined). Set to 'false' or leave %% undefined during a rolling upgrade from 1.0. {mapred_2i_pipe, true}, %% Each of the following entries control how many Javascript %% virtual machines are available for executing map, reduce, %% pre- and post-commit hook functions. {map_js_vm_count, 8 }, {reduce_js_vm_count, 6 }, {hook_js_vm_count, 2 }, %% js_max_vm_mem is the maximum amount of memory, in megabytes, %% allocated to the Javascript VMs. If unset, the default is %% 8MB. {js_max_vm_mem, 8}, %% js_thread_stack is the maximum amount of thread stack, in megabyes, %% allocate to the Javascript VMs. If unset, the default is 16MB. %% NOTE: This is not the same as the C thread stack. {js_thread_stack, 16}, %% js_source_dir should point to a directory containing Javascript %% source files which will be loaded by Riak when it initializes %% Javascript VMs. %{js_source_dir, "/tmp/js_source"}, %% http_url_encoding determines how Riak treats URL encoded %% buckets, keys, and links over the REST API. When set to 'on' %% Riak always decodes encoded values sent as URLs and Headers. %% Otherwise, Riak defaults to compatibility mode where links %% are decoded, but buckets and keys are not. The compatibility %% mode will be removed in a future release. {http_url_encoding, on}, %% Switch to vnode-based vclocks rather than client ids. This %% significantly reduces the number of vclock entries. %% Only set true if *all* nodes in the cluster are upgraded to 1.0 {vnode_vclocks, true}, %% This option toggles compatibility of keylisting with 1.0 %% and earlier versions. Once a rolling upgrade to a version %% > 1.0 is completed for a cluster, this should be set to %% true for better control of memory usage during key listing %% operations {listkeys_backpressure, true}, %% This option specifies how many of each type of fsm may exist %% concurrently. This is for overload protection and is a new %% mechanism that obsoletes 1.3's health checks. Note that this number %% represents two potential processes, so +P in vm.args should be at %% least 3X the fsm_limit. {fsm_limit, 50000}, %% object_format controls which binary representation of a riak_object %% is stored on disk. %% Current options are: v0, v1. %% v0: Original erlang:term_to_binary format. Higher space overhead. %% v1: New format for more compact storage of small values. {object_format, v1}, {memory_backend,[{max_memory,4096}]} ]}, %% Riak Search Config {riak_search, [ %% To enable Search functionality set this 'true'. {enabled, false} ]}, %% Merge Index Config {merge_index, [ %% The root dir to store search merge_index data {data_root, "./data/merge_index"}, %% Size, in bytes, of the in-memory buffer. When this %% threshold has been reached the data is transformed %% into a segment file which resides on disk. {buffer_rollover_size, 1048576}, %% Overtime the segment files need to be compacted. %% This is the maximum number of segments that will be %% compacted at once. A lower value will lead to %% quicker but more frequent compactions. {max_compact_segments, 20} ]}, %% Bitcask Config {bitcask, [ %% Configure how Bitcask writes data to disk. %% erlang: Erlang's built-in file API %% nif: Direct calls to the POSIX C API %% %% The NIF mode provides higher throughput for certain %% workloads, but has the potential to negatively impact %% the Erlang VM, leading to higher worst-case latencies %% and possible throughput collapse. {io_mode, erlang}, {data_root, "./data/bitcask"}, {open_timeout,4}, {max_file_size,2147483648}, {frag_merge_trigger,60}, {dead_bytes_merge_trigger,536870912}, {frag_threshold,40}, {dead_bytes_threshold,134217728}, {small_file_threshold,10485760}, {max_fold_age,-1}, {max_fold_puts,0}, {expiry_secs,-1}, {require_hint_crc,true}, {expiry_grace_time,0}, {sync_strategy,none}, {merge_window,always} ]}, %% eLevelDB Config {eleveldb, [ {data_root, "./data/leveldb"}, {max_open_files,30}, {cache_size, 8388608}, {sync,false}, {write_buffer_size_min,31457280}, {write_buffer_size_max,62914560}, {sst_block_size,4096}, {block_restart_interval,16}, {verify_checksums,true}, {verify_compaction,true}, {use_bloomfilter,true} ]}, %% Lager Config {lager, [ %% What handlers to install with what arguments %% The defaults for the logfiles are to rotate the files when %% they reach 10Mb or at midnight, whichever comes first, and keep %% the last 5 rotations. See the lager README for a description of %% the time rotation format: %% https://github.com/basho/lager/blob/master/README.org %% %% If you wish to disable rotation, you can either set the size to 0 %% and the rotation time to "", or instead specify a 2-tuple that only %% consists of {Logfile, Level}. %% %% If you wish to have riak log messages to syslog, you can use a handler %% like this: %% {lager_syslog_backend, ["riak", daemon, info]}, %% {handlers, [{lager_file_backend, [{file,"./log/console.log"}, {level,info}, {size,10485760}, {date,"$D0"}, {count,5}]}, {lager_file_backend, [{file,"./log/error.log"}, {level,error}, {size,10485760}, {date,"$D0"}, {count,5}]}]}, %% Whether to write a crash log, and where. %% Commented/omitted/undefined means no crash logger. {crash_log, "./log/crash.log"}, %% Maximum size in bytes of events in the crash log - defaults to 65536 {crash_log_msg_size, 65536}, %% Maximum size of the crash log in bytes, before its rotated, set %% to 0 to disable rotation - default is 0 {crash_log_size, 10485760}, %% What time to rotate the crash log - default is no time %% rotation. See the lager README for a description of this format: %% https://github.com/basho/lager/blob/master/README.org {crash_log_date, "$D0"}, %% Number of rotated crash logs to keep, 0 means keep only the %% current one - default is 0 {crash_log_count, 5}, %% Whether to redirect error_logger messages into lager - defaults to true {error_logger_redirect, true}, %% maximum number of error_logger messages to handle in a second %% lager 2.0.0 shipped with a limit of 50, which is a little low for riak's startup {error_logger_hwm, 100} ]}, %% riak_sysmon config {riak_sysmon, [ %% To disable forwarding events of a particular type, use a %% limit of 0. {process_limit, 30}, {port_limit, 2}, %% Finding reasonable limits for a given workload is a matter %% of experimentation. %% NOTE: Enabling the 'gc_ms_limit' monitor (by setting non-zero) %% can cause performance problems on multi-CPU systems. {gc_ms_limit, 0}, {heap_word_limit, 40111000}, %% Configure the following items to 'false' to disable logging %% of that event type. {busy_port, true}, {busy_dist_port, true} ]}, %% SASL config {sasl, [ {sasl_error_logger, false} ]}, %% riak_control config {riak_control, [ %% Set to false to disable the admin panel. {enabled, false}, %% Authentication style used for access to the admin %% panel. Valid styles are 'userlist' . {auth, userlist}, %% If auth is set to 'userlist' then this is the %% list of usernames and passwords for access to the %% admin panel. {userlist, [ %%{"user", "pass"} ]}, %% The admin panel is broken up into multiple %% components, each of which is enabled or disabled %% by one of these settings. {admin, true} ]} ]. cuttlefish-3.0.1/test/advanced.config0000644000232200023220000000110114027401005020130 0ustar debalancedebalance%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*- %% ex: ft=erlang ts=4 sw=4 et [ %% Riak KV config {riak_kv, [ %% Restrict how fast AAE can build hash trees. Building the tree %% for a given partition requires a full scan over that partition's %% data. Once built, trees stay built until they are expired. %% Config is of the form: %% {num-builds, per-timespan-in-milliseconds} %% Default is 1 build per hour. {anti_entropy_build_limit, {1, 3600000}} ]} ]. cuttlefish-3.0.1/test/multi3.schema0000644000232200023220000000135214027401005017603 0ustar debalancedebalance%%% We'll call this a 'b' namespace {mapping, "b.some.var1", "app_b.some_var1", []}. {translation, "app_b.some_var1", fun(X) -> "b1" end}. {mapping, "b.some.var2", "app_b.some_var2", []}. {translation, "app_b.some_var2", fun(X) -> "b2" end}. {validator, "b.validator1", "validators are magic!", fun(_AlwaysFalse) -> false end}. {validator, "b.validator2", "validators are magic!", fun(_AlwaysFalse) -> false end}. %% For some nutty reason, somebody has decided to redefine "b.some.var1" later in the same schema. %% This should appear in the same place the original definition above was, but with the new values. {mapping, "b.some.var1", "app_b.some_var3", []}. %% Same for translations {translation, "app_b.some_var1", fun(X) -> "b3" end}. cuttlefish-3.0.1/test/multi1.conf0000644000232200023220000000005014027401005017260 0ustar debalancedebalancea.b.c = 1 a.b.c = 2 a.b.d = 1 a.b.c = 3 cuttlefish-3.0.1/test/sample_mustache.schema0000644000232200023220000000011714027401005021536 0ustar debalancedebalance{mapping, "a.b", "app_a.setting_b", [ {default, "$(c)/{{mustache}}/a.b"} ]}. cuttlefish-3.0.1/test/cuttlefish_escript_test.erl0000644000232200023220000001175714027401005022664 0ustar debalancedebalance-module(cuttlefish_escript_test). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -define(assertPrinted(___Text), ?assertPrinted(___Text, [])). -define(assertPrinted(___Text, ___Opts), begin ((fun() -> case cuttlefish_test_group_leader:get_output() of {ok, ___Output} -> case {lists:member(exact, ___Opts), re:run(___Output, ___Text)} of {true, _} when ___Output =:= ___Text -> ok; {true, _} when ___Output =/= ___Text -> erlang:error({assertPrinted_failed, [{module, ?MODULE}, {line, ?LINE}, {expected, ___Text}, {actual, unicode:characters_to_list(___Output)}]}); {_, {match, _}} -> ok; {_, nomatch} -> erlang:error({assertPrinted_failed, [{module, ?MODULE}, {line, ?LINE}, {expected, ___Text}, {actual, unicode:characters_to_list(___Output)}]}) end; error -> erlang:error({assertPrinted_failed, [{module, ?MODULE}, {line, ?LINE}, {expected, ___Text}, {reason, timed_out_on_receive}]}) end end)()) end). -define(capturing(__Forms), begin ___OldLeader = group_leader(), group_leader(cuttlefish_test_group_leader:new_group_leader(self()), self()), try __Forms after cuttlefish_test_group_leader:tidy_up(___OldLeader) end end). describe_test_() -> [ {"`cuttlefish describe` prints documentation", fun describe_prints_docs/0}, {"`cuttlefish describe` prints datatype's valid values", fun describe_prints_datatype/0}, {"`cuttlefish describe` prints default", fun describe_prints_default/0}, {"`cuttlefish describe` prints configured value", fun describe_prints_configured/0}, {"`cuttlefish describe` prints erlang application key", fun describe_prints_app_key/0}, {"`cuttlefish describe` prints message when no default exists", fun describe_prints_no_default/0}, {"`cuttlefish describe` prints message when value not configured", fun describe_prints_not_configured/0} ]. describe(Key) -> ?assertThrow(stop_deactivate, cuttlefish_escript:main(["-i", "test/riak.schema", "-c", "test/riak.conf", "describe", Key])). describe_prints_docs() -> ?capturing(begin describe("ring_size"), ?assertPrinted("Documentation for ring_size"), ?assertPrinted("Default ring creation size\\. Make sure it is a power of 2") end). describe_prints_datatype() -> ?capturing(begin describe("storage_backend"), ?assertPrinted("- one of: bitcask, leveldb, memory, multi") end). describe_prints_default() -> ?capturing(begin describe("ring_size"), ?assertPrinted("Default Value : 64") end). describe_prints_configured() -> ?capturing(begin describe("anti_entropy"), ?assertPrinted("Set Value : debug") end). describe_prints_app_key() -> ?capturing(begin describe("leveldb.bloomfilter"), ?assertPrinted("Internal key : eleveldb\\.use_bloomfilter") end). describe_prints_no_default() -> ?capturing(begin describe("listener.https.foo"), ?assertPrinted("No default set") end). describe_prints_not_configured() -> ?capturing(begin describe("ssl.keyfile"), ?assertPrinted("Value not set in test/riak.conf") end). silent_test() -> ?capturing(begin cuttlefish_escript:main(["-i", "test/riak.schema", "-c", "test/riak.conf", "--etc_dir", "etc", "--silent"]), ?assertPrinted("", [exact]) end). vm_args_test() -> ?capturing(begin cuttlefish_escript:main(["--schema_file", "priv/erlang_vm.schema", "--conf_file", "test/riak.conf", "--etc_dir", "etc", "--dest_file", "vm.generated.args", "--allow_extra", "--silent"]), ?assertPrinted("", [exact]) end). -endif. cuttlefish-3.0.1/test/multi2.conf0000644000232200023220000000001214027401005017257 0ustar debalancedebalancea.b.c = 4 cuttlefish-3.0.1/test/include_file.conf0000644000232200023220000000002214027401005020466 0ustar debalancedebalanceinclude riak.conf cuttlefish-3.0.1/test/throw_not_found.schema0000644000232200023220000000020114027401005021574 0ustar debalancedebalance{mapping, "a.b", "c.d", []}. {translation, "c.d.", fun(Conf) -> Value = cuttlefish:conf_get("a.b", Conf), Value end}. cuttlefish-3.0.1/test/riak.schema0000644000232200023220000011313514027401005017317 0ustar debalancedebalance%% example of super basic mapping %% @doc Default ring creation size. Make sure it is a power of 2, %% e.g. 16, 32, 64, 128, 256, 512 etc {mapping, "ring_size", "riak_core.ring_creation_size", [ {datatype, integer}, {default, 64}, {commented, 64}, {validators, ["ring_size"]} ]}. {validator, "ring_size", "not a power of 2 greater than 1", fun(Size) -> Size > 1 andalso (Size band (Size-1) =:= 0) end}. %% Slightly more complex mapping with translation layer %% @doc enable active anti-entropy subsystem {mapping, "anti_entropy", "riak_kv.anti_entropy", [ {datatype, {enum, [on, off, debug]}}, {default, on} ]}. { translation, "riak_kv.anti_entropy", fun(Conf) -> Setting = cuttlefish:conf_get("anti_entropy", Conf), case Setting of on -> {on, []}; debug -> {on, [debug]}; off -> {off, []}; _Default -> {on, []} end end }. %% complex lager example %% @doc where do you want the console.log output: %% off : nowhere %% file: the file specified by log.console.file %% console : standard out %% both : log.console.file and standard out. {mapping, "log.console", "lager.handlers", [ {default, file}, {datatype, {enum, [off, file, console, both]}} ]}. %% @doc the log level of the console log {mapping, "log.console.level", "lager.handlers", [ {default, info}, {datatype, {enum, [debug, info, warning, error]}} ]}. %% @doc location of the console log {mapping, "log.console.file", "lager.handlers", [ {default, "./log/console.log"} ]}. %% *gasp* notice the same @mapping! %% @doc location of the error log {mapping, "log.error.file", "lager.handlers", [ {default, "./log/error.log"} ]}. %% *gasp* notice the same @mapping! %% @doc turn on syslog {mapping, "log.syslog", "lager.handlers", [ {default, off}, {datatype, {enum, [on, off]}} ]}. { translation, "lager.handlers", fun(Conf) -> SyslogHandler = case cuttlefish:conf_get("log.syslog", Conf) of on -> [{lager_syslog_backend, ["riak", daemon, info]}]; _ -> [] end, ErrorHandler = case cuttlefish:conf_get("log.error.file", Conf) of undefined -> []; ErrorFilename -> [{lager_file_backend, [{file, ErrorFilename}, {level, error}, {size, 10485760}, {date, "$D0"}, {count, 5}]}] end, ConsoleLogLevel = cuttlefish:conf_get("log.console.level", Conf), ConsoleLogFile = cuttlefish:conf_get("log.console.file", Conf), ConsoleHandler = {lager_console_handler, ConsoleLogLevel}, ConsoleFileHandler = {lager_file_backend, [{file, ConsoleLogFile}, {level, ConsoleLogLevel}, {size, 10485760}, {date, "$D0"}, {count, 5}]}, ConsoleHandlers = case cuttlefish:conf_get("log.console", Conf) of off -> []; file -> [ConsoleFileHandler]; console -> [ConsoleHandler]; both -> [ConsoleHandler, ConsoleFileHandler]; _ -> [] end, SyslogHandler ++ ConsoleHandlers ++ ErrorHandler end }. %% SASL %% We should never care about this {mapping, "sasl", "sasl.sasl_error_logger", [ {default, off}, {datatype, {enum, [on, off]}}, {level, advanced} ]}. { translation, "sasl.sasl_error_logger", fun(Conf) -> case cuttlefish:conf_get("sasl", Conf) of %%how to pull default? on -> true; _ -> false end end }. %% HTTP Listeners %% @doc listener.http. is an IP address and TCP port that the Riak %% HTTP interface will bind. {mapping, "listener.http.$name", "riak_core.http", [ {default, {"127.0.0.1",8098}}, {datatype, ip}, {include_default, "internal"} ]}. { translation, "riak_core.http", fun(Conf) -> HTTP = cuttlefish_variable:filter_by_prefix("listener.http", Conf), [ IP || {_, IP} <- HTTP] end }. %% protobuf Listeners %% @doc listener.protobuf. is an IP address and TCP port that the Riak %% Protocol Buffers interface will bind. {mapping, "listener.protobuf.$name", "riak_api.pb", [ {default, {"127.0.0.1", 8087}}, {datatype, ip}, {include_default, "internal"} ]}. { translation, "riak_api.pb", fun(Conf) -> PB = cuttlefish_variable:filter_by_prefix("listener.protobuf", Conf), [ IP || {_, IP} <- PB] end }. %% @doc pb_backlog is the maximum length to which the queue of pending %% connections may grow. If set, it must be an integer >= 0. %% By default the value is 5. If you anticipate a huge number of %% connections being initialised *simultaneously*, set this number %% higher. {mapping, "protobuf.backlog", "riak_api.pb_backlog", [ {datatype, integer}, {commented, 64} ]}. %% @doc Default location of ringstate {mapping, "ring.state_dir", "riak_core.ring_state_dir", [ {default, "$(platform_data_dir)/ring"} ]}. %% @doc listener.https. is an IP address and TCP port that the Riak %% HTTPS interface will bind. {mapping, "listener.https.$name", "riak_core.https", [ {commented, {"127.0.0.1", 8098}}, {datatype, ip}, {include_default, "internal"} ]}. { translation, "riak_core.https", fun(Conf) -> HTTPS = cuttlefish_variable:filter_by_prefix("listener.https", Conf), [ IP || {_, IP} <- HTTPS] end }. %% @doc Default cert location for https can be overridden %% with the ssl config variable, for example: {mapping, "ssl.certfile", "riak_core.ssl.certfile", [ {commented, "./etc/cert.pem"} ]}. %% @doc Default key location for https can be overridden %% with the ssl config variable, for example: {mapping, "ssl.keyfile", "riak_core.ssl.keyfile", [ {commented, "./etc/key.pem"} ]}. %% @doc handoff.port is the TCP port that Riak uses for %% intra-cluster data handoff. {mapping, "handoff.port", "riak_core.handoff_port", [ {default, 8099}, {datatype, integer} ]}. %% @doc To encrypt riak_core intra-cluster data handoff traffic, %% uncomment the following line and edit its path to an %% appropriate certfile and keyfile. (This example uses a %% single file with both items concatenated together.) {mapping, "handoff.ssl.certfile", "riak_core.handoff_ssl_options.certfile", [ {commented, "/tmp/erlserver.pem"} ]}. %% @doc if you need a seperate keyfile for handoff {mapping, "handoff.ssl.keyfile", "riak_core.handoff_ssl_options.keyfile", []}. %% @doc DTrace support %% Do not enable 'dtrace' unless your Erlang/OTP %% runtime is compiled to support DTrace. DTrace is %% available in R15B01 (supported by the Erlang/OTP %% official source package) and in R14B04 via a custom %% source repository & branch. {mapping, "dtrace", "riak_core.dtrace_support", [ {default, off}, {datatype, {enum, [on, off]}} ]}. %% consistent on/off (in lieu of enabled/disabled, true/false) { translation, "riak_core.dtrace_support", fun(Conf) -> Setting = cuttlefish:conf_get("dtrace", Conf), case Setting of on -> true; off -> false; _Default -> false end end }. %% Platform-specific installation paths (substituted by rebar) {mapping, "platform_bin_dir", "riak_core.platform_bin_dir", [ {default, "./bin"} ]}. {mapping, "platform_data_dir", "riak_core.platform_data_dir", [ {default, "./data"} ]}. {mapping, "platform_etc_dir", "riak_core.platform_etc_dir", [ {default, "./etc"} ]}. {mapping, "platform_lib_dir", "riak_core.platform_lib_dir", [ {default, "./lib"} ]}. {mapping, "platform_log_dir", "riak_core.platform_log_dir", [ {default, "./log"} ]}. %% @doc To enable Search functionality set this 'on'. %% @datatype enum on, off {mapping, "search", "riak_search.enabled", [ {default, off}, {datatype, {enum, [on, off]}} ]}. { translation, "riak_search.enabled", fun(Conf) -> Setting = cuttlefish:conf_get("search", Conf), case Setting of on -> true; off -> false; _Default -> false end end}. %% Merge Index Config %% @doc The root dir to store search merge_index data {mapping, "merge_index.data_root", "merge_index.data_root", [ {default, "./data/merge_index"} ]}. %% @doc Size, in bytes, of the in-memory buffer. When this %% threshold has been reached the data is transformed %% into a segment file which resides on disk. {mapping, "merge_index.buffer_rollover_size", "merge_index.buffer_rollover_size", [ {default, "1MB"}, {datatype, bytesize} ]}. %% @doc Overtime the segment files need to be compacted. %% This is the maximum number of segments that will be %% compacted at once. A lower value will lead to %% quicker but more frequent compactions. {mapping, "merge_index.max_compact_segments", "merge_index.max_compact_segments", [ {default, 20}, {datatype, integer} ]}. %% Lager Config %% @doc Whether to write a crash log, and where. %% Commented/omitted/undefined means no crash logger. {mapping, "log.crash.file", "lager.crash_log", [ {default, "./log/crash.log"} ]}. %% @doc Maximum size in bytes of events in the crash log - defaults to 65536 %% @datatype integer %% @mapping {mapping, "log.crash.msg_size", "lager.crash_log_msg_size", [ {default, "64KB"}, {datatype, bytesize} ]}. %% @doc Maximum size of the crash log in bytes, before its rotated, set %% to 0 to disable rotation - default is 0 {mapping, "log.crash.size", "lager.crash_log_size", [ {default, "10MB"}, {datatype, bytesize} ]}. %% @doc What time to rotate the crash log - default is no time %% rotation. See the lager README for a description of this format: %% https://github.com/basho/lager/blob/master/README.org {mapping, "log.crash.date", "lager.crash_log_date", [ {default, "$D0"} ]}. %% @doc Number of rotated crash logs to keep, 0 means keep only the %% current one - default is 0 {mapping, "log.crash.count", "lager.crash_log_count", [ {default, 5}, {datatype, integer} ]}. %% @doc Whether to redirect error_logger messages into lager - defaults to true {mapping, "log.error.redirect", "lager.error_logger_redirect", [ {default, on}, {datatype, {enum, [on, off]}} ]}. { translation, "lager.error_logger_redirect", fun(Conf) -> Setting = cuttlefish:conf_get("log.error.redirect", Conf), case Setting of on -> true; off -> false; _Default -> true end end}. %% @doc maximum number of error_logger messages to handle in a second %% lager 2.0.0 shipped with a limit of 50, which is a little low for riak's startup {mapping, "log.error.messages_per_second", "lager.error_logger_hwm", [ {default, 100}, {datatype, integer} ]}. %% Riak KV config %% @doc Storage_backend specifies the Erlang module defining the storage %% mechanism that will be used on this node. {mapping, "storage_backend", "riak_kv.storage_backend", [ {default, bitcask}, {datatype, {enum, [bitcask, leveldb, memory, multi]}} ]}. { translation, "riak_kv.storage_backend", fun(Conf) -> Setting = cuttlefish:conf_get("storage_backend", Conf), case Setting of bitcask -> riak_kv_bitcask_backend; leveldb -> riak_kv_eleveldb_backend; memory -> riak_kv_memory_backend; multi -> riak_kv_multi_backend; _Default -> riak_kv_bitcask_backend end end}. %% @doc raw_name is the first part of all URLS used by the Riak raw HTTP %% interface. See riak_web.erl and raw_http_resource.erl for %% details. {mapping, "raw_name", "riak_kv.raw_name", [ {commented, "riak"} ]}. %% @doc Restrict how fast AAE can build hash trees. Building the tree %% for a given partition requires a full scan over that partition's %% data. Once built, trees stay built until they are expired. %% Config is of the form: %% {num-builds, per-timespan} %% Default is 1 build per hour. {mapping, "anti_entropy.build_limit.number", "riak_kv.anti_entropy_build_limit", [ {default, 1}, {datatype, integer} ]}. {mapping, "anti_entropy.build_limit.per_timespan", "riak_kv.anti_entropy_build_limit", [ {default, "1h"}, {datatype, {duration, ms}} ]}. {translation, "riak_kv.anti_entropy_build_limit", fun(Conf) -> {cuttlefish:conf_get("anti_entropy.build_limit.number", Conf), cuttlefish:conf_get("anti_entropy.build_limit.per_timespan", Conf)} end}. %% @doc Determine how often hash trees are expired after being built. %% Periodically expiring a hash tree ensures the on-disk hash tree %% data stays consistent with the actual k/v backend data. It also %% helps Riak identify silent disk failures and bit rot. However, %% expiration is not needed for normal AAE operation and should be %% infrequent for performance reasons. The time is specified in %% milliseconds. The default is 1 week. {mapping, "anti_entropy.expire", "riak_kv.anti_entropy_expire", [ {default, "1w"}, {datatype, {duration, ms}} ]}. %% @doc Limit how many AAE exchanges/builds can happen concurrently. {mapping, "anti_entropy.concurrency", "riak_kv.anti_entropy_concurrency", [ {default, 2}, {datatype, integer} ]}. %% @doc The tick determines how often the AAE manager looks for work %% to do (building/expiring trees, triggering exchanges, etc). %% The default is every 15 seconds. Lowering this value will %% speedup the rate that all replicas are synced across the cluster. %% Increasing the value is not recommended. {mapping, "anti_entropy.tick", "riak_kv.anti_entropy_tick", [ {default, "15s"}, {datatype, {duration, ms}} ]}. %% @doc The directory where AAE hash trees are stored. {mapping, "anti_entropy.data_dir", "riak_kv.anti_entropy_data_dir", [ {default, "./data/anti_entropy"} ]}. %% @doc The LevelDB options used by AAE to generate the LevelDB-backed %% on-disk hashtrees. {mapping, "anti_entropy.write_buffer_size", "riak_kv.anti_entropy_leveldb_opts.write_buffer_size", [ {default, "4MB"}, {datatype, bytesize} ]}. {mapping, "anti_entropy.max_open_files", "riak_kv.anti_entropy_leveldb_opts.max_open_files", [ {default, 20}, {datatype, integer} ]}. %% @doc mapred_name is URL used to submit map/reduce requests to Riak. {mapping, "mapred_name", "riak_kv.mapred_name", [ {default, "mapred"} ]}. %% @doc mapred_2i_pipe indicates whether secondary-index %% MapReduce inputs are queued in parallel via their own %% pipe ('true'), or serially via a helper process %% ('false' or undefined). Set to 'false' or leave %% undefined during a rolling upgrade from 1.0. {mapping, "mapred_2i_pipe", "riak_kv.mapred_2i_pipe", [ {default, on}, {datatype, {enum, [on, off]}} ]}. { translation, "riak_kv.mapred_2i_pipe", fun(Conf) -> Setting = cuttlefish:conf_get("mapred_2i_pipe", Conf), case Setting of on -> true; off -> false; _Default -> true end end}. %% @doc Each of the following entries control how many Javascript %% virtual machines are available for executing map, reduce, %% pre- and post-commit hook functions. %% @datatype integer %% @mapping {mapping, "javascript_vm.map_js_vm_count", "riak_kv.map_js_vm_count", [ {default, 8}, {datatype, integer} ]}. {mapping, "javascript_vm.reduce_js_vm_count", "riak_kv.reduce_js_vm_count", [ {default, 6}, {datatype, integer} ]}. {mapping, "javascript_vm.hook_js_vm_count", "riak_kv.hook_js_vm_count", [ {default, 2}, {datatype, integer} ]}. %% @doc js_max_vm_mem is the maximum amount of memory, in megabytes, %% allocated to the Javascript VMs. If unset, the default is %% 8mb. {mapping, "javascript_vm.max_vm_mem", "riak_kv.js_max_vm_mem", [ {default, 8}, {datatype, integer} ]}. %% @doc js_thread_stack is the maximum amount of thread stack, in megabyes, %% allocate to the Javascript VMs. If unset, the default is 16mb. %% NOTE: This is not the same as the C thread stack. {mapping, "javascript_vm.thread_stack", "riak_kv.js_thread_stack", [ {default, 16}, {datatype, integer} ]}. %% @doc js_source_dir should point to a directory containing Javascript %% source files which will be loaded by Riak when it initializes %% Javascript VMs. {mapping, "javascript_vm.source_dir", "riak_kv.js_source_dir", [ {commented, "/tmp/js_source"} ]}. %% @doc http_url_encoding determines how Riak treats URL encoded %% buckets, keys, and links over the REST API. When set to 'on' %% Riak always decodes encoded values sent as URLs and Headers. %% Otherwise, Riak defaults to compatibility mode where links %% are decoded, but buckets and keys are not. The compatibility %% mode will be removed in a future release. {mapping, "http_url_encoding", "riak_kv.http_url_encoding", [ {default, on}, {datatype, {enum, [on, off]}} ]}. %% @doc Switch to vnode-based vclocks rather than client ids. This %% significantly reduces the number of vclock entries. %% Only set on if *all* nodes in the cluster are upgraded to 1.0 {mapping, "vnode_vclocks", "riak_kv.vnode_vclocks", [ {default, on}, {datatype, {enum, [on, off]}} ]}. { translation, "riak_kv.vnode_vclocks", fun(Conf) -> Setting = cuttlefish:conf_get("vnode_vclocks", Conf), case Setting of on -> true; off -> false; _Default -> true end end}. %% @doc This option toggles compatibility of keylisting with 1.0 %% and earlier versions. Once a rolling upgrade to a version %% > 1.0 is completed for a cluster, this should be set to %% true for better control of memory usage during key listing %% operations {mapping, "listkeys_backpressure", "riak_kv.listkeys_backpressure", [ {default, on}, {datatype, {enum, [on, off]}} ]}. { translation, "riak_kv.listkeys_backpressure", fun(Conf) -> Setting = cuttlefish:conf_get("listkeys_backpressure", Conf), case Setting of on -> true; off -> false; _Default -> true end end}. %% @doc This option specifies how many of each type of fsm may exist %% concurrently. This is for overload protection and is a new %% mechanism that obsoletes 1.3's health checks. Note that this number %% represents two potential processes, so +P in vm.args should be at %% least 3X the fsm_limit. {mapping, "fsm_limit", "riak_kv.fsm_limit", [ {default, 50000}, {datatype, integer} ]}. %% @doc object_format controls which binary representation of a riak_object %% is stored on disk. %% Current options are: v0, v1. %% v0: Original erlang:term_to_binary format. Higher space overhead. %% v1: New format for more compact storage of small values. {mapping, "object_format", "riak_kv.object_format", [ {default, v1}, {datatype, {enum, [v0, v1]}} ]}. %% riak_sysmon config %% @doc To disable forwarding events of a particular type, use a %% limit of 0. {mapping, "riak_sysmon.process_limit", "riak_sysmon.process_limit", [ {default, 30}, {datatype, integer}, {level, advanced} ]}. {mapping, "riak_sysmon.port_limit", "riak_sysmon.port_limit", [ {default, 2}, {datatype, integer}, {level, advanced} ]}. %% Finding reasonable limits for a given workload is a matter %% of experimentation. %% NOTE: Enabling the 'gc_ms_limit' monitor (by setting non-zero) %% can cause performance problems on multi-CPU systems. {mapping, "riak_sysmon.gc_ms_limit", "riak_sysmon.gc_ms_limit", [ {default, 0}, {datatype, integer}, {level, advanced} ]}. {mapping, "riak_sysmon.heap_word_limit", "riak_sysmon.heap_word_limit", [ {default, 40111000}, {datatype, integer}, {level, advanced} ]}. %% @doc Configure the following items to 'false' to disable logging %% of that event type. {mapping, "riak_sysmon.busy_port", "riak_sysmon.busy_port", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. {mapping, "riak_sysmon.busy_dist_port", "riak_sysmon.busy_dist_port", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% riak_control config %% @doc Set to false to disable the admin panel. {mapping, "riak_control", "riak_control.enabled", [ {default, off}, {datatype, {enum, [on, off]}} ]}. {translation, "riak_control.enabled", fun(Conf) -> Setting = cuttlefish:conf_get("riak_control", Conf), case Setting of on -> true; off -> false; _Default -> false end end}. %% @doc Authentication style used for access to the admin %% panel. Valid styles are: off, userlist {mapping, "riak_control.auth", "riak_control.auth", [ {default, userlist}, {datatype, {enum, [off, userlist]}} ]}. {translation, "riak_control.auth", fun(Conf) -> case cuttlefish:conf_get("riak_control.auth", Conf) of userlist -> userlist; off -> none; _ -> none end end}. %% @doc If auth is set to 'userlist' then this is the %% list of usernames and passwords for access to the %% admin panel. {mapping, "riak_control.user.$username.password", "riak_control.userlist", [ {default, "pass"}, {include_default, "user"} ]}. {translation, "riak_control.userlist", fun(Conf) -> UserList1 = lists:filter( fun({Var, _V}) -> cuttlefish_variable:is_fuzzy_match(Var, ["riak_control","user","$username","password"]) end, Conf), UserList = [ begin [_, _, Username, _] = UserVariable, {Username, Password} end || {UserVariable, Password} <- UserList1] end}. %% @doc The admin panel is broken up into multiple %% components, each of which is enabled or disabled %% by one of these settings. {mapping, "riak_control.admin", "riak_control.admin", [ {default, on}, {datatype, {enum, [on, off]}} ]}. {translation, "riak_control.admin", fun(Conf) -> Setting = cuttlefish:conf_get("riak_control.admin", Conf), case Setting of on -> true; off -> false; _Default -> true end end}. %%%% bitcask %% @doc bitcask data root {mapping, "bitcask.data_root", "bitcask.data_root", [ {default, "./data/bitcask"} ]}. %% @doc The open_timeout setting specifies the maximum time Bitcask will %% block on startup while attempting to create or open the data directory. %% The value is in seconds and the default is 4. You generally need not %% change this value. If for some reason the timeout is exceeded on open %% you'll see a log message of the form: %% "Failed to start bitcask backend: .... " %% Only then should you consider a longer timeout. {mapping, "bitcask.open_timeout", "bitcask.open_timeout", [ {default, 4}, {datatype, integer}, {level, advanced} ]}. %% @doc The `sync_strategy` setting changes the durability of writes by specifying %% when to synchronize data to disk. The default setting protects against data %% loss in the event of application failure (process death) but leaves open a %% small window wherein data could be lost in the event of complete system %% failure (e.g. hardware, O/S, power). %% %% The default mode, `none`, writes data into operating system buffers which %% which will be written to the disks when those buffers are flushed by the %% operating system. If the system fails (power loss, crash, etc.) before %% before those buffers are flushed to stable storage that data is lost. %% %% This is prevented by the setting `o_sync` which forces the operating system %% to flush to stable storage at every write. The effect of flushing each %% write is better durability, however write throughput will suffer as each %% write will have to wait for the write to complete. %% %% ___Available Sync Strategies___ %% %% * `none` - (default) Lets the operating system manage syncing writes. %% * `o_sync` - Uses the O_SYNC flag which forces syncs on every write. %% * `interval` - Riak will force Bitcask to sync every `bitcask.sync_interval` seconds. {mapping, "bitcask.sync_strategy", "bitcask.sync_strategy", [ {default, none}, {datatype, {enum, [none, o_sync, interval]}}, {level, advanced} ]}. {mapping, "bitcask.sync_interval", "bitcask.sync_strategy", [ {datatype, {duration, s}}, {level, advanced} ]}. {translation, "bitcask.sync_strategy", fun(Conf) -> Setting = cuttlefish:conf_get("bitcask.sync_strategy", Conf), case Setting of none -> none; o_sync -> o_sync; interval -> Interval = cuttlefish:conf_get("bitcask.sync_interval", Conf, undefined), {seconds, Interval}; _Default -> none end end}. %% @doc The `max_file_size` setting describes the maximum permitted size for any %% single data file in the Bitcask directory. If a write causes the current %% file to exceed this size threshold then that file is closed, and a new file %% is opened for writes. {mapping, "bitcask.max_file_size", "bitcask.max_file_size", [ {default, "2GB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc The `merge_window` setting lets you specify when during the day merge %% operations are allowed to be triggered. Valid options are: %% %% * `always` (default) No restrictions %% * `never` Merge will never be attempted %% * `window` Hours during which merging is permitted, where %% `bitcask.merge_window.start` and %% `bitcask.merge_window.end` are integers between 0 and 23. %% %% If merging has a significant impact on performance of your cluster, or your %% cluster has quiet periods in which little storage activity occurs, you may %% want to change this setting from the default. {mapping, "bitcask.merge_window", "bitcask.merge_window", [ {default, always}, {datatype, {enum, [always, never, window]}}, {level, advanced} ]}. {mapping, "bitcask.merge_window.start", "bitcask.merge_window", [ {default, 0}, {datatype, integer}, {level, advanced} ]}. {mapping, "bitcask.merge_window.end", "bitcask.merge_window", [ {default, 23}, {datatype, integer}, {level, advanced} ]}. {translation, "bitcask.merge_window", fun(Conf) -> Setting = cuttlefish:conf_get("bitcask.merge_window", Conf), case Setting of always -> always; never -> never; window -> Start = cuttlefish:conf_get("bitcask.merge_window.start", Conf, undefined), End = cuttlefish:conf_get("bitcask.merge_window.end", Conf, undefined), {Start, End}; _Default -> always end end}. %% @doc `frag_merge_trigger` setting describes what ratio of %% dead keys to total keys in a file will trigger merging. The value of this %% setting is a percentage (0-100). For example, if a data file contains 6 %% dead keys and 4 live keys, then merge will be triggered at the default %% setting. Increasing this value will cause merging to occur less often, %% whereas decreasing the value will cause merging to happen more often. %% %% Default is: `60` {mapping, "bitcask.frag_merge_trigger", "bitcask.frag_merge_trigger", [ {datatype, integer}, {level, advanced}, {default, 60} ]}. %% @doc `dead_bytes_merge_trigger` setting describes how much %% data stored for dead keys in a single file will trigger merging. The %% value is in bytes. If a file meets or exceeds the trigger value for dead %% bytes, merge will be triggered. Increasing the value will cause merging %% to occur less often, whereas decreasing the value will cause merging to %% happen more often. %% %% When either of these constraints are met by any file in the directory, %% Bitcask will attempt to merge files. %% %% Default is: 512mb in bytes {mapping, "bitcask.dead_bytes_merge_trigger", "bitcask.dead_bytes_merge_trigger", [ {datatype, bytesize}, {level, advanced}, {default, "512MB"} ]}. %% @doc `frag_threshold` setting describes what ratio of %% dead keys to total keys in a file will cause it to be included in the %% merge. The value of this setting is a percentage (0-100). For example, %% if a data file contains 4 dead keys and 6 live keys, it will be included %% in the merge at the default ratio. Increasing the value will cause fewer %% files to be merged, decreasing the value will cause more files to be %% merged. %% %% Default is: `40` {mapping, "bitcask.frag_threshold", "bitcask.frag_threshold", [ {datatype, integer}, {level, advanced}, {default, 40} ]}. %% @doc `dead_bytes_threshold` setting describes the minimum %% amount of data occupied by dead keys in a file to cause it to be included %% in the merge. Increasing the value will cause fewer files to be merged, %% decreasing the value will cause more files to be merged. %% %% Default is: 128mb in bytes {mapping, "bitcask.dead_bytes_threshold", "bitcask.dead_bytes_threshold", [ {datatype, bytesize}, {level, advanced}, {default, "128MB"} ]}. %% @doc `small_file_threshold` setting describes the minimum %% size a file must have to be _excluded_ from the merge. Files smaller %% than the threshold will be included. Increasing the value will cause %% _more_ files to be merged, decreasing the value will cause _fewer_ files %% to be merged. %% %% Default is: 10mb in bytes {mapping, "bitcask.small_file_threshold", "bitcask.small_file_threshold", [ {datatype, bytesize}, {level, advanced}, {default, "10MB"} ]}. %% @doc Fold keys thresholds will reuse the keydir if another fold was started less %% than `max_fold_age` ago and there were less than `max_fold_puts` updates. %% Otherwise it will wait until all current fold keys complete and then start. %% Set either option to -1 to disable. %% Age in micro seconds (-1 means "unlimited") {mapping, "bitcask.max_fold_age", "bitcask.max_fold_age", [ {datatype, integer}, {level, advanced}, {default, -1} ]}. {mapping, "bitcask.max_fold_puts", "bitcask.max_fold_puts", [ {datatype, integer}, {level, advanced}, {default, 0} ]}. %% @doc By default, Bitcask keeps all of your data around. If your data has %% limited time-value, or if for space reasons you need to purge data, you can %% set the `expiry_secs` option. If you needed to purge data automatically %% after 1 day, set the value to `1d`. %% %% Default is: `-1` which disables automatic expiration {mapping, "bitcask.expiry", "bitcask.expiry_secs", [ {datatype, {duration, s}}, {level, advanced}, {default, -1} ]}. %% @doc Require the CRC to be present at the end of hintfiles. %% Bitcask defaults to a backward compatible mode where %% old hint files will still be accepted without them. %% It is safe to set this true for new deployments and will %% become the default setting in a future release. {mapping, "bitcask.require_hint_crc", "bitcask.require_hint_crc", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% By default, Bitcask will trigger a merge whenever a data file contains %% an expired key. This may result in excessive merging under some usage %% patterns. To prevent this you can set the `expiry_grace_time` option. %% Bitcask will defer triggering a merge solely for key expiry by the %% configured number of seconds. Setting this to `1h` effectively limits %% each cask to merging for expiry once per hour. %% %% Default is: `0` {mapping, "bitcask.expiry_grace_time", "bitcask.expiry_grace_time", [ {datatype, {duration, s}}, {level, advanced}, {default, 0} ]}. %% @doc Configure how Bitcask writes data to disk. %% erlang: Erlang's built-in file API %% nif: Direct calls to the POSIX C API %% %% The NIF mode provides higher throughput for certain %% workloads, but has the potential to negatively impact %% the Erlang VM, leading to higher worst-case latencies %% and possible throughput collapse. {mapping, "bitcask.io_mode", "bitcask.io_mode", [ {default, erlang}, {datatype, {enum, [erlang, nif]}} ]}. %%%% This is the leveldb section %% @doc leveldb data_root {mapping, "leveldb.data_root", "eleveldb.data_root", [ {default, "./data/leveldb"} ]}. %% @doc The `max_open_files` value is multiplied by 4 megabytes to create a %% file cache. The file cache may end up holding more or fewer files at any %% given moment due to variations in file metadata size. `max_open_files` %% applies to a single vnode, not to the entire server. {mapping, "leveldb.max_open_files", "eleveldb.max_open_files", [ {datatype, integer}, {default, 30}, {level, advanced} ]}. %% @doc The cache_size determines the size of each vnode's block cache. The %% block cache holds data blocks that leveldb has recently retrieved from %% `.sst` table files. Any given block contains one or more complete key/value %% pairs. The cache speeds up repeat access to the same key and potential %% access to adjacent keys. {mapping, "leveldb.cache_size", "eleveldb.cache_size", [ {datatype, bytesize}, {default, "8MB"}, {level, advanced} ]}. %% @doc The 'sync' parameter defines how new key/value data is placed in the %% recovery log. The recovery log is only used if the Riak program crashes or %% the server loses power unexpectedly. The parameter's original intent was %% to guarantee that each new key / value was written to the physical disk %% before leveldb responded with “write goodâ€. The reality in modern servers %% is that many layers of data caching exist between the database program and %% the physical disks. This flag influences only one of the layers. {mapping, "leveldb.sync", "eleveldb.sync", [ {default, false}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% @doc Each vnode first stores new key/value data in a memory based write %% buffer. This write buffer is in parallel to the recovery log mentioned %% in the “sync†parameter. Riak creates each vnode with a randomly sized %% write buffer for performance reasons. The random size is somewhere %% between write_buffer_size_min and write_buffer_size_max. {mapping, "leveldb.write_buffer_size_min", "eleveldb.write_buffer_size_min", [ {default, "30MB"}, {datatype, bytesize}, {level, advanced} ]}. {mapping, "leveldb.write_buffer_size_max", "eleveldb.write_buffer_size_max", [ {default, "60MB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc Each database .sst table file can include an optional "bloom filter" %% that is highly effective in shortcutting data queries that are destined %% to not find the requested key. The bloom_filter typically increases the %% size of an .sst table file by about 2%. This option must be set to true %% in the riak.conf to take effect. {mapping, "leveldb.bloomfilter", "eleveldb.use_bloomfilter", [ {default, on}, {datatype, {enum, [on, off]}} ]}. { translation, "eleveldb.use_bloomfilter", fun(Conf) -> case cuttlefish:conf_get("leveldb.bloomfilter", Conf) of on -> true; off -> false; _ -> true end end }. %% @doc sst_block_size defines the size threshold for a block / chunk of data %% within one .sst table file. Each new block gets an index entry in the .sst %% table file's master index. {mapping, "leveldb.block_size", "eleveldb.sst_block_size", [ {default, "4KB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc block_restart_interval defines the key count threshold for a new key %% entry in the key index for a block. %% Most clients should leave this parameter alone. {mapping, "leveldb.block_restart_interval", "eleveldb.block_restart_interval", [ {default, 16}, {datatype, integer}, {level, advanced} ]}. %% @doc verify_checksums controls whether or not validation occurs when Riak %% requests data from the leveldb database on behalf of the user. {mapping, "leveldb.verify_checksums", "eleveldb.verify_checksums", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% @doc verify_compaction controls whether or not validation occurs when %% leveldb reads data as part of its background compaction operations. {mapping, "leveldb.verify_compaction", "eleveldb.verify_compaction", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %%%% Memory backend section {mapping, "memory_backend.max_memory", "riak_kv.memory_backend.max_memory", [ {datatype, bytesize}, {default, "4GB"}, {level, advanced} ]}. {translation, "riak_kv.memory_backend.max_memory", fun(Conf) -> Bytes = cuttlefish:conf_get("memory_backend.max_memory", Conf), cuttlefish_util:ceiling(Bytes / 1048576) end }. {mapping, "memory_backend.ttl", "riak_kv.memory_backend.ttl", [ {datatype, {duration, s}}, {commented, "1d"}, %% no default, it's undefined. {level, advanced} ]}. %% o man o man o man vm.args! %%% Things that need to be in vm.args, but never tuned {mapping, "erlang.smp", "vm_args.-smp", [ {default, "enable"}, {level, advanced} ]}. {mapping, "erlang.W", "vm_args.+W", [ {default, "w"}, {level, advanced} ]}. {mapping, "erlang.K", "vm_args.+K", [ {default, "true"}, {level, advanced} ]}. %%%% Tunables %% @doc Name of the riak node {mapping, "nodename", "vm_args.-name", [ {default, "dev1@127.0.0.1"} ]}. %% @doc Cookie for distributed node communication. All nodes in the same cluster %% should use the same cookie or they will not be able to communicate. {mapping, "distributed_cookie", "vm_args.-setcookie", [ {default, "riak"} ]}. {mapping, "erlang.asyc_threads", "vm_args.+A", [ {default, "64"} ]}. %% @doc Increase number of concurrent ports/sockets {mapping, "erlang.max_ports", "vm_args.-env ERL_MAX_PORTS", [ {default, "64000"} ]}. %% @doc Tweak GC to run more often {mapping, "erlang.fullsweep_after", "vm_args.-env ERL_FULLSWEEP_AFTER", [ {default, "0"}, {level, advanced} ]}. %% @doc Set the location of crash dumps {mapping, "erlang.crash_dump", "vm_args.-env ERL_CRASH_DUMP", [ {default, "./log/erl_crash.dump"} ]}. %% @doc Raise the ETS table limit {mapping, "erlang.max_ets_tables", "vm_args.-env ERL_MAX_ETS_TABLES", [ {default, "256000"} ]}. %% @doc Raise the default erlang process limit {mapping, "process_limit", "vm_args.+P", [ {datatype, integer}, {default, 256000} ]}. {translation, "vm_args.+P", fun(Conf) -> Procs = cuttlefish:conf_get("process_limit", Conf), integer_to_list(Procs) end}. %% @doc For nodes with many busy_dist_port events, Basho recommends %% raising the sender-side network distribution buffer size. %% 32mb may not be sufficient for some workloads and is a suggested %% starting point. %% The Erlang/OTP default is 1024 (1 megabyte). %% See: http://www.erlang.org/doc/man/erl.html#%2bzdbbl {mapping, "erlang.zdouble", "vm_args.+zdbbl", [ {commented, "32MB"}, {datatype, bytesize} ]}. %% @doc Erlang VM scheduler tuning. %% Prerequisite: a patched VM from Basho, or a VM compiled separately %% with this patch applied (R15B only): %% https://gist.github.com/evanmcc/a599f4c6374338ed672e {mapping, "erlang.swiffy", "vm_args.+sfwi", [ {commented, "500"} ]}. {mapping, "erlang.scl", "vm_args.+scl", [ {commented, "false"}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. {mapping, "erlang.sub", "vm_args.+sub", [ {commented, "true"}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. cuttlefish-3.0.1/test/riakconf.ez0000644000232200023220000002620114027401005017340 0ustar debalancedebalancePK p€âN riakconf/UT 4d]*d]ux ääPK‚áN¿äâVÿ)]–riakconf/riak.schemaUT ¼]¼]ux ääÄñ$‹Ss¦ËLä,àZ†,áY&ÓÅÞ½{ìï‘ Ùs1çe\°>²0¼*eZþ)<ÆÞð+«sÁdÁ¤fœej  æƒ ‚ÞÂc³Göà {ôpÂfO؃Sør:{ÀDî]Û'l÷ðô>ý¯üPå£Ïnk7þa±ëˆ¼ØdbÂdZˆ…Èo&ôÙàŒ;š¡J3¢úÓŠÇ–«\¬ÆÎoö>Þx{{õ„.b©*Ze D ~)–9±µ,–¬ÈyªcÓ˜oD^ñM¤<îò°+ø'-ä1P WÙè.DÒ¤=Îð팊üW+¯ó½Cök‘– |Uéh1Ÿ°HåâãM‡*uTmbŒsFö¤á3•Î- Š¢(ðà?±°,ŠXÌ¥^ž…0Ã_ˆâ°{Z:¡…!×¢Z­æôN˜]#æ>ÞüÕ~%üë{7 G4cxÔÆ*ßiI" 3÷Ìÿ[ž:.ƤÕé`Å´õR§#Å6ªdk8Èr>Õ*^¬ý²ÈÊâ W 2g,U´?Ìe,ÎhþÄt&B9—"bÁ†ÁZÏÁÁQƒ ý@t’™GG²uÖ[„òÛšë5%¨1ˆNèJD±Èu%:N$ž•’¾4!yqÂÄá8!”@®Œ9záY‘*±X‰•±A.AÏ£ù; )Ó¹C’DÃL™¯ò”¶y®òž± ŽîŽ" ÷½)¬š6Dd¿Þûþ‚ëì>ˆH!CAkžöw»ãŒâG§ÂŽn‰›YsGÌŠ2OQUÁ`  c¾î€ HÔ¨°ÓUñ¬o¢º°aòÒÌA…6gÐNµð60:6‰}¸¦-}3Íxx&ÄÜSü –E\$ˆ7 ßÇ›•ÂÕ>:Ëc¬ß $ÿ®ˆµøÛA®Lœ2{B»¸M þ?`EŠÄ!‹=Bià~mº5ýfbìúß5i®U³Û¯Æ+|“‡O|trûõ =°~ÿ»ç'û·_ª2Q<½ŽÕ,¢Ÿžõ}­¯É’\qC&¬qÏÕP¾»i2ºJ-0–›v‰oÕ`ÒÅù¦…"Ð1,m¬oMÖZ&†º¤/”/”¶Œ òCVƒë¸k[ŒïR+.Ýèø¥Ï³jŽ»¨ÓºSÈWèODa»¡j›Ðï¿ï>5YÛ±ºxrñÿý ®’¥*㈥ 9P <*€³׌ÔÍDsMNþëáÿùdV|ÀÄõnwM°É£OC^,c]0ã\·Çpìf÷î-ÕšŠee3‹þU²hî˜"/Å_«oÄ9„&bØk}yyyÎ^KˆR`Dí6Ø/Þ²(2ïohѦ00e¯ÎáøQ.´&§ñòÙ9DL9ò€§öÜdhŒÞò9W`-ç@¦QÛµlmônÔñ{a×û³?z'ð?ÊŸüåñM—23_dÆe$üÚu!ŒRïo\›åzlÃÿèˆMkì¯8¬†ðì töñƒŸå0ôé°}Ô¶}Çÿ> eÿõ/víOà§ö·c_©’ãX–«Bå|×Üœ;rî—‡*fOËùõsg.V·9É3éeÁv6²Ç'üú|t[3ñüéíYèιÏŸö˜Hìʺ31Ì!þI&eQOºÀ€‚¸Q†Kû£%åt2€có8`*Râ5¬Ý°E®Ö{5gZ`ŒS°¤Ô 0¼syöóOìÄÃÅO7΄ü> rJ0NÅÀ;”\|Œ³e¹ Œb@É‘îîÀ\¦²¨®ä} G‰!¤ªÔñæþ‘"Km¡ ˆ¥\@ŒÛ’¥J„,m:Bäן·%ŠÚy¡vÌæBúfl„y ˆ ÑÍZyôÕd>ºjŽu"¤ï3ŹÊQÄIGS\´ß ›fA­½¸±Õcֶб4ìjê7µ¸ús&÷ân6Wïlt/†Ö N(ò†ô‡›1OÓ·—ç¸ ‡8Q&tcJvA;v`œÝJ’ûÇW\Æ´ƒLÙ»ÙéÓ“;´KL𧽑Bñ‚Ûši@ ep±B4DB`>=yÈV’5CP•`˜Ù¹È”–…Ê7ì?Yö<\¶ôÉ´MIóÍ·x}I*Ïæ¸ÉºøLQcéXŠïsCïhIM?L(.šR$t´õê"y‡‚EuöKÍÍó„èPbK,wnŽc[HÁC‰m MƒF©t!‹ÒÊE.ž·T·ò[Àèû=£½Ü0 Uwý\çn ÷ ãØ6Ðpul\öÃÐ6¸± ¶À­Gûirl…«ÛàV£Cé÷®ù£ †lÀ…à9D ¶äƒC^ljïû@¥žYeõŒ¡–1«c­;œUØ™_=«[_7?¼Ám5¯Bø›k^]Ì}#rˆŒ^¥‘øDÛʺÄ~ F7W`þ‹dÄ ¼ô ’,Áu¾¤uH±&ácéBÙß20¨)ÓÆì®´\PB,f°)„ž¸úL‘ U(ö‡‹æ·¥HM† ÖKˆF–*Žà*Ũ†rÁÃ%^R°œ<ôË»(Ã"²>˜¢+yANŒq*(²h2ŸØ¦úÊ#‚ÁN ®Ï (¶d¿ÃœifožîwÅ”h³;4z Pè.&ç·¼6N*0å{‚9%¥"wªÎWQ³¢¯j¢4áªÇ„q¼Ïè†:¶"˜Èœ&ǂ㶸âR†WØÜQ¦`ž‹?JÄÐÂ"GeŒž€šoçù©.=Gæ´éùàä³îïkªˆ[Õ¨ˆ b…n#%9rI)†0çz‰ÅÉ ù Tþ&¢>s~àT%²Àë’U"@ÔÀǪW/:Jžã`§¼Ißüf.¡[{¥E¦öe,ÊL¥Cäv¬ˆ»25Å`‡;v©ç}tzúã¶¶D£®Qgð‰®ûU:ÇðcóHET‘“áŽnDOv×çß(mB ­,Îc|Bü@¦p29åö\$fióx÷âÉó7/(´à°T‡¹Ìêr»¤ 4ᦫƒ"ì³étÑTx YÓöUS5 bLŽàÔ€õT>"X‹"‘ýÞMEaɪM‰_*‹dYWÓ€$ôJˆl<4Ê…¿QŠ)Š kUæ¹q¼ÅN,¥â×Ân ñénuÂäç",X³ÜØk jsÿ޵5ïÿáN±Æº Úo wœ ôKÛÆö¹­/4x–oà ûn±¨3XaÊÏh_ÐQ€cç`˜öSL/e–¨R:‚C¨w‚<=™Xo‚ÁÛ-¨gMz‰ŽåÆæ¦¼(³af;<ü ÝÚ|„ïËuÒcùìäów ¥¥þùÞ¦Ïj/ BØÄÕΫÎ.݈×ávJ8]xÈ+òFÌB”ˆh'uÒr*0•!7MÆ]E¢u'êöÆÍ¾ÀþPû¸,@t®ÆÄÜOLëVL˜q(á_X.? !p‡¼ˆÏJ¾EEÚbâÛOŠ“~{²æLa¿u§Zgº1Ó|éÍCÚ´¦á‡î¬¦ÚàØN 4u1çkŸ:y¬w:—¹.XjJÄAj~}÷úÂÈM‘ÔÂ:JŠ[GÞTn‚.“(¢›ŠDÁeÜvJVM l|륩Oªs¼ºÈ%X,Ïá eOž¼0¹ìRšPj…Ð{Š_œá7ꯤû{!Á{#‚HºÃÑŸ»‰6eŽw1²0ŠVM< È5ÁcoÁu§]‹‰Ù ÏÆ|a˜Ù‹qã ÃÞñ)ð‘õp)­.µó¸Ðq ·ÎŠuL'¯ ,Ô1z%:ãé %ëÛwfÏ‹=ßK ~‹ÐÍ.ZæùdA=c›Ç:ƒýÆÔ¾åÛn÷vÙ-®;ÏpØŸ-{þßuTæÆ®°DW§movØi¯ov®?Û¡)xŸ†öTí Ð>ÁŒ)L)Éò‚EÖ&jÇû¡Ù¬!5OÐþÜ%šíJ„]ó@ú™¤¼Þ‰˜Ý[ÛlƒQsžoŽ)KJž½3XÊ4+ #ØÔ¯E…TpAÁEL¥T€!!^ÓMA[`}p4a ¡òx›²+Þ‡°g–«BT\rx@øN®’ GäŽH›Æp,øJ˜ö—,…5×:&Éñ§2[ä<Âä±JØ Âì²ZZ P¶ùzI±-îö°üÖÙs9azÁÃeíл® Ô) ñQP®¸¾…ÿ›¯¸I!"„•ÌÉaJÝqÔï\úM߀ƒ#L8ê„U™˜.QqLžM¦tqŒW]÷ꪪ êN ð3èß+ÜüU‚Ìñ×ðS•Ákò­3Ôé˜ÙjFw4gÛtp´½ï£»í‹4Ûu`ìN €@“ `‘t‹G1Á|•'²¦Ò p$šyð]šu몢¾Î÷¦Îkƒ?Å.ÕzÀ†P¥)«Ÿá€²›ðõÅjd§;ˆ×Îÿ¯bVWÜ*7«”踓[ ¿ÌuB¯kÄ'8€m=­³a¬òPð&Ák”ži™Ç1¦Ÿ«©XìT€o!1Äœy?‚Ç ÖáR€zìå²D·3¹@šn¢µ/Ó­ØÏk£ELZižÇGx ®cž/šÌé(hñ öÃÿ˜E'6éÜ”ÅêkSþš;Åû““ê¡Äü$òM„Óç”2… »>eõT‡›ª“YLùZm³HuVT\‚Më€á>c«“ [Íh|urÆÞærÛÄLP¥õ  ¿P¾ÝÛ æ±—ôJ D®‘ÃÀ"S¶YÍÎØ/bmg’TÒ…‡…+ÓRëW‚Y>ã&´èÛ¢B“ÆÝ6W½â‹ÓmsÈæÍjš+7:Qi·}YwéÀFkž“Oeû˜ˆÚTç ˘çÌìS‚ŽÒªßN4vó¬,vhd°}ÀF¥hÛSÜ4T^ŒàÐÙ1zÛ²?På’ÞçÙZQ–àëV™“Ñd(È2ƒ £Õh|>Á 'óå‚si߃Eè'–x v)vã³Ã€2 ©Jÿ¹:2uDc`A8r°YÖFš µaRáùøÙù¯Ìü)!=ÊÛÆþ]ª¶‡Úd½[G5Ì| d4ŒD¸ÈÓÙl¶ÅXmg3i)Û¢óÜNñ˜w2t\£;IÜå?7–Ô´kô¬A©7~ëqÒÀ@û|x ŽÙ›˜ûsëÛömØÀŠq”š£_¯Êª9o«ÛXc"7Ô$>"£„²²©ˆû´ðö;¿•¦÷¡tþ@Ñù3~Uïoÿè¤ãN=)ªøTØþ¹bÛö#2x!Úx—!ÚS‚’ÈÏÞãßb3KìM" Vçè.ޱÇã°gGöc›AÔÖ?Iå&ðjh›½›ÆÿŒÃŽ­?áávGR»Ÿ ,cÀ[¾øî×½Á~»Wpa—5¹܅x€ô·í`&Õf®pM78ÎÂ:‰É&d\k4œºÏA µvP !zß9¸žÙcšÃ¯_ÕÂ%ûcŠqÙþ̪Áwö+ŒàßP˜^á mß¡qòõ{Ÿÿþ¦ñyèݱÔþ¼üóÏ×x¸<¤U:jižìWŸ+š| üõ½{—kºUêçv¯#8ôè¹µ ž¯ +â|ÜëÊØeÛ„¢0¹ºÙ*3ÓiK.DW@\SL» [©š5­ÑÃb•{¨Gyº u›Â‰ΟðD*­B|úöÀ~½sP¼Í`[à_f®+¿y l™ÈîÝC›žÁš§®÷‘ZxðIQ“Òv°ýièãpýÝÎtÊÚ’#ª"c þç¶¶›`]=€ºmžZ<];E@™<ºt¨½E Ÿ8ƒ7d S«ôWÏ:í¿×líO0²Á™2bú噓ۓMÏmViå&dÚD ]"òt«6·„5@+ÓâteŽM¿ª<ˆ™ø!LÙ²©?ÇÈÃ$EY­”졽RtÔK,†<ŽÍKfó¼‚$­%{zÞ5m§ëé>}øŒÑ—"˜Òœø Ì)bTÜHÀ´Íñ ­ëÆô¸nôh—zlc[®g¢õNÍáÞð ñß^uç–)M|ן4BÝd;cе””Ú>èwõYb©²/­TÏÙ,¡…µPŒ2T.•Ó¢ÌÜ÷p®g›rêRÙX½?®(Æè¤{L³4äeµØÑ•r&mõöµR‰MiÑR{·²ˆpT°¾žÐla#Hï‘Ll©åŠ\¨ò¦­Ð Š>¤l ä0]ÛhD}Jl #29$"‘ ÌÍÌè‡ä=¿¢ß&µôgI¶fqJWk£ G'ÔäµY›Ét¹ÑÒ’DA «ÖŸN¸²d/ÍÅÝ…ÉhC…“AG¡~¿î2® )’+YÂF»›göµ¿©¬%ÉžÊà~ò*[Çi¥©}\ÀŒP>{¶%Ks1ÒÂÛþÕ?ß<7dJ'!+ð&½>qmCLv7*MI@à—n#R=¢ß¬.¤¶ª¾·©1u(¶Î–¡B½Úì<3– é šÃØa×22ûõî‘ÅÒËí}Öì«–ö&Ö«m˜;m¬ëÙüãV¢Tįò/û»¥‹MôÒþv—y„ÅÀº^6W…Oô•œ®k;ŠL­ÂøñœÛ'Ò5ˆÒ‰”Àjû”’Åëš©µD<e¶RÊ3àâV«D…¡6d·îBv9T&¢ªèpÔ4Vñʳèß›õ#¶Æ×Ð_Uc®'ÖTKÆž3 ñoXe ¨$ó:—]T½V'DÄ: Eg˃š9{òí–$Ë-R^c ¬e{¯ø0ƒ®ƒ)¦¶ŠlŽ1ƒõV˜¡ä„ÁëèÊÑ5{9YçŠQ…k”›¤1 <}ûe”RHÃp¤¼G¬Í‚P úæµ³²åÌf^µ“ú®Ø˜MU/êC§_´¬„«›8|süçt§O¡ Úؽý=#óCíJn|7€'8¢¬œœÚ=é娩Ÿ2ä¶ZŠç/ÆÔãbÁFÀðÍ}Í @[?oòl/óbD“P&¯‰n+p?Ô¡Y fiƒïó”ëKá‹àae„)šÞI$.ÜQ:í¿‡¢-²Ð¶›ÈӮ𽫖é½7“&Wï<¼OŒµÉËû?9}ÀZ7·°«‡îmµïÜÚ4kÉìò/»‹ÈbÆ‚Ø_õôámmW5l~yrpì¥ñµó m€y>\.¸gÞ ¬ÎfÁT²lÔÙou-éÜž¥ê÷‡ë؃ã-ç[Ÿ˜ÑÞõÁû¯køê ®³’V’CÖmB-ž®ŒRÙ«ÖuéòHáÀÊçÙ—],¹†M™v«È Â3¯AÁ®Œ–ĉ2ÔkA:Í’ ûoèd„KØîî6PTˆ äÎót-(ÜŠˆ•q ÷[×µ+ Â|98 ¢ÛðÁ± ù¨p•UÓBEŒ‹2æNãÎâA¾ö rL|'07Rèöî¹j7ͺ¶6RF ?§FXœÁ[y}½:¦”Qgä»Ê 쮞‰þK`òäKSÂNkÕêÈ6‚S‘År¹O6؆ÊËtLâ;<[Woð([Ί[ÁØ$†¤M,B˜oήÜQeù p@}Œ¼?\ ;ÛŸý99±œS{@3ú¾˜)LÑ^8D€íK0e†ñäÏ…e Ùž¹àvÑDµH&"ŒE)l(T8£k²± 'DØÆjw˲qÎ2³DBÅa'†b‰sóYJ#MzÑâ¡Mɸ«ŠÍac"9XÂÓRÍw-ŽP8ºG¥ ¡P_¾tùå‹|ü,ùF ¡D]ÞçåÛ«—?%Ïõe›oóæå…¨†åªøH`¹ÆJƒâ¸ •¼Q›7_J äT+t«êÓá"›¤ÊNqè‹uâõþñºK$]ëa’>MË(•‡ût†Û †y¦ …Uôe.ùÃn`Ãb6K—-Ê"/ú˜X'úS¨„¼mÊAžvAY¯¢ãóhØÒ•Rqä].£\–VPd&ü®,ök<±Kߌaª BˆAåˆÒÜ@=ÏU•ì^¦VêƒC@K‹S ú[б W6Ë6+ñèÿY8ëHØRJ.æœG²y3{S"cV’çÙ:Ï/Á®O‚€¼žLw:uñl²+…ã…î–ëœ@ß¼ª"Fó&ìJÖúèNbÛj™Ý©T¤!cœ~é4ÓVñ—š³ÂbgŠ]ň*íN˜!§’íZŠt°µ’‡jÔA¦õ¸(¹EÝ1Î}hiÐ+Ëõ ‘d±,’óÅ­rLû±&…®üK³åzCJ—ìEùJ1·dtÄ[*!)«l™A"‚ttùc‘E®:„Œv北§?QfV ü¼õìV-­§Ég7#!àáeæ“|O2Œ!G—¾´œ¥ÃÀë¸(¦® Uðê½_TGÆ ³ùsIºb*ŒQØfšKžØVVÇŠÐ#QÊü,)Y›Jš€n>»•NܘáS-ƒ`0.µ‡ìÉÆ¼f~Ô,Š¥Î+9´çt#_µô²åô¶„ Ï=LŒ?&}Y‘4β@–ÔÿþÛ%MaR£ßûŸ%ƒ\›€Ó Y«…N¬Ô\Žˆå¢¼½tîª!áZ&R-ê¢Ð6éåìŽxÕnŒ¹TkŒùJùblæ¹fÊŠ*3Z{ŽŠ$D#Fý¡vU7lÎ?Â,­Ðn¨6[”Œ2*Š ÚsrÑ+üÉ2Kï „1ò“†1š’r;W(x£Òןá/&o8[‰]O¤Xö›ÒSx\x…Kæ0‹”­CÙŠ (ß*8|IkÙ}¬<4@½úH¨ç‰¥ù$ý%ÊãhÓÛÞ¨íç§[1ÔïeßÒeúË®Añ:dýCˆ£[sØg¼m†çu â($dzþ:fÏ1Ì“RŽºµxð„™lL{X`’ø´X­Á¦/’ýó&[Iê:6Bv Ö_dh-—A-=RÙ%¨YuËvß—îQ¨ˆœ¹x83W¸c÷j3öúÔÐ⵸'Ÿ÷‚Zw{¤–±@ Z'¨K„ã˜ÏÒ6Á4*{¦Ég®Ù(úá³?‘ ½“Ïîu­n|µK•ã×éÆj.´(« 9ÜhxÔ§9c-¯qhvUîm©Óç0N7‹vs¡2 Êp Û@ñ=y¨[ùz"è0‰Ü1¼“[Ël|J‰t_?Â韩LrûG ×uáÞÌ-O¸íò‰;¬diÉiéjí'Â$ WƒºûðDïS_a<‚óÄûéõ‚.㘧/^ƒÊÞJg† [Uq@´ÖWîŠ u™}1ßöJb3/}ŒA3$(î¶T>R„f¥I¢½^Ô/Š+ÉÆlüÓ%•lÛ#§éll-ÆTY@šÈÃ?Ç›ÉÝšsO*уíLËÔ†ÒÒa«£ö^Ñ }àÞý<^&çV*¹¢dòŸþ)tš°¶šÞS` ÁõB>°=±òS€¸å¥;‘ξÝ~DjÃùÞÒu# ïkz$Ú`%·ÌÁ%âyu¢›u>{6Ìr 7ìIˇÉñÑÙÓÇ_>ÙÿÌíí¤4-m¡¡<Ýí$ô1ÖŽG†„‰á뢨üÉùÚì.±®·uå8ûÿW„™¿P``v,&jyweUH4â¾õzûÈ›¿8½zå÷¤w>Îûø¢w ×6©!Ý.–ôÛýâÇf«7÷lðû Áï› b¹í8ÙƒlΦºÒý ªLÁ‚’ùýão‹{ëå_žP a”}<þ{usÃ+\|È5ã+Gø5¯ˆä¡¢³Yh=n&C¸ŽF?ЦBÓ:•]Ó‘å±t 5C·šK^Èõä×3[?¤ý„Ûª7 ¾4ÊE¶v¿Öæì_ÑäZZÞ˜± x9œÕ)¦Y>´\… Å+‰Ëò F_¤o¨ ¾L%[|L^¼{Õ}þSÿòí»÷W±5.tz“áø–Ñ»ÕÆÏ‡‹tÛ,Js ]öߌàâ‡W¯®~|ñâ²~ñþÅ»æ0Žî䧺ÒÐ׺\55ÚÌ—1ºÈåPx×ówçWßõ¿ùáõeÌ/[ÛL;}½& Ôï IsÊï¯Ôüȩ…E†y}¾ÕÎ'ÓPÿýùׯ^D˜uòøI„[Õ l0Rz´Ð]Í5p”œ¨^6•ÜC,C‰m’akuœ„KÓuë^X—Ûÿt @Ó¸Â[Ù¯c \в1¼žŠ?-DQ(ªnò5îJ«àí©"W†Ž6m§æýê…Ìf[Y#NT)2HžºnèÛÇǧHÏD(A• óan¯}`ÓE›*·r3™ðÌN=ãåÁÀ`uuäÕEËàìÑÉY²wìBûš™’=#ªé³ÃÛ››žÊ^±š ’@¢{Óõ|ö×ÏO®]_Ï"²úë¨Ø\Ͻøßµâáoÿ§'w¿4ÏÅÌ’×|nf²a#Ëï\®2¿Ë|mæ´°5o5ïóœ@¶1#ÅoîJë2Ã9jIž…^ÂÓà?%Ì2Jöp—5ýžûÏl±ºG¯\÷¼Ëô2£€æÃáaúø«¯ÆgÃ'§_žž>ÍFO¾<ÉbÆÅM>ßô*Ç7yŒ\«ig8 FINC»yƒÂ}ñHÿ›ë°þÝèß³?ÞýÿPK‚áN_mŽŽêriakconf/riak.confUT ¼]¼]ux ääeÌÁ Â0 €áûž"à½íæÁ)ø,£j¬…˜Ž´sΧ7S¦ ôðç£9t9>ö°n`#Âè¹€‡|õD( J`&•öØ!Iý¤þ„‡!T”‚A‘$æiþÆÞ¼X­ö]u{™câœÿÔÒ—§¬CEâŠb.È(æRJo"ö¤·ºÙ§¯ÞµnÛþ8¼û°'PK p€âN íAriakconf/UT4d]ux ääPK‚áN¿äâVÿ)]–¤Criakconf/riak.schemaUT¼]ux ääPK‚áN_mŽŽê¤*riakconf/riak.confUT¼]ux ääPKj+cuttlefish-3.0.1/test/unset_translation.schema0000644000232200023220000000034214027401005022140 0ustar debalancedebalance{mapping, "a.b", "erlang.key", [ {datatype, integer} ]}. {translation, "erlang.key", fun(Conf) -> cuttlefish:conf_get("a.b", Conf) * 17 end }. %% nevermind, I don't want this translated. {translation, "erlang.key"}. cuttlefish-3.0.1/test/riak.conf0000644000232200023220000000035214027401005017000 0ustar debalancedebalancering_size = 32 # we want a smaller ring size anti_entropy = debug log.error.file = /var/log/error.log log.console.file = /var/log/console.log log.syslog = on listener.http.internal = 127.0.0.1:8098 listener.http.external = 10.0.0.1:80cuttlefish-3.0.1/test/conf.d/0000755000232200023220000000000014027401005016352 5ustar debalancedebalancecuttlefish-3.0.1/test/conf.d/dir.d/0000755000232200023220000000000014027401005017352 5ustar debalancedebalancecuttlefish-3.0.1/test/conf.d/dir.d/riak.conf0000644000232200023220000000007214027401005021146 0ustar debalancedebalancelog.console.file = /var/log/console.log rogue.option = 42 cuttlefish-3.0.1/test/conf.d/riak2.conf0000644000232200023220000000005214027401005020226 0ustar debalancedebalancelog.syslog = off include dir.d/riak.conf cuttlefish-3.0.1/test/conf.d/riak.conf0000644000232200023220000000026214027401005020147 0ustar debalancedebalancering_size = 5 # we want a smaller ring size anti_entropy = debug log.error.file = /var/log/error.log listener.http.internal = 127.0.0.1:8098 listener.http.external = 10.0.0.1:80 cuttlefish-3.0.1/test/erlang_vm_schema_tests.erl0000644000232200023220000002276414027401005022436 0ustar debalancedebalance-module(erlang_vm_schema_tests). -include_lib("eunit/include/eunit.hrl"). %% basic schema test will check to make sure that all defaults from the schema %% make it into the generated app.config basic_schema_test() -> %% The defaults are defined in priv/riak_kv.schema and multi_backend.schema. %% they are the files under test. Config = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], [], context()), cuttlefish_unit:assert_config(Config, "vm_args.-smp", enable), cuttlefish_unit:assert_config(Config, "vm_args.+W", "w"), cuttlefish_unit:assert_config(Config, "vm_args.+K", true), cuttlefish_unit:assert_not_configured(Config, "vm_args.+S"), cuttlefish_unit:assert_config(Config, "vm_args.-name", "node@host"), cuttlefish_unit:assert_config(Config, "vm_args.-setcookie", "erlang"), cuttlefish_unit:assert_config(Config, "vm_args.+A", 64), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_FULLSWEEP_AFTER", 0), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_CRASH_DUMP", "dump"), cuttlefish_unit:assert_config(Config, "vm_args.+P", 256000), cuttlefish_unit:assert_not_configured(Config, "vm_args.+zdbbl"), cuttlefish_unit:assert_not_configured(Config, "vm_args.+sfwi"), cuttlefish_unit:assert_not_configured(Config, "vm_args.+scl"), cuttlefish_unit:assert_not_configured(Config, "vm_args.+sub"), cuttlefish_unit:assert_not_configured(Config, "vm_args.-kernel net_ticktime"), cuttlefish_unit:assert_not_configured(Config, "kernel.inet_dist_listen_min"), cuttlefish_unit:assert_not_configured(Config, "kernel.inet_dist_listen_max"), case cuttlefish:otp("R16", erlang:system_info(otp_release)) of true -> cuttlefish_unit:assert_config(Config, "vm_args.+Q", 262144), cuttlefish_unit:assert_config(Config, "vm_args.+e", 256000); _ -> cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_MAX_PORTS", 262144), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_MAX_ETS_TABLES", 256000) end, ok. override_schema_test() -> %% Conf represents the riak.conf file that would be read in by cuttlefish. %% this proplists is what would be output by the conf_parse module Conf = [ {["erlang", "smp"], "disable"}, {["erlang", "W"], "i"}, {["erlang", "K"], off}, {["erlang", "schedulers", "total"], 4}, {["erlang", "schedulers", "online"], 4}, {["nodename"], "mynode@myhost"}, {["distributed_cookie"], "riak"}, {["erlang", "async_threads"], 22}, {["erlang", "max_ports"], 32000}, {["erlang", "fullsweep_after"], 1}, {["erlang", "crash_dump"], "place"}, {["erlang", "max_ets_tables"], 128000}, {["erlang", "process_limit"], 128001}, {["erlang", "distribution_buffer_size"], 1024}, {["erlang", "schedulers", "force_wakeup_interval"], 500}, {["erlang", "schedulers", "compaction_of_load"], true}, {["erlang", "schedulers", "utilization_balancing"], false}, {["erlang", "distribution", "port_range", "minimum"], 6000}, {["erlang", "distribution", "port_range", "maximum"], 7999}, {["erlang", "distribution", "net_ticktime"], 43} ], Config = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], Conf, context()), cuttlefish_unit:assert_config(Config, "vm_args.-smp", disable), cuttlefish_unit:assert_config(Config, "vm_args.+W", "i"), cuttlefish_unit:assert_config(Config, "vm_args.+K", false), cuttlefish_unit:assert_config(Config, "vm_args.+S", "4:4"), cuttlefish_unit:assert_config(Config, "vm_args.-name", "mynode@myhost"), cuttlefish_unit:assert_config(Config, "vm_args.-setcookie", "riak"), cuttlefish_unit:assert_config(Config, "vm_args.+A", 22), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_FULLSWEEP_AFTER", 1), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_CRASH_DUMP", "place"), cuttlefish_unit:assert_config(Config, "vm_args.+P", 128001), cuttlefish_unit:assert_config(Config, "vm_args.+zdbbl", 1), cuttlefish_unit:assert_config(Config, "vm_args.+sfwi", 500), cuttlefish_unit:assert_config(Config, "vm_args.+scl", true), cuttlefish_unit:assert_config(Config, "vm_args.+sub", false), cuttlefish_unit:assert_config(Config, "kernel.inet_dist_listen_min", 6000), cuttlefish_unit:assert_config(Config, "kernel.inet_dist_listen_max", 7999), cuttlefish_unit:assert_config(Config, "vm_args.-kernel net_ticktime", 43), %% These settings are version dependent, so we won't even test them here %% because we don't know what version you're running, so we'll cover it %% in two tests below case cuttlefish:otp("R16", erlang:system_info(otp_release)) of true -> cuttlefish_unit:assert_config(Config, "vm_args.+Q", 32000), cuttlefish_unit:assert_config(Config, "vm_args.+e", 128000); _ -> cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_MAX_PORTS", 32000), cuttlefish_unit:assert_config(Config, "vm_args.-env ERL_MAX_ETS_TABLES", 128000) end, ok. erlang_scheduler_test() -> Conf1 = [ {["erlang", "schedulers", "total"], 4}, {["erlang", "schedulers", "online"], 1} ], Config1 = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], Conf1, context()), cuttlefish_unit:assert_config(Config1, "vm_args.+S", "4:1"), Conf2 = [ {["erlang", "schedulers", "total"], 4} ], Config2 = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], Conf2, context()), cuttlefish_unit:assert_config(Config2, "vm_args.+S", "4"), Conf3 = [ {["erlang", "schedulers", "online"], 4} ], Config3 = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], Conf3, context()), cuttlefish_unit:assert_config(Config3, "vm_args.+S", ":4"), Config4 = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], [], context()), cuttlefish_unit:assert_not_configured(Config4, "vm_args.+S"), ok. async_threads_stack_size_test() -> WordSize = erlang:system_info({wordsize, external}), TooSmall = cuttlefish_bytesize:to_string(WordSize * 1024 * 10), TooLarge = cuttlefish_bytesize:to_string(WordSize * 1024 * 9000), Indivisible = cuttlefish_bytesize:to_string(WordSize * 1024 * 16 - 2), Correct = cuttlefish_bytesize:to_string(WordSize * 1024 * 32), MinSize = cuttlefish_bytesize:to_string(WordSize * 1024 * 16), MaxSize = cuttlefish_bytesize:to_string(WordSize * 1024 * 8192), CorrectRaw = 32, Conf0 = [], Config0 = cuttlefish_unit:generate_templated_config(["priv/erlang_vm.schema"], Conf0, context()), cuttlefish_unit:assert_not_configured(Config0, "vm_args.+a"), Conf1 = [{["erlang", "async_threads", "stack_size"], Correct}], Config1 = cuttlefish_unit:generate_templated_config(["priv/erlang_vm.schema"], Conf1, context()), cuttlefish_unit:assert_config(Config1, "vm_args.+a", CorrectRaw), Conf2 = [{["erlang", "async_threads", "stack_size"], TooSmall}], Config2 = cuttlefish_unit:generate_templated_config(["priv/erlang_vm.schema"], Conf2, context()), cuttlefish_unit:assert_error_message(Config2, "erlang.async_threads.stack_size invalid, must be in the range of " ++ MinSize ++ " to " ++ MaxSize), Conf3 = [{["erlang", "async_threads", "stack_size"], TooLarge}], Config3 = cuttlefish_unit:generate_templated_config(["priv/erlang_vm.schema"], Conf3, context()), cuttlefish_unit:assert_error_message(Config3, "erlang.async_threads.stack_size invalid, must be in the range of " ++ MinSize ++ " to " ++ MaxSize), Conf4 = [{["erlang", "async_threads", "stack_size"], Indivisible}], Config4 = cuttlefish_unit:generate_templated_config(["priv/erlang_vm.schema"], Conf4, context()), cuttlefish_unit:assert_error_message(Config4, "erlang.async_threads.stack_size invalid, must be divisible by " ++ integer_to_list(WordSize)), ok. %% this context() represents the substitution variables that rebar %% will use during the build process. riak_core's schema file is %% written with some {{mustache_vars}} for substitution during %% packaging cuttlefish doesn't have a great time parsing those, so we %% perform the substitutions first, because that's how it would work %% in real life. context() -> [ {node, "node@host"}, {crash_dump, "dump"} ]. inet_dist_use_interface_test() -> InputConfig = "erlang.distribution.interface", GeneratedConfig = "kernel.inet_dist_use_interface", InputConfigPoint = string:tokens(InputConfig, "."), Pass =[ {"127.0.0.1",{127,0,0,1}}, {"0.0.0.0",{0,0,0,0}}, {"fe80:1200::1",{65152,4608,0,0,0,0,0,1}} ], Fail = [ "127.0.0.1:8080", "127.1", "fe80:1200::g", "Not an IP" ], lists:foreach(fun({Input, Expected}) -> Config = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], [{InputConfigPoint, Input}], context()), cuttlefish_unit:assert_config(Config, GeneratedConfig, Expected) end, Pass), lists:foreach(fun(Input) -> Config = cuttlefish_unit:generate_templated_config( ["priv/erlang_vm.schema"], [{InputConfigPoint, Input}], context()), cuttlefish_unit:assert_error_message(Config, InputConfig ++ " invalid, must be a valid IPv4 or IPv6 address") end, Fail). cuttlefish-3.0.1/test/dir with spaces/0000755000232200023220000000000014027401005020154 5ustar debalancedebalancecuttlefish-3.0.1/test/dir with spaces/value 30000644000232200023220000000000614027401005021332 0ustar debalancedebalance12.34 cuttlefish-3.0.1/test/multi_backend.schema0000644000232200023220000003721114027401005021172 0ustar debalancedebalance{mapping, "multi_backend.default", "riak_kv.multi_backend_default", [ {level, advanced} ]}. %% Riak KV config %% @doc Storage_backend specifies the Erlang module defining the storage %% mechanism that will be used on this node. {mapping, "multi_backend.$name.storage_backend", "riak_kv.multi_backend", [ {default, bitcask}, {datatype, {enum, [bitcask, leveldb, memory]}} ]}. {translation, "riak_kv.multi_backend", fun(Conf, Schema) -> %% group by $name into list, also cut the "multi_backend.$name" off every key BackendNames = cuttlefish_variable:fuzzy_matches(["multi_backend","$name","storage_backend"], Conf), %% for each in list, case statement on backend type [ begin BackendConfigName = ["multi_backend", Name], {BackendModule, BackendConfig} = case cuttlefish:conf_get(BackendConfigName ++ ["storage_backend"], Conf) of bitcask -> BackendConfigPrefix = BackendConfigName ++ ["bitcask"], SubConf = [ begin {Key -- BackendConfigName, Value} end || {Key, Value} <- cuttlefish_variable:filter_by_prefix(BackendConfigPrefix, Conf)], BackendProplist = cuttlefish_generator:map(Schema, SubConf), {riak_kv_bitcask_backend, proplists:get_value(bitcask, BackendProplist)}; leveldb -> BackendConfigPrefix = BackendConfigName ++ ["leveldb"], SubConf = [ begin {Key -- BackendConfigName, Value} end || {Key, Value} <- cuttlefish_variable:filter_by_prefix(BackendConfigPrefix, Conf)], BackendProplist = cuttlefish_generator:map(Schema, SubConf), {riak_kv_eleveldb_backend, proplists:get_value(eleveldb, BackendProplist)}; memory -> BackendConfigPrefix = BackendConfigName ++ ["memory_backend"], SubConf = [ begin {Key -- BackendConfigName, Value} end || {Key, Value} <- cuttlefish_variable:filter_by_prefix(BackendConfigPrefix, Conf)], BackendProplist = cuttlefish_generator:map(Schema, SubConf), {riak_kv_memory_backend, proplists:get_value(memory_backend, proplists:get_value(riak_kv, BackendProplist))}; _ -> oops_all_berries end, {list_to_binary(Name), BackendModule, BackendConfig} end || {"$name", Name} <- BackendNames] end }. %% @doc bitcask data root {mapping, "multi_backend.$name.bitcask.data_root", "riak_kv.multi_backend", [ {level, advanced} ]}. %% @doc The open_timeout setting specifies the maximum time Bitcask will %% block on startup while attempting to create or open the data directory. %% The value is in seconds and the default is 4. You generally need not %% change this value. If for some reason the timeout is exceeded on open %% you'll see a log message of the form: %% "Failed to start bitcask backend: .... " %% Only then should you consider a longer timeout. {mapping, "multi_backend.$name.bitcask.open_timeout", "riak_kv.multi_backend", [ {default, 4}, {datatype, integer}, {level, advanced} ]}. % @doc The `sync_strategy` setting changes the durability of writes by % specifying when to synchronize data to disk. The default setting protects % against data loss in the event of application failure (process death) but % leaves open a small window wherein data could be lost in the event of complete % system failure (e.g. hardware, O/S, power). % % The default mode, `none`, writes data into operating system buffers which % which will be written to the disks when those buffers are flushed by the % operating system. If the system fails (power loss, crash, etc.) before before % those buffers are flushed to stable storage that data is lost. % % This is prevented by the setting `o_sync` which forces the operating system to % flush to stable storage at every write. The effect of flushing each write is % better durability, however write throughput will suffer as each write will % have to wait for the write to complete. % % ___Available Sync Strategies___ % % * `none` - (default) Lets the operating system manage syncing writes. % * `o_sync` - Uses the O_SYNC flag which forces syncs on every write. % % * `interval` - Riak will force Bitcask to sync every `bitcask.sync_interval` % `seconds. {mapping, "multi_backend.$name.bitcask.sync_strategy", "riak_kv.multi_backend", [ {default, none}, {datatype, {enum, [none, o_sync, interval]}}, {level, advanced} ]}. {mapping, "multi_backend.$name.bitcask.sync_interval", "riak_kv.multi_backend", [ {datatype, {duration, s}}, {level, advanced} ]}. %% @doc The `max_file_size` setting describes the maximum permitted size for any %% single data file in the Bitcask directory. If a write causes the current %% file to exceed this size threshold then that file is closed, and a new file %% is opened for writes. {mapping, "multi_backend.$name.bitcask.max_file_size", "riak_kv.multi_backend", [ {default, "2GB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc The `merge_window` setting lets you specify when during the day merge %% operations are allowed to be triggered. Valid options are: %% %% * `always` (default) No restrictions %% * `never` Merge will never be attempted %% * `window` Hours during which merging is permitted, where %% `bitcask.merge_window.start` and %% `bitcask.merge_window.end` are integers between 0 and 23. %% %% If merging has a significant impact on performance of your cluster, or your %% cluster has quiet periods in which little storage activity occurs, you may %% want to change this setting from the default. {mapping, "multi_backend.$name.bitcask.merge_window", "riak_kv.multi_backend", [ {default, always}, {datatype, {enum, [always, never, window]}}, {level, advanced} ]}. {mapping, "multi_backend.$name.bitcask.merge_window.start", "riak_kv.multi_backend", [ {default, 0}, {datatype, integer}, {level, advanced} ]}. {mapping, "multi_backend.$name.bitcask.merge_window.end", "riak_kv.multi_backend", [ {default, 23}, {datatype, integer}, {level, advanced} ]}. %% @doc `frag_merge_trigger` setting describes what ratio of %% dead keys to total keys in a file will trigger merging. The value of this %% setting is a percentage (0-100). For example, if a data file contains 6 %% dead keys and 4 live keys, then merge will be triggered at the default %% setting. Increasing this value will cause merging to occur less often, %% whereas decreasing the value will cause merging to happen more often. %% %% Default is: `60` {mapping, "multi_backend.$name.bitcask.frag_merge_trigger", "riak_kv.multi_backend", [ {datatype, integer}, {level, advanced}, {default, 60} ]}. %% @doc `dead_bytes_merge_trigger` setting describes how much %% data stored for dead keys in a single file will trigger merging. The %% value is in bytes. If a file meets or exceeds the trigger value for dead %% bytes, merge will be triggered. Increasing the value will cause merging %% to occur less often, whereas decreasing the value will cause merging to %% happen more often. %% %% When either of these constraints are met by any file in the directory, %% Bitcask will attempt to merge files. %% %% Default is: 512mb in bytes {mapping, "multi_backend.$name.bitcask.dead_bytes_merge_trigger", "riak_kv.multi_backend", [ {datatype, bytesize}, {level, advanced}, {default, "512MB"} ]}. %% @doc `frag_threshold` setting describes what ratio of %% dead keys to total keys in a file will cause it to be included in the %% merge. The value of this setting is a percentage (0-100). For example, %% if a data file contains 4 dead keys and 6 live keys, it will be included %% in the merge at the default ratio. Increasing the value will cause fewer %% files to be merged, decreasing the value will cause more files to be %% merged. %% %% Default is: `40` {mapping, "multi_backend.$name.bitcask.frag_threshold", "riak_kv.multi_backend", [ {datatype, integer}, {level, advanced}, {default, 40} ]}. %% @doc `dead_bytes_threshold` setting describes the minimum %% amount of data occupied by dead keys in a file to cause it to be included %% in the merge. Increasing the value will cause fewer files to be merged, %% decreasing the value will cause more files to be merged. %% %% Default is: 128mb in bytes {mapping, "multi_backend.$name.bitcask.dead_bytes_threshold", "riak_kv.multi_backend", [ {datatype, bytesize}, {level, advanced}, {default, "128MB"} ]}. %% @doc `small_file_threshold` setting describes the minimum %% size a file must have to be _excluded_ from the merge. Files smaller %% than the threshold will be included. Increasing the value will cause %% _more_ files to be merged, decreasing the value will cause _fewer_ files %% to be merged. %% %% Default is: 10mb in bytes {mapping, "multi_backend.$name.bitcask.small_file_threshold", "riak_kv.multi_backend", [ {datatype, bytesize}, {level, advanced}, {default, "10MB"} ]}. %% @doc Fold keys thresholds will reuse the keydir if another fold was started less %% than `max_fold_age` ago and there were less than `max_fold_puts` updates. %% Otherwise it will wait until all current fold keys complete and then start. %% Set either option to -1 to disable. %% Age in micro seconds (-1 means "unlimited") {mapping, "multi_backend.$name.bitcask.max_fold_age", "riak_kv.multi_backend", [ {datatype, integer}, {level, advanced}, {default, -1} ]}. {mapping, "multi_backend.$name.bitcask.max_fold_puts", "riak_kv.multi_backend", [ {datatype, integer}, {level, advanced}, {default, 0} ]}. %% @doc By default, Bitcask keeps all of your data around. If your data has %% limited time-value, or if for space reasons you need to purge data, you can %% set the `expiry_secs` option. If you needed to purge data automatically %% after 1 day, set the value to `1d`. %% %% Default is: `-1` which disables automatic expiration {mapping, "multi_backend.$name.bitcask.expiry", "riak_kv.multi_backend", [ {datatype, {duration, s}}, {level, advanced}, {default, -1} ]}. %% @doc Require the CRC to be present at the end of hintfiles. %% Bitcask defaults to a backward compatible mode where %% old hint files will still be accepted without them. %% It is safe to set this true for new deployments and will %% become the default setting in a future release. {mapping, "multi_backend.$name.bitcask.require_hint_crc", "riak_kv.multi_backend", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% By default, Bitcask will trigger a merge whenever a data file contains %% an expired key. This may result in excessive merging under some usage %% patterns. To prevent this you can set the `expiry_grace_time` option. %% Bitcask will defer triggering a merge solely for key expiry by the %% configured number of seconds. Setting this to `1h` effectively limits %% each cask to merging for expiry once per hour. %% %% Default is: `0` {mapping, "multi_backend.$name.bitcask.expiry_grace_time", "riak_kv.multi_backend", [ {datatype, {duration, s}}, {level, advanced}, {default, 0} ]}. %% @doc Configure how Bitcask writes data to disk. %% erlang: Erlang's built-in file API %% nif: Direct calls to the POSIX C API %% %% The NIF mode provides higher throughput for certain %% workloads, but has the potential to negatively impact %% the Erlang VM, leading to higher worst-case latencies %% and possible throughput collapse. {mapping, "multi_backend.$name.bitcask.io_mode", "riak_kv.multi_backend", [ {default, erlang}, {datatype, {enum, [erlang, nif]}} ]}. %%%% This is the leveldb section %% @doc leveldb data_root {mapping, "multi_backend.$name.leveldb.data_root", "riak_kv.multi_backend", [ {default, "./data/leveldb"} ]}. %% @doc The `max_open_files` value is multiplied by 4 megabytes to create a %% file cache. The file cache may end up holding more or fewer files at any %% given moment due to variations in file metadata size. `max_open_files` %% applies to a single vnode, not to the entire server. {mapping, "multi_backend.$name.leveldb.max_open_files", "riak_kv.multi_backend", [ {datatype, integer}, {default, 30}, {level, advanced} ]}. %% @doc The cache_size determines the size of each vnode's block cache. The %% block cache holds data blocks that leveldb has recently retrieved from %% `.sst` table files. Any given block contains one or more complete key/value %% pairs. The cache speeds up repeat access to the same key and potential %% access to adjacent keys. {mapping, "multi_backend.$name.leveldb.cache_size", "riak_kv.multi_backend", [ {datatype, bytesize}, {default, "8MB"}, {level, advanced} ]}. %% @doc The 'sync' parameter defines how new key/value data is placed in the %% recovery log. The recovery log is only used if the Riak program crashes or %% the server loses power unexpectedly. The parameter's original intent was %% to guarantee that each new key / value was written to the physical disk %% before leveldb responded with “write goodâ€. The reality in modern servers %% is that many layers of data caching exist between the database program and %% the physical disks. This flag influences only one of the layers. {mapping, "multi_backend.$name.leveldb.sync", "riak_kv.multi_backend", [ {default, false}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% @doc Each vnode first stores new key/value data in a memory based write %% buffer. This write buffer is in parallel to the recovery log mentioned %% in the “sync†parameter. Riak creates each vnode with a randomly sized %% write buffer for performance reasons. The random size is somewhere %% between write_buffer_size_min and write_buffer_size_max. {mapping, "multi_backend.$name.leveldb.write_buffer_size_min", "riak_kv.multi_backend", [ {default, "15MB"}, {datatype, bytesize}, {level, advanced} ]}. {mapping, "multi_backend.$name.leveldb.write_buffer_size_max", "riak_kv.multi_backend", [ {default, "30MB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc Each database .sst table file can include an optional "bloom filter" %% that is highly effective in shortcutting data queries that are destined %% to not find the requested key. The bloom_filter typically increases the %% size of an .sst table file by about 2%. This option must be set to true %% in the riak.conf to take effect. {mapping, "multi_backend.$name.leveldb.bloomfilter", "riak_kv.multi_backend", [ {default, on}, {datatype, {enum, [on, off]}} ]}. %% @doc sst_block_size defines the size threshold for a block / chunk of data %% within one .sst table file. Each new block gets an index entry in the .sst %% table file's master index. {mapping, "multi_backend.$name.leveldb.block_size", "riak_kv.multi_backend", [ {default, "4KB"}, {datatype, bytesize}, {level, advanced} ]}. %% @doc block_restart_interval defines the key count threshold for a new key %% entry in the key index for a block. %% Most clients should leave this parameter alone. {mapping, "multi_backend.$name.leveldb.block_restart_interval", "riak_kv.multi_backend", [ {default, 16}, {datatype, integer}, {level, advanced} ]}. %% @doc verify_checksums controls whether or not validation occurs when Riak %% requests data from the leveldb database on behalf of the user. {mapping, "multi_backend.$name.leveldb.verify_checksums", "riak_kv.multi_backend", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %% @doc verify_compaction controls whether or not validation occurs when %% leveldb reads data as part of its background compaction operations. {mapping, "multi_backend.$name.leveldb.verify_compaction", "riak_kv.multi_backend", [ {default, true}, {datatype, {enum, [true, false]}}, {level, advanced} ]}. %%%% Memory backend section {mapping, "multi_backend.$name.memory_backend.max_memory", "riak_kv.multi_backend", [ {datatype, bytesize}, {default, "4GB"}, {level, advanced} ]}. {mapping, "multi_backend.$name.memory_backend.ttl", "riak_kv.multi_backend", [ {datatype, {duration, s}}, {commented, "1d"}, %% no default, it's undefined. {level, advanced} ]}. cuttlefish-3.0.1/test/cuttlefish_nested_schema_test.erl0000644000232200023220000000373514027401005024012 0ustar debalancedebalance-module(cuttlefish_nested_schema_test). -include_lib("eunit/include/eunit.hrl"). nested_schema_test() -> Conf = [ {["thing", "a"], foo}, {["nested", "thing1", "type"], "thing"}, {["nested", "thing1", "thing", "a"], 0}, {["nested", "thing2", "type"], "thing"} ], Config = cuttlefish_generator:map(schema(), Conf), ?assertEqual( [{thing, [{a, foo}]}, {nested_things, [{thing, [{a, 0}]}, {thing, [{a, 5}]}]}], Config ), ok. schema() -> Mappings = [ {mapping, "thing.a", "thing.a", [ {datatype, [{atom, foo}, integer]}, {default, 5} ]}, {mapping, "nested.$name.type", "nested_things", [ {datatype, {enum, [thing]}} ]}, {mapping, "nested.$name.thing.a", "nested_things", [ {datatype, [{atom, foo}, integer]}, {default, 5} ]} ], Translations = [ {translation, "nested_things", fun(Conf, Schema) -> NestedNames = cuttlefish_variable:fuzzy_matches(["nested","$name","type"], Conf), Things = [ begin ConfigName = ["nested", Name], Prefix = ConfigName ++ ["thing"], SubConf = [ begin {Key -- ConfigName, Value} end || {Key, Value} <- cuttlefish_variable:filter_by_prefix(Prefix, Conf)], Proplist = cuttlefish_generator:map(Schema, SubConf), case cuttlefish_error:is_error(Proplist) of true -> cuttlefish:invalid("gtfo"); _ -> {thing, proplists:get_value(thing, Proplist)} end end|| {"$name", Name} <- NestedNames], case Things of [] -> cuttlefish:unset(); _ -> Things end end} ], { [ cuttlefish_translation:parse(T) || T <- Translations], [ cuttlefish_mapping:parse(M) || M <- Mappings], []}. cuttlefish-3.0.1/test/cuttlefish_test_group_leader.erl0000644000232200023220000001001114027401005023641 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% Copyright (c) 2012 Basho Technologies, Inc. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_test_group_leader). -export([new_group_leader/1, group_leader_loop/2, tidy_up/1, get_output/0]). -compile(nowarn_deprecated_function). %% @doc spawns the new group leader new_group_leader(Runner) -> spawn_link(?MODULE, group_leader_loop, [Runner, queue:new()]). %% @doc listens for io_requests, and pipes them into an array group_leader_loop(Runner, Output) -> receive {io_request, From, ReplyAs, Req} -> P = process_flag(priority, normal), %% run this part under normal priority always NewOutput = io_request(From, ReplyAs, Req, Output), process_flag(priority, P), group_leader_loop(Runner, NewOutput); {get_output, Ref, From} -> From ! {Ref, queue:to_list(Output)}, group_leader_loop(Runner, Output); stab -> kthxbye; _ -> %% discard any other messages group_leader_loop(Runner, Output) end. %% @doc closes group leader down tidy_up(FormerGroupLeader) -> GroupLeaderToMurder = group_leader(), group_leader(FormerGroupLeader, self()), GroupLeaderToMurder ! stab. %% @doc Retrieves the io output from the group leader get_output() -> GL = group_leader(), Ref = make_ref(), GL ! {get_output, Ref, self()}, receive {Ref, Output} -> {ok, Output} after 1000 -> error end. %% Processes an io_request and sends a reply io_request(From, ReplyAs, Req, Output) -> {Reply, NewOutput} = io_request(Req, Output), io_reply(From, ReplyAs, Reply), NewOutput. %% sends a reply back to the sending process io_reply(From, ReplyAs, Reply) -> From ! {io_reply, ReplyAs, Reply}. %% If we're processing io:put_chars, Chars shows up as binary io_request({put_chars, Chars}, Output) when is_binary(Chars); is_list(Chars) -> {ok, queue:in(Chars, Output)}; io_request({put_chars, M, F, As}, Output) -> try apply(M, F, As) of Chars -> {ok, queue:in(Chars, Output)} catch C:T:Trace -> {{error, {C, T, Trace}}, Output} end; io_request({put_chars, _Enc, Chars}, Output) -> io_request({put_chars, Chars}, Output); io_request({put_chars, _Enc, Mod, Func, Args}, Output) -> io_request({put_chars, Mod, Func, Args}, Output); %% The rest of these functions just handle expected messages from %% the io module. They're mostly i, but we only care about o. io_request({get_chars, _Enc, _Prompt, _N}, O) -> {eof, O}; io_request({get_chars, _Prompt, _N}, O) -> {eof, O}; io_request({get_line, _Prompt}, O) -> {eof, O}; io_request({get_line, _Enc, _Prompt}, O) -> {eof, O}; io_request({get_until, _Prompt, _M, _F, _As}, O) -> {eof, O}; io_request({setopts, _Opts}, O) -> {ok, O}; io_request(getopts, O) -> {{error, enotsup}, O}; io_request({get_geometry,columns}, O) -> {{error, enotsup}, O}; io_request({get_geometry,rows}, O) -> {{error, enotsup}, O}; io_request({requests, Reqs}, O) -> {io_requests(Reqs, ok, O), O}; io_request(_, O) -> {{error, request}, O}. io_requests([R | Rs], ok, Output) -> {Result, NewOutput} = io_request(R, Output), io_requests(Rs, Result, NewOutput); io_requests(_, Result, Output) -> {Result, Output}. cuttlefish-3.0.1/README.md0000644000232200023220000001172014027401005015504 0ustar debalancedebalance# Cuttlefish [![GitHub Actions Status](https://github.com/Kyorai/cuttlefish/workflows/CI/badge.svg)](https://github.com/Kyorai/cuttlefish/actions) [![Travis CI Build Status](https://travis-ci.org/Kyorai/cuttlefish.svg?branch=master)](https://travis-ci.org/Kyorai/cuttlefish) [![Coverage Status](https://coveralls.io/repos/github/Kyorai/cuttlefish/badge.svg?branch=master)](https://coveralls.io/github/Kyorai/cuttlefish) [![Hex version](https://img.shields.io/hexpm/v/cuttlefish.svg "Hex version")](https://hex.pm/packages/cuttlefish) Cuttlefish is a library for Erlang applications that wish to walk the fine line between Erlang `app.config`s and a sysctl-like syntax. The name is a pun on the pronunciation of 'sysctl' and jokes are better explained. This repository retains full history of the original repository, [basho/cuttlefish/](https://github.com/basho/cuttlefish/), but intentionally cut ties with that repo to avoid confusion as to where is the most up-to-date, maintained version is. This is the repository used to produce [Hex.pm releases](https://hex.pm/packages/cuttlefish) of the project. ## Supported Erlang/OTP Versions * Cuttlefish 2.7.0 and later versions support Erlang 22 through 24 * Cuttlefish releases up to and including 2.6.0 support Erlang/OTP 17 through 23 ## Riak Disclaimer While this readme and test suite is Riak-heavy, the fact is that this library can be used with any Erlang application that wants a more universally accessible configuration syntax. Still, I built this for Riak, and it's nice to have a concrete example to work with. ## The Vision Currently, Riak's `app.config` is **the** definitive place for configuring Riak. It's not odd for Erlang applications to be configured this way, but it is a struggle for non-Erlang programmers and automated deployment tools to manipulate these files. On the other hand, the `app.config` is a useful construct for Erlang programmers, and it is pretty coupled to OTP applications. Cuttlefish's goal is to put a layer of abstraction on top of the `app.config` that is easier to work with outside of the Erlang world. It will allow Erlang programmers to write a schema for their application's configuration file, which is independent of the applications included in the project. The schema is one of the more important parts of Cuttlefish, so we'll go into more detail on it below, but it is written in Erlang and defines how the non-Erlang configuration file works. From this schema, you can generate a default `.conf` file for your application. This will be the file that is packaged with your application as the default configuration. The schema is also used to generate an `app.config` that will be used to start your application. Using the schema alone will generate all the proper defaults. Your users can make changes to the `.conf` file and those changes will overwrite the schema's defaults. You an also have an `advanced.config` which looks like the old `app.config` for anything that no schema mapping is created for. What does this look like for an application like Riak? Well, the authors of Riak maintain a schema for Riak's config. It defines all sorts of things we'll get into later. When we build Riak, Cuttlefish generates a `riak.conf` file that contains the default shipping configuration of Riak. When a script to start Riak is run, a Cuttlefish escript is spun up, reads the `riak.conf` file and combines that with the Schema to generate an `app.config`. The script then exits, and a new Erlang VM (destined to run Riak) is started with that generated `app.config`. Down the line somewhere, you may be troubleshooting some part of Riak, and the support organization at Basho may need you to manipulate a configuration setting that is not exposed by the schema because it is so infrequently used. In that case, we can set that setting directly in an `advanced.config` which sits in the same directory as `riak.conf`. I hope that gives you a good idea about how this works at a high level. ## What's it look like to Erlang Developers? You can learn more about the technical implementation of schemas at: https://github.com/basho/cuttlefish/wiki/Cuttlefish-for-Erlang-Developers ## What's it look like to users? Riak uses the semantic of `$conf_dir/app.config` for configuration. We're going to replace that with a file called `riak.conf`, with a syntax that looks like this: ```ini ring_size = 32 anti_entropy = debug log.error.file = /var/log/error.log log.console.file = /var/log/console.log log.syslog = on ``` More information for users here: https://github.com/basho/cuttlefish/wiki/Cuttlefish-for-Application-Users ## What's it look like to application packagers? * [node_package](https://github.com/basho/cuttlefish/wiki/Cuttlefish-for-node_package-users) * [non node_package](https://github.com/basho/cuttlefish/wiki/Cuttlefish-for-non-node_package-users) ## Current Status Cuttlefish is ready for production deployments. ## Re-generating parser ``` rebar3 as dev neotoma ``` Please see the *NOTE* in `src/conf_parse.peg` as well. cuttlefish-3.0.1/LICENSE0000644000232200023220000002613614027401005015241 0ustar debalancedebalance Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cuttlefish-3.0.1/appveyor.yml0000644000232200023220000000034014027401005016611 0ustar debalancedebalancebuild: off # Erlang 22.3 is only available in this image image: "Visual Studio 2019" install: - curl -OL https://s3.amazonaws.com/rebar3/rebar3 test_script: - escript ./rebar3 do compile,eunit,dialyzer deploy: false cuttlefish-3.0.1/.gitattributes0000644000232200023220000000012514027401005017115 0ustar debalancedebalancetest/value* text eol=lf "test/dir with spaces/value 3" text eol=lf cuttlefish-3.0.1/rebar.config0000644000232200023220000000117314027401005016510 0ustar debalancedebalance{minimum_otp_vsn, "21.3"}. {project_plugins, [rebar3_hex]}. {erl_opts, [warnings_as_errors, debug_info, warn_untyped_record]}. {deps, [getopt]}. {escript_emu_args, "%%! -escript main cuttlefish_escript +S 1 +A 0\n"}. {escript_incl_apps, [getopt, cuttlefish]}. {escript_main_app, cuttlefish}. {provider_hooks, [{post, [{compile, {default, escriptize}}]}]}. {eunit_opts, [verbose]}. {cover_enabled, true}. {cover_print_enabled, true}. {cover_export_enabled, true}. {profiles, [{dev, [{deps, [neotoma]}, {plugins, [rebar3_neotoma_plugin]}]}, {test, [{deps, [bbmustache]}]}]}. cuttlefish-3.0.1/test_fixtures/0000755000232200023220000000000014027401005017134 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/acformat/0000755000232200023220000000000014027401005020730 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/acformat/lib/0000755000232200023220000000000014027401005021476 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/acformat/lib/01-setting.schema0000644000232200023220000000004514027401005024552 0ustar debalancedebalance{mapping, "setting", "setting", []}. cuttlefish-3.0.1/test_fixtures/acformat/etc/0000755000232200023220000000000014027401005021503 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/acformat/etc/advanced.config0000644000232200023220000000001114027401005024427 0ustar debalancedebalance[a]. [b].cuttlefish-3.0.1/test_fixtures/acformat/etc/acformat.conf0000644000232200023220000000002014027401005024136 0ustar debalancedebalancesetting = thing cuttlefish-3.0.1/test_fixtures/escript_utf8_test/0000755000232200023220000000000014027401005022612 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/escript_utf8_test/lib/0000755000232200023220000000000014027401005023360 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/escript_utf8_test/lib/01-setting.schema0000644000232200023220000000004514027401005026434 0ustar debalancedebalance{mapping, "setting", "setting", []}. cuttlefish-3.0.1/test_fixtures/escript_utf8_test/etc/0000755000232200023220000000000014027401005023365 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/escript_utf8_test/etc/utf8.conf0000644000232200023220000000002214027401005025114 0ustar debalancedebalancesetting = thingÅ’ cuttlefish-3.0.1/test_fixtures/escript_prune_test/0000755000232200023220000000000014027401005023055 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/escript_prune_test/lib/0000755000232200023220000000000014027401005023623 5ustar debalancedebalancecuttlefish-3.0.1/test_fixtures/escript_prune_test/lib/basic.schema0000644000232200023220000000015214027401005026064 0ustar debalancedebalance{mapping, "a.b", "a.b", [ {default, "c"} ]}. {mapping, "a.c", "vm_args.+B", [ {default, "c"} ]}. cuttlefish-3.0.1/rebar.lock0000644000232200023220000000042414027401005016171 0ustar debalancedebalance{"1.2.0", [{<<"getopt">>,{pkg,<<"getopt">>,<<"1.0.2">>},0}]}. [ {pkg_hash,[ {<<"getopt">>, <<"33D9B44289FE7AD08627DDFE1D798E30B2DA0033B51DA1B3A2D64E72CD581D02">>}]}, {pkg_hash_ext,[ {<<"getopt">>, <<"A0029AEA4322FB82A61F6876A6D9C66DC9878B6CB61FAA13DF3187384FD4EA26">>}]} ]. cuttlefish-3.0.1/rebar.config.script0000644000232200023220000000070314027401005020011 0ustar debalancedebalance%% vim:ft=erlang: case os:getenv("TRAVIS_JOB_ID") of false -> CONFIG; JobId -> %% coveralls.io. [{plugins, [{coveralls, {git, "https://github.com/markusn/coveralls-erl", {branch, "master"}}}]} ,{coveralls_coverdata, "_build/test/cover/eunit.coverdata"} ,{coveralls_service_name, "travis-ci"} ,{coveralls_service_job_id, JobId} |CONFIG ] end. cuttlefish-3.0.1/ChangeLog.md0000644000232200023220000000173014027401005016376 0ustar debalancedebalance# Cuttlefish Change Log ## Next version (in development) No changes yet. ## 3.0.1 (Mar 26, 2021) * Fix `$(< filename)` include directive parsing (#25). ## 3.0.0 (Mar 13, 2021) ### OTP Logger Instead of Lager The library now uses standard OTP logger instead of Lager. Log entries use a very similar format but the timestamp formatting has changed. In older releases with Lager: ``` 15:12:26.054 [info] No app.config or vm.args detected in /etc, activating cuttlefish ``` In 3.0, on a cutting edge Erlang version: ``` 2021-03-08T15:14:55.963768+03:00 [info] No app.config or vm.args detected in /etc, activating cuttlefish ``` GitHub issue: [#19](https://github.com/Kyorai/cuttlefish/issues/19). ## 2.7.0 (Mar 7, 2021) ### Erlang 24 Support Cuttlefish is now compatible (builds, runs) with Erlang 24. ### Older Erlang Releases Support Dropped Cuttlefish now supports Erlang 22 through 24. `2.6.0` was the last release to support older Erlang versions, e.g. 18 or 19. cuttlefish-3.0.1/src/0000755000232200023220000000000014027401005015013 5ustar debalancedebalancecuttlefish-3.0.1/src/cuttlefish_util.erl0000644000232200023220000001442114027401005020730 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_util: various cuttlefish utility functions %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_util). -include_lib("kernel/include/logger.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([ replace_proplist_value/3, numerify/1, ceiling/1, levenshtein/2]). %% Legacy API -export([ conf_get_value/2, conf_get_value/3, filter_by_variable_starts_with/2, matches_for_variable_def/2, fuzzy_variable_match/2 ]). %% @deprecated conf_get_value(Key, Conf) -> _ = ?LOG_WARNING("cuttlefish_util:conf_get_value/2 has been deprecated. use cuttlefish:conf_get/2"), cuttlefish:conf_get(Key, Conf). %% @deprecated conf_get_value(Key, Conf, Default) -> _ = ?LOG_WARNING("cuttlefish_util:conf_get_value/3 has been deprecated. use cuttlefish:conf_get/3"), cuttlefish:conf_get(Key, Conf, Default). %% @deprecated filter_by_variable_starts_with(Prefix, Conf) -> _ = ?LOG_WARNING("cuttlefish_util:filter_by_variable_starts_with/2 has been deprecated. use cuttlefish_variable:filter_by_prefix/2"), cuttlefish_variable:filter_by_prefix(Prefix, Conf). %% @deprecated matches_for_variable_def(VarDef, Conf) -> _ = ?LOG_WARNING("cuttlefish_util:matches_for_variable_def/2 has been deprecated. use cuttlefish_variable:fuzzy_matches/2"), cuttlefish_variable:fuzzy_matches(VarDef, Conf). %% @deprecated fuzzy_variable_match(Var, VarDef) -> _ = ?LOG_WARNING("cuttlefish_util:fuzzy_variable_match/2 has been deprecated. use cuttlefish_variable:is_fuzzy_match/2"), cuttlefish_variable:is_fuzzy_match(Var, VarDef). %% @doc replace the element in a proplist -spec replace_proplist_value(atom() | string(), any(), [{string(), any()}]) -> [{string(), any()}]. replace_proplist_value(Key, Value, Proplist) -> lists:keystore(Key, 1, Proplist, {Key, Value}). %% @doc Turn a string into a number if `list_to_float' or %% `list_to_integer' accept it as valid -spec numerify(string()) -> integer()|float()|cuttlefish_error:error(). numerify([$.|_]=Num) -> numerify([$0|Num]); numerify(String) -> try list_to_float(String) of Float -> Float catch _:_ -> try list_to_integer(String) of Int -> Int catch _:_ -> {error, {number_parse, String}} end end. %% @doc remember when you learned about decimal places. about a minute %% later, you learned about rounding up and down. This is rounding up. -spec ceiling(float()) -> integer(). ceiling(X) -> T = erlang:trunc(X), case (X - T) of Neg when Neg < 0 -> T; Pos when Pos > 0 -> T + 1; _ -> T end. %% Levenshtein code by Adam Lindberg, Fredrik Svensson via %% http://www.trapexit.org/String_similar_to_(Levenshtein) %% %%------------------------------------------------------------------------------ %% @spec levenshtein(StringA :: string(), StringB :: string()) -> integer() %% @doc Calculates the Levenshtein distance between two strings %% @end %%------------------------------------------------------------------------------ levenshtein(Samestring, Samestring) -> 0; levenshtein(String, []) -> length(String); levenshtein([], String) -> length(String); levenshtein(Source, Target) -> levenshtein_rec(Source, Target, lists:seq(0, length(Target)), 1). %% Recurses over every character in the source string and calculates a list of distances levenshtein_rec([SrcHead|SrcTail], Target, DistList, Step) -> levenshtein_rec(SrcTail, Target, levenshtein_distlist(Target, DistList, SrcHead, [Step], Step), Step + 1); levenshtein_rec([], _, DistList, _) -> lists:last(DistList). %% Generates a distance list with distance values for every character in the target string levenshtein_distlist([TargetHead|TargetTail], [DLH|DLT], SourceChar, NewDistList, LastDist) when length(DLT) > 0 -> Min = lists:min([LastDist + 1, hd(DLT) + 1, DLH + dif(TargetHead, SourceChar)]), levenshtein_distlist(TargetTail, DLT, SourceChar, NewDistList ++ [Min], Min); levenshtein_distlist([], _, _, NewDistList, _) -> NewDistList. % Calculates the difference between two characters or other values dif(C, C) -> 0; dif(_, _) -> 1. -ifdef(TEST). replace_proplist_value_test() -> Proplist = [ {"test1", 1}, {"test2", 2}, {"test3", 3} ], NewProplist = replace_proplist_value("test2", 8, Proplist), ?assertEqual( 8, proplists:get_value("test2", NewProplist) ), ok. replace_proplist_value_when_undefined_test() -> Proplist = [ {"test1", 1}, {"test2", 2} ], NewProplist = replace_proplist_value("test3", 3, Proplist), ?assertEqual( 3, proplists:get_value("test3", NewProplist) ), ok. levenshtein_test() -> ?assertEqual(0, levenshtein("X", "X")), ?assertEqual(1, levenshtein("X", "XX")), ?assertEqual(1, levenshtein("penguin", "penguino")), ?assertEqual(1, levenshtein("dtrace", "ctrace")), ?assertEqual(5, levenshtein("anti_entropy", "anti_entropy.tick")), ?assertEqual(1, levenshtein("anti_entropy", "anti-entropy")), ?assertEqual(4, levenshtein("", "four")), ?assertEqual(4, levenshtein("four", "")), ok. ceiling_test() -> ?assertEqual(9, ceiling(8.99999)), ?assertEqual(9, ceiling(8.00001)), ?assertEqual(9, ceiling(9.0)), ?assertEqual(-2, ceiling(-2.0000001)), ?assertEqual(-2, ceiling(-2.9999999)), ok. numerify_test() -> ?assertEqual(42, numerify("42")), ?assertEqual(42.0, numerify("42.0")), ?assertEqual(0.5, numerify(".5")), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_enum.erl0000644000232200023220000001326614027401005020725 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_enum: datatype for simple enum settings with %% customizable names and values %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_enum). -type enum_list() :: [{atom()|string(), term()}]. -type enum() :: {enum, enum_list()}. -type strict_enum_list() :: [{string(), term()}]. -type strict_enum() :: {enum, strict_enum_list()}. -export_type([enum/0, strict_enum/0]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([ to_string/2, parse/2 ]). -define(FMT(F, A), lists:flatten(io_lib:format(F, A))). -spec to_string(string() | atom(), enum() ) -> string() | cuttlefish_error:error(). to_string(Value, {enum, _Raw} = Enum) -> FriendlEnum = assuage_enum(Enum), case to_string_by_value(Value, FriendlEnum) of {K, _} -> atom_to_list_maybe(K); false -> to_string_by_key(atom_to_list_maybe(Value), FriendlEnum) end. -spec to_string_by_value(term(), strict_enum()) -> term(). to_string_by_value(Value, {enum, FriendlEnum}) -> lists:keyfind(Value, 2, FriendlEnum). -spec to_string_by_key(atom() | string(), strict_enum()) -> string(). to_string_by_key(Key, {enum, FriendlEnum}) -> case lists:keyfind(Key, 1, FriendlEnum) of {K, _} -> K; false -> to_error(Key, FriendlEnum) end. -spec parse(term(), enum()) -> term() | cuttlefish_error:error(). parse(Value, {enum, _Raw} = Enum) -> FriendlEnum = assuage_enum(Enum), case parse_by_key(atom_to_list_maybe(Value), FriendlEnum) of {_Key, Val} -> Val; false -> parse_by_value(Value, FriendlEnum) end. -spec parse_by_key(atom() | string() | term(), strict_enum()) -> {string(), term()} | false. parse_by_key(Key, {enum, FriendlEnum}) -> lists:keyfind(Key, 1, FriendlEnum). -spec parse_by_value(term(), strict_enum()) -> term() | cuttlefish_error:error(). parse_by_value(Value, {enum, FriendlEnum}) -> case lists:keyfind(Value, 2, FriendlEnum) of false -> to_error(Value, FriendlEnum); {_Key, Value} -> Value end. -spec to_error(atom() | string()| term(), strict_enum_list()) -> cuttlefish_error:error(). to_error(Value, FriendlEnum) -> Acceptable = [Key || {Key, _} <- FriendlEnum], {error, {enum_name, {atom_to_list_maybe(Value), Acceptable}}}. -spec atom_to_list_maybe(term()) -> term(). atom_to_list_maybe(Atom) when is_atom(Atom) -> atom_to_list(Atom); atom_to_list_maybe(Other) -> Other. -spec assuage_enum(enum()) -> strict_enum() | cuttlefish_error:error(). assuage_enum({enum, Enum}) -> FriendlEnum = assuage_enum(Enum, []), case cuttlefish_error:is_error(FriendlEnum) of true -> FriendlEnum; _ -> {enum, FriendlEnum} end. -spec assuage_enum(enum_list(), strict_enum_list()) -> strict_enum_list() | cuttlefish_error:error(). assuage_enum([], FriendlEnum) -> lists:reverse(FriendlEnum); %% If the head is a 2-tuple; yay! assuage_enum([{Key, Value}|EnumTail], FriendlEnum) when is_atom(Key) -> assuage_enum(EnumTail, [ { cuttlefish_datatypes:to_string(Key, atom), Value } | FriendlEnum ]); assuage_enum([{Key, Value}|EnumTail], FriendlEnum) when is_list(Key) -> assuage_enum(EnumTail, [ { Key, Value } | FriendlEnum ]); %% If the head is just a string or atom, fall here. assuage_enum([Key|EnumTail], FriendlEnum) when is_atom(Key) -> assuage_enum(EnumTail, [ { cuttlefish_datatypes:to_string(Key, atom), Key } | FriendlEnum]); assuage_enum([Key|EnumTail], FriendlEnum) when is_list(Key) -> assuage_enum(EnumTail, [{Key, Key} | FriendlEnum]); assuage_enum([BadTuple|_], _) when is_tuple(BadTuple) -> {error, {enum_format, BadTuple}}; assuage_enum([ErroneousItem|_], _) -> {error, {enum_format, ErroneousItem}}. -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). parse_test() -> ?assertEqual(1, parse("one", {enum, [{"one", 1}, two]})), ?assertEqual(two, parse("two", {enum, [{"one", 1}, two]})), ok. assuage_enum_test() -> ?assertEqual({enum, [{"true", true}, {"false", false}]}, assuage_enum({enum, [true, false]})), ?assertEqual({enum, [{"true", "true"}, {"false", "false"}]}, assuage_enum({enum, ["true", "false"]})), ?assertEqual({enum, [{"one", 1}, {"two", 2}]}, assuage_enum({enum, [{one, 1}, {"two", 2}]})), ?assertEqual({enum, [{"off", off}, {"on", "On"}]}, assuage_enum({enum, [off, {on, "On"}]})), ok. assuage_enum_error_test() -> ?assertEqual( "Enum elements must be atoms, strings, or 2-tuples with " "atom or string as first element. Bad value: {one,two,three}", ?XLATE(assuage_enum({enum, [{one, two, three}, oops]})) ), ?assertEqual( "Enum elements must be atoms, strings, or 2-tuples with " "atom or string as first element. Bad value: 7", ?XLATE(assuage_enum({enum, [oops, 7]})) ), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_mapping.erl0000644000232200023220000004272314027401005021414 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_mapping: models a single mapping %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_mapping). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -record(mapping, { variable::cuttlefish_variable:variable(), mapping::string(), default::term(), commented::term(), datatype = [string] :: cuttlefish_datatypes:datatype_list(), level = basic :: basic | intermediate | advanced, doc = [] :: list(), include_default = undefined :: string() | undefined, new_conf_value = undefined :: string() | undefined, validators = [] :: [string()], is_merge = false :: boolean(), see = [] :: [cuttlefish_variable:variable()], hidden = false :: boolean() }). -type mapping() :: #mapping{}. -type raw_mapping() :: {mapping, string(), string(), [proplists:property()]}. -export_type([mapping/0]). -export([ parse/1, parse_and_merge/2, is_mapping/1, variable/1, is_fuzzy_variable/1, mapping/1, default/1, has_default/1, commented/1, datatype/1, level/1, hidden/1, doc/1, see/1, include_default/1, new_conf_value/1, replace/2, validators/1, validators/2, remove_all_but_first/2 ]). -spec parse(raw_mapping()) -> mapping() | cuttlefish_error:error(). parse({mapping, Variable, Mapping, Proplist}) -> Datatype = case proplists:get_value(datatype, Proplist, string) of {enum, Enums} -> AtomEnums = [ begin case is_list(E) of true -> list_to_atom(E); _ -> E end end || E <- Enums ], [{enum, AtomEnums}]; L when is_list(L) -> case cuttlefish_datatypes:is_valid_list(L) of true -> L; _ -> {error, {mapping_types, L}} end; D -> [D] end, case Datatype of {error, _} -> Datatype; _ -> #mapping{ variable = cuttlefish_variable:tokenize(Variable), default = proplists:get_value(default, Proplist), commented = proplists:get_value(commented, Proplist), mapping = Mapping, level = proplists:get_value(level, Proplist, basic), datatype = Datatype, doc = proplists:get_value(doc, Proplist, []), see = proplists:get_value(see, Proplist, []), include_default = proplists:get_value(include_default, Proplist), new_conf_value = proplists:get_value(new_conf_value, Proplist), validators = proplists:get_value(validators, Proplist, []), hidden = proplists:get_value(hidden, Proplist, false) } end; parse(X) -> {error, {mapping_parse, X}}. %% If this mapping exists, do something. %% That something is usually a simple replace, unless the proplist in %% the raw mapping contains the atom 'merge'. %% %% This fuction assumes it's run as part of a foldl over new schema elements %% in which case, there's only ever one instance of a key in the list %% so keyreplace works fine. -spec parse_and_merge( raw_mapping(), [mapping()]) -> [mapping()]. parse_and_merge({mapping, Variable, _Mapping, Props} = MappingSource, Mappings) -> Var = cuttlefish_variable:tokenize(Variable), case lists:keyfind(Var, #mapping.variable, Mappings) of false -> [ parse(MappingSource) | Mappings]; OldMapping -> MaybeMergedMapping = case proplists:is_defined(merge, Props) of true -> merge(MappingSource, OldMapping); _ -> parse(MappingSource) end, lists:keyreplace(Var, #mapping.variable, Mappings, MaybeMergedMapping) end. -spec merge(raw_mapping(), mapping()) -> mapping(). merge(NewMappingSource, OldMapping) -> MergeMapping = parse(NewMappingSource), #mapping{ variable = variable(MergeMapping), mapping = mapping(MergeMapping), default = choose(default, NewMappingSource, MergeMapping, OldMapping), commented = choose(commented, NewMappingSource, MergeMapping, OldMapping), datatype = choose(datatype, NewMappingSource, MergeMapping, OldMapping), level = choose(level, NewMappingSource, MergeMapping, OldMapping), doc = choose(doc, NewMappingSource, MergeMapping, OldMapping), include_default = choose(include_default, NewMappingSource, MergeMapping, OldMapping), new_conf_value = choose(include_default, NewMappingSource, MergeMapping, OldMapping), validators = choose(validators, NewMappingSource, MergeMapping, OldMapping), see = choose(see, NewMappingSource, MergeMapping, OldMapping), hidden = choose(hidden, NewMappingSource, MergeMapping, OldMapping) }. choose(Field, {_, _, _, PreParseMergeProps}, MergeMapping, OldMapping) -> Which = case {Field, proplists:is_defined(Field, PreParseMergeProps), proplists:get_value(Field, PreParseMergeProps)} of {see, _, []} -> old; {doc, _, []} -> old; {_, true, _} -> new; _ -> old end, case Which of new -> ?MODULE:Field(MergeMapping); old -> ?MODULE:Field(OldMapping) end. -spec is_mapping(any()) -> boolean(). is_mapping(M) -> is_tuple(M) andalso element(1, M) =:= mapping. -spec variable(mapping()) -> [string()]. variable(M) -> M#mapping.variable. -spec is_fuzzy_variable(mapping()) -> boolean(). is_fuzzy_variable(#mapping{variable=VariableDef}) -> lists:any(fun(X) -> hd(X) =:= $$ end, VariableDef). -spec mapping(mapping()) -> string(). mapping(M) -> M#mapping.mapping. -spec default(mapping()) -> term(). default(M) -> M#mapping.default. -spec has_default(mapping()) -> boolean(). has_default(MappingRecord) -> default(MappingRecord) =/= undefined. -spec commented(mapping()) -> term(). commented(M) -> M#mapping.commented. -spec datatype(mapping()) -> cuttlefish_datatypes:datatype_list(). datatype(M) -> M#mapping.datatype. -spec level(mapping()) -> basic | intermediate | advanced. level(M) -> M#mapping.level. -spec hidden(mapping()) -> boolean(). hidden(M) -> M#mapping.hidden. -spec doc(mapping()) -> [string()]. doc(M) -> M#mapping.doc. -spec see(mapping()) -> [cuttlefish_variable:variable()]. see(M) -> M#mapping.see. -spec include_default(mapping()) -> string() | undefined. include_default(M) -> M#mapping.include_default. -spec new_conf_value(mapping()) -> string() | undefined. new_conf_value(M) -> M#mapping.new_conf_value. -spec validators(mapping()) -> [string()]. validators(M) -> M#mapping.validators. -spec validators(mapping(), [cuttlefish_validator:validator()]) -> [cuttlefish_validator:validator()]. validators(M, Validators) -> lists:foldr(fun(VName, Vs) -> case lists:keyfind(VName, 2, Validators) of false -> Vs; V -> [V|Vs] end end, [], M#mapping.validators). -spec replace(mapping(), [mapping()]) -> [mapping()]. replace(Mapping, ListOfMappings) -> Exists = lists:keymember(variable(Mapping), #mapping.variable, ListOfMappings), case Exists of true -> lists:keyreplace(variable(Mapping), #mapping.variable, ListOfMappings, Mapping); _ -> [Mapping | ListOfMappings] end. -spec remove_all_but_first(string(), [mapping()]) -> [mapping()]. remove_all_but_first(MappingName, Mappings) -> lists:foldr( fun(#mapping{mapping=MN}=M, Acc) when MN =:= MappingName-> [M|lists:keydelete(MN, #mapping.mapping, Acc)]; (M, Acc) -> [M|Acc] end, [], Mappings). -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). mapping_test() -> SampleMapping = { mapping, "conf.key", "erlang.key", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "config_file_val"}, {doc, ["documentation", "for feature"]}, {validators, ["valid.the.impailer"]}, hidden ] }, Record = parse(SampleMapping), ?assertEqual(["conf","key"], Record#mapping.variable), ?assertEqual("default value", Record#mapping.default), ?assertEqual("erlang.key", Record#mapping.mapping), ?assertEqual(advanced, Record#mapping.level), ?assertEqual([{enum, [on, off]}], Record#mapping.datatype), ?assertEqual(["documentation", "for feature"], Record#mapping.doc), ?assertEqual("default_substitution", Record#mapping.include_default), ?assertEqual("config_file_val", Record#mapping.new_conf_value), ?assertEqual(["valid.the.impailer"], Record#mapping.validators), ?assertEqual(true, Record#mapping.hidden), %% funciton tests ?assertEqual(["conf","key"], variable(Record)), ?assertEqual("default value", default(Record)), ?assertEqual("erlang.key", mapping(Record)), ?assertEqual(advanced, level(Record)), ?assertEqual([{enum, [on, off]}], datatype(Record)), ?assertEqual(["documentation", "for feature"], doc(Record)), ?assertEqual("default_substitution", include_default(Record)), ?assertEqual("config_file_val", new_conf_value(Record)), ?assertEqual(["valid.the.impailer"], validators(Record)), ?assertEqual(true, hidden(Record)), ok. replace_test() -> Element1 = parse({ mapping, "conf.key18", "erlang.key4", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "conf_val18"}, {doc, ["documentation", "for feature"]} ] }), SampleMappings = [Element1, parse({ mapping, "conf.key", "erlang.key2", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "conf_val_A"}, {doc, ["documentation", "for feature"]} ] }) ], Override = parse({ mapping, "conf.key", "erlang.key", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "conf_val_B"}, {doc, ["documentation", "for feature"]} ] }), NewMappings = replace(Override, SampleMappings), ?assertEqual([Element1, Override], NewMappings), ok. validators_test() -> Validators = [ cuttlefish_validator:parse( {validator, "a", "a desc", fun(_X) -> true end} ), cuttlefish_validator:parse({ validator, "b", "b desc", fun(_X) -> true end }), cuttlefish_validator:parse({ validator, "c", "c desc", fun(_X) -> true end }) ], %% Hack for coverage [ begin Fun = cuttlefish_validator:func(V), ?assert(Fun(x)) end || V <- Validators], Mapping = parse({ mapping, "conf.key", "erlang.key1", [ {validators, ["a", "b"]} ] }), [A, B, _C] = Validators, ?assertEqual([A,B], validators(Mapping, Validators)), MappingWithMissingValidator = parse({ mapping, "conf.key", "erlang.key1", [ {validators, ["a", "d"]} %% There is no "d" ] }), ?assertEqual([A], validators(MappingWithMissingValidator, Validators)), ok. parse_and_merge_test() -> SampleMappings = [parse({ mapping, "conf.key", "erlang.key1", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "conf_val_A"}, {doc, ["documentation", "for feature"]}, hidden ] }), parse({ mapping, "conf.key2", "erlang.key2", [ {level, advanced}, {default, "default value"}, {datatype, {enum, [on, off]}}, {commented, "commented value"}, {include_default, "default_substitution"}, {new_conf_value, "conf_val_B"}, {doc, ["documentation", "for feature"]} ] }) ], NewMappings = parse_and_merge({mapping, "conf.key", "erlang.key3", [{hidden, false}]}, SampleMappings), ?assertEqual("erlang.key3", mapping(hd(NewMappings))), ?assertEqual(false, hidden(hd(NewMappings))), ok. smart_merge_test() -> OldM1 = parse({mapping, "thing.to.merge", "some.key", [ {default, 7}, {datatype, integer}, {doc, ["documentation", "for feature"]} ]}), OldM2 = parse({mapping, "thing.not.merged", "some.other_key", [{default, 6}, {datatype, integer}]}), OriginalMappings = [OldM1, OldM2], NewRawNoMergeMapping = {mapping, "thing.to.merge", "some.new_other_key", [{level, advanced}]}, [NewUnMergedMapping, OldM2] = parse_and_merge( NewRawNoMergeMapping, OriginalMappings), ?assertEqual(["thing", "to", "merge"], variable(NewUnMergedMapping)), ?assertEqual(undefined, default(NewUnMergedMapping)), ?assertEqual("some.new_other_key", mapping(NewUnMergedMapping)), ?assertEqual(advanced, level(NewUnMergedMapping)), ?assertEqual([string], datatype(NewUnMergedMapping)), ?assertEqual([], doc(NewUnMergedMapping)), ?assertEqual(undefined, include_default(NewUnMergedMapping)), ?assertEqual([], validators(NewUnMergedMapping)), NewRawMergeMapping = {mapping, "thing.to.merge", "some.new_key", [merge, {level, advanced}]}, [NewMergedMapping, OldM2] = NewMappings = parse_and_merge( NewRawMergeMapping, OriginalMappings), ?assertEqual(["thing", "to", "merge"], variable(NewMergedMapping)), ?assertEqual(7, default(NewMergedMapping)), ?assertEqual("some.new_key", mapping(NewMergedMapping)), ?assertEqual(advanced, level(NewMergedMapping)), ?assertEqual([integer], datatype(NewMergedMapping)), ?assertEqual(["documentation", "for feature"], doc(NewMergedMapping)), ?assertEqual(undefined, include_default(NewMergedMapping)), ?assertEqual([], validators(NewMergedMapping)), NewerRawMergeMapping = {mapping, "thing.to.merge", "some.third_key", [merge, {default, 42}]}, [NewerMergedMapping, OldM2] = parse_and_merge( NewerRawMergeMapping, NewMappings), ?assertEqual(["thing", "to", "merge"], variable(NewerMergedMapping)), ?assertEqual(42, default(NewerMergedMapping)), ?assertEqual("some.third_key", mapping(NewerMergedMapping)), ?assertEqual(advanced, level(NewerMergedMapping)), ?assertEqual([integer], datatype(NewerMergedMapping)), ?assertEqual(["documentation", "for feature"], doc(NewerMergedMapping)), ?assertEqual(undefined, include_default(NewerMergedMapping)), ?assertEqual([], validators(NewerMergedMapping)), ok. accidentally_used_strings_for_enums_test() -> Mapping = parse({ mapping, "conf.key2", "erlang.key2", [ {datatype, {enum, ["on", "off"]}} ] }), ?assertEqual([{enum, [on, off]}], cuttlefish_mapping:datatype(Mapping)), ok. parse_error_test() -> {ErrorAtom, ErrorTuple} = parse(not_a_raw_mapping), ?assertEqual(error, ErrorAtom), ?assertEqual( "Poorly formatted input to cuttlefish_mapping:parse/1 : not_a_raw_mapping", ?XLATE(ErrorTuple)), ok. is_mapping_test() -> ?assert(not(is_mapping(not_a_mapping))), M = parse({ mapping, "conf.key2", "erlang.key2", [ {datatype, {enum, ["on", "off"]}} ] }), ?assert(is_mapping(M)), ok. %% conf.key can be any integer or the atom undefined. extended_types_parse_test() -> Mapping = parse({ mapping, "conf.key", "erlang.key", [ {datatype, [integer, {atom, undefined}]} ] }), ?assertEqual([integer, {atom, undefined}], cuttlefish_mapping:datatype(Mapping)), ok. datatype_cannot_be_empty_list_test() -> Mapping = parse({ mapping, "conf.key", "erlang.key", [ {datatype, []} ] }), ?assertMatch({error, _}, Mapping), ok. -endif. cuttlefish-3.0.1/src/conf_parse.erl0000644000232200023220000003736614027401005017655 0ustar debalancedebalance-module(conf_parse). -export([parse/1,file/1]). -define(p_anything,true). -define(p_charclass,true). -define(p_choose,true). -define(p_label,true). -define(p_not,true). -define(p_one_or_more,true). -define(p_optional,true). -define(p_scan,true). -define(p_seq,true). -define(p_string,true). -define(p_zero_or_more,true). %% ------------------------------------------------------------------- %% %% conf_parse: for all your .conf parsing needs. %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% Copyright (c) 2019 Pivotal Software, Inc. All rights reserved. %% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- %% This module implements the parser for a sysctl-style %% configuration format. Example: %% %% ``` %% riak.local.node = riak@127.0.0.1 %% riak.local.http = 127.0.0.1:8098 %% riak.local.pb = 127.0.0.1:8087 %% riak.local.storage.backend = bitcask''' %% %% This would parse into the following flat proplist: %% %% ``` %% [{<<"riak.local.node">>,<<"riak@127.0.0.1">>}, %% {<<"riak.local.http">>,<<"127.0.0.1:8098">>}, %% {<<"riak.local.pb">>,<<"127.0.0.1:8087">>}, %% {<<"riak.local.storage.backend">>,<<"bitcask">>}]''' %% %% Other modules in this application interpret and validate the %% result of a successful parse. %% @end -define(line, true). -define(FMT(F,A), lists:flatten(io_lib:format(F,A))). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. %% @doc Only let through lines that are not comments or whitespace. is_setting(ws) -> false; is_setting([ws]) -> false; is_setting(comment) -> false; is_setting(_) -> true. %% @doc Removes escaped dots from keys unescape_dots([$\\,$.|Rest]) -> [$.|unescape_dots(Rest)]; unescape_dots([]) -> []; unescape_dots([C|Rest]) -> [C|unescape_dots(Rest)]. -ifdef(TEST). file_test() -> Conf = conf_parse:file("test/riak.conf"), ?assertEqual([ {["ring_size"],"32"}, {["anti_entropy"],"debug"}, {["log","error","file"],"/var/log/error.log"}, {["log","console","file"],"/var/log/console.log"}, {["log","syslog"],"on"}, {["listener","http","internal"],"127.0.0.1:8098"}, {["listener","http","external"],"10.0.0.1:80"} ], Conf), ok. included_file_test() -> Conf = conf_parse:file("test/include_file.conf"), ?assertEqual([ {include,<<"riak.conf">>} ], Conf), ok. included_dir_test() -> Conf = conf_parse:file("test/include_dir.conf"), ?assertEqual([ {include,<<"conf.d/*.conf">>} ], Conf), ok. escaped_dots_are_removed_test() -> Conf = conf_parse:parse("#comment\nsetting\\.0 = thing0\n"), ?assertEqual([ {["setting.0"],"thing0"} ], Conf), ok. utf8_test() -> Conf = conf_parse:parse("setting = thing" ++ [338] ++ "\n"), ?assertEqual([{["setting"], {error, {conf_to_latin1, 1}} }], Conf), ok. gh_1_two_tab_test() -> Conf = conf_parse:parse("setting0 = thing0\n\t\t\nsetting1 = thing1\n"), ?assertEqual([ {["setting0"],"thing0"}, {["setting1"],"thing1"} ], Conf), ok. gh_1_three_tab_test() -> Conf = conf_parse:parse("setting0 = thing0\n\t\t\t\nsetting1 = thing1\n"), ?assertEqual([ {["setting0"],"thing0"}, {["setting1"],"thing1"} ], Conf), ok. -endif. -spec file(file:name()) -> any(). file(Filename) -> AbsFilename = filename:absname(Filename), case erl_prim_loader:get_file(AbsFilename) of {ok, Bin, _} -> parse(Bin); error -> {error, undefined} end. -spec parse(binary() | list()) -> any(). parse(List) when is_list(List) -> parse(unicode:characters_to_binary(List)); parse(Input) when is_binary(Input) -> _ = setup_memo(), Result = case 'config'(Input,{{line,1},{column,1}}) of {AST, <<>>, _Index} -> AST; Any -> Any end, release_memo(), Result. -spec 'config'(input(), index()) -> parse_result(). 'config'(Input, Index) -> p(Input, Index, 'config', fun(I,D) -> (p_zero_or_more(fun 'line'/2))(I,D) end, fun(Node, _Idx) -> [ L || L <- Node, is_setting(L) ] end). -spec 'line'(input(), index()) -> parse_result(). 'line'(Input, Index) -> p(Input, Index, 'line', fun(I,D) -> (p_choose([p_seq([p_choose([fun 'setting'/2, fun 'include'/2, fun 'comment'/2, p_one_or_more(fun 'ws'/2)]), p_choose([fun 'crlf'/2, fun 'eof'/2])]), fun 'crlf'/2]))(I,D) end, fun(Node, _Idx) -> case Node of [ Line, _EOL ] -> Line; Line -> Line end end). -spec 'setting'(input(), index()) -> parse_result(). 'setting'(Input, Index) -> p(Input, Index, 'setting', fun(I,D) -> (p_seq([p_zero_or_more(fun 'ws'/2), fun 'key'/2, p_zero_or_more(fun 'ws'/2), p_string(<<"=">>), p_zero_or_more(fun 'ws'/2), fun 'value'/2, p_zero_or_more(fun 'ws'/2), p_optional(fun 'comment'/2)]))(I,D) end, fun(Node, _Idx) -> [ _, Key, _, _Eq, _, Value, _, _ ] = Node, {Key, Value} end). -spec 'key'(input(), index()) -> parse_result(). 'key'(Input, Index) -> p(Input, Index, 'key', fun(I,D) -> (p_seq([p_label('head', fun 'word'/2), p_label('tail', p_zero_or_more(p_seq([p_string(<<".">>), fun 'word'/2])))]))(I,D) end, fun(Node, _Idx) -> [{head, H}, {tail, T}] = Node, [unicode:characters_to_list(H)| [ unicode:characters_to_list(W) || [_, W] <- T]] end). -spec 'value'(input(), index()) -> parse_result(). 'value'(Input, Index) -> p(Input, Index, 'value', fun(I,D) -> (p_one_or_more(p_seq([p_not(p_choose([p_seq([p_zero_or_more(fun 'ws'/2), fun 'crlf'/2]), fun 'comment'/2])), p_anything()])))(I,D) end, fun(Node, Idx) -> case unicode:characters_to_binary(Node, utf8, latin1) of {_Status, _Begining, _Rest} -> {error, {conf_to_latin1, line(Idx)}}; Bin -> binary_to_list(Bin) end end). -spec 'comment'(input(), index()) -> parse_result(). 'comment'(Input, Index) -> p(Input, Index, 'comment', fun(I,D) -> (p_seq([p_zero_or_more(fun 'ws'/2), p_string(<<"#">>), p_zero_or_more(p_seq([p_not(fun 'crlf'/2), p_anything()]))]))(I,D) end, fun(_Node, _Idx) ->comment end). -spec 'include'(input(), index()) -> parse_result(). 'include'(Input, Index) -> p(Input, Index, 'include', fun(I,D) -> (p_seq([p_zero_or_more(fun 'ws'/2), p_string(<<"include">>), p_zero_or_more(fun 'ws'/2), fun 'included_file_or_dir'/2, p_optional(fun 'comment'/2)]))(I,D) end, fun(Node, _Idx) -> [_, _Include, _, Included, _] = Node, {include, Included} end). -spec 'included_file_or_dir'(input(), index()) -> parse_result(). 'included_file_or_dir'(Input, Index) -> p(Input, Index, 'included_file_or_dir', fun(I,D) -> (p_one_or_more(p_charclass(<<"[A-Za-z0-9-\_\.\*\\/]">>)))(I,D) end, fun(Node, _Idx) -> unicode:characters_to_binary(Node, utf8, latin1) end). -spec 'word'(input(), index()) -> parse_result(). 'word'(Input, Index) -> p(Input, Index, 'word', fun(I,D) -> (p_one_or_more(p_choose([p_string(<<"\\.">>), p_charclass(<<"[A-Za-z0-9_-]">>)])))(I,D) end, fun(Node, _Idx) -> unescape_dots(unicode:characters_to_list(Node)) end). -spec 'crlf'(input(), index()) -> parse_result(). 'crlf'(Input, Index) -> p(Input, Index, 'crlf', fun(I,D) -> (p_seq([p_optional(p_string(<<"\r">>)), p_string(<<"\n">>)]))(I,D) end, fun(_Node, _Idx) ->ws end). -spec 'eof'(input(), index()) -> parse_result(). 'eof'(Input, Index) -> p(Input, Index, 'eof', fun(I,D) -> (p_not(p_anything()))(I,D) end, fun(_Node, _Idx) ->ws end). -spec 'ws'(input(), index()) -> parse_result(). 'ws'(Input, Index) -> p(Input, Index, 'ws', fun(I,D) -> (p_one_or_more(p_charclass(<<"[\s\t]">>)))(I,D) end, fun(_Node, _Idx) ->ws end). -file("peg_includes.hrl", 1). -type index() :: {{line, pos_integer()}, {column, pos_integer()}}. -type input() :: binary(). -type parse_failure() :: {fail, term()}. -type parse_success() :: {term(), input(), index()}. -type parse_result() :: parse_failure() | parse_success(). -type parse_fun() :: fun((input(), index()) -> parse_result()). -type xform_fun() :: fun((input(), index()) -> term()). -spec p(input(), index(), atom(), parse_fun(), xform_fun()) -> parse_result(). p(Inp, StartIndex, Name, ParseFun, TransformFun) -> case get_memo(StartIndex, Name) of % See if the current reduction is memoized {ok, Memo} -> %Memo; % If it is, return the stored result Memo; _ -> % If not, attempt to parse Result = case ParseFun(Inp, StartIndex) of {fail,_} = Failure -> % If it fails, memoize the failure Failure; {Match, InpRem, NewIndex} -> % If it passes, transform and memoize the result. Transformed = TransformFun(Match, StartIndex), {Transformed, InpRem, NewIndex} end, memoize(StartIndex, Name, Result), Result end. -spec setup_memo() -> ets:tid(). setup_memo() -> put({parse_memo_table, ?MODULE}, ets:new(?MODULE, [set])). -spec release_memo() -> true. release_memo() -> ets:delete(memo_table_name()). -spec memoize(index(), atom(), parse_result()) -> true. memoize(Index, Name, Result) -> Memo = case ets:lookup(memo_table_name(), Index) of [] -> []; [{Index, Plist}] -> Plist end, ets:insert(memo_table_name(), {Index, [{Name, Result}|Memo]}). -spec get_memo(index(), atom()) -> {ok, term()} | {error, not_found}. get_memo(Index, Name) -> case ets:lookup(memo_table_name(), Index) of [] -> {error, not_found}; [{Index, Plist}] -> case proplists:lookup(Name, Plist) of {Name, Result} -> {ok, Result}; _ -> {error, not_found} end end. -spec memo_table_name() -> ets:tid(). memo_table_name() -> get({parse_memo_table, ?MODULE}). -ifdef(p_eof). -spec p_eof() -> parse_fun(). p_eof() -> fun(<<>>, Index) -> {eof, [], Index}; (_, Index) -> {fail, {expected, eof, Index}} end. -endif. -ifdef(p_optional). -spec p_optional(parse_fun()) -> parse_fun(). p_optional(P) -> fun(Input, Index) -> case P(Input, Index) of {fail,_} -> {[], Input, Index}; {_, _, _} = Success -> Success end end. -endif. -ifdef(p_not). -spec p_not(parse_fun()) -> parse_fun(). p_not(P) -> fun(Input, Index)-> case P(Input,Index) of {fail,_} -> {[], Input, Index}; {Result, _, _} -> {fail, {expected, {no_match, Result},Index}} end end. -endif. -ifdef(p_assert). -spec p_assert(parse_fun()) -> parse_fun(). p_assert(P) -> fun(Input,Index) -> case P(Input,Index) of {fail,_} = Failure-> Failure; _ -> {[], Input, Index} end end. -endif. -ifdef(p_seq). -spec p_seq([parse_fun()]) -> parse_fun(). p_seq(P) -> fun(Input, Index) -> p_all(P, Input, Index, []) end. -spec p_all([parse_fun()], input(), index(), [term()]) -> parse_result(). p_all([], Inp, Index, Accum ) -> {lists:reverse( Accum ), Inp, Index}; p_all([P|Parsers], Inp, Index, Accum) -> case P(Inp, Index) of {fail, _} = Failure -> Failure; {Result, InpRem, NewIndex} -> p_all(Parsers, InpRem, NewIndex, [Result|Accum]) end. -endif. -ifdef(p_choose). -spec p_choose([parse_fun()]) -> parse_fun(). p_choose(Parsers) -> fun(Input, Index) -> p_attempt(Parsers, Input, Index, none) end. -spec p_attempt([parse_fun()], input(), index(), none | parse_failure()) -> parse_result(). p_attempt([], _Input, _Index, Failure) -> Failure; p_attempt([P|Parsers], Input, Index, FirstFailure)-> case P(Input, Index) of {fail, _} = Failure -> case FirstFailure of none -> p_attempt(Parsers, Input, Index, Failure); _ -> p_attempt(Parsers, Input, Index, FirstFailure) end; Result -> Result end. -endif. -ifdef(p_zero_or_more). -spec p_zero_or_more(parse_fun()) -> parse_fun(). p_zero_or_more(P) -> fun(Input, Index) -> p_scan(P, Input, Index, []) end. -endif. -ifdef(p_one_or_more). -spec p_one_or_more(parse_fun()) -> parse_fun(). p_one_or_more(P) -> fun(Input, Index)-> Result = p_scan(P, Input, Index, []), case Result of {[_|_], _, _} -> Result; _ -> {fail, {expected, Failure, _}} = P(Input,Index), {fail, {expected, {at_least_one, Failure}, Index}} end end. -endif. -ifdef(p_label). -spec p_label(atom(), parse_fun()) -> parse_fun(). p_label(Tag, P) -> fun(Input, Index) -> case P(Input, Index) of {fail,_} = Failure -> Failure; {Result, InpRem, NewIndex} -> {{Tag, Result}, InpRem, NewIndex} end end. -endif. -ifdef(p_scan). -spec p_scan(parse_fun(), input(), index(), [term()]) -> {[term()], input(), index()}. p_scan(_, <<>>, Index, Accum) -> {lists:reverse(Accum), <<>>, Index}; p_scan(P, Inp, Index, Accum) -> case P(Inp, Index) of {fail,_} -> {lists:reverse(Accum), Inp, Index}; {Result, InpRem, NewIndex} -> p_scan(P, InpRem, NewIndex, [Result | Accum]) end. -endif. -ifdef(p_string). -spec p_string(binary()) -> parse_fun(). p_string(S) -> Length = erlang:byte_size(S), fun(Input, Index) -> try <> = Input, {S, Rest, p_advance_index(S, Index)} catch error:{badmatch,_} -> {fail, {expected, {string, S}, Index}} end end. -endif. -ifdef(p_anything). -spec p_anything() -> parse_fun(). p_anything() -> fun(<<>>, Index) -> {fail, {expected, any_character, Index}}; (Input, Index) when is_binary(Input) -> <> = Input, {<>, Rest, p_advance_index(<>, Index)} end. -endif. -ifdef(p_charclass). -spec p_charclass(string() | binary()) -> parse_fun(). p_charclass(Class) -> {ok, RE} = re:compile(Class, [unicode, dotall]), fun(Inp, Index) -> case re:run(Inp, RE, [anchored]) of {match, [{0, Length}|_]} -> {Head, Tail} = erlang:split_binary(Inp, Length), {Head, Tail, p_advance_index(Head, Index)}; _ -> {fail, {expected, {character_class, binary_to_list(Class)}, Index}} end end. -endif. -ifdef(p_regexp). -spec p_regexp(binary()) -> parse_fun(). p_regexp(Regexp) -> {ok, RE} = re:compile(Regexp, [unicode, dotall, anchored]), fun(Inp, Index) -> case re:run(Inp, RE) of {match, [{0, Length}|_]} -> {Head, Tail} = erlang:split_binary(Inp, Length), {Head, Tail, p_advance_index(Head, Index)}; _ -> {fail, {expected, {regexp, binary_to_list(Regexp)}, Index}} end end. -endif. -ifdef(line). -spec line(index() | term()) -> pos_integer() | undefined. line({{line,L},_}) -> L; line(_) -> undefined. -endif. -ifdef(column). -spec column(index() | term()) -> pos_integer() | undefined. column({_,{column,C}}) -> C; column(_) -> undefined. -endif. -spec p_advance_index(input() | unicode:charlist() | pos_integer(), index()) -> index(). p_advance_index(MatchedInput, Index) when is_list(MatchedInput) orelse is_binary(MatchedInput)-> % strings lists:foldl(fun p_advance_index/2, Index, unicode:characters_to_list(MatchedInput)); p_advance_index(MatchedInput, Index) when is_integer(MatchedInput) -> % single characters {{line, Line}, {column, Col}} = Index, case MatchedInput of $\n -> {{line, Line+1}, {column, 1}}; _ -> {{line, Line}, {column, Col+1}} end. cuttlefish-3.0.1/src/cuttlefish_duration_parse.peg0000644000232200023220000000302614027401005022762 0ustar debalancedebalance%% Duration string parser duration <- duration_segment+ `lists:sum(Node)`; duration_segment <- (float / integer) unit ` [Amount, Span] = Node, {Span, Multiplier} = lists:keyfind(Span, 1, ?MULTIPLIERS), Amount * Multiplier `; integer <- [1-9] [0-9]* `list_to_integer(?FLATTEN(Node))`; unit <- "f" / "w" / "d" / "h" / "ms" / "m" / "s" `binary_to_atom(Node, latin1)`; float <- ( [0-9]+ "." [0-9]+ ) / ( "." [0-9]+ ) ` case Node of [<<".">>, Mantissa] -> list_to_float(?FLATTEN(["0.", Mantissa])); _ -> list_to_float(?FLATTEN(Node)) end `; ` %% ------------------------------------------------------------------- %% %% cuttlefish_duration_parse: parses duration strings %% %% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -include("cuttlefish_duration.hrl"). -define(FLATTEN(S), binary_to_list(iolist_to_binary(S))). ` cuttlefish-3.0.1/src/cuttlefish_duration.erl0000644000232200023220000001267714027401005021613 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_duration: complexity for parsing durations %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_duration). -opaque time_unit() :: f | w | d | h | m | s | ms. -export_type([time_unit/0]). -include("cuttlefish_duration.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([parse/1, parse/2, to_string/2]). -spec to_string(integer(), time_unit()) -> string(). to_string(Fortnights, f) -> milliseconds(Fortnights * ?FORTNIGHT); to_string(Weeks, w) -> milliseconds(Weeks * ?WEEK); to_string(Days, d) -> milliseconds(Days * ?DAY); to_string(Hours, h) -> milliseconds(Hours * ?HOUR); to_string(Minutes, m) -> milliseconds(Minutes * ?MINUTE); to_string(Seconds, s) -> milliseconds(Seconds * ?SECOND); to_string(Millis, ms) -> milliseconds(Millis). milliseconds(Millis) -> Units = lists:filter(fun({N, _Unit}) -> N =/= 0 end, [ { Millis div ?WEEK, "w"}, {(Millis rem ?WEEK) div ?DAY, "d"}, {(Millis rem ?DAY) div ?HOUR, "h"}, {(Millis rem ?HOUR) div ?MINUTE, "m"}, {(Millis rem ?MINUTE) div ?SECOND, "s"}, { Millis rem ?SECOND, "ms"} ]), lists:flatten([ integer_to_list(N) ++ Unit || {N, Unit} <- Units]). -spec parse(string(), time_unit()) -> integer(). parse(DurationString, ms) -> parse(DurationString); parse(DurationString, Unit) -> case parse(DurationString) of {error, _}=Err -> Err; Num when is_number(Num) -> {Unit, Multiplier} = lists:keyfind(Unit, 1, ?MULTIPLIERS), cuttlefish_util:ceiling(Num / Multiplier) end. -spec parse(string()) -> integer() | cuttlefish_error:error(). parse(InputDurationString) -> DurationString = string:to_lower(InputDurationString), case cuttlefish_duration_parse:parse(DurationString) of Float when is_float(Float) -> cuttlefish_util:ceiling(Float); Int when is_integer(Int) -> Int; _ -> {error, {duration, InputDurationString}} end. -ifdef(TEST). milliseconds_test() -> ?assertEqual("500ms", milliseconds(500)), ?assertEqual("1s500ms", milliseconds(1500)), ?assertEqual("30m", milliseconds(1800000)), ?assertEqual("1w1d1h1m1s1ms", milliseconds(694861001)), ok. seconds_test() -> ?assertEqual("50s", to_string(50, s)), ?assertEqual("1m1s", to_string(61, s)), ?assertEqual("30m", to_string(1800, s)), ?assertEqual("1w1d1h1m1s", to_string(694861, s)), ok. parse_test() -> test_parse(500, "500ms"), test_parse(500, ".5s"), test_parse(1001, "1s1ms"), test_parse(1599, "1s599ms"), test_parse(1599, "1.599s"), test_parse(1600, "1.5999s"), test_parse(60000, "1m"), test_parse(60000, "60s"), test_parse(60000, "60000ms"), test_parse(90000, "1.5m"), test_parse(90000, "1m30s"), test_parse(90000, "1m29s1000ms"), test_parse(1800000, ".5h"), test_parse(3600000, "1h"), test_parse(3601000, "1h1s"), test_parse(3660000, "1h1m"), test_parse(3661000, "1h1m1s"), test_parse(3661001, "1h1m1s1ms"), test_parse(3600000, "60m"), test_parse(5400000, "90m"), test_parse(5401000, "90m1s"), test_parse(5401001, "90m1s1ms"), test_parse(5400000, "1h30m"), test_parse(5401000, "1h30m1s"), test_parse(3660000, "1h1m"), test_parse(86400000, "1d"), test_parse(86401000, "1d1s"), test_parse(86401001, "1d1s1ms"), test_parse(604800000, "1w"), test_parse(691200000, "1w1d"), test_parse(694800000, "1w1d1h"), test_parse(694860000, "1w1d1h1m"), test_parse(694861000, "1w1d1h1m1s"), test_parse(694861001, "1w1d1h1m1s1ms"), %% Weird but ok? test_parse(121001, "1m1s1ms1m"), %% Easter Egg test_parse(1904461001, "1f1w1d1h1m1s1ms"), ok. test_parse(ExpectedMillis, StringToParse) -> ?assertEqual(ExpectedMillis, parse(StringToParse)). parse_2_test() -> ?assertEqual(1, parse("1ms", ms)), ?assertEqual(1, parse("1ms", s)), ?assertEqual(1, parse("1ms", m)), ?assertEqual(1, parse("1ms", h)), ?assertEqual(1, parse("1ms", d)), ?assertEqual(1, parse("1ms", w)), ?assertEqual(1, parse("1ms", f)), ok. to_string_test() -> ?assertEqual("2w", to_string(1, f)), ?assertEqual("2w", to_string(2, w)), ?assertEqual("1w", to_string(1, w)), ?assertEqual("1d", to_string(1, d)), ?assertEqual("1w", to_string(7, d)), ?assertEqual("1h", to_string(1, h)), ?assertEqual("1m", to_string(1, m)), ok. parse_error_test() -> ?assertMatch({error, _}, parse("1q")), %% This previously raised badarith because it did not check the %% return value of parse/1. ?assertMatch({error, _}, catch parse("1q", h)), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_conf.erl0000644000232200023220000004322214027401005020701 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_conf: handles the reading and generation of .conf files %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_conf). -include_lib("kernel/include/logger.hrl"). -export([ generate/1, generate_file/2, file/1, files/1, is_variable_defined/2, pretty_datatype/1]). -type conf_pair() :: {cuttlefish_variable:variable(), any()}. -type conf() :: [conf_pair()]. -export_type([conf_pair/0, conf/0]). -define(FMT(F, Args), lists:flatten(io_lib:format(F, Args))). is_variable_defined(VariableDef, Conf) -> lists:any(fun({X, _}) -> cuttlefish_variable:is_fuzzy_match(X, VariableDef) end, Conf). -spec files([file:name()]) -> conf() | cuttlefish_error:errorlist(). files(ListOfConfFiles) -> {ValidConf, Errors} = lists:foldl( fun(ConfFile, {ConfAcc, ErrorAcc}) -> case cuttlefish_conf:file(ConfFile) of {errorlist, ErrorList} -> {ConfAcc, ErrorList ++ ErrorAcc}; Conf -> {lists:foldl( fun({K,V}, MiniAcc) -> cuttlefish_util:replace_proplist_value(K, V, MiniAcc) end, ConfAcc, Conf), ErrorAcc} end end, {[], []}, ListOfConfFiles), case {ValidConf, Errors} of {_, []} -> ValidConf; _ -> {errorlist, Errors} end. -spec file(file:name()) -> conf() | cuttlefish_error:errorlist(). file(Filename) -> case conf_parse:file(Filename) of {error, Reason} -> %% Reason is an atom via file:open {errorlist, [{error, {file_open, {Filename, Reason}}}]}; {_Conf, Remainder, {{line, L}, {column, C}}} when is_binary(Remainder) -> {errorlist, [{error, {conf_syntax, {Filename, {L, C}}}}]}; Conf0 -> % go through the conf looking for include directives, fold all into % a single proplist Conf = fold_conf_files(Filename, Conf0), %% Conf is a proplist, check if any of the values are cuttlefish_errors {_, Values} = lists:unzip(Conf), case cuttlefish_error:filter(Values) of {errorlist, []} -> % expand any non-literal values (ie. included values) expand_values(Filename, remove_duplicates(Conf)); {errorlist, ErrorList} -> NewErrorList = [ {error, {in_file, {Filename, E}}} || {error, E} <- ErrorList ], {errorlist, NewErrorList} end end. fold_conf_files(Filename, Conf0) -> lists:foldl(fun({include, Included}, Acc) -> DirName = binary_to_list(filename:join(filename:dirname(Filename), filename:dirname(Included))), BaseName = binary_to_list(filename:basename(Included)), Acc ++ lists:flatten( [file(filename:join(DirName, F)) || F <- filelib:wildcard(BaseName, DirName)]); (KeyValue, Acc) -> Acc ++ [KeyValue] end, [], Conf0). expand_values(Filename, Conf) -> lists:map(fun({K, Value0}) -> case re:run(Value0, "^\\$\\(\\<(.+)\\)$", [{capture, all_but_first, list}]) of {match, [IncludeFilename0]} -> % This is a value of the format "$( {K, re:replace(Value, "[\n\r]$", "", [{return, list}])}; {error, Reason} -> throw({unable_to_open, IncludeFilename, Reason}) end; _ -> % normal value, nothing to do {K, Value0} end end, Conf). -spec generate([cuttlefish_mapping:mapping()]) -> [string()]. generate(Mappings) -> lists:foldl( fun(Mapping, ConfFile) -> ConfFile ++ generate_element(Mapping) end, [], Mappings). -spec generate_file([cuttlefish_mapping:mapping()], string()) -> ok. generate_file(Mappings, Filename) -> ConfFileLines = generate(Mappings), {ok, S} = file:open(Filename, [write]), _ = [ begin io:format(S, "~s~n", [lists:flatten(Line)]) end || Line <- ConfFileLines], % add an include directive at the end that will allow % other conf files in `conf.d` and have them get picked up % in order to override settings % example use case is a k8s configMap that is mapped as a file % to conf.d/erlang_vm.conf io:format(S, "include conf.d/*.conf~n~n", []), _ = file:close(S), ok. -spec generate_element(cuttlefish_mapping:mapping()) -> [string()]. generate_element(MappingRecord) -> Default = get_default(MappingRecord), Key = cuttlefish_mapping:variable(MappingRecord), Commented = cuttlefish_mapping:commented(MappingRecord), Level = cuttlefish_mapping:level(MappingRecord), Hidden = cuttlefish_mapping:hidden(MappingRecord), IncDef = cuttlefish_mapping:include_default(MappingRecord), [Datatype|_] = cuttlefish_mapping:datatype(MappingRecord), %% level != basic OR hidden == true: leave out of generated .conf file %% commeneted $val: insert into .conf file, but commented out with $val %% include_default $val: substitute '$name' or whatever in the key for $val %% e.g. {include_default, "internal"} %% listener.http.$name -> listener.http.internal Field = cuttlefish_variable:format(cuttlefish_variable:replace_match(Key, IncDef)), case Level of basic -> ok; Level -> _ = ?LOG_WARNING("{level, ~p} has been deprecated. Use 'hidden' or '{hidden, true}'", [Level]) end, case generate_element(Hidden, Level, Default, Commented) of no -> []; commented -> Comments = generate_comments(MappingRecord), Comments ++ [lists:flatten([ "## ", Field, " = ", cuttlefish_datatypes:to_string(Commented, Datatype) ]), ""]; default -> Comments = generate_comments(MappingRecord), Comments ++ [lists:flatten([ Field, " = ", cuttlefish_datatypes:to_string(Default, Datatype) ]), ""] end. get_default(MappingRecord) -> %% Normally we use `default` to determine what value to use when generating %% a config file, but `new_conf_value` can override that. The reason we need %% a separate attribute to override `default` (instead of just changing the %% default directly) is that `default` also affects default values used for %% config keys that haven't been set to any particular value in the .conf file. %% (See `cuttlefish_generator:add_defaults` for the relevant bits of code.) case cuttlefish_mapping:new_conf_value(MappingRecord) of undefined -> cuttlefish_mapping:default(MappingRecord); Value -> Value end. generate_element(true, _, _, _) -> no; generate_element(false, _, undefined, undefined) -> no; generate_element(false, basic, _Default, undefined) -> default; generate_element(false, basic, _, _Comment) -> commented; generate_element(false, _Level, _Default, _Commented) -> no. -spec generate_comments(cuttlefish_mapping:mapping()) -> [string()]. generate_comments(M) -> DocString = cuttlefish_mapping:doc(M), [DefaultDT|_] = cuttlefish_mapping:datatype(M), Default = case cuttlefish_mapping:default(M) of undefined -> []; Other -> [ "", ?FMT("Default: ~s", [cuttlefish_datatypes:to_string(Other, DefaultDT)]) ] end, Datatypes = ["", "Acceptable values:" | [ ?FMT(" - ~s", [pretty_datatype(DT)]) || DT <- cuttlefish_mapping:datatype(M)]], Doc = DocString ++ Default ++ Datatypes, [ "## " ++ D || D <- Doc]. -spec pretty_datatype(cuttlefish_datatypes:datatype() | cuttlefish_datatypes:extended()) -> string(). pretty_datatype(integer) -> "an integer"; pretty_datatype({enum, L}) -> "one of: " ++ string:join([ atom_to_list(A) || A <- L], ", "); pretty_datatype(ip) -> "an IP/port pair, e.g. 127.0.0.1:10011"; pretty_datatype({duration, _}) -> "a time duration with units, e.g. '10s' for 10 seconds"; pretty_datatype(bytesize) -> "a byte size with units, e.g. 10GB"; pretty_datatype({integer, I}) -> "the integer " ++ integer_to_list(I); pretty_datatype({string, S}) -> "the text \"" ++ S ++ "\""; pretty_datatype({atom, A}) -> "the text \"" ++ atom_to_list(A) ++ "\""; pretty_datatype({ip, {IP, Port}}) -> ?FMT("the address ~s:~p", [IP, Port]); pretty_datatype({{duration,_}, D}) -> "the time duration " ++ D; pretty_datatype({bytesize, B}) -> "the bytesize " ++ B; pretty_datatype(file) -> "the path to a file"; pretty_datatype(directory) -> "the path to a directory"; pretty_datatype({file, F}) -> "the file " ++ F; pretty_datatype({directory, D}) -> "the directory " ++ D; pretty_datatype(flag) -> "on or off"; pretty_datatype({flag, On, Off}) when is_atom(On), is_atom(Off) -> ?FMT("~p or ~p", [On, Off]); pretty_datatype({flag, {On,_}, {Off,_}}) -> ?FMT("~p or ~p", [On, Off]); pretty_datatype(_) -> "text". %% string and atom remove_duplicates(Conf) -> lists:foldl( fun({K,V}, MiniAcc) -> cuttlefish_util:replace_proplist_value(K, V, MiniAcc) end, [], Conf). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -ifdef(TEST). generate_element_test() -> TestSchemaElement = cuttlefish_mapping:parse({mapping, "ring_size", "riak_core.ring_creation_size", [ {datatype, integer}, {commented, 64}, {doc, ["Default ring creation size. Make sure it is a power of 2,", "e.g. 16, 32, 64, 128, 256, 512 etc"]} ] }), GeneratedConf = generate_element(TestSchemaElement), ?assertEqual(7, length(GeneratedConf)), ?assertEqual( "## Default ring creation size. Make sure it is a power of 2,", lists:nth(1, GeneratedConf) ), ?assertEqual( "## e.g. 16, 32, 64, 128, 256, 512 etc", lists:nth(2, GeneratedConf) ), ?assertEqual( "## ", lists:nth(3, GeneratedConf) ), ?assertEqual( "## Acceptable values:", lists:nth(4, GeneratedConf) ), ?assertEqual( "## - an integer", lists:nth(5, GeneratedConf) ), ?assertEqual( "## ring_size = 64", lists:nth(6, GeneratedConf) ), ok. generate_conf_default_test() -> TestMappings = [{mapping, "default.absent", "undefined", [{datatype, integer}, {new_conf_value, 42}]}, {mapping, "default.present", "undefined", [{datatype, integer}, {default, -1}, {new_conf_value, 9001}]}], TestSchema = lists:map(fun cuttlefish_mapping:parse/1, TestMappings), GeneratedConf = generate(TestSchema), %% TODO Feels pretty fragile to rely on the number of comment lines not changing... %% Would be nice if we had a good way to pinpoint the line we want to check without %% having to hardcode the line numbers into the lists:nth calls. ?assertEqual( "default.absent = 42", lists:nth(4, GeneratedConf) ), ?assertEqual( "default.present = 9001", lists:nth(11, GeneratedConf) ), ok. generate_dollar_test() -> TestSchemaElement = cuttlefish_mapping:parse({ mapping, "listener.http.$name", "riak_core.http", [ {datatype, ip}, {default, "127.0.0.1:8098"}, {mapping, "riak_core.http"}, {include_default,"internal"} ]}), _GeneratedConf = generate_element(TestSchemaElement), ok. generate_comments_test() -> SchemaElement = cuttlefish_mapping:parse({ mapping, "dont.care", "undefined", [ {doc, ["Hi!", "Bye!"]} ]}), Comments = generate_comments(SchemaElement), ?assertEqual(["## Hi!", "## Bye!", "## ", "## Acceptable values:", "## - text"], Comments). duplicates_test() -> Conf = file("test/multi1.conf"), ?assertEqual(2, length(Conf)), ?assertEqual("3", proplists:get_value(["a","b","c"], Conf)), ?assertEqual("1", proplists:get_value(["a","b","d"], Conf)), ok. duplicates_multi_test() -> Conf = files(["test/multi1.conf", "test/multi2.conf"]), ?assertEqual(2, length(Conf)), ?assertEqual("4", proplists:get_value(["a","b","c"], Conf)), ?assertEqual("1", proplists:get_value(["a","b","d"], Conf)), ok. files_one_nonent_test() -> Conf = files(["test/multi1.conf", "test/nonent.conf"]), ?assertMatch({errorlist,[{error, {file_open, {"test/nonent.conf", _}}}]}, Conf), ok. files_incomplete_parse_test() -> Conf = file("test/incomplete.conf"), ?assertEqual({errorlist, [{error, {conf_syntax, {"test/incomplete.conf", {3, 1}}}}]}, Conf), ok. generate_element_level_advanced_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(warning), assert_no_output({level, advanced}), [Log] = cuttlefish_test_logging:get_logs(), ?assertMatch({match, _}, re:run(Log, "{level, advanced} has been deprecated. Use 'hidden' or '{hidden, true}'")), ok. generate_element_level_intermediate_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(warning), assert_no_output({level, intermediate}), [Log] = cuttlefish_test_logging:get_logs(), ?assertMatch({match, _}, re:run(Log, "{level, intermediate} has been deprecated. Use 'hidden' or '{hidden, true}'")), ok. generate_element_hidden_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(warning), assert_no_output(hidden), assert_no_output({hidden, true}), ?assertEqual([], cuttlefish_test_logging:get_logs()), ok. included_file_test() -> Conf = file("test/include_file.conf"), ?assertEqual(lists:sort([ {["ring_size"],"32"}, {["anti_entropy"],"debug"}, {["log","error","file"],"/var/log/error.log"}, {["log","console","file"],"/var/log/console.log"}, {["log","syslog"],"on"}, {["listener","http","internal"],"127.0.0.1:8098"}, {["listener","http","external"],"10.0.0.1:80"} ]), lists:sort(Conf)), ok. included_dir_test() -> Conf = file("test/include_dir.conf"), ?assertEqual(lists:sort([ {["anti_entropy"],"debug"}, {["log","error","file"],"/var/log/error.log"}, {["log","console","file"],"/var/log/console.log"}, {["ring_size"],"5"}, {["rogue","option"],"42"}, {["listener","http","internal"],"127.0.0.1:8098"}, {["listener","http","external"],"10.0.0.1:80"}, {["log","syslog"],"off"} ]), lists:sort(Conf)), ok. included_value_test() -> Conf = file("test/included_value.conf"), ?assertEqual(lists:sort([ {["value1"], "42"}, {["value2"], "43"}, {["value3"], "42"}, {["value4"], "multi\nline\nvalue"}, {["value5"], "12.34"}, {["value6"], "$v1 [$v2] $v3 $v4"} ]), lists:sort(Conf)), ok. assert_no_output(Setting) -> Mapping = cuttlefish_mapping:parse( {mapping, "a", "b", [ {doc, ["blah"]}, Setting ]}), ?assertEqual([], generate_element(Mapping)). -endif. cuttlefish-3.0.1/src/cuttlefish_generator.erl0000644000232200023220000013624514027401005021752 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_generator: this is where the action is %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_generator). -include_lib("kernel/include/logger.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -define(FMT(F,A), lists:flatten(io_lib:format(F,A))). -define(LSUB, "$("). -define(RSUB, ")"). -define(LSUBLEN, 2). -define(RSUBLEN, 1). -export([map/2, map/3, find_mapping/2, add_defaults/2, minimal_map/2]). -spec map(cuttlefish_schema:schema(), cuttlefish_conf:conf()) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map(Schema, Config) -> map(Schema, Config, []). -spec map(cuttlefish_schema:schema(), cuttlefish_conf:conf(), [proplists:property()]) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map(Schema, Config, ParsedArgs) -> map_add_defaults(Schema, Config, ParsedArgs). %% @doc Generates an Erlang config that only includes the settings %% encompassed by the passed Config, excluding defaults from the %% schema for unspecified settings. -spec minimal_map(cuttlefish_schema:schema(), cuttlefish_conf:conf()) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. minimal_map({AllTranslations,AllMappings,V}, Config) -> ConfigKeys = sets:from_list([K || {K, _} <- Config]), {RestrictedMappings, MappingKeys} = lists:foldr(fun(M,Acc) -> restrict_mappings(M, Acc, ConfigKeys) end, {[], sets:new()}, AllMappings), RestrictedTranslations = [ T || T <- AllTranslations, sets:is_element(cuttlefish_translation:mapping(T), MappingKeys)], map({RestrictedTranslations,RestrictedMappings,V}, Config, []). restrict_mappings(M, {Mappings, Keys}, ConfigKeys) -> case sets:is_element(cuttlefish_mapping:variable(M), ConfigKeys) of true -> {[M|Mappings], sets:add_element(cuttlefish_mapping:mapping(M), Keys)}; false -> {Mappings, Keys} end. -spec map_add_defaults(cuttlefish_schema:schema(), cuttlefish_conf:conf(), [proplists:property()]) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map_add_defaults({_, Mappings, _} = Schema, Config, ParsedArgs) -> %% Config at this point is just what's in the .conf file. %% add_defaults/2 rolls the default values in from the schema _ = ?LOG_DEBUG("Adding Defaults"), DConfig = add_defaults(Config, Mappings), case cuttlefish_error:errorlist_maybe(DConfig) of {errorlist, EList} -> {error, add_defaults, {errorlist, EList}}; _ -> map_value_sub(Schema, DConfig, ParsedArgs) end. -spec map_value_sub(cuttlefish_schema:schema(), cuttlefish_conf:conf(), [proplists:property()]) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map_value_sub(Schema, Config, ParsedArgs) -> _ = ?LOG_DEBUG("Right Hand Side Substitutions"), case value_sub(Config) of {SubbedConfig, []} -> map_transform_datatypes(Schema, SubbedConfig, ParsedArgs); {_, EList} -> {error, rhs_subs, {errorlist, EList}} end. -spec map_transform_datatypes(cuttlefish_schema:schema(), cuttlefish_conf:conf(), [proplists:property()]) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map_transform_datatypes({_, Mappings, _} = Schema, DConfig, ParsedArgs) -> %% Everything in DConfig is of datatype "string", %% transform_datatypes turns them into other erlang terms %% based on the schema _ = ?LOG_DEBUG("Applying Datatypes"), case transform_datatypes(DConfig, Mappings, ParsedArgs) of {NewConf, []} -> map_validate(Schema, NewConf); {_, EList} -> {error, transform_datatypes, {errorlist, EList}} end. -spec map_validate(cuttlefish_schema:schema(), cuttlefish_conf:conf()) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. map_validate(Schema, Conf) -> %% Any more advanced validators _ = ?LOG_DEBUG("Validation"), case cuttlefish_error:errorlist_maybe(run_validations(Schema, Conf)) of {errorlist, EList} -> {error, validation, {errorlist, EList}}; true -> {DirectMappings, TranslationsToDrop} = apply_mappings(Schema, Conf), apply_translations(Schema, Conf, DirectMappings, TranslationsToDrop) end. -spec apply_mappings(cuttlefish_schema:schema(), cuttlefish_conf:conf()) -> {[proplists:property()], [string()]}. apply_mappings({Translations, Mappings, _Validators}, Conf) -> %% This fold handles 1:1 mappings, that have no cooresponding translations %% The accumlator is the app.config proplist that we start building from %% these 1:1 mappings, hence the return "DirectMappings". %% It also builds a list of "TranslationsToDrop". It's basically saying that %% if a user didn't actually configure this setting in the .conf file and %% there's no default in the schema, then there won't be enough information %% during the translation phase to succeed, so we'll earmark it to be skipped {DirectMappings, {TranslationsToMaybeDrop, TranslationsToKeep}} = lists:foldr( fun(MappingRecord, {ConfAcc, {MaybeDrop, Keep}}) -> Mapping = cuttlefish_mapping:mapping(MappingRecord), Default = cuttlefish_mapping:default(MappingRecord), Variable = cuttlefish_mapping:variable(MappingRecord), case { Default =/= undefined orelse cuttlefish_conf:is_variable_defined(Variable, Conf), lists:any( fun(T) -> cuttlefish_translation:mapping(T) =:= Mapping end, Translations) } of {true, false} -> Tokens = cuttlefish_variable:tokenize(Mapping), NewValue = proplists:get_value(Variable, Conf), {set_value(Tokens, ConfAcc, NewValue), {MaybeDrop, ordsets:add_element(Mapping,Keep)}}; {true, true} -> {ConfAcc, {MaybeDrop, ordsets:add_element(Mapping,Keep)}}; _ -> {ConfAcc, {ordsets:add_element(Mapping,MaybeDrop), Keep}} end end, {[], {ordsets:new(),ordsets:new()}}, Mappings), _ = ?LOG_DEBUG("Applied 1:1 Mappings"), TranslationsToDrop = TranslationsToMaybeDrop -- TranslationsToKeep, {DirectMappings, TranslationsToDrop}. -spec apply_translations(cuttlefish_schema:schema(), cuttlefish_conf:conf(), [proplists:property()], [string()]) -> [proplists:property()] | {error, atom(), cuttlefish_error:errorlist()}. apply_translations({Translations, _, _} = Schema, Conf, DirectMappings, TranslationsToDrop) -> %% The fold handles the translations. After we've build the DirectMappings, %% we use that to seed this fold's accumulator. As we go through each translation %% we write that to the `app.config` that lives in the accumutator. {Proplist, Errorlist} = lists:foldl(fold_apply_translation(Conf, Schema, TranslationsToDrop), {DirectMappings, []}, Translations), case Errorlist of [] -> _ = ?LOG_DEBUG("Applied Translations"), Proplist; Es -> {error, apply_translations, {errorlist, Es}} end. fold_apply_translation(Conf, Schema, TranslationsToDrop) -> fun(TranslationRecord, {Acc, Errors}) -> Mapping = cuttlefish_translation:mapping(TranslationRecord), Xlat = cuttlefish_translation:func(TranslationRecord), case lists:member(Mapping, TranslationsToDrop) of false -> {XlatFun, XlatArgs} = prepare_translation_fun(Conf, Schema, Mapping, Xlat), _ = ?LOG_DEBUG("Running translation for ~s", [Mapping]), case try_apply_translation(Mapping, XlatFun, XlatArgs) of unset -> {Acc, Errors}; {set, NewValue} -> {set_value(cuttlefish_variable:tokenize(Mapping), Acc, NewValue), Errors}; {error, Term} -> {Acc, [{error, Term}|Errors]} end; _ -> _ = ?LOG_DEBUG("~p in Translations to drop...", [Mapping]), {Acc, Errors} end end. prepare_translation_fun(Conf, Schema, Mapping, Xlat) -> case proplists:get_value(arity, erlang:fun_info(Xlat)) of 1 -> {Xlat, [Conf]}; 2 -> {Xlat, [Conf, Schema]}; OtherArity -> {fun(_) -> {error, {translation_arity, {Mapping, OtherArity}}} end, error} end. try_apply_translation(Mapping, XlatFun, XlatArgs) -> try erlang:apply(XlatFun, XlatArgs) of {ok, Value} -> {set, Value}; X -> {set, X} catch %% cuttlefish:conf_get/2 threw not_found throw:{not_found, NotFound} -> {error, {translation_missing_setting, {Mapping, cuttlefish_variable:format(NotFound)}}}; %% For explicitly omitting an output setting. %% See cuttlefish:unset/0 throw:unset -> unset; %% For translations that found invalid %% settings, even after mapping. See %% cuttlefish:invalid/1. throw:{invalid, Invalid} -> {error, {translation_invalid_configuration, {Mapping, Invalid}}}; %% Any unknown error, perhaps caused by stdlib %% stuff. E:R -> {error, {translation_unknown_error, {Mapping, {E, R}}}} end. %for each token, is it special? % %if yes, special processing %if no, recurse into this with the value from the proplist and tail of tokens % %unless the tail of tokens is [] %% This is the last token, so things ends with replacing the proplist value. set_value([LastToken], Acc, NewValue) -> cuttlefish_util:replace_proplist_value(list_to_atom(LastToken), NewValue, Acc); %% This is the case of all but the last token. %% recurse until you hit a leaf. set_value([HeadToken|MoreTokens], PList, NewValue) -> Token = list_to_atom(HeadToken), OldValue = proplists:get_value(Token, PList, []), cuttlefish_util:replace_proplist_value( Token, set_value(MoreTokens, OldValue, NewValue), PList). %% @doc adds default values from the schema when something's not %% defined in the Conf, to give a complete app.config add_defaults(Conf, Mappings) -> Prefixes = get_possible_values_for_fuzzy_matches(Conf, Mappings), lists:foldl( fun(MappingRecord, Acc) -> case cuttlefish_mapping:has_default(MappingRecord) of false -> Acc; true -> add_default(Conf, Prefixes, MappingRecord, Acc) end end, Conf, Mappings). add_default(Conf, Prefixes, MappingRecord, Acc) -> Default = cuttlefish_mapping:default(MappingRecord), VariableDef = cuttlefish_mapping:variable(MappingRecord), IsFuzzyMatch = cuttlefish_mapping:is_fuzzy_variable(MappingRecord), IsStrictMatch = lists:keymember(VariableDef, 1, Conf), %% No, then plug in the default case {IsStrictMatch, IsFuzzyMatch} of %% Strict match means we have the setting already {true, false} -> Acc; %% If IsStrictMatch =:= false, IsFuzzyMatch =:= true, we've got a setting, but %% it's part of a complex data structure. {false, true} -> add_fuzzy_default(Prefixes, Acc, Default, VariableDef); %% If Match =:= FuzzyMatch =:= false, use the default, key not set in .conf {false, false} -> [{VariableDef, Default}|Acc]; %% If Match =:= true, do nothing, the value is set in the .conf file _ -> %% TODO: Handle with more style and grace _ = ?LOG_ERROR("Both fuzzy and strict match! should not happen"), [{error, {map_multiple_match, VariableDef}}|Acc] end. is_strict_prefix([H|T1], [H|T2]) -> is_strict_prefix(T1, T2); is_strict_prefix([], [H2|_]) when hd(H2) =:= $$ -> true; is_strict_prefix(_, _) -> false. add_fuzzy_default(Prefixes, Conf, Default, VariableDef) -> PotentialMatch = lists:dropwhile(fun({Prefix, _}) -> not is_strict_prefix(Prefix, VariableDef) end, Prefixes), case PotentialMatch of %% None of the prefixes match, so we don't generate a default. [] -> Conf; [{_Prefix, Substitutions}|_] -> %% This means that we found the key. %% ToAdd will be the list of all the things we're adding to the defaults. %% So, let's say you have the following mappings defined: %% namespace.$named_thing.a %% namespace.$named_thing.b %% namespace.$named_thing.c %% and in your conf, you defined the following: %% namespace.strong_bad.a = 10 %% namespace.senor_cardgage.b = percent_sign %% namespace.trogdor.c = burninate %% Well, Prefixes would look like this: %% [{"namespace", ["strong_bad", "senor_cardgage", "trogdor"]}] %% The ToAdd list comp is going through and saying: ok, I know there are %% defaults for namespace.$named_thing.a, b, and c. And I know the possible %% values of $named_thing are strong_bad, senor_cardgage, and trogdor. %% so I want to ensure that there are values for the following: %% %% namespace.strong_bad.a %% namespace.strong_bad.b %% namespace.strong_bad.c %% namespace.senor_cardgage.a %% namespace.senor_cardgage.b %% namespace.senor_cardgage.c %% namespace.trogdor.a %% namespace.trogdor.b %% namespace.trogdor.c %% So, we go through the List of possible substitutions %% and apply the substitution to the variable. If it %% already exists in the Conf, then we skip it, otherwise %% we include the Default value. ToAdd = [ {VariableToAdd, Default} || Subst <- Substitutions, VariableToAdd <- [cuttlefish_variable:replace_match(VariableDef, Subst)], not lists:keymember(VariableToAdd, 1, Conf)], Conf ++ ToAdd end. %%%%%%%%%%%%%%%%%%%%%%%% %% Prefixes is the thing we need for defaults of named keys %% it looks like this: %% %% Prefixes: [{"riak_control.user",["user"]}, %% {"listener.https",["internal"]}, %% {"listener.protobuf",["internal"]}, %% {"listener.http",["internal"]}, %% {"multi_backend", %% ["bitcask_mult","leveldb_mult","leveldb_mult2","memory_mult"}] %%%%%%%%%%%%%%%%%%%%%%%% -spec get_possible_values_for_fuzzy_matches(cuttlefish_conf:conf(), [cuttlefish_mapping:mapping()]) -> [{string(), [string()]}]. get_possible_values_for_fuzzy_matches(Conf, Mappings) -> %% Now, get all the variables that could match, i.e. all the names %% it found referenced in the Conf proplist. It may look something %% like this: [{"n",["ck","ak","bk"]}] lists:foldl( fun(Mapping, FuzzyMatches) -> case cuttlefish_mapping:is_fuzzy_variable(Mapping) of false -> FuzzyMatches; %% Strict match true -> %% Fuzzy match, extract the matching settings from the conf VD = cuttlefish_mapping:variable(Mapping), ListOfVars = [Var || {_, Var} <- cuttlefish_variable:fuzzy_matches(VD, Conf)], {Prefix, _, _} = cuttlefish_variable:split_on_match(VD), orddict:append_list(Prefix, ListOfVars, FuzzyMatches) end end, orddict:new(), Mappings). -spec transform_datatypes( cuttlefish_conf:conf(), [cuttlefish_mapping:mapping()], [proplists:property()] ) -> {cuttlefish_conf:conf(), [cuttlefish_error:error()]}. transform_datatypes(Conf, Mappings, ParsedArgs) -> lists:foldl( fun({Variable, Value}, {Acc, ErrorAcc}) -> AllowExtra = proplists:get_value(allow_extra, ParsedArgs, false), %% Look up mapping from schema case find_mapping(Variable, Mappings) of {error, _} when AllowExtra -> %% user asked for us to tolerate variables %% that are not present in the mapping so %% do nothing here {Acc, ErrorAcc}; {error, _} -> %% So, this error message isn't so performant (s/o @argv0) %% but it shouldn't happen too often, and I think it's important %% to give users this feedback. %% It will prevent anything from starting, and will let you know %% that you're trying to set something that has no effect VarName = cuttlefish_variable:format(Variable), _ = ?LOG_ERROR("You've tried to set ~s, but there is no setting with that name.", [VarName]), _ = ?LOG_ERROR(" Did you mean one of these?"), Possibilities = [ begin MapVarName = cuttlefish_variable:format(cuttlefish_mapping:variable(M)), {cuttlefish_util:levenshtein(VarName, MapVarName), MapVarName} end || M <- Mappings], Sorted = lists:sort(Possibilities), _ = [ _ = ?LOG_ERROR(" ~s", [T]) || {_, T} <- lists:sublist(Sorted, 3) ], {Acc, [ {error, {unknown_variable, VarName}} | ErrorAcc ]}; MappingRecord -> DTs = cuttlefish_mapping:datatype(MappingRecord), %% DTs is a list now, which means we'll receive an %% errorlist, not a single error case transform_type(DTs, Value) of {ok, NewValue} -> {[{Variable, NewValue}|Acc], ErrorAcc}; {errorlist, EList} -> NewError = {transform_type, cuttlefish_variable:format(Variable)}, {Acc, [{error, NewError}] ++ EList ++ ErrorAcc} end end end, {[], []}, Conf). -spec value_sub(cuttlefish_conf:conf()) -> {cuttlefish_conf:conf(), [cuttlefish_error:error()]}. value_sub(Conf) -> lists:foldr( fun({Var, Val}, {Acc, ErrorAcc}) -> case value_sub(Var, Val, Conf) of {error, _E} = Error -> {Acc, [Error|ErrorAcc]}; {NewVal, _NewConf} -> {[{Var, NewVal}|Acc], ErrorAcc} end end, {[],[]}, Conf). -spec value_sub(cuttlefish_variable:variable(), string(), cuttlefish_conf:conf()) -> {string(), cuttlefish_conf:conf()} | cuttlefish_error:error(). value_sub(Var, Value, Conf) -> value_sub(Var, Value, Conf, []). -spec value_sub(cuttlefish_variable:variable(), string(), cuttlefish_conf:conf(), [string()]) -> {string(), cuttlefish_conf:conf()} | cuttlefish_error:error(). value_sub(Var, Value, Conf, History) when is_list(Value) -> %% Check if history contains duplicates. if so error case erlang:length(History) == sets:size(sets:from_list(History)) of false -> {error, {circular_rhs, History}}; _ -> case head_sub(Value) of none -> {Value, Conf}; {sub, NextVar, {SubFront, SubBack}} -> case subbed_value(NextVar, Conf) of undefined -> {error, {substitution_missing_config, {cuttlefish_variable:format(Var), cuttlefish_variable:format(NextVar)}}}; SubVal -> %% Do a sub-subsitution, in case the substituted %% value contains substitutions itself. Do this as %% its own seperate recursion so that circular %% subtitutions can be detected. case value_sub(NextVar, SubVal, Conf, [Var|History]) of {error, _} = Error -> Error; {NewSubVal, NewConf} -> NewValue = SubFront ++ NewSubVal ++ SubBack, value_sub(Var, NewValue, NewConf, History) end end end end; value_sub(_Var, Value, Conf, _History) -> {Value, Conf}. -spec subbed_value(Var :: cuttlefish_variable:variable(), Conf :: cuttlefish_conf:conf()) -> undefined | string(). subbed_value(Var, Conf) -> case proplists:get_value(Var, Conf) of undefined -> % we couldn't find the var in the conf file, let's % look at the environment and check for it case os:getenv(cuttlefish_variable:format(Var)) of false -> undefined; Value -> Value end; Value -> Value end. -spec head_sub(string()) -> none | {sub, cuttlefish_variable:variable(), {string(), string()}}. head_sub(Value) -> L = string:str(Value, ?LSUB), case L > 0 of false -> none; _ -> R = string:str(string:substr(Value, L + ?RSUBLEN), ?RSUB) + L, case L < R of false -> none; _ -> Var = cuttlefish_variable:tokenize(string:strip(string:substr(Value, L+?LSUBLEN, R-L-?LSUBLEN))), Front = string:substr(Value, 1, L-1), Back = string:substr(Value, R+?RSUBLEN), {sub, Var, {Front, Back}} end end. %% If transform_type takes a list as first argument, foldm_either will %% give us back an errorlist for a single error -spec transform_type(cuttlefish_datatypes:datatype_list() | cuttlefish_datatypes:datatype(), term()) -> {ok, term()} | cuttlefish_error:error() | cuttlefish_error:errorlist(). transform_type(DTs, Value) when is_list(DTs) -> foldm_either(fun(DT) -> transform_type(DT, Value) end, DTs); transform_type(DT, Value) -> Supported = cuttlefish_datatypes:is_supported(DT), Extended = cuttlefish_datatypes:is_extended(DT), if Supported -> transform_supported_type(DT, Value); Extended -> transform_extended_type(DT, Value); true -> {error, {unsupported_type, DT}} end. -spec transform_supported_type(cuttlefish_datatypes:datatype(), any()) -> {ok, term()} | cuttlefish_error:error(). transform_supported_type(DT, Value) -> try cuttlefish_datatypes:from_string(Value, DT) of {error, Message} -> {error, Message}; NewValue -> {ok, NewValue} catch Class:Error -> {error, {transform_type_exception, {DT, {Class, Error}}}} end. -spec transform_extended_type(cuttlefish_datatypes:extended(), any()) -> {ok, term()} | cuttlefish_error:error(). transform_extended_type({DT, AcceptableValue}, Value) -> case transform_supported_type(DT, Value) of {ok, AcceptableValue} -> {ok, AcceptableValue}; {ok, _NewValue} -> {error, {transform_type_unacceptable, {Value, AcceptableValue}}}; {error, Term} -> {error, Term} end. %% Ok, this is tricky %% There are three scenarios we have to deal with: %% 1. The mapping is there! -> return mapping %% 2. The mapping is not there -> error %% 3. The mapping is there, but the key in the schema contains a $. %% (fuzzy match) find_mapping([H|_]=Variable, Mappings) when is_list(H) -> {HardMappings, FuzzyMappings} = lists:foldl( fun(Mapping, {HM, FM}) -> VariableDef = cuttlefish_mapping:variable(Mapping), case {Variable =:= VariableDef, cuttlefish_variable:is_fuzzy_match(Variable, VariableDef)} of {true, _} -> {[Mapping|HM], FM}; {_, true} -> {HM, [Mapping|FM]}; _ -> {HM, FM} end end, {[], []}, Mappings), %% The input to this function is massaged enough that you'll never see a hard mapping count > 1 %% You might see more than one fuzzy match, there's really nothing to stop that. FVariable = cuttlefish_variable:format(Variable), case {length(HardMappings), length(FuzzyMappings)} of {1, _} -> hd(HardMappings); {0, 1} -> hd(FuzzyMappings); {0, 0} -> {error, {mapping_not_found, FVariable}}; {X, Y} -> {error, {mapping_multiple, {FVariable, {X, Y}}}} end; find_mapping(Variable, Mappings) -> find_mapping(cuttlefish_variable:tokenize(Variable), Mappings). -spec run_validations(cuttlefish_schema:schema(), cuttlefish_conf:conf()) -> boolean()|list(cuttlefish_error:error()). run_validations({_, Mappings, Validators}, Conf) -> Validations = lists:flatten([ begin Vs = cuttlefish_mapping:validators(M, Validators), Value = proplists:get_value(cuttlefish_mapping:variable(M), Conf), [ begin Validator = cuttlefish_validator:func(V), case {Value, Validator(Value)} of {undefined, _} -> true; {_, true} -> true; _ -> Error = {validation, { cuttlefish_variable:format( cuttlefish_mapping:variable(M)), cuttlefish_validator:description(V) }}, _ = ?LOG_ERROR(cuttlefish_error:xlate(Error)), {error, Error} end end || V <- Vs] end || M <- Mappings, cuttlefish_mapping:validators(M) =/= [], cuttlefish_mapping:default(M) =/= undefined orelse proplists:is_defined(cuttlefish_mapping:variable(M), Conf) ]), case lists:all(fun(X) -> X =:= true end, Validations) of true -> true; _ -> Validations end. %% @doc Calls Fun on each element of the list until it returns {ok, %% term()}, otherwise accumulates {error, term()} into a list, %% wrapping in {error, _} at the end. -spec foldm_either(fun((term()) -> {ok, term()} | cuttlefish_error:errorlist()), list()) -> {ok, term()} | cuttlefish_error:errorlist(). foldm_either(Fun, List) -> foldm_either(Fun, List, []). %% @doc Calls Fun on each element of the list until it returns {ok, %% term()}, otherwise accumulates {error, term()} into a list, %% wrapping in {errorlist, _} at the end. -spec foldm_either(fun((term()) -> {ok, term()} | cuttlefish_error:error()), list(), list()) -> {ok, term()} | cuttlefish_error:errorlist(). foldm_either(_Fun, [], Acc) -> {errorlist, lists:reverse(Acc)}; foldm_either(Fun, [H|T], Acc) -> case Fun(H) of {ok, Result} -> {ok, Result}; {error, _}=Error -> foldm_either(Fun, T, [Error|Acc]) end. -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). bad_conf_test() -> Conf = [ {["integer_thing"], "thirty_two"}, {["enum_thing"], bad_enum_value}, {["ip_thing"], "not an IP address"} ], Mappings = [ cuttlefish_mapping:parse({mapping, "integer_thing", "to.int", [ {datatype, integer} ]}), cuttlefish_mapping:parse({mapping, "enum_thing", "to.enum", [ {datatype, {enum, [on, off]}} ]}), cuttlefish_mapping:parse({mapping, "ip_thing", "to.ip", [ {datatype, ip} ]}) ], Translations = [ cuttlefish_translation:parse({translation, "to.enum", fun(_ConfConf) -> whatev end}) ], NewConfig = map({Translations, Mappings, []}, Conf), io:format("NewConf: ~p~n", [NewConfig]), ?assertMatch({error, transform_datatypes, _}, NewConfig), ok. add_defaults_test() -> Conf = [ %%{["a","b","c"], "override"}, %% Specifically left out. Uncomment line to break test, {["a","c","d"], "override"}, {["no","match"], "unchanged"}, %%{"m.rk.x", "defined"}, %% since this is undefined no defaults should be created for "m", %% two matches on a name "ak" and "bk" {["n","ak","x"], "set_n_name_x"}, {["n","bk","x"], "set_n_name_x2"}, {["n","ck","y"], "set_n_name_y3"} ], Mappings = [ %% First mapping, direct, not in .conf, will be default cuttlefish_mapping:parse({mapping, "a.b.c", "b.c", [ {default, "q"} ]}), %% default is "l", but since "a.c.d" is in Conf, it will be "override" cuttlefish_mapping:parse({mapping, "a.c.d", "c.d", [ {default, "l"} ]}), cuttlefish_mapping:parse({mapping, "m.$name.x", "some.proplist", [ {default, "m_name_x"} ]}), cuttlefish_mapping:parse({mapping, "n.$name.x", "some.proplist", [ {default, "n_name_x"} ]}), cuttlefish_mapping:parse({mapping, "n.$name.y", "some.proplist", [ {default, "n_name_y"} ]}), cuttlefish_mapping:parse({mapping, "o.$name.z", "some.proplist", [ {default, "o_name_z"}, {include_default, "blue"} ]}) ], DConf = add_defaults(Conf, Mappings), io:format("DConf: ~p~n", [DConf]), ?assertEqual(9, length(DConf)), ?assertEqual("q", proplists:get_value(["a","b","c"], DConf)), ?assertNotEqual("l", proplists:get_value(["a","c","d"], DConf)), ?assertEqual("override", proplists:get_value(["a","c","d"], DConf)), ?assertEqual("unchanged", proplists:get_value(["no","match"], DConf)), ?assertEqual("set_n_name_x", proplists:get_value(["n","ak","x"], DConf)), ?assertEqual("set_n_name_x2", proplists:get_value(["n","bk","x"], DConf)), ?assertEqual("n_name_x", proplists:get_value(["n","ck","x"], DConf)), ?assertEqual("n_name_y", proplists:get_value(["n","ak","y"], DConf)), ?assertEqual("n_name_y", proplists:get_value(["n","bk","y"], DConf)), ?assertEqual("set_n_name_y3", proplists:get_value(["n","ck","y"], DConf)), ?assertEqual(undefined, proplists:get_value(["o","blue","z"], DConf)), ok. map_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = conf_parse:file("test/riak.conf"), NewConfig = map(Schema, Conf), NewRingSize = proplists:get_value(ring_creation_size, proplists:get_value(riak_core, NewConfig)), ?assertEqual(32, NewRingSize), NewAAE = proplists:get_value(anti_entropy, proplists:get_value(riak_kv, NewConfig)), ?assertEqual({on,[debug]}, NewAAE), NewSASL = proplists:get_value(sasl_error_logger, proplists:get_value(sasl, NewConfig)), ?assertEqual(false, NewSASL), NewHTTP = proplists:get_value(http, proplists:get_value(riak_core, NewConfig)), ?assertEqual([{"10.0.0.1", 80}, {"127.0.0.1", 8098}], NewHTTP), NewPB = proplists:get_value(pb, proplists:get_value(riak_api, NewConfig)), ?assertEqual([], NewPB), NewHTTPS = proplists:get_value(https, proplists:get_value(riak_core, NewConfig)), ?assertEqual(undefined, NewHTTPS), ok. minimal_map_test() -> Schema = cuttlefish_schema:file("test/riak.schema"), Conf = [{["ring_size"], "32"}, {["anti_entropy"], "debug"}], NewConfig = minimal_map(Schema, Conf), ?assertEqual([{riak_core, [{ring_creation_size, 32}]},{riak_kv,[{anti_entropy, {on, [debug]}}]}], lists:sort(NewConfig)). apply_mappings_test() -> %% Two mappings, both alike in dignity, %% In fair unit test, where we lay our scene, %% From ancient failure break to new mutiny, %% Where civil overrides makes civil priority unclean. %% From forth the fatal loins of these two foes %% A pair of star-cross'd mappings write one app var; %% Whose misadventured piteous overthrows %% Do with their merge behave unexpectedly. %% Assume add_defaults has already run Conf = [ {["conf", "key1"], "1"}, {["conf", "key2"], "2"} ], Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key1", "erlang.key", [ {default, "1"} ] }), cuttlefish_mapping:parse({ mapping, "conf.key2", "erlang.key", [ {default, "2"} ] }) ], {DirectMappings, []} = apply_mappings({[], Mappings, []}, Conf), cuttlefish_unit:assert_config(DirectMappings, "erlang.key", "1"), ok. find_mapping_test() -> Mappings = [ cuttlefish_mapping:parse({mapping, "variable.with.fixed.name", "", [{ default, 0}]}), cuttlefish_mapping:parse({mapping, "variable.with.$matched.name", "", [{ default, 1}]}) ], io:format("Mappings: ~p~n", [Mappings]), ?assertEqual( ["variable","with","fixed","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","fixed","name"], Mappings)) ), ?assertEqual( ["variable","with","fixed","name"], cuttlefish_mapping:variable(find_mapping("variable.with.fixed.name", Mappings)) ), ?assertEqual( 0, cuttlefish_mapping:default(find_mapping(["variable","with","fixed","name"], Mappings)) ), ?assertEqual( ["variable","with","$matched","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","A","name"], Mappings)) ), ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","A","name"], Mappings)) ), ?assertEqual( ["variable","with","$matched","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","B","name"], Mappings)) ), ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","B","name"], Mappings)) ), ?assertEqual( ["variable","with","$matched","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","C","name"], Mappings)) ), ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","C","name"], Mappings)) ), ?assertEqual( ["variable","with","$matched","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","D","name"], Mappings)) ), ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","D","name"], Mappings)) ), ?assertEqual( ["variable","with","$matched","name"], cuttlefish_mapping:variable(find_mapping(["variable","with","E","name"], Mappings)) ), ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","E","name"], Mappings)) ), %% Test variable name with dot ?assertEqual( "variable.with.E.F.name not_found", ?XLATE(find_mapping(["variable","with","E","F","name"], Mappings)) ), %% Test variable name with escaped dot ?assertEqual( 1, cuttlefish_mapping:default(find_mapping(["variable","with","E.F","name"], Mappings)) ), ok. multiple_hard_match_test() -> %% In real life this should never happen, but if it does, I'd love a log message Mappings = [ cuttlefish_mapping:parse({mapping, "variable.with.fixed.name", "", [{ default, 0}]}), cuttlefish_mapping:parse({mapping, "variable.with.fixed.name", "", [{ default, 1}]}) ], ?assertEqual( "2 hard mappings and 0 fuzzy mappings found for variable.with.fixed.name", ?XLATE(find_mapping(["variable","with","fixed","name"], Mappings)) ), ok. apply_mappings_translations_dropped_correctly_test() -> Fun = fun(X) -> X end, ?assertEqual(1, Fun(1)), %% coverage kludge Translations = [ cuttlefish_translation:parse({ translation, "mapping.name", Fun }) ], Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key", "mapping.name", [{default, 6}] }) ], %% So, we have a translation for the corresponding mapping, but that mapping has no default {_DirectMappings, TranslationsToDrop} = apply_mappings({Translations, Mappings, []}, []), ?assertEqual([], TranslationsToDrop), ok. apply_mappings_translations_dropped_correctly_mixed_test() -> Fun = fun(X) -> X end, ?assertEqual(1, Fun(1)), %% coverage kludge Translations = [ cuttlefish_translation:parse({ translation, "mapping.name", Fun }) ], Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key", "mapping.name", [{default, 6}] }), cuttlefish_mapping:parse({ mapping, "conf.key2", "mapping.name", [] }) ], %% One valid mapping, and one that should be dropped (no default) {_DirectMappings, TranslationsToDrop} = apply_mappings({Translations, Mappings, []}, []), ?assertEqual([], TranslationsToDrop), ok. apply_mappings_translations_dropped_correctly_mixed2_test() -> Fun = fun(X) -> X end, ?assertEqual(1, Fun(1)), %% coverage kludge Translations = [ cuttlefish_translation:parse({ translation, "mapping.name", Fun }) ], Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key", "mapping.name", [{default, 6}] }), cuttlefish_mapping:parse({ mapping, "conf.key2", "mapping.name", [] }), cuttlefish_mapping:parse({ mapping, "conf.key3", "mapping.name", [] }) ], %% One valid mapping and two that should be dropped (no default) {_DirectMappings, TranslationsToDrop} = apply_mappings({Translations, Mappings, []}, []), ?assertEqual([], TranslationsToDrop), ok. transform_datatypes_not_found_test() -> Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key", "erlang.key", [] }) ], Conf = [ {["conf", "other"], "string"} ], NewConf = transform_datatypes(Conf, Mappings, []), ?assertEqual({[], [{error, {unknown_variable, "conf.other"}}]}, NewConf), ok. transform_datatypes_allowed_not_found_test() -> Mappings = [ cuttlefish_mapping:parse({ mapping, "conf.key", "erlang.key", [] }) ], Conf = [ {["conf", "other"], "string"} ], NewConf = transform_datatypes(Conf, Mappings, [{allow_extra, true}]), ?assertEqual({[], []}, NewConf), ok. validation_test() -> Pid = self(), Mappings = [cuttlefish_mapping:parse( {mapping, "a", "b.c", [{validators, ["a"]}, {datatype, {enum, [true, false]}}]} ) ], Validators = [cuttlefish_validator:parse( {validator, "a", "error msg", fun(X) -> Pid ! X, true end} ) ], Conf = [ {["a"], true} ], AppConf = map({[], Mappings, Validators}, Conf), receive X -> ?assert(X) after 1000 -> ?assert(false) end, ?assertEqual([{b, [{c, true}]}], AppConf), ok. throw_unset_test() -> Mappings = [cuttlefish_mapping:parse({mapping, "a", "b.c", []})], Translations = [cuttlefish_translation:parse( {translation, "b.c", fun(_X) -> cuttlefish:unset() end}) ], AppConf = map({Translations, Mappings, []}, []), ?assertEqual([], AppConf), ok. bad_prefix_match_test() -> Prefixes = [ {["prefix", "one"], ["one", "two"]}, {["prefix", "two"], ["one", "two"]} ], Conf = [], Default = 8, VariableDef = ["prefix", "one", "other_thing", "$name"], ?assertEqual([], add_fuzzy_default(Prefixes, Conf, Default, VariableDef)), ok. assert_extended_datatype( Datatype, Setting, Expected) -> Mappings = [ cuttlefish_mapping:parse({mapping, "a.b", "e.k", [ {datatype, Datatype} ]}) ], Conf = [ {["a","b"], Setting} ], Actual = map({[], Mappings, []}, Conf), case Expected of {error, Phase, EMsg} -> ?assertMatch({error, Phase, _}, Actual), ?assertEqual(EMsg, ?XLATE(hd(element(2, element(3, Actual))))); _ -> ?assertEqual([{e, [{k, Expected}]}], map({[], Mappings, []}, Conf)) end, ok. extended_datatypes_test() -> assert_extended_datatype([integer, {atom, never}], "1", 1), assert_extended_datatype([integer, {atom, never}], "never", never), assert_extended_datatype([integer, {atom, never}], "always", {error, transform_datatypes, "Error transforming datatype for: a.b"}), assert_extended_datatype([{duration, s}, {atom, never}], "never", never), assert_extended_datatype([{atom, never}, integer], "1", 1), assert_extended_datatype([{enum, [never, always]}, {duration, s}], "1s", 1), assert_extended_datatype([{atom, never}, {atom, always}], "foo", {error, transform_datatypes, "Error transforming datatype for: a.b"}), ok. not_found_test() -> Mappings = [cuttlefish_mapping:parse({mapping, "a", "b.c", []})], Translations = [cuttlefish_translation:parse( {translation, "b.c", fun(Conf) -> cuttlefish:conf_get("d", Conf) end}) ], AppConf = map({Translations, Mappings, []}, [{["a"], "foo"}]), ?assertEqual({error, apply_translations, {errorlist, [{error, {translation_missing_setting, {"b.c", "d"}}}]}}, AppConf). invalid_test() -> Mappings = [cuttlefish_mapping:parse({mapping, "a", "b.c", []})], Translations = [cuttlefish_translation:parse( {translation, "b.c", fun(_Conf) -> cuttlefish:invalid("review all files") end}) ], AppConf = map({Translations, Mappings, []}, [{["a"], "foo"}]), ?assertEqual({error, apply_translations, {errorlist, [{error, {translation_invalid_configuration, {"b.c", "review all files"}}}]}}, AppConf). value_env_sub_test() -> os:putenv("ENV_TEST_VAL", "/a/b"), Conf = [ {["a","b","c"], "$(ENV_TEST_VAL)/c"}, {["a","b"], "/b/a"} ], {NewConf, Errors} = value_sub(Conf), os:unsetenv("ENV_TEST_VAL"), ?assertEqual([], Errors), ABC = proplists:get_value(["a","b","c"], NewConf), ?assertEqual("/a/b/c", ABC), ok. value_sub_test() -> Conf = [ {["a","b","c"], "$(a.b)/c"}, {["a","b"], "/a/b"} ], {NewConf, Errors} = value_sub(Conf), ?assertEqual([], Errors), ABC = proplists:get_value(["a","b","c"], NewConf), ?assertEqual("/a/b/c", ABC), ok. value_sub_infinite_loop_test() -> Conf = [ {["a"], "$(c)/d"}, {["b"], "$(a)/d"}, {["c"], "$(b)/d"} ], {_NewConf, Errors} = value_sub(Conf), ?assertEqual( "Circular RHS substitutions: [[\"a\"],[\"b\"],[\"c\"],[\"a\"]]", ?XLATE(hd(Errors)) ), ?assertEqual( "Circular RHS substitutions: [[\"b\"],[\"c\"],[\"a\"],[\"b\"]]", ?XLATE(hd(tl(Errors))) ), ?assertEqual( "Circular RHS substitutions: [[\"c\"],[\"a\"],[\"b\"],[\"c\"]]", ?XLATE(hd(tl(tl(Errors)))) ), ok. value_sub_not_found_test() -> Conf = [ {["a"], "$(b)/c"} ], {_NewConf, Errors} = value_sub(Conf), ?assertEqual( "'a' substitution requires a config variable 'b' to be set", ?XLATE(hd(Errors)) ), ok. value_sub_whitespace_test() -> Conf = [ {["a", "b", "c"], "/tyktorp"}, {["a"], "$(a.b.c)/svagen"}, {["b"], "$( a.b.c)/svagen"}, {["c"], "$(a.b.c )/svagen"}, {["d"], "$( a.b.c )/svagen"} ], {NewConf, []} = value_sub(Conf), ?assertEqual("/tyktorp/svagen", proplists:get_value(["a"], NewConf)), ?assertEqual("/tyktorp/svagen", proplists:get_value(["b"], NewConf)), ?assertEqual("/tyktorp/svagen", proplists:get_value(["c"], NewConf)), ?assertEqual("/tyktorp/svagen", proplists:get_value(["d"], NewConf)), ok. value_sub_multiple_sub_test() -> Conf = [ {["a"], "/a"}, {["b"], "/b"}, {["c"], "$(a)$(b)"} ], {NewConf, []} = value_sub(Conf), ?assertEqual("/a/b", proplists:get_value(["c"], NewConf)), ok. value_sub_error_in_second_sub_test() -> Conf = [ {["a"], "$(b)/$(c)"}, {["b"], "/b"}, {["c"], "$(a)/c"} ], {_NewConf, Errors} = value_sub(Conf), ?assertEqual( "Circular RHS substitutions: [[\"a\"],[\"c\"],[\"a\"]]", ?XLATE(hd(Errors)) ), ?assertEqual( "Circular RHS substitutions: [[\"c\"],[\"a\"],[\"c\"]]", ?XLATE(hd(tl(Errors))) ), ok. value_sub_false_circle_test() -> Conf = [ {["a"], "$(c)/$(c)"}, {["c"], "C"} ], {NewConf, Errors} = value_sub(Conf), ?assertEqual([], Errors), ?assertEqual("C/C", proplists:get_value(["a"], NewConf)), ok. value_sub_paren_test() -> Conf = [ {["a"], "$(c)/$(c)"}, {["c"], "C)"} ], {NewConf, Errors} = value_sub(Conf), ?assertEqual([], Errors), ?assertEqual("C)/C)", proplists:get_value(["a"], NewConf)), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_schema.erl0000644000232200023220000004515314027401005021221 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_schema: slurps schema files %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_schema). -include_lib("kernel/include/logger.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -export([file/1]). -endif. -export([files/1, strings/1]). %% Exported for unit testing in other projects -export([merger/1, string_fun_factory/0]). -type schema() :: { [cuttlefish_translation:translation()], [cuttlefish_mapping:mapping()], [cuttlefish_validator:validator()]}. -export_type([schema/0]). -spec files([string()]) -> schema() | cuttlefish_error:errorlist(). files(ListOfSchemaFiles) -> merger(fun file/2, ListOfSchemaFiles). -spec strings([string()]) -> schema() | cuttlefish_error:errorlist(). strings(ListOfStrings) -> merger(fun string/2, ListOfStrings). -spec merger(fun((string(), schema()) -> schema() | cuttlefish_error:errorlist()), [string()]) -> schema() | cuttlefish_error:errorlist(). merger(Fun, ListOfInputs) -> merger([ {Fun, Input} || Input <- ListOfInputs ]). -spec merger([{fun((string(), schema()) -> schema() | cuttlefish_error:errorlist()), string()}]) -> schema() | cuttlefish_error:errorlist(). merger(ListOfFunInputPairs) -> Schema = lists:foldr( fun({Fun, Input}, {TranslationAcc, MappingAcc, ValidatorAcc}) -> case Fun(Input, {TranslationAcc, MappingAcc, ValidatorAcc}) of {errorlist, Errors} -> %% These have already been logged. We're not moving forward with this %% but, return them anyway so the rebar plugin can display them {errorlist, Errors}; {Translations, Mappings, Validators} -> NewMappings = lists:foldr( fun cuttlefish_mapping:replace/2, MappingAcc, Mappings), NewTranslations = lists:foldr( fun cuttlefish_translation:replace/2, TranslationAcc, Translations), NewValidators = lists:foldr( fun cuttlefish_validator:replace/2, ValidatorAcc, Validators), {NewTranslations, NewMappings, NewValidators} end end, {[], [], []}, ListOfFunInputPairs), filter(Schema). %% This filter is *ONLY* for the case of multiple mappings to a single %% erlang app setting, *AND* there's no corresponding translation for %% that app setting -spec filter(schema() | cuttlefish_error:errorlist()) -> schema() | cuttlefish_error:errorlist(). filter({errorlist, Errorlist}) -> {errorlist, Errorlist}; filter({Translations, Mappings, Validators}) -> Counts = count_mappings(Mappings), {MappingsToCheck, _} = lists:unzip(Counts), NewMappings = lists:foldl( fun(MappingName, Acc) -> case lists:any( fun(T) -> cuttlefish_translation:mapping(T) =:= MappingName end, Translations) of false -> cuttlefish_mapping:remove_all_but_first(MappingName, Acc); _ -> Acc end end, Mappings, MappingsToCheck), {Translations, NewMappings, Validators}. count_mappings(Mappings) -> lists:foldl( fun(M, Acc) -> orddict:update_counter(cuttlefish_mapping:mapping(M), 1, Acc) end, orddict:new(), Mappings). -spec file(string(), schema()) -> schema() | cuttlefish_error:errorlist(). file(Filename, Schema) -> {ok, B, _} = erl_prim_loader:get_file(filename:absname(Filename)), %% latin-1 is easier to support generically. We'll revisit utf-8 %% support in the future. S = unicode:characters_to_list(B, latin1), case string(S, Schema) of {errorlist, Errors} -> cuttlefish_error:print("Error parsing schema: ~s", [Filename]), {errorlist, Errors}; NewSchema -> NewSchema end. %% @doc this exists so that we can create the fun using non exported %% functions for unit testing -spec string_fun_factory() -> fun((string(), schema()) -> schema() | cuttlefish_error:errorlist()). string_fun_factory() -> fun string/2. -spec string(string(), schema()) -> schema() | cuttlefish_error:errorlist(). string(S, {T, M, V}) -> case erl_scan:string(S) of {ok, Tokens, _} -> CommentTokens = erl_comment_scan:string(S), {Translations, Mappings, Validators, Errors} = parse_schema(Tokens, CommentTokens, {T, M, V, []}), case length(Errors) of 0 -> {Translations, Mappings, Validators}; _ -> lists:foreach(fun({error, _Term}=E) -> cuttlefish_error:print(E) end, Errors), {errorlist, Errors} end; {error, {Line, erl_scan, _}, _} -> Error = {erl_scan, Line}, ErrStr = cuttlefish_error:xlate(Error), _ = ?LOG_ERROR(lists:flatten(ErrStr)), {errorlist, [{error, Error}]} end. -spec parse_schema( [any()], [any()], {[cuttlefish_translation:translation()], [cuttlefish_mapping:mapping()], [cuttlefish_validator:validator()], [cuttlefish_error:error()]} ) -> {[cuttlefish_translation:translation()], [cuttlefish_mapping:mapping()], [cuttlefish_validator:validator()], [cuttlefish_error:error()]}. %% We're done! We don't care about any comments after the last schema item parse_schema([], _LeftoverComments, {TAcc, MAcc, VAcc, EAcc}) -> {lists:reverse(TAcc), lists:reverse(MAcc), lists:reverse(VAcc), lists:reverse(EAcc)}; parse_schema(ScannedTokens, CommentTokens, {TAcc, MAcc, VAcc, EAcc}) -> {LineNo, Tokens, TailTokens } = parse_schema_tokens(ScannedTokens), {Comments, TailComments} = lists:foldr( fun(X={CommentLineNo, _, _, Comment}, {C, TC}) -> case CommentLineNo < LineNo of true -> {Comment ++ C, TC}; _ -> {C, [X|TC]} end end, {[], []}, CommentTokens), NewAcc = case parse(Tokens) of {error, {erl_parse, Reason}} -> {TAcc, MAcc, VAcc, [{error, {erl_parse, {Reason, LineNo}}} | EAcc]}; {mapping, {mapping, Variable, Mapping, Proplist}} -> Attributes = comment_parser(Comments), Doc = proplists:get_value(doc, Attributes, []), See = get_see(Attributes), MappingSource = {mapping, Variable, Mapping, [{see, See},{doc, Doc}|Proplist]}, {TAcc, cuttlefish_mapping:parse_and_merge(MappingSource, MAcc), VAcc, EAcc}; {translation, Return} -> {cuttlefish_translation:parse_and_merge(Return, TAcc), MAcc, VAcc, EAcc}; {validator, Return} -> {TAcc, MAcc, cuttlefish_validator:parse_and_merge(Return, VAcc), EAcc}; Other -> {TAcc, MAcc, VAcc, [{error, {parse_schema, Other}} | EAcc]} end, parse_schema(TailTokens, TailComments, NewAcc). parse_schema_tokens(Scanned) -> parse_schema_tokens(Scanned, []). parse_schema_tokens([], Acc=[Last|_]) -> %% When you've reached the end of file without encountering a dot, %% return the result anyway and let erl_parse produce the error. {element(2, Last), lists:reverse(Acc), []}; parse_schema_tokens(Scanned, Acc=[{dot, LineNo}|_]) -> {LineNo, lists:reverse(Acc), Scanned}; parse_schema_tokens([H|Scanned], Acc) -> parse_schema_tokens(Scanned, [H|Acc]). -spec parse(list()) -> { mapping | translation | validator, tuple()} | cuttlefish_error:error(). parse(Scanned) -> case erl_parse:parse_exprs(Scanned) of {ok, Parsed} -> {value, X, _} = erl_eval:exprs(Parsed,[]), {element(1, X), X}; {error, {_Line, erl_parse, [H|_T]=Strings}} when is_list(H) -> {error, {erl_parse, lists:flatten(Strings)}}; {error, {_Line, erl_parse, Term}} -> {error, {erl_parse, io_lib:format("~p", [Term])}}; E -> {error, {erl_parse_unexpected, E}} end. -spec get_see([proplists:property()]) -> [cuttlefish_variable:variable()]. get_see(Proplist) -> [ cuttlefish_variable:tokenize(Line) || [Line] <- proplists:get_all_values(see, Proplist)]. comment_parser(Comments) -> StrippedComments = lists:filter(fun(X) -> X =/= [] end, [percent_stripper(C) || C <- Comments]), %% now, let's go annotation hunting AttrList = lists:foldl( fun(Line, Acc) -> case {Line, Acc} of {[ $@ | T], _} -> Annotation = hd(string:tokens(T, [$\s])), [{list_to_atom(Annotation), [percent_stripper(T -- Annotation)] }|Acc]; { _, []} -> []; {String, _} -> [{Annotation, Strings}|T] = Acc, [{Annotation, [String|Strings]}|T] end end, [], StrippedComments), SortedList = lists:reverse([ {Attr, lists:reverse(Value)} || {Attr, Value} <- AttrList]), CorrectedList = attribute_formatter(SortedList), CorrectedList. %% Just handles the @doc business attribute_formatter([Other | T]) -> [ Other | attribute_formatter(T)]; attribute_formatter([]) -> []. percent_stripper(Line) -> percent_stripper_r(percent_stripper_l(Line)). percent_stripper_l([$%|T]) -> percent_stripper_l(T); percent_stripper_l([$\s|T]) -> percent_stripper_l(T); percent_stripper_l(Line) -> Line. percent_stripper_r(Line) -> lists:reverse( percent_stripper_l( lists:reverse(Line))). -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). %% Test helpers -spec file(string()) -> schema() | cuttlefish_error:errorlist(). file(Filename) -> file(Filename, {[], [], []}). -spec string(string()) -> schema() | cuttlefish_error:errorlist(). string(S) -> string(S, {[], [], []}). percent_stripper_test() -> ?assertEqual("hi!", percent_stripper("%%% hi!")), ?assertEqual("hi!", percent_stripper("%% hi!")), ?assertEqual("hi!", percent_stripper("% hi!")), ?assertEqual("hi!", percent_stripper(" hi!")), ?assertEqual("hi!", percent_stripper(" % % hi!")), ?assertEqual("hi!", percent_stripper("% % % hi!")), ?assertEqual("hi!", percent_stripper("% % % hi! % % %")), ok. comment_parser_test() -> Comments = [ " ", "%% @doc this is a sample doc", "%% it spans multiple lines %%", "", "%% there can be line breaks", "%% @datatype enum on, off", "%% @advanced", "%% @include_default name_substitution", "%% @mapping riak_kv.anti_entropy", "%% @see mapping.a", "%% @see mapping.b" ], ParsedComments = comment_parser(Comments), ?assertEqual(["this is a sample doc", "it spans multiple lines", "there can be line breaks"], proplists:get_value(doc, ParsedComments)), ?assertEqual([["mapping.a"], ["mapping.b"]], proplists:get_all_values(see, ParsedComments)), ok. bad_file_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(), {errorlist, ErrorList} = file("test/bad_erlang.schema"), Logs = cuttlefish_test_logging:get_logs(), [L1|Tail] = Logs, [L2|[]] = Tail, ?assertMatch({match, _}, re:run(L1, "Error scanning erlang near line 10")), ?assertMatch({match, _}, re:run(L2, "Error parsing schema: test/bad_erlang.schema")), ?assertEqual([ {error, {erl_scan, 10}} ], ErrorList), ok. parse_invalid_erlang_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(), SchemaString = lists:flatten([ "%% @doc some doc\n", "%% the doc continues!\n", "{mapping, \"ring_size\", \"riak_core.ring_creation_size\", [\n", " {datatype, penguin}" "}.\n" ]), Parsed = string(SchemaString), [Log] = cuttlefish_test_logging:get_logs(), ?assertMatch({match, _}, re:run(Log, "Schema parse error near line number 4")), ?assertMatch({match, _}, re:run(Log, "syntax error before: ")), ?assertMatch({match, _}, re:run(Log, "'}'")), ?assertEqual({errorlist, [{error, {erl_parse, {"syntax error before: '}'", 4}}}]}, Parsed). parse_bad_datatype_test() -> _ = cuttlefish_test_logging:set_up(), _ = cuttlefish_test_logging:bounce(), SchemaString = lists:flatten([ "%% @doc some doc\n", "%% the doc continues!\n", "{mapping, \"ring_size\", \"riak_core.ring_creation_size\", [\n", " {default, \"blue\"}, ", " {datatype, penguin}" "]}.\n" ]), _Parsed = string(SchemaString), ?assertEqual([], cuttlefish_test_logging:get_logs()). files_test() -> %% files/1 takes a list of schemas in priority order. %% Loads them in reverse order, as things are overridden {Translations, Mappings, Validators} = files( [ "test/multi1.schema", "test/multi2.schema", "test/multi3.schema" ]), ?assertEqual(6, length(Mappings)), [M1, M2, M3, M4, M5, M6] = Mappings, %% Check mappings in correct order io:format("~p", [Mappings]), ?assertEqual(["top_level", "var1"], cuttlefish_mapping:variable(M1)), ?assertEqual(["a", "some", "var1"], cuttlefish_mapping:variable(M2)), ?assertEqual(["a", "some", "var2"], cuttlefish_mapping:variable(M3)), ?assertEqual(["a", "some", "var3"], cuttlefish_mapping:variable(M4)), ?assertEqual(["b", "some", "var1"], cuttlefish_mapping:variable(M5)), ?assertEqual(["b", "some", "var2"], cuttlefish_mapping:variable(M6)), %% Check correct mapping overrides ?assertEqual("app_a.big_var", cuttlefish_mapping:mapping(M1)), ?assertEqual("app_a.some_var1", cuttlefish_mapping:mapping(M2)), ?assertEqual("app_a.some_var", cuttlefish_mapping:mapping(M3)), ?assertEqual("app_a.some_var3", cuttlefish_mapping:mapping(M4)), ?assertEqual("app_b.some_var3", cuttlefish_mapping:mapping(M5)), ?assertEqual("app_b.some_var2", cuttlefish_mapping:mapping(M6)), ?assertEqual(6, length(Translations)), [T1, T2, T3, T4, T5, T6] = Translations, %% Check translation overrides AssertTran = fun(Mapping, Translation, Expected) -> %% Check Order ?assertEqual(Mapping, cuttlefish_translation:mapping(Translation)), %% Check Override F1 = cuttlefish_translation:func(Translation), ?assertEqual(Expected, F1(x)) end, AssertTran("app_a.big_var", T1, "tippedy top"), AssertTran("app_a.some_var1", T2, "a1"), AssertTran("app_a.some_var2", T3, "a2"), AssertTran("app_a.some_var3", T4, "toplevel"), AssertTran("app_b.some_var1", T5, "b3"), AssertTran("app_b.some_var2", T6, "b2"), %% One more time, for validators! ?assertEqual(5, length(Validators)), [V1, V2, V3, V4, V5] = Validators, %% Now check overrides AssertVal = fun(Name, Validator, Expected) -> %% Check Order ?assertEqual(Name, cuttlefish_validator:name(Validator)), %% Check Override F1 = cuttlefish_validator:func(Validator), ?assertEqual(Expected, F1(x)) end, AssertVal("top.val", V1, false), AssertVal("a.validator1", V2, true), AssertVal("a.validator2", V3, false), AssertVal("b.validator1", V4, false), AssertVal("b.validator2", V5, true), ok. get_see_test() -> Proplist = [ {doc, ["line1", "line2", "line3"]}, {see, ["a.b"]}, {see, ["a.c"]} ], ?assertEqual([["a","b"],["a","c"]], get_see(Proplist)), ok. see_test() -> String = "{mapping, \"a.b\", \"e.k\", []}.\n" ++ "%% @see a.b\n" ++ "{mapping, \"a.c\", \"e.j\", []}.\n", {_, Mappings, _} = strings([String]), ?assertEqual(2, length(Mappings)), [M1, M2] = Mappings, ?assertEqual([], cuttlefish_mapping:see(M1)), ?assertEqual([["a", "b"]], cuttlefish_mapping:see(M2)), ok. strings_filtration_test() -> String = "{mapping, \"a.b\", \"e.k\", []}.\n" ++ "{mapping, \"a.c\", \"e.k\", []}.\n" ++ "{mapping, \"a.d\", \"e.j\", []}.\n" ++ "{mapping, \"a.e\", \"e.j\", []}.\n" ++ "{translation, \"e.j\", fun(X) -> \"1\" end}.\n" ++ "{mapping, \"b.a\", \"e.i\", []}.\n" ++ "{mapping, \"b.b\", \"e.i\", []}.\n" ++ "{mapping, \"b.c\", \"e.i\", []}.\n" ++ "{translation, \"e.i\", fun(X) -> \"1\" end}.\n", {Translations, Mappings, _} = strings([String]), ?assertEqual(2, length(Translations)), ?assertEqual(6, length(Mappings)), ?assertEqual(["a", "b"], cuttlefish_mapping:variable(hd(Mappings))), ?assertEqual(["b", "b"], cuttlefish_mapping:variable(lists:nth(5, Mappings))), ok. error_test() -> {ErrorAtom, Errors} = strings(["tyktorp"]), io:format("~p", [Errors]), ?assertEqual(errorlist, ErrorAtom), {errorlist, [{error, Error}]} = strings(["{mapping, \"a\", [{datatype, unsupported_datatype}]}."]), ?assertEqual( "Unknown parse return: {mapping,\n {mapping,\"a\",[{datatype,unsupported_datatype}]}}", ?XLATE(Error)), ok. merge_across_multiple_schemas_test() -> StringSchema1 = "{mapping, \"a.b\", \"erlang.key\", [merge, {default, on}]}.", StringSchema2 = "%%@doc hi\n{mapping, \"a.b\", \"erlang.key\", [{default, off}, {datatype, flag}]}.", {_, Mappings, _} = strings([StringSchema1, StringSchema2]), ?assertEqual(1, length(Mappings)), [Mapping] = Mappings, ?assertEqual([flag], cuttlefish_mapping:datatype(Mapping)), ?assertEqual(on, cuttlefish_mapping:default(Mapping)), ?assertEqual(["hi"], cuttlefish_mapping:doc(Mapping)), ok. -endif. cuttlefish-3.0.1/src/conf_parse.peg0000644000232200023220000001630214027401005017631 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% conf_parse: for all your .conf parsing needs. %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% Copyright (c) 2019 Pivotal Software, Inc. All rights reserved. %% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- % ------------------------------------------------------------------- % NOTE: IMPORTANT % % After using neotoma to re-generate conf_parse.erl, you MUST % edit that file to change the exported file/1 function to this code: % % -spec file(file:name()) -> any(). % file(Filename) -> % AbsFilename = filename:absname(Filename), % case erl_prim_loader:get_file(AbsFilename) of % {ok, Bin, _} -> parse(Bin); % error -> {error, undefined} % end. % % The reason is that the above code allows for reading cuttlefish % schemas from .ez archives % ------------------------------------------------------------------- %% A configuration file may have zero-or-more lines. config <- line* %{ [ L || L <- Node, is_setting(L) ] %}; %% Lines are actual settings, includes, comments, or horizontal whitespace, %% terminated by an end-of-line or end-of-file. line <- ((setting / include / comment / ws+) (crlf / eof)) / crlf %{ case Node of [ Line, _EOL ] -> Line; Line -> Line end %}; %% A setting is a key and a value, joined by =, with surrounding %% whitespace ignored. setting <- ws* key ws* "=" ws* value ws* comment? %{ [ _, Key, _, _Eq, _, Value, _, _ ] = Node, {Key, Value} %}; %% A key is a series of dot-separated identifiers. key <- head:word tail:("." word)* %{ [{head, H}, {tail, T}] = Node, [unicode:characters_to_list(H)| [ unicode:characters_to_list(W) || [_, W] <- T]] %}; %% A value is any character, with trailing whitespace stripped. value <- (!((ws* crlf) / comment) .)+ %{ case unicode:characters_to_binary(Node, utf8, latin1) of {_Status, _Begining, _Rest} -> {error, {conf_to_latin1, line(Idx)}}; Bin -> binary_to_list(Bin) end %}; %% A comment is any line that begins with a # sign, leading whitespace %% allowed. comment <- ws* "#" (!crlf .)* `comment`; %% An include is a line that begins with 'include' and something. include <- ws* "include" ws* included_file_or_dir comment? %{ [_, _Include, _, Included, _] = Node, {include, Included} %}; included_file_or_dir <- [A-Za-z0-9-\_\.\*\/]+ %{ unicode:characters_to_binary(Node, utf8, latin1) %}; %% A word is one or more of letters, numbers and dashes or %% underscores. word <- ("\\." / [A-Za-z0-9_-])+ %{ unescape_dots(unicode:characters_to_list(Node)) %}; %% An end-of-line is signified by a line-feed with an optional %% preceding carriage-return. crlf <- "\r"? "\n" `ws`; %% The end-of-file is where no character matches. eof <- !. `ws`; %% Whitespace is either spaces or tabs. ws <- [ \t]+ `ws`; % Erlang code %{ %% ------------------------------------------------------------------- %% %% conf_parse: for all your .conf parsing needs. %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% Copyright (c) 2019 Pivotal Software, Inc. All rights reserved. %% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- %% This module implements the parser for a sysctl-style %% configuration format. Example: %% %% ``` %% riak.local.node = riak@127.0.0.1 %% riak.local.http = 127.0.0.1:8098 %% riak.local.pb = 127.0.0.1:8087 %% riak.local.storage.backend = bitcask''' %% %% This would parse into the following flat proplist: %% %% ``` %% [{<<"riak.local.node">>,<<"riak@127.0.0.1">>}, %% {<<"riak.local.http">>,<<"127.0.0.1:8098">>}, %% {<<"riak.local.pb">>,<<"127.0.0.1:8087">>}, %% {<<"riak.local.storage.backend">>,<<"bitcask">>}]''' %% %% Other modules in this application interpret and validate the %% result of a successful parse. %% @end -define(line, true). -define(FMT(F,A), lists:flatten(io_lib:format(F,A))). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. %% @doc Only let through lines that are not comments or whitespace. is_setting(ws) -> false; is_setting([ws]) -> false; is_setting(comment) -> false; is_setting(_) -> true. %% @doc Removes escaped dots from keys unescape_dots([$\\,$.|Rest]) -> [$.|unescape_dots(Rest)]; unescape_dots([]) -> []; unescape_dots([C|Rest]) -> [C|unescape_dots(Rest)]. -ifdef(TEST). file_test() -> Conf = conf_parse:file("test/riak.conf"), ?assertEqual([ {["ring_size"],"32"}, {["anti_entropy"],"debug"}, {["log","error","file"],"/var/log/error.log"}, {["log","console","file"],"/var/log/console.log"}, {["log","syslog"],"on"}, {["listener","http","internal"],"127.0.0.1:8098"}, {["listener","http","external"],"10.0.0.1:80"} ], Conf), ok. included_file_test() -> Conf = conf_parse:file("test/include_file.conf"), ?assertEqual([ {include,<<"riak.conf">>} ], Conf), ok. included_dir_test() -> Conf = conf_parse:file("test/include_dir.conf"), ?assertEqual([ {include,<<"conf.d/*.conf">>} ], Conf), ok. escaped_dots_are_removed_test() -> Conf = conf_parse:parse("#comment\nsetting\\.0 = thing0\n"), ?assertEqual([ {["setting.0"],"thing0"} ], Conf), ok. utf8_test() -> Conf = conf_parse:parse("setting = thing" ++ [338] ++ "\n"), ?assertEqual([{["setting"], {error, {conf_to_latin1, 1}} }], Conf), ok. gh_1_two_tab_test() -> Conf = conf_parse:parse("setting0 = thing0\n\t\t\nsetting1 = thing1\n"), ?assertEqual([ {["setting0"],"thing0"}, {["setting1"],"thing1"} ], Conf), ok. gh_1_three_tab_test() -> Conf = conf_parse:parse("setting0 = thing0\n\t\t\t\nsetting1 = thing1\n"), ?assertEqual([ {["setting0"],"thing0"}, {["setting1"],"thing1"} ], Conf), ok. -endif. %} cuttlefish-3.0.1/src/cuttlefish_rebar_plugin.erl0000644000232200023220000000725014027401005022426 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_rebar_plugin: generates an application's default .conf %% as part of the build %% %% Copyright (c) 2013 Basho Technologies, Inc. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_rebar_plugin). -export([ generate/2 ]). %% =================================================================== %% Public API %% =================================================================== generate(Config0, ReltoolFile) -> case should_i_run(Config0, ReltoolFile) of {ok, Config, ReltoolConfig} -> TargetDir = rebar_rel_utils:get_target_dir(Config, ReltoolConfig), %% Finally, overlay the files specified by the overlay section case lists:keyfind(overlay, 1, ReltoolConfig) of {overlay, Overlays} when is_list(Overlays) -> SchemaOverlays = lists:filter(fun(Overlay) -> element(1, Overlay) =:= template andalso filename:extension(element(3, Overlay)) =:= ".schema" end, Overlays), Schemas = lists:sort([ lists:flatten(filename:join(TargetDir, element(3, Schema))) || Schema <- SchemaOverlays]), io:format("Schema: ~p~n", [Schemas]), case cuttlefish_schema:files(Schemas) of {errorlist, _Es} -> %% These errors were already printed error; {_Translations, Mappings, _Validators} -> make_default_file(Config, TargetDir, Mappings) end; false -> %%io:format("No {overlay, [...]} found in reltool.config.\n", []); ok; _ -> io:format("{overlay, [...]} entry in reltool.config " "must be a list.\n", []) end, ok; no -> ok end, ok. make_default_file(Config, TargetDir, Mappings) -> %% I really wanted this to default to the application name. The problem %% is that the type of application that uses cuttlefish is also the kind %% that doesn't have an .app.src file, so rebar doesn't get it. %% I could have done something with cwd, but I didn't like that because you %% could be building anywhere. So, cuttlefish it is. he's pretty cool anyway. File = rebar_config:get_local(Config, cuttlefish_filename, "cuttlefish.conf"), Filename = filename:join([TargetDir, "etc", File]), cuttlefish_conf:generate_file(Mappings, Filename), ok. %% Only run for rel directory should_i_run(Config0, ReltoolFile) -> case rebar_rel_utils:is_rel_dir() of {true, _} -> %% Load the reltool configuration from the file {Config, ReltoolConfig} = rebar_rel_utils:load_config(Config0, ReltoolFile), {ok, Config, ReltoolConfig}; false -> no end. cuttlefish-3.0.1/src/cuttlefish_escript.erl0000644000232200023220000005422514027401005021432 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_escript: used by sh scripts to parse configs %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_escript). -define(LOGGER_HANDLER, default). -define(STDOUT(Str, Args), io:format(Str ++ "~n", Args)). -define(FORMAT(Str, Args), io_lib:format(Str, Args)). -export([main/1]). -include_lib("kernel/include/logger.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. cli_options() -> %% Option Name, Short Code, Long Code, Argument Spec, Help Message [ {help, $h, "help", undefined, "Print this usage page"}, {etc_dir, $e, "etc_dir", {string, "/etc"}, "etc dir"}, {dest_dir, $d, "dest_dir", string, "specifies the directory to write the config file to"}, {dest_file, $f, "dest_file", string, "the file name to write"}, {schema_dir, $s, "schema_dir", string, "a directory containing .schema files"}, %% one or more schema file paths {schema_file, $i, "schema_file", string, "individual schema file, will be processed in command line order, after -s"}, %% one or more sysctl-style configuration file paths {conf_file, $c, "conf_file", string, "a cuttlefish conf file path, multiple files allowed"}, %% overrides advanced.config file path {advanced_conf_file, $a, "advanced_conf_file", string, "the advanced config file path"}, {log_level, $l, "log_level", {string, "notice"}, "log level for cuttlefish output"}, {print_schema, $p, "print", undefined, "prints schema mappings on stderr"}, {max_history, $m, "max_history", {integer, 3}, "the maximum number of generated config files to keep"}, {silent, $t, "silent", {boolean, false}, "silent operation, no output"}, {allow_extra, $x, "allow_extra", {boolean, false}, "don't fail if extra keys not belonging to a schema are found"} ]. %% LOL! I wanted this to be halt 0, but honestly, if this escript does anything %% except return the path to a generated config file, it should return a non-zero %% return code print_help() -> getopt:usage(cli_options(), escript:script_name()), stop_deactivate(). parse_and_command(Args) -> {ParsedArgs, Extra} = case getopt:parse(cli_options(), Args) of {ok, {P, H}} -> {P, H}; _ -> {[help], []} end, {Command, ExtraArgs} = case {lists:member(help, ParsedArgs), Extra} of {false, []} -> {generate, []}; {false, [Cmd|E]} -> {list_to_atom(Cmd), E}; _ -> {help, []} end, {Command, ParsedArgs, ExtraArgs}. %% @doc main method for generating erlang term config files main(Args) -> {Command, ParsedArgs, Extra} = parse_and_command(Args), SuggestedLogLevel = list_to_atom(proplists:get_value(log_level, ParsedArgs)), LogLevel = case lists:member(SuggestedLogLevel, [debug, info, notice, warning, error, critical, alert, emergency]) of true -> SuggestedLogLevel; _ -> notice end, logger:set_primary_config(#{ level => LogLevel }), {ok, LC0} = logger:get_handler_config(?LOGGER_HANDLER), %% override logger formatter to match what 2.7.0 and earlier versions %% used with Lager LC1 = maps:update(formatter, {logger_formatter, #{ legacy_header => false, single_line => true, template => [time," [", level ,"] ", msg, "\n"] }}, LC0), logger:update_handler_config(?LOGGER_HANDLER, LC1), _ = ?LOG_DEBUG("Cuttlefish log level is set to ~s", [LogLevel]), _ = ?LOG_DEBUG("Parsed arguments: ~p", [ParsedArgs]), case Command of help -> print_help(); generate -> generate(ParsedArgs); effective -> effective(ParsedArgs); describe -> describe(ParsedArgs, Extra); _Other -> print_help() end. %% This shows the effective configuration, including defaults effective(ParsedArgs) -> _ = ?LOG_DEBUG("cuttlefish `effective`", []), EtcDir = proplists:get_value(etc_dir, ParsedArgs), %% Should we even show this? {AppConfigExists, ExistingAppConfigName} = check_existence(EtcDir, "app.config"), {VMArgsExists, ExistingVMArgsName} = check_existence(EtcDir, "vm.args"), case {AppConfigExists, VMArgsExists} of {false, false} -> AdvancedConfigFile = proplists:get_value(advanced_conf_file, ParsedArgs, filename:join(EtcDir, "advanced.config")), _ = ?LOG_DEBUG("Will look for advanced.config at '~s'", [AdvancedConfigFile]), AdvConfig = case filelib:is_file(AdvancedConfigFile) of true -> _ = ?LOG_DEBUG("~s detected, overlaying proplists", [AdvancedConfigFile]), case file:consult(AdvancedConfigFile) of {ok, [AdvancedConfig]} -> AdvancedConfig; {error, Error} -> _ = ?LOG_ERROR("Error parsing advanced.config: ~s", [file:format_error(Error)]), stop_deactivate() end; _ -> [] end, EffectiveConfig = cuttlefish_effective:build( load_conf(ParsedArgs), load_schema(ParsedArgs), AdvConfig), _ = [ ?STDOUT(Line, []) || Line <- EffectiveConfig], ok; _ -> ?STDOUT("Disabling cuttlefish, legacy configuration files found:", []), case AppConfigExists of true -> ?STDOUT(" ~s", [ExistingAppConfigName]); _ -> ok end, case VMArgsExists of true -> ?STDOUT(" ~s", [ExistingVMArgsName]); _ -> ok end, ?STDOUT("Effective config is only visible for cuttlefish conf files.", []) end, ok. %% This is the function that dumps the docs for a single setting describe(_ParsedArgs, []) -> %% No query, you get nothing. ?STDOUT("cuttlefish's describe command required a variable to query.", []), ?STDOUT("Try `describe setting.name`", []), stop_deactivate(); describe(ParsedArgs, [Query|_]) when is_list(Query) -> QDef = cuttlefish_variable:tokenize(Query), _ = ?LOG_DEBUG("cuttlefish describe '~s'", [Query]), {_, Mappings, _} = load_schema(ParsedArgs), FindResults = fun(QueryVar) -> lists:filter( fun(X) -> cuttlefish_variable:is_fuzzy_match(QueryVar, cuttlefish_mapping:variable(X)) end, Mappings) end, case FindResults(QDef) of [] -> ?STDOUT("Variable '~s' not found", [Query]); [Match|_] -> ?STDOUT("Documentation for ~s", [cuttlefish_variable:format(cuttlefish_mapping:variable(Match))]), _ = case {cuttlefish_mapping:doc(Match), cuttlefish_mapping:see(Match)} of {[], []} -> ok; {[], See} -> _ = [ begin M = hd(FindResults(S)), [ ?STDOUT("~s", [Line]) || Line <- cuttlefish_mapping:doc(M)] end || S <- See], ok; {Docs, []} -> [ ?STDOUT("~s", [Line]) || Line <- Docs]; {Docs, See} -> _ = [ ?STDOUT("~s", [Line]) || Line <- Docs], ?STDOUT("See also:", []), [?STDOUT(" ~s", [cuttlefish_variable:format(S)]) || S <- See] end, ?STDOUT("", []), ValidValues = [ ?FORMAT("~n - ~s", [cuttlefish_conf:pretty_datatype(Type)]) || Type <- lists:flatten([cuttlefish_mapping:datatype(Match)]) ], ?STDOUT(" Valid Values: ~s", [ValidValues]), case cuttlefish_mapping:has_default(Match) of true -> ?STDOUT(" Default Value : ~s", [format_datatype(cuttlefish_mapping:default(Match), cuttlefish_mapping:datatype(Match))]); false -> ?STDOUT(" No default set", []) end, Conf = load_conf(ParsedArgs), case lists:keyfind(QDef, 1, Conf) of false -> ConfFile = proplists:get_value(conf_file, ParsedArgs), ?STDOUT(" Value not set in ~s", [ConfFile]); {_, CValue} -> ConfiguredValue = format_datatype(CValue, cuttlefish_mapping:datatype(Match)), ?STDOUT(" Set Value : ~s", [ConfiguredValue]) end, ?STDOUT(" Internal key : ~s", [cuttlefish_mapping:mapping(Match)]) end, stop_deactivate(). -ifndef(TEST). stop_deactivate() -> init:stop(1), timer:sleep(250), stop_deactivate(). stop_ok() -> init:stop(0). -endif. -ifdef(TEST). %% In test mode we don't want to kill the test VM prematurely. stop_deactivate() -> throw(stop_deactivate). stop_ok() -> ok. -endif. generate(ParsedArgs) -> EtcDir = proplists:get_value(etc_dir, ParsedArgs), {AppConfigExists, ExistingAppConfigName} = check_existence(EtcDir, "app.config"), {VMArgsExists, ExistingVMArgsName} = check_existence(EtcDir, "vm.args"), %% If /etc/app.config exists, use it and disable cuttlefish %% even though cuttlefish is awesome FilesToUse = case {AppConfigExists, VMArgsExists} of {true, true} -> _ = ?LOG_INFO("~s and ~s exists, disabling cuttlefish.", [ExistingAppConfigName, ExistingVMArgsName]), _ = ?LOG_INFO("If you'd like to know more about cuttlefish, check your local library!", []), _ = ?LOG_INFO(" or see http://github.com/Kyorai/cuttlefish", []), {ExistingAppConfigName, ExistingVMArgsName}; {true, false} -> _ = ?LOG_INFO("~s exists, generating vm.args", [ExistingAppConfigName]), {_, NewVMArgs} = engage_cuttlefish(ParsedArgs), {ExistingAppConfigName, NewVMArgs}; {false, true} -> _ = ?LOG_INFO("~s exists, generating app.config", [ExistingVMArgsName]), {NewAppConfig, _} = engage_cuttlefish(ParsedArgs), {NewAppConfig, ExistingVMArgsName}; _ -> _ = ?LOG_INFO("No app.config or vm.args detected in ~s, activating cuttlefish", [EtcDir]), engage_cuttlefish(ParsedArgs) end, Silent = proplists:get_value(silent, ParsedArgs, false), case Silent orelse FilesToUse of true -> %% user requested for silent operation, ie. not cli args stop_ok(); %% this is nice and all, but currently all error paths of engage_cuttlefish end with %% stop_deactivate() hopefully factor that to be cleaner. error -> stop_deactivate(); {AppConf, VMArgs} -> %% Note: we have added a parameter '-vm_args' to this. It appears redundant %% but it is not! the erlang vm allows us to access all arguments to the erl %% command EXCEPT '-args_file', so in order to get access to this file location %% from within the vm, we need to pass it in twice. ?STDOUT(" -config ~s -args_file ~s -vm_args ~s ", [AppConf, VMArgs, VMArgs]), stop_ok() end. load_schema(ParsedArgs) -> SchemaDir = proplists:get_value(schema_dir, ParsedArgs), SchemaDirFiles = case SchemaDir of undefined -> []; _ -> [ filename:join(SchemaDir, Filename) || Filename <- filelib:wildcard("*.schema", SchemaDir)] end, IndividualSchemaFiles = proplists:get_all_values(schema_file, ParsedArgs), SchemaFiles = SchemaDirFiles ++ IndividualSchemaFiles, SortedSchemaFiles = lists:sort(fun(A,B) -> A < B end, SchemaFiles), case length(SortedSchemaFiles) of 0 -> _ = ?LOG_DEBUG("No Schema files found in specified", []), stop_deactivate(); _ -> _ = ?LOG_DEBUG("SchemaFiles: ~p", [SortedSchemaFiles]) end, Schema = cuttlefish_schema:files(SortedSchemaFiles), case proplists:is_defined(print_schema, ParsedArgs) of true -> _ = print_schema(Schema), Schema; _ -> Schema end. load_conf(ParsedArgs) -> ConfFiles = proplists:get_all_values(conf_file, ParsedArgs), _ = ?LOG_DEBUG("ConfFiles: ~p", [ConfFiles]), case cuttlefish_conf:files(ConfFiles) of {errorlist, Errors} -> _ = [ _ = ?LOG_ERROR(cuttlefish_error:xlate(E)) || {error, E} <- Errors], stop_deactivate(), {errorlist, Errors}; GoodConf -> GoodConf end. -spec writable_destination_path([proplists:property()]) -> file:filename() | error. writable_destination_path(ParsedArgs) -> EtcDir = proplists:get_value(etc_dir, ParsedArgs), DestinationPath = proplists:get_value(dest_dir, ParsedArgs, filename:join(EtcDir, "generated")), AbsoluteDestPath = case DestinationPath of [$/|_] -> DestinationPath; _ -> filename:join(element(2,file:get_cwd()), DestinationPath) end, %% Check Permissions case filelib:ensure_dir(filename:join(AbsoluteDestPath, "weaksauce.dummy")) of %% filelib:ensure_dir/1 requires a dummy filename in the argument, %% I think that is weaksauce, hence "weaksauce.dummy" ok -> AbsoluteDestPath; {error, E} -> _ = ?LOG_ERROR( "Error creating ~s: ~s", [AbsoluteDestPath, file:format_error(E)]), error end. -spec engage_cuttlefish([proplists:property()]) -> {string(), string()} | error. engage_cuttlefish(ParsedArgs) -> EtcDir = proplists:get_value(etc_dir, ParsedArgs), AbsPath = case writable_destination_path(ParsedArgs) of error -> stop_deactivate(), error; Path -> Path end, DestinationFilename = filename_maker(proplists:get_value(dest_file, ParsedArgs, "app"), "config"), Destination = filename:join(AbsPath, DestinationFilename), DestinationVMArgsFilename = filename_maker(proplists:get_value(dest_file, ParsedArgs, "vm"), "args"), DestinationVMArgs = filename:join(AbsPath, DestinationVMArgsFilename), _ = ?LOG_DEBUG("Generating config in: ~p", [Destination]), _ = ?LOG_DEBUG("Generating vm.args in: ~p", [DestinationVMArgs]), Schema = load_schema(ParsedArgs), Conf = load_conf(ParsedArgs), NewConfig = case cuttlefish_generator:map(Schema, Conf, ParsedArgs) of {error, Phase, {errorlist, Errors}} -> _ = ?LOG_ERROR("Error generating configuration in phase ~s", [Phase]), _ = [ cuttlefish_error:print(E) || E <- Errors], stop_deactivate(); ValidConfig -> ValidConfig end, AdvancedConfigFile = proplists:get_value(advanced_conf_file, ParsedArgs, filename:join(EtcDir, "advanced.config")), _ = ?LOG_DEBUG("AdvancedConfigFile: ~p", [AdvancedConfigFile]), FinalConfig = case filelib:is_file(AdvancedConfigFile) of true -> _ = ?LOG_INFO("advanced config file is detected at ~s, overlaying proplists", [AdvancedConfigFile]), case file:consult(AdvancedConfigFile) of {ok, [AdvancedConfig]} -> cuttlefish_advanced:overlay(NewConfig, AdvancedConfig); {ok, OtherTerms} -> _ = ?LOG_ERROR("Error parsing ~s, incorrect format: ~p", [AdvancedConfigFile, OtherTerms]), stop_deactivate(); {error, Error} -> _ = ?LOG_ERROR("Error parsing ~s: ~s", [AdvancedConfigFile, file:format_error(Error)]), stop_deactivate() end; _ -> %% Nothing to see here, these aren't the droids you're looking for. NewConfig end, case FinalConfig of {error, _X} -> error; _ -> FinalAppConfig = proplists:delete(vm_args, FinalConfig), FinalVMArgs = cuttlefish_vmargs:stringify(proplists:get_value(vm_args, FinalConfig, [])), %% Prune excess files MaxHistory = proplists:get_value(max_history, ParsedArgs, 3) - 1, prune(Destination, MaxHistory), prune(DestinationVMArgs, MaxHistory), case {maybe_write_file(Destination, "~p.\n", FinalAppConfig), maybe_write_file(DestinationVMArgs, "~s", string:join(FinalVMArgs, "\n"))} of {ok, ok} -> {Destination, DestinationVMArgs}; {Err1, Err2} -> maybe_log_file_error(Destination, Err1), maybe_log_file_error(DestinationVMArgs, Err2), error end end. -spec maybe_write_file(Filename :: string(), Format :: string(), Data :: string()) -> ok | {error, file:posix() | badarg | terminated | system_limit}. maybe_write_file(_, _, []) -> % nothing to write, write nothing ok; maybe_write_file(Filename, Format, Data) -> file:write_file(Filename, io_lib:fwrite(Format, [Data])). -spec prune(file:name_all(), integer()) -> ok. prune(Filename, MaxHistory) -> %% A Filename comes in /Abs/Path/To/something.YYYY.MM.DD.HH.mm.SS.ext %% We want `ls /Abs/Path/To/something.*.ext and delete all but the most %% recent MaxHistory Path = filename:dirname(Filename), Ext = filename:extension(Filename), Base = hd(string:tokens(filename:basename(Filename, Ext), ".")), Files = lists:sort(filelib:wildcard(Base ++ ".*" ++ Ext, Path)), delete([ filename:join([Path, F]) || F <- Files], MaxHistory), ok. -spec delete(file:name_all(), integer()) -> ok. delete(Files, MaxHistory) when length(Files) =< MaxHistory -> ok; delete([File|Files], MaxHistory) -> case file:delete(File) of ok -> ok; {error, Reason} -> _ = ?LOG_ERROR("Could not delete ~s, ~p", [File, Reason]) end, delete(Files, MaxHistory). -spec maybe_log_file_error( file:filename(), ok | {error, file:posix() %% copied from file:format_error/1 | badarg | terminated | system_limit | { integer(), module(), term() }}) -> ok. maybe_log_file_error(_, ok) -> ok; maybe_log_file_error(Filename, {error, Reason}) -> _ = ?LOG_ERROR("Error writing ~s: ~s", [Filename, file:format_error(Reason)]), ok. -spec check_existence(string(), string()) -> {boolean(), string()}. check_existence(EtcDir, Filename) -> FullName = filename:join(EtcDir, Filename), %% Barfolomew Exists = filelib:is_file(FullName), _ = ?LOG_INFO("Checking ~s exists... ~p", [FullName, Exists]), {Exists, FullName}. filename_maker(Filename, Extension) -> case length(string:tokens(Filename, ".")) of 1 -> filename_maker(add_suffix, Filename, Extension); _ -> filename_maker(no_suffix, Filename, Extension) end. filename_maker(no_suffix, Filename, _Extension) -> Filename; filename_maker(add_suffix, Filename, Extension) -> {{Y, M, D}, {HH, MM, SS}} = calendar:local_time(), _DestinationFilename = io_lib:format("~s.~p.~s.~s.~s.~s.~s.~s", [Filename, Y, zero_pad(M), zero_pad(D), zero_pad(HH), zero_pad(MM), zero_pad(SS), Extension ]). zero_pad(Integer) -> S = integer_to_list(Integer), case Integer > 9 of true -> S; _ -> [$0|S] end. print_schema(Schema) -> _ = ?LOG_INFO("Printing Schema Mappings"), {_, Mappings, _} = Schema, {Max, ListOfMappings} = lists:foldr( fun(M, {OldMax, List}) -> CandidateMax = length(cuttlefish_mapping:mapping(M)), NewMax = case CandidateMax > OldMax of true -> CandidateMax; _ -> OldMax end, {NewMax, [{cuttlefish_mapping:mapping(M), cuttlefish_variable:format(cuttlefish_mapping:variable(M))}|List]} end, {0, []}, Mappings ), [ io:format(standard_error, "~s ~s~n", [string:left(M, Max+2, $\s), V]) || {M, V} <- ListOfMappings]. format_datatype(Value, Datatypes) when is_list(Datatypes) -> %% We're not sure which datatype the default or set value is going %% to match, so let's find one that does. [H|_] = lists:dropwhile( fun(D0) -> D = cuttlefish_datatypes:extended_from(D0), case cuttlefish_datatypes:from_string(Value, D) of {error, _} -> true; _ -> false end end, Datatypes), format_datatype(Value, cuttlefish_datatypes:extended_from(H)); format_datatype(Value, Datatype) -> cuttlefish_datatypes:to_string(cuttlefish_datatypes:from_string(Value, Datatype), Datatype). -ifdef(TEST). zero_pad_test() -> ?assertEqual("00", zero_pad(0)), ?assertEqual("01", zero_pad(1)), ?assertEqual("02", zero_pad(2)), ?assertEqual("03", zero_pad(3)), ?assertEqual("04", zero_pad(4)), ?assertEqual("05", zero_pad(5)), ?assertEqual("06", zero_pad(6)), ?assertEqual("07", zero_pad(7)), ?assertEqual("08", zero_pad(8)), ?assertEqual("09", zero_pad(9)), ?assertEqual("10", zero_pad(10)), ?assertEqual("11", zero_pad(11)), ?assertEqual("12", zero_pad(12)), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_advanced.erl0000644000232200023220000000464414027401005021526 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_advanced: handles merging of advanced configs %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_advanced). -export([overlay/2]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. %% @doc this function overlays the values in proplist 'AdvancedConfig' %% on top of 'GeneratedConfig' overlay(GeneratedConfig, AdvancedConfig) -> lists:foldl( fun({ApplicationName, ApplicationConfig}, OuterAcc) -> GeneratedApplicationConfig = proplists:get_value(ApplicationName, GeneratedConfig, []), Updated = lists:foldl( fun({ConfigElementName, ConfigElement}, Acc) -> cuttlefish_util:replace_proplist_value(ConfigElementName, ConfigElement, Acc) end, GeneratedApplicationConfig, ApplicationConfig), cuttlefish_util:replace_proplist_value(ApplicationName, Updated, OuterAcc) end, GeneratedConfig, AdvancedConfig). -ifdef(TEST). overlay_test() -> GeneratedConfig = [ {app1, [{'setting1.1', "value1.1"}]}, {app2, [{'setting2.1', "value2.1"}]}, {app3, [{'setting3.1', [{"blah", "blah"}, {"blarg", "blarg"}]}]} ], AdvancedConfig = [ {app3, [{'setting3.1', i_dont_care}]}, {app4, [{'some_unschemad_thing', 'like_a_penguin'}]} ], Expected = [ {app1, [{'setting1.1', "value1.1"}]}, {app2, [{'setting2.1', "value2.1"}]}, {app3, [{'setting3.1', i_dont_care}]}, {app4, [{'some_unschemad_thing', 'like_a_penguin'}]} ], NewConfig = overlay(GeneratedConfig, AdvancedConfig), ?assertEqual(Expected, NewConfig), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_flag.erl0000644000232200023220000000706714027401005020674 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_flag: datatype for simple boolean settings with %% customizable names and values %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_flag). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([ parse/1, parse/2, to_string/2 ]). -define(FMT(F, A), lists:flatten(io_lib:format(F, A))). parse(Value) -> cuttlefish_enum:parse(Value, to_enum(flag)). parse(Value, Flag) -> cuttlefish_enum:parse(Value, to_enum(Flag)). to_string(Value, Flag) -> cuttlefish_enum:to_string(Value, to_enum(Flag)). to_enum({flag, {On, OnValue}, {Off, OffValue}}) -> {enum, [{On, OnValue}, {Off, OffValue}]}; to_enum({flag, On, Off}) -> {enum, [{On, true}, {Off, false}]}; to_enum(flag) -> {enum, [{on, true}, {off, false}]}. -ifdef(TEST). parse_test() -> ?assertEqual(true, parse("on")), ?assertEqual(false, parse("off")), ?assertEqual(true, parse("enabled", {flag, enabled, disabled})), ?assertEqual(false, parse("disabled", {flag, enabled, disabled})), ?assertEqual(tyk, parse("on", {flag, {on, tyk}, {off, torp}})), ?assertEqual(torp, parse("off", {flag, {on, tyk}, {off, torp}})), ?assertEqual({long, tuple, value}, parse("foo", {flag, {simple, ok}, {foo, {long, tuple, value}}})), ?assertEqual(ok, parse("simple", {flag, {simple, ok}, {foo, {long, tuple, value}}})). to_string_test() -> ?assertEqual(to_string(true, flag), "on"), ?assertEqual(to_string(on, flag), "on"), ?assertEqual(to_string(false, flag), "off"), ?assertEqual(to_string(off, flag), "off"), ?assertEqual(to_string(true, {flag, enabled, disabled}), "enabled"), ?assertEqual(to_string(enabled, {flag, enabled, disabled}), "enabled"), ?assertEqual(to_string(false, {flag, enabled, disabled}), "disabled"), ?assertEqual(to_string(disabled, {flag, enabled, disabled}), "disabled"), ?assertEqual(to_string(tyk, {flag, {on, tyk}, {off, torp}}), "on"), ?assertEqual(to_string(on, {flag, {on, tyk}, {off, torp}}), "on"), ?assertEqual(to_string(torp, {flag, {on, tyk}, {off, torp}}), "off"), ?assertEqual(to_string(off, {flag, {on, tyk}, {off, torp}}), "off"), ?assertEqual(to_string({long, tuple, value}, {flag, {simple, ok}, {foo, {long, tuple, value}}}), "foo"), ?assertEqual(to_string(foo, {flag, {simple, ok}, {foo, {long, tuple, value}}}), "foo"), ?assertEqual(to_string(ok, {flag, {simple, ok}, {foo, {long, tuple, value}}}), "simple"), ?assertEqual(to_string(simple, {flag, {simple, ok}, {foo, {long, tuple, value}}}), "simple"). -endif. cuttlefish-3.0.1/src/cuttlefish_vmargs.erl0000644000232200023220000000265614027401005021261 0ustar debalancedebalance-module(cuttlefish_vmargs). -export([stringify/1]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. %% @doc turns a proplist into a list of strings suitable for vm.args files -spec stringify([{any(), string()}]) -> [string()]. stringify(VMArgsProplist) -> [ stringify_line(K, V) || {K, V} <- VMArgsProplist ]. stringify_line(K, V) when is_list(V) -> lists:flatten(io_lib:format("~s ~s", [K, V])); stringify_line(K, V) -> lists:flatten(io_lib:format("~s ~w", [K, V])). -ifdef(TEST). stringify_test() -> VMArgsProplist = [ {'-name', "dev1@127.0.0.1"}, {'-setcookie', 'riak'}, {'-smp', enable}, {'+W',"w"}, {'+K',"true"}, {'+A',"64"}, {'-env ERL_MAX_PORTS',"64000"}, {'-env ERL_FULLSWEEP_AFTER',"0"}, {'-env ERL_CRASH_DUMP',"./log/erl_crash.dump"}, {'-env ERL_MAX_ETS_TABLES',"256000"}, {'+P', "256000"}, {'-kernel net_ticktime', "42"} ], VMArgs = stringify(VMArgsProplist), Expected = [ "-name dev1@127.0.0.1", "-setcookie riak", "-smp enable", "+W w", "+K true", "+A 64", "-env ERL_MAX_PORTS 64000", "-env ERL_FULLSWEEP_AFTER 0", "-env ERL_CRASH_DUMP ./log/erl_crash.dump", "-env ERL_MAX_ETS_TABLES 256000", "+P 256000", "-kernel net_ticktime 42" ], [ ?assertEqual(E, V) || {E, V} <- lists:zip(Expected, VMArgs)], ok. -endif. cuttlefish-3.0.1/src/cuttlefish_unit.erl0000644000232200023220000001524414027401005020736 0ustar debalancedebalance-module(cuttlefish_unit). -include_lib("kernel/include/logger.hrl"). -include_lib("eunit/include/eunit.hrl"). -compile([nowarn_export_all, export_all]). generate_templated_config(FileName, Conf, Context) -> generate_templated_config(FileName, Conf, Context, {[], [], []}). generate_templated_config(FileName, Conf, Context, PreexistingSchema) -> RenderedSchemas = case lists:all(fun(X) -> not is_list(X) end, FileName) of true -> %% it's a single depth list, aka string [{ cuttlefish_schema:string_fun_factory(), render_template(FileName, Context)}]; _ -> %% It's a list of lists, aka multiple strings [{ cuttlefish_schema:string_fun_factory(), render_template(F, Context)} || F <- FileName] end, Schema = cuttlefish_schema:merger(RenderedSchemas ++ [ { fun(_, _) -> PreexistingSchema end, ""} ]), cuttlefish_generator:map(Schema, Conf). render_template(FileName, Context) -> {ok, Bin, _} = erl_prim_loader:get_file(filename:absname(FileName)), %% Stolen from rebar_templater:render/2 %% Be sure to escape any double-quotes before rendering... ReOpts = [global, {return, list}], Str0 = re:replace(Bin, "\\\\", "\\\\\\", ReOpts), Str1 = re:replace(Str0, "\"", "\\\\\"", ReOpts), %% the mustache module is only available in the context of a rebar run. case {code:ensure_loaded(mustache), code:ensure_loaded(rebar_mustache), code:ensure_loaded(bbmustache)} of {{module, mustache}, _, _} -> mustache:render(Str1, dict:from_list(Context)); {_, {module, rebar_mustache}, _} -> rebar_mustache:render(Str1, dict:from_list(Context)); {_, _, {module, bbmustache}} -> Ret = bbmustache:render( Bin, maps:from_list( [case is_atom(K) of true -> {atom_to_list(K), V}; false -> I end || I = {K, V} <- Context])), binary_to_list(Ret); _ -> io:format("mustache and/or rebar_mustache module not loaded. " "This test can only be run in a rebar context.~n") end. -spec generate_config(atom(), [string()]|string(), list()) -> list(). generate_config(strings, SchemaStrings, Conf) -> Schema = cuttlefish_schema:strings(SchemaStrings), cuttlefish_generator:map(Schema, Conf); generate_config(string, SchemaString, Conf) -> Schema = cuttlefish_schema:strings([SchemaString]), cuttlefish_generator:map(Schema, Conf); generate_config(file, SchemaFile, Conf) -> generate_config(SchemaFile, Conf). -spec generate_config(string(), list()) -> list(). generate_config(SchemaFile, Conf) -> Schema = cuttlefish_schema:files([SchemaFile]), cuttlefish_generator:map(Schema, Conf). assert_valid_config(Config) -> case Config of List when is_list(List) -> ok; {error, Phase, {errorlist, Errors}} -> erlang:exit({assert_valid_config_failed, [{phase, Phase}, {errorlist, Errors}]}); Other -> erlang:exit({assert_valid_config_failed, [{bad_value, Other}]}) end. assert_config(Config, Path, Value) -> ok = assert_valid_config(Config), ActualValue = case path(cuttlefish_variable:tokenize(Path), Config) of {error, bad_nesting} -> ?assertEqual({Path, Value}, {Path, nesting_error}); notset -> ?assertEqual({Path, Value}, {Path, notset}); {ok, X} -> X end, ?assertEqual({Path, Value}, {Path, ActualValue}). assert_not_configured(Config, Path) -> ok = assert_valid_config(Config), case path(cuttlefish_variable:tokenize(Path), Config) of {error, bad_nesting} -> erlang:exit({assert_not_configured_failed, [{bad_nesting, Path}, {config, Config}]}); {ok, Value} -> erlang:exit({assert_not_configured_failed, [{key, Path}, {configured_to, Value}, {config, Config}]}); notset -> ok end. %% @doc Asserts that the generated configuration is in error. assert_error(Config) -> ?assertMatch({error, _, {errorlist, _}}, Config). %% @doc Asserts that the generated configuration is in error and %% contains an error tuple that translates to the given error message assert_error_message(Config, Message) -> ok = assert_error(Config), {errorlist, Errors} = element(3, Config), chase_message(Message, Errors, Errors). chase_message(Message, [], Errors) -> erlang:exit({assert_error_message_failed, [{expected, Message}, {actual, Errors}]}); chase_message(Message, [{error, ErrorTerm}|T], Errors) -> case lists:flatten(cuttlefish_error:xlate(ErrorTerm)) of Message -> ok; _ -> chase_message(Message, T, Errors) end. -spec path(cuttlefish_variable:variable(), [{ string() | atom() | binary() , term()}]) -> {ok, any()} | notset | {error, bad_nesting}. path(_, []) -> {error, bad_nesting}; path(_, undefined) -> notset; path([Last], Proplist) -> case lists:dropwhile(key_no_match(Last), Proplist) of [] -> notset; [{_, V}|_] -> {ok, V} end; path([H|T], Proplist) when is_list(H)-> case path([H], Proplist) of {ok, SmallerProplist} -> path(T, SmallerProplist); Other -> Other end. -spec key_no_match(string()) -> fun((atom() | string() | binary()) -> boolean()). key_no_match(Key) -> fun({E, _}) when is_atom(E) -> E =/= list_to_atom(Key); ({E, _}) when is_list(E) -> E =/= Key; ({E, _}) when is_binary(E) -> E =/= list_to_binary(Key); (_) -> true end. -spec dump_to_file(any(), string()) -> ok. dump_to_file(ErlangTerm, Filename) -> {ok, S} = file:open(Filename, [write,append]), io:format(S, "~p~n", [ErlangTerm]), _ = file:close(S), ok. -ifdef(TEST). path_test() -> ?assertEqual( {ok, "disable"}, path(["vm_args", "-smp"], [{vm_args, [{'-smp', "disable"}]}])), ok. multiple_schema_generate_templated_config_test() -> Context = [ {mustache, "mustache"} ], PrereqSchema = {[], [ cuttlefish_mapping:parse( {mapping, "c", "app.c", [ {default, "/c"} ]}) ], []}, Config = cuttlefish_unit:generate_templated_config("test/sample_mustache.schema", [], Context, PrereqSchema), _ = ?LOG_ERROR("~p", [Config]), assert_config(Config, "app_a.setting_b", "/c/mustache/a.b"), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_duration_parse.erl0000644000232200023220000002570314027401005022777 0ustar debalancedebalance-module(cuttlefish_duration_parse). -export([parse/1,file/1]). -define(p_charclass,true). -define(p_choose,true). -define(p_one_or_more,true). -define(p_scan,true). -define(p_seq,true). -define(p_string,true). -define(p_zero_or_more,true). %% ------------------------------------------------------------------- %% %% cuttlefish_duration_parse: parses duration strings %% %% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -include("cuttlefish_duration.hrl"). -define(FLATTEN(S), binary_to_list(iolist_to_binary(S))). -spec file(file:name()) -> any(). file(Filename) -> AbsFilename = filename:absname(Filename), case erl_prim_loader:get_file(AbsFilename) of {ok, Bin, _} -> parse(Bin); error -> {error, undefined} end. -spec parse(binary() | list()) -> any(). parse(List) when is_list(List) -> parse(list_to_binary(List)); parse(Input) when is_binary(Input) -> _ = setup_memo(), Result = case 'duration'(Input,{{line,1},{column,1}}) of {AST, <<>>, _Index} -> AST; Any -> Any end, release_memo(), Result. -spec 'duration'(input(), index()) -> parse_result(). 'duration'(Input, Index) -> p(Input, Index, 'duration', fun(I,D) -> (p_one_or_more(fun 'duration_segment'/2))(I,D) end, fun(Node, _Idx) ->lists:sum(Node) end). -spec 'duration_segment'(input(), index()) -> parse_result(). 'duration_segment'(Input, Index) -> p(Input, Index, 'duration_segment', fun(I,D) -> (p_seq([p_choose([fun 'float'/2, fun 'integer'/2]), fun 'unit'/2]))(I,D) end, fun(Node, _Idx) -> [Amount, Span] = Node, {Span, Multiplier} = lists:keyfind(Span, 1, ?MULTIPLIERS), Amount * Multiplier end). -spec 'integer'(input(), index()) -> parse_result(). 'integer'(Input, Index) -> p(Input, Index, 'integer', fun(I,D) -> (p_seq([p_charclass(<<"[1-9]">>), p_zero_or_more(p_charclass(<<"[0-9]">>))]))(I,D) end, fun(Node, _Idx) ->list_to_integer(?FLATTEN(Node)) end). -spec 'unit'(input(), index()) -> parse_result(). 'unit'(Input, Index) -> p(Input, Index, 'unit', fun(I,D) -> (p_choose([p_string(<<"f">>), p_string(<<"w">>), p_string(<<"d">>), p_string(<<"h">>), p_string(<<"ms">>), p_string(<<"m">>), p_string(<<"s">>)]))(I,D) end, fun(Node, _Idx) ->binary_to_atom(Node, latin1) end). -spec 'float'(input(), index()) -> parse_result(). 'float'(Input, Index) -> p(Input, Index, 'float', fun(I,D) -> (p_choose([p_seq([p_one_or_more(p_charclass(<<"[0-9]">>)), p_string(<<".">>), p_one_or_more(p_charclass(<<"[0-9]">>))]), p_seq([p_string(<<".">>), p_one_or_more(p_charclass(<<"[0-9]">>))])]))(I,D) end, fun(Node, _Idx) -> case Node of [<<".">>, Mantissa] -> list_to_float(?FLATTEN(["0.", Mantissa])); _ -> list_to_float(?FLATTEN(Node)) end end). -file("peg_includes.hrl", 1). -type index() :: {{line, pos_integer()}, {column, pos_integer()}}. -type input() :: binary(). -type parse_failure() :: {fail, term()}. -type parse_success() :: {term(), input(), index()}. -type parse_result() :: parse_failure() | parse_success(). -type parse_fun() :: fun((input(), index()) -> parse_result()). -type xform_fun() :: fun((input(), index()) -> term()). -spec p(input(), index(), atom(), parse_fun(), xform_fun()) -> parse_result(). p(Inp, StartIndex, Name, ParseFun, TransformFun) -> case get_memo(StartIndex, Name) of % See if the current reduction is memoized {ok, Memo} -> %Memo; % If it is, return the stored result Memo; _ -> % If not, attempt to parse Result = case ParseFun(Inp, StartIndex) of {fail,_} = Failure -> % If it fails, memoize the failure Failure; {Match, InpRem, NewIndex} -> % If it passes, transform and memoize the result. Transformed = TransformFun(Match, StartIndex), {Transformed, InpRem, NewIndex} end, memoize(StartIndex, Name, Result), Result end. -spec setup_memo() -> ets:tid(). setup_memo() -> put({parse_memo_table, ?MODULE}, ets:new(?MODULE, [set])). -spec release_memo() -> true. release_memo() -> ets:delete(memo_table_name()). -spec memoize(index(), atom(), parse_result()) -> true. memoize(Index, Name, Result) -> Memo = case ets:lookup(memo_table_name(), Index) of [] -> []; [{Index, Plist}] -> Plist end, ets:insert(memo_table_name(), {Index, [{Name, Result}|Memo]}). -spec get_memo(index(), atom()) -> {ok, term()} | {error, not_found}. get_memo(Index, Name) -> case ets:lookup(memo_table_name(), Index) of [] -> {error, not_found}; [{Index, Plist}] -> case proplists:lookup(Name, Plist) of {Name, Result} -> {ok, Result}; _ -> {error, not_found} end end. -spec memo_table_name() -> ets:tid(). memo_table_name() -> get({parse_memo_table, ?MODULE}). -ifdef(p_eof). -spec p_eof() -> parse_fun(). p_eof() -> fun(<<>>, Index) -> {eof, [], Index}; (_, Index) -> {fail, {expected, eof, Index}} end. -endif. -ifdef(p_optional). -spec p_optional(parse_fun()) -> parse_fun(). p_optional(P) -> fun(Input, Index) -> case P(Input, Index) of {fail,_} -> {[], Input, Index}; {_, _, _} = Success -> Success end end. -endif. -ifdef(p_not). -spec p_not(parse_fun()) -> parse_fun(). p_not(P) -> fun(Input, Index)-> case P(Input,Index) of {fail,_} -> {[], Input, Index}; {Result, _, _} -> {fail, {expected, {no_match, Result},Index}} end end. -endif. -ifdef(p_assert). -spec p_assert(parse_fun()) -> parse_fun(). p_assert(P) -> fun(Input,Index) -> case P(Input,Index) of {fail,_} = Failure-> Failure; _ -> {[], Input, Index} end end. -endif. -ifdef(p_seq). -spec p_seq([parse_fun()]) -> parse_fun(). p_seq(P) -> fun(Input, Index) -> p_all(P, Input, Index, []) end. -spec p_all([parse_fun()], input(), index(), [term()]) -> parse_result(). p_all([], Inp, Index, Accum ) -> {lists:reverse( Accum ), Inp, Index}; p_all([P|Parsers], Inp, Index, Accum) -> case P(Inp, Index) of {fail, _} = Failure -> Failure; {Result, InpRem, NewIndex} -> p_all(Parsers, InpRem, NewIndex, [Result|Accum]) end. -endif. -ifdef(p_choose). -spec p_choose([parse_fun()]) -> parse_fun(). p_choose(Parsers) -> fun(Input, Index) -> p_attempt(Parsers, Input, Index, none) end. -spec p_attempt([parse_fun()], input(), index(), none | parse_failure()) -> parse_result(). p_attempt([], _Input, _Index, Failure) -> Failure; p_attempt([P|Parsers], Input, Index, FirstFailure)-> case P(Input, Index) of {fail, _} = Failure -> case FirstFailure of none -> p_attempt(Parsers, Input, Index, Failure); _ -> p_attempt(Parsers, Input, Index, FirstFailure) end; Result -> Result end. -endif. -ifdef(p_zero_or_more). -spec p_zero_or_more(parse_fun()) -> parse_fun(). p_zero_or_more(P) -> fun(Input, Index) -> p_scan(P, Input, Index, []) end. -endif. -ifdef(p_one_or_more). -spec p_one_or_more(parse_fun()) -> parse_fun(). p_one_or_more(P) -> fun(Input, Index)-> Result = p_scan(P, Input, Index, []), case Result of {[_|_], _, _} -> Result; _ -> {fail, {expected, Failure, _}} = P(Input,Index), {fail, {expected, {at_least_one, Failure}, Index}} end end. -endif. -ifdef(p_label). -spec p_label(atom(), parse_fun()) -> parse_fun(). p_label(Tag, P) -> fun(Input, Index) -> case P(Input, Index) of {fail,_} = Failure -> Failure; {Result, InpRem, NewIndex} -> {{Tag, Result}, InpRem, NewIndex} end end. -endif. -ifdef(p_scan). -spec p_scan(parse_fun(), input(), index(), [term()]) -> {[term()], input(), index()}. p_scan(_, <<>>, Index, Accum) -> {lists:reverse(Accum), <<>>, Index}; p_scan(P, Inp, Index, Accum) -> case P(Inp, Index) of {fail,_} -> {lists:reverse(Accum), Inp, Index}; {Result, InpRem, NewIndex} -> p_scan(P, InpRem, NewIndex, [Result | Accum]) end. -endif. -ifdef(p_string). -spec p_string(binary()) -> parse_fun(). p_string(S) -> Length = erlang:byte_size(S), fun(Input, Index) -> try <> = Input, {S, Rest, p_advance_index(S, Index)} catch error:{badmatch,_} -> {fail, {expected, {string, S}, Index}} end end. -endif. -ifdef(p_anything). -spec p_anything() -> parse_fun(). p_anything() -> fun(<<>>, Index) -> {fail, {expected, any_character, Index}}; (Input, Index) when is_binary(Input) -> <> = Input, {<>, Rest, p_advance_index(<>, Index)} end. -endif. -ifdef(p_charclass). -spec p_charclass(string() | binary()) -> parse_fun(). p_charclass(Class) -> {ok, RE} = re:compile(Class, [unicode, dotall]), fun(Inp, Index) -> case re:run(Inp, RE, [anchored]) of {match, [{0, Length}|_]} -> {Head, Tail} = erlang:split_binary(Inp, Length), {Head, Tail, p_advance_index(Head, Index)}; _ -> {fail, {expected, {character_class, binary_to_list(Class)}, Index}} end end. -endif. -ifdef(p_regexp). -spec p_regexp(binary()) -> parse_fun(). p_regexp(Regexp) -> {ok, RE} = re:compile(Regexp, [unicode, dotall, anchored]), fun(Inp, Index) -> case re:run(Inp, RE) of {match, [{0, Length}|_]} -> {Head, Tail} = erlang:split_binary(Inp, Length), {Head, Tail, p_advance_index(Head, Index)}; _ -> {fail, {expected, {regexp, binary_to_list(Regexp)}, Index}} end end. -endif. -ifdef(line). -spec line(index() | term()) -> pos_integer() | undefined. line({{line,L},_}) -> L; line(_) -> undefined. -endif. -ifdef(column). -spec column(index() | term()) -> pos_integer() | undefined. column({_,{column,C}}) -> C; column(_) -> undefined. -endif. -spec p_advance_index(input() | unicode:charlist() | pos_integer(), index()) -> index(). p_advance_index(MatchedInput, Index) when is_list(MatchedInput) orelse is_binary(MatchedInput)-> % strings lists:foldl(fun p_advance_index/2, Index, unicode:characters_to_list(MatchedInput)); p_advance_index(MatchedInput, Index) when is_integer(MatchedInput) -> % single characters {{line, Line}, {column, Col}} = Index, case MatchedInput of $\n -> {{line, Line+1}, {column, 1}}; _ -> {{line, Line}, {column, Col+1}} end. cuttlefish-3.0.1/src/cuttlefish_validator.erl0000644000232200023220000001316614027401005021745 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_validator: models a cuttlefish validator %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_validator). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -record(validator, { name::string(), description::string(), func::fun() }). -type validator() :: #validator{}. -type validator_fun() :: fun((any()) -> boolean()). -type raw_validator() :: {validator, string(), string(), validator_fun()}. -export_type([validator/0]). -export([ parse/1, parse_and_merge/2, is_validator/1, name/1, description/1, func/1, replace/2]). -spec parse(raw_validator()) -> validator() | cuttlefish_error:error(). parse({validator, Name, Description, Fun}) -> #validator{ name = Name, description = Description, func = Fun }; parse(X) -> {error, {validator_parse, X}}. %% This assumes it's run as part of a foldl over new schema elements %% in which case, there's only ever one instance of a key in the list %% so keyreplace works fine. -spec parse_and_merge( raw_validator(), [validator()]) -> [validator()|cuttlefish_error:error()]. parse_and_merge({validator, ValidatorName, _, _} = ValidatorSource, Validators) -> NewValidator = parse(ValidatorSource), case lists:keyfind(ValidatorName, #validator.name, Validators) of false -> [ NewValidator | Validators]; _OldMapping -> lists:keyreplace(ValidatorName, #validator.name, Validators, NewValidator) end. -spec is_validator(any()) -> boolean(). is_validator(V) -> is_tuple(V) andalso element(1, V) =:= validator. -spec name(validator()) -> string(). name(V) -> V#validator.name. -spec description(validator()) -> string(). description(V) -> V#validator.description. -spec func(validator()) -> fun(). func(V) -> V#validator.func. -spec replace(validator(), [validator()]) -> [validator()]. replace(Validator, ListOfValidators) -> Exists = lists:keymember(name(Validator), #validator.name, ListOfValidators), case Exists of true -> lists:keyreplace(name(Validator), #validator.name, ListOfValidators, Validator); _ -> [Validator | ListOfValidators] end. -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). parse_test() -> ValidatorDataStruct = { validator, "name", "description", fun(X) -> X*2 end }, Validator = parse(ValidatorDataStruct), ?assertEqual("name", Validator#validator.name), ?assertEqual("description", Validator#validator.description), F = Validator#validator.func, ?assertEqual(4, F(2)), ok. getter_test() -> Validator = #validator{ name = "name", description = "description", func = fun(X) -> X*2 end }, ?assertEqual("name", name(Validator)), ?assertEqual("description", description(Validator)), Fun = func(Validator), ?assertEqual(4, Fun(2)), ok. replace_test() -> Element1 = #validator{ name = "name18", description = "description18", func = fun(X) -> X*2 end }, ?assertEqual(4, (Element1#validator.func)(2)), Element2 = #validator{ name = "name1", description = "description1", func = fun(X) -> X*4 end }, ?assertEqual(8, (Element2#validator.func)(2)), SampleValidators = [Element1, Element2], Override = #validator{ name = "name1", description = "description1", func = fun(X) -> X*5 end }, ?assertEqual(25, (Override#validator.func)(5)), NewValidators = replace(Override, SampleValidators), ?assertEqual([Element1, Override], NewValidators), ok. remove_duplicates_test() -> Sample1 = #validator{ name = "name1", description = "description1", func = fun(X) -> X*3 end }, ?assertEqual(6, (Sample1#validator.func)(2)), Sample2 = #validator{ name = "name1", description = "description1", func = fun(X) -> X*4 end }, ?assertEqual(8, (Sample2#validator.func)(2)), SampleValidators = [Sample1, Sample2], [NewValidator|_] = parse_and_merge( {validator, "name1", "description2", fun(X) -> X*10 end}, SampleValidators), F = func(NewValidator), ?assertEqual(50, F(5)), ?assertEqual("description2", description(NewValidator)), ?assertEqual("name1", name(NewValidator)), ok. parse_error_test() -> {ErrorAtom, ErrorTerm} = parse(not_a_raw_validator), ?assertEqual(error, ErrorAtom), ?assertEqual( "Poorly formatted input to cuttlefish_validator:parse/1 : not_a_raw_validator", ?XLATE(ErrorTerm)), ok. is_validator_test() -> ?assert(not(is_validator(not_a_validator))), V = #validator{ name = "name1", description = "description1", func = fun(X) -> X*4 end }, ?assertEqual(8, (V#validator.func)(2)), ?assert(is_validator(V)), ok. -endif. cuttlefish-3.0.1/src/cuttlefish.erl0000644000232200023220000001206114027401005017671 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% External functions for schema writers and cuttlefish invokers %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish). -include_lib("kernel/include/logger.hrl"). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([ conf_get/2, conf_get/3, unset/0, invalid/1, otp/2, otp/3, warn/1 ]). % @doc If DesiredMinimum =< the OTP you're running, then return % IfGreaterOrEqual, otherwise IfLessThan. -spec otp(string(), any(), any()) -> any(). otp(DesiredMinimumOTPVersion, IfGreaterOrEqual, IfLessThan) -> ActualOTPVersion = erlang:system_info(otp_release), case otp(DesiredMinimumOTPVersion, ActualOTPVersion) of true -> IfGreaterOrEqual; _ -> IfLessThan end. % @doc is ActualOTPVersion >= DesiredMinimumOTPVersion? -spec otp(string(), string()) -> boolean(). otp([], []) -> %% They're the same length AND all previous chars were matches true; otp([$R|TMin], Ver) -> otp(TMin, Ver); otp(Min, [$R|TVer]) -> otp(Min, TVer); otp([H|TMin], [H|TVer]) -> %% The head chars are equal, test the tails otp(TMin, TVer); otp([HMin|_], [HVer|_]) -> %% The heads are different, check which is greater HVer >= HMin; otp([], _Ver) -> %% The actual OTP release is a longer string, but %% everything matched up until this point %% e.g. R16B02, R16B02-basho4 true; otp(_Min, []) -> %% Our Min is a longer string %% e.g. R16B02-basho4, R16B02 false. % @doc conf_get/2 is a convenience wrapper for proplists:get_value/2 % for schema writers. Keys to a Conf proplist are variable()s which % are a list of strings. This function will look for those, but if % you pass it a string() instead, it will be nice and split that % string on "." since that's how cuttlefish do. Also, it will % throw(not_found) if the key is not found in the list which is % different that proplists:get_value/2's default behavior of returning % 'undefined'. This makes it easy for cuttlefish translations to abort % and on error, and not assume a value. If that's what you want, % please use conf_get/3. formerly cuttlefish_util:conf_get_value/2 -spec conf_get( string() | cuttlefish_variable:variable(), cuttlefish_conf:conf()) -> any(). conf_get([H|_T]=Variable, ConfigProplist) when is_list(H) -> case proplists:is_defined(Variable, ConfigProplist) of true -> proplists:get_value(Variable, ConfigProplist); false -> throw({not_found, Variable}) end; conf_get(Variable, ConfigProplist) -> conf_get( cuttlefish_variable:tokenize(Variable), ConfigProplist). % @doc conf_get/3 works just like proplists:get_value/3. It expects a % variable() as the Key, but is nice enough to take a string() and % split it on "." formerly cuttlefish_util:conf_get_value/3 -spec conf_get( string() | cuttlefish_variable:variable(), cuttlefish_conf:conf(), any()) -> any(). conf_get([H|_T]=Variable, ConfigProplist, Default) when is_list(H) -> proplists:get_value(Variable, ConfigProplist, Default); conf_get(Variable, ConfigProplist, Default) -> conf_get(cuttlefish_variable:tokenize(Variable), ConfigProplist, Default). %% @doc When called inside a translation, tells cuttlefish to omit the %% Erlang setting from the generated configuration. -spec unset() -> no_return(). unset() -> throw(unset). %% @doc When called inside a translation, informs the user that input %% configuration is invalid, using the supplied reason string. -spec invalid(string()) -> no_return(). invalid(Reason) -> throw({invalid, Reason}). %% @doc When called inside a translation, results in a warning message %% being logged. -spec warn(iodata()) -> ok. warn(Str) -> _ = ?LOG_WARNING(Str, []). -ifdef(TEST). otp_test() -> ?assert(otp("R15B02", "R15B02-basho3")), ?assert(not(otp("R15B02-basho3", "R15B02"))), ?assert(otp("R16B02-basho3", "R16B03")), ?assert(otp("R15B01", "R15B02")), ?assert(otp("R15B01", "R15B02-basho3")), ?assert(not(otp("R16B01", "R15B02"))), ?assert(otp("R16", "R16B03")), ?assert(otp("R16", "R16A")), ?assert(not(otp("R16B01", "R16A"))), ?assert(otp("R16A", "R16A")), ?assert(otp("R16", "17")), ?assert(otp("R16B03-1", "17")), ?assert(not(otp("17", "R16"))), ?assert(otp("R16A", "17")), ?assert(not(otp("18", "17"))), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_translation.erl0000644000232200023220000001443214027401005022313 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_translation: models a cuttlefish translation %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_translation). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -record(translation, { mapping::string(), func::fun() | undefined }). -type translation() :: #translation{}. -type translation_fun() :: fun(([proplists:property()]) -> any()). -type raw_translation() :: {translation, string(), translation_fun()} | {translation, string()}. -export_type([translation/0]). -export([ parse/1, parse_and_merge/2, is_translation/1, mapping/1, func/1, replace/2]). -spec parse(raw_translation()) -> translation() | cuttlefish_error:error(). parse({translation, Mapping}) -> #translation{ mapping = Mapping }; parse({translation, Mapping, Fun}) -> #translation{ mapping = Mapping, func = Fun }; parse(X) -> {error, {translation_parse, X}}. %% This assumes it's run as part of a foldl over new schema elements %% in which case, there's only ever one instance of a key in the list %% so keyreplace works fine. -spec parse_and_merge( raw_translation(), [translation()]) -> [translation()]. parse_and_merge({translation, Mapping}, Translations) -> lists:keydelete(Mapping, #translation.mapping, Translations); parse_and_merge({translation, Mapping, _} = TranslationSource, Translations) -> NewTranslation = parse(TranslationSource), case lists:keyfind(Mapping, #translation.mapping, Translations) of false -> [ NewTranslation | Translations]; _OldMapping -> lists:keyreplace(Mapping, #translation.mapping, Translations, NewTranslation) end. -spec is_translation(any()) -> boolean(). is_translation(T) -> is_tuple(T) andalso element(1, T) =:= translation. -spec mapping(translation()) -> string(). mapping(T) -> T#translation.mapping. -spec func(translation()) -> fun(). func(T) -> T#translation.func. -spec replace(translation(), [translation()]) -> [translation()]. replace(Translation, ListOfTranslations) -> Exists = lists:keymember(mapping(Translation), #translation.mapping, ListOfTranslations), case Exists of true -> lists:keyreplace(mapping(Translation), #translation.mapping, ListOfTranslations, Translation); _ -> [Translation | ListOfTranslations] end. -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). parse_test() -> TranslationDataStruct = { translation, "mapping", fun(X) -> X*2 end }, Translation = parse(TranslationDataStruct), ?assertEqual("mapping", Translation#translation.mapping), F = Translation#translation.func, ?assertEqual(4, F(2)), ok. getter_test() -> Translation = #translation{ mapping = "mapping", func = fun(X) -> X*2 end }, ?assertEqual("mapping", mapping(Translation)), Fun = func(Translation), ?assertEqual(4, Fun(2)), ok. replace_test() -> Element1 = #translation{ mapping = "mapping18", func = fun(X) -> X*2 end }, ?assertEqual(4, (Element1#translation.func)(2)), Element2 = #translation{ mapping = "mapping1", func = fun(X) -> X*4 end }, ?assertEqual(8, (Element2#translation.func)(2)), SampleTranslations = [Element1, Element2], Override = #translation{ mapping = "mapping1", func = fun(X) -> X*5 end }, ?assertEqual(25, (Override#translation.func)(5)), NewTranslations = replace(Override, SampleTranslations), ?assertEqual([Element1, Override], NewTranslations), ok. parse_and_merge_test() -> Sample1 = #translation{ mapping = "mapping1", func = fun(X) -> X*3 end }, ?assertEqual(6, (Sample1#translation.func)(2)), Sample2 = #translation{ mapping = "mapping2", func = fun(X) -> X*4 end }, ?assertEqual(8, (Sample2#translation.func)(2)), SampleTranslations = [Sample1, Sample2], NewTranslations = parse_and_merge( {translation, "mapping1", fun(X) -> X * 10 end}, SampleTranslations), F = func(hd(NewTranslations)), ?assertEqual(50, F(5)), ok. parse_error_test() -> {ErrorAtom, ErrorTuple} = parse(not_a_raw_translation), ?assertEqual(error, ErrorAtom), ?assertEqual( "Poorly formatted input to cuttlefish_translation:parse/1 : not_a_raw_translation", ?XLATE(ErrorTuple)), ok. parse_empty_test() -> TranslationDataStruct = { translation, "mapping" }, Translation = parse(TranslationDataStruct), ?assertEqual("mapping", Translation#translation.mapping), F = Translation#translation.func, ?assertEqual(undefined, F), ok. parse_and_merge_empty_test() -> Sample1 = #translation{ mapping = "mapping1", func = fun(X) -> X*3 end }, ?assertEqual(6, (Sample1#translation.func)(2)), Sample2 = #translation{ mapping = "mapping2", func = fun(X) -> X*4 end }, ?assertEqual(8, (Sample2#translation.func)(2)), SampleTranslations = [Sample1, Sample2], NewTranslations = parse_and_merge( {translation, "mapping1"}, SampleTranslations), F = func(hd(NewTranslations)), ?assertEqual(1, length(NewTranslations)), ?assertEqual(40, F(10)), ok. is_translation_test() -> ?assert(not(is_translation(not_a_translation))), T = #translation{ mapping = "mapping1", func = fun(X) -> X*3 end }, ?assertEqual(6, (T#translation.func)(2)), ?assert(is_translation(T)), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_datatypes.erl0000644000232200023220000005775114027401005021766 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_datatypes: handles datatypes in cuttlefish schemas %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_datatypes). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -type datatype() :: integer | string | atom | file | directory | flag | {flag, atom(), atom()} | {flag, {atom(), term()}, {atom(), term()}} | {enum, [atom()]} | ip | fqdn | {duration, cuttlefish_duration:time_unit() } | bytesize | {percent, integer} | {percent, float} | float | {list, datatype()}. -type extended() :: { integer, integer() } | { string, string() } | { file, file:filename() } | { directory, file:filename() } | { atom, atom() } | { ip, { string(), integer() } } | { {duration, cuttlefish_duration:time_unit() }, string() } | { bytesize, string() } | { {percent, integer}, integer() } | { {percent, float}, float() } | { float, float() }. -type datatype_list() :: [ datatype() | extended() ]. -export_type([datatype/0, extended/0, datatype_list/0]). -export([ is_supported/1, is_extended/1, is_valid_list/1, from_string/2, to_string/2, extended_from/1 ]). -spec is_supported(any()) -> boolean(). is_supported(integer) -> true; is_supported(string) -> true; is_supported(file) -> true; is_supported(directory) -> true; is_supported(flag) -> true; is_supported({flag, On, Off}) when is_atom(On), is_atom(Off) -> true; is_supported({flag, {On, _}, {Off, _}}) when is_atom(On), is_atom(Off) -> true; is_supported(atom) -> true; is_supported({enum, E}) when is_list(E) -> true; is_supported(ip) -> true; is_supported(fqdn) -> true; is_supported({duration, f}) -> true; is_supported({duration, w}) -> true; is_supported({duration, d}) -> true; is_supported({duration, h}) -> true; is_supported({duration, m}) -> true; is_supported({duration, s}) -> true; is_supported({duration, ms}) -> true; is_supported(bytesize) -> true; is_supported({percent, integer}) -> true; is_supported({percent, float}) -> true; is_supported(float) -> true; is_supported({list, {list, _}}) -> % lists of lists are not supported false; is_supported({list, ListDatatype}) -> is_supported(ListDatatype); is_supported(_) -> false. -spec is_extended(any()) -> boolean(). is_extended({integer, I}) when is_integer(I) -> true; is_extended({string, S}) when is_list(S) -> true; is_extended({atom, A}) when is_atom(A) -> true; is_extended({file, F}) when is_list(F) -> true; is_extended({directory, D}) when is_list(D) -> true; is_extended({ip, {IP, Port}}) when is_list(IP) andalso is_integer(Port) -> true; is_extended({ip, StringIP}) when is_list(StringIP) -> true; is_extended({fqdn, {FQDN, Port}}) when is_list(FQDN) andalso is_integer(Port) -> true; is_extended({fqdn, StringFQDN}) when is_list(StringFQDN) -> true; is_extended({{duration, f}, D}) when is_list(D) -> true; is_extended({{duration, w}, D}) when is_list(D) -> true; is_extended({{duration, d}, D}) when is_list(D) -> true; is_extended({{duration, g}, D}) when is_list(D) -> true; is_extended({{duration, m}, D}) when is_list(D) -> true; is_extended({{duration, s}, D}) when is_list(D) -> true; is_extended({{duration, ms}, D}) when is_list(D) -> true; is_extended({bytesize, B}) when is_list(B) -> true; is_extended({{percent, integer}, _Int}) -> true; is_extended({{percent, float}, _Float}) -> true; is_extended({float, F}) when is_float(F) -> true; is_extended(_) -> false. -spec extended_from(extended()) -> datatype(). extended_from({integer, _}) -> integer; extended_from({string, _}) -> string; extended_from({atom, _}) -> atom; extended_from({file, _}) -> file; extended_from({directory, _}) -> directory; extended_from({ip, _}) -> ip; extended_from({fqdn, _}) -> fqdn; extended_from({{duration, Unit}, _}) -> {duration, Unit}; extended_from({bytesize, _}) -> bytesize; extended_from({{percent, integer}, _}) -> {percent, integer}; extended_from({{percent, float}, _}) -> {percent, float}; extended_from({float, _}) -> float; extended_from(Other) -> case is_supported(Other) of true -> Other; _ -> error end. -spec is_valid_list(any()) -> boolean(). is_valid_list(NotList) when not is_list(NotList) -> false; is_valid_list([]) -> false; is_valid_list(List) -> lists:all(fun(X) -> is_supported(X) orelse is_extended(X) end, List). -spec to_string(term(), datatype()) -> string() | cuttlefish_error:error(). to_string(Atom, atom) when is_list(Atom) -> Atom; to_string(Atom, atom) when is_atom(Atom) -> atom_to_list(Atom); to_string(Integer, integer) when is_integer(Integer) -> integer_to_list(Integer); to_string(Integer, integer) when is_list(Integer) -> Integer; to_string({IP, Port}, ip) when is_list(IP), is_integer(Port) -> IP ++ ":" ++ integer_to_list(Port); to_string(IPString, ip) when is_list(IPString) -> IPString; to_string({FQDN, Port}, fqdn) when is_list(FQDN), is_integer(Port) -> FQDN ++ ":" ++ integer_to_list(Port); to_string(FQDNString, fqdn) when is_list(FQDNString) -> FQDNString; to_string(Enum, {enum, _}) when is_list(Enum) -> Enum; to_string(Enum, {enum, _}) when is_atom(Enum) -> atom_to_list(Enum); to_string(Duration, {duration, _}) when is_list(Duration) -> Duration; to_string(Duration, {duration, Unit}) when is_integer(Duration) -> cuttlefish_duration:to_string(Duration, Unit); to_string(Bytesize, bytesize) when is_list(Bytesize) -> Bytesize; to_string(Bytesize, bytesize) when is_integer(Bytesize) -> cuttlefish_bytesize:to_string(Bytesize); to_string(String, string) when is_list(String) -> String; to_string(File, file) when is_list(File) -> File; to_string(Directory, directory) when is_list(Directory) -> Directory; to_string(Flag, flag) when is_atom(Flag) -> cuttlefish_flag:to_string(Flag, flag); to_string(Flag, flag) when is_list(Flag) -> cuttlefish_flag:to_string(Flag, flag); to_string(Flag, {flag, _, _}=Type) when is_atom(Flag) -> cuttlefish_flag:to_string(Flag, Type); to_string(Flag, {flag, _, _}=Type) when is_list(Flag) -> cuttlefish_flag:to_string(Flag, Type); to_string(Percent, {percent, integer}) when is_integer(Percent) -> integer_to_list(Percent) ++ "%"; to_string(Percent, {percent, integer}) when is_list(Percent) -> Percent; to_string(Percent, {percent, float}) when is_float(Percent) -> P = list_to_float(float_to_list(Percent * 100, [{decimals, 6}, compact])), integer_to_list(cuttlefish_util:ceiling(P)) ++ "%"; to_string(Percent, {percent, float}) when is_list(Percent) -> Percent; to_string(Float, float) when is_float(Float) -> float_to_list(Float, [{decimals, 6}, compact]); to_string(Float, float) when is_list(Float) -> Float; %% The Pokemon Clause: Gotta Catch 'em all! to_string(Value, MaybeExtendedDatatype) -> case is_extended(MaybeExtendedDatatype) of true -> to_string(Value, extended_from(MaybeExtendedDatatype)); _ -> {error, {type, {Value, MaybeExtendedDatatype}}} end. -spec from_string(term(), datatype()) -> term() | cuttlefish_error:error(). from_string(Atom, atom) when is_atom(Atom) -> Atom; from_string(String, atom) when is_list(String) -> list_to_atom(String); from_string(Value, {enum, Enum}) -> cuttlefish_enum:parse(Value, {enum, Enum}); from_string(Integer, integer) when is_integer(Integer) -> Integer; from_string(String, integer) when is_list(String) -> try list_to_integer(String) of X -> X catch _:_ -> {error, {conversion, {String, integer}}} end; from_string({IP, Port}, ip) when is_list(IP), is_integer(Port) -> {IP, Port}; from_string(String, ip) when is_list(String) -> from_string_to_ip(String, lists:split(string:rchr(String, $:), String)); from_string({FQDN, Port}, fqdn) when is_list(FQDN), is_integer(Port) -> {FQDN, Port}; from_string(String, fqdn) when is_list(String) -> from_string_to_fqdn(String, lists:split(string:rchr(String, $:), String)); from_string(Duration, {duration, _}) when is_integer(Duration) -> Duration; from_string(Duration, {duration, Unit}) when is_list(Duration) -> cuttlefish_duration:parse(Duration, Unit); from_string(Bytesize, bytesize) when is_integer(Bytesize) -> Bytesize; from_string(Bytesize, bytesize) when is_list(Bytesize) -> cuttlefish_bytesize:parse(Bytesize); from_string(String, string) when is_list(String) -> String; from_string(File, file) when is_list(File) -> File; from_string(Directory, directory) when is_list(Directory) -> Directory; from_string(Flag, flag) when is_list(Flag) -> cuttlefish_flag:parse(Flag); from_string(Flag, flag) when is_atom(Flag) -> cuttlefish_flag:parse(Flag); from_string(Flag, {flag, _, _}=Type) when is_list(Flag) -> cuttlefish_flag:parse(Flag, Type); from_string(Flag, {flag, _, _}=Type) when is_atom(Flag) -> cuttlefish_flag:parse(Flag, Type); from_string(Percent, {percent, integer}) when is_integer(Percent), Percent >= 0, Percent =< 100 -> Percent; from_string(Percent, {percent, integer}) when is_integer(Percent) -> {error, {range, {{Percent, {percent, integer}}, "0 - 100%"}}}; %% This clause ends with a percent sign! from_string(Percent, {percent, integer}) when is_list(Percent) -> from_string( list_to_integer(string:sub_string(Percent, 1, length(Percent) - 1)), {percent, integer}); from_string(Percent, {percent, float}) when is_float(Percent), Percent >= 0, Percent =< 1 -> Percent; from_string(Percent, {percent, float}) when is_float(Percent) -> {error, {range, {{Percent, {percent, float}}, "0 - 100%"}}}; %% This clause ends with a percent sign! from_string(Percent, {percent, float}) when is_list(Percent) -> from_string( list_to_integer(string:sub_string(Percent, 1, length(Percent) - 1)) / 100.0, {percent, float}); from_string(Float, float) when is_float(Float) -> Float; from_string(String, float) when is_list(String) -> try list_to_float(String) of X -> X catch _:_ -> {error, {conversion, {String, float}}} end; from_string(List, {list, DataType}) when is_list(List) -> lists:map(fun(El) -> from_string(string:trim(El), DataType) end, string:split(List, ",", all)); from_string(Thing, InvalidDatatype) -> {error, {type, {Thing, InvalidDatatype}}}. %%% Utility functions for IP conversion port_to_integer(Str) -> try list_to_integer(Str) of X when X >= 0 -> X; %% Negative ports are nonsensical _X -> undefined catch _:_ -> undefined end. ip_conversions(String, _IPStr, {error, einval}, _Port) -> {error, {conversion, {String, 'IP'}}}; ip_conversions(String, _IPStr, _IP, undefined) -> {error, {conversion, {String, 'IP'}}}; ip_conversions(_String, IPStr, {ok, _}, Port) -> {IPStr, Port}. fqdn_conversions(String, _FQDNStr, nomatch, _Port) -> {error, {conversion, {String, 'FQDN'}}}; fqdn_conversions(String, _FQDNStr, _, undefined) -> {error, {conversion, {String, 'FQDN'}}}; fqdn_conversions(_String, FQDNStr, {match, _}, Port) -> {FQDNStr, Port}. validate_fqdn(Str) -> %% inspired by https://regexr.com/3g5j0, amended to disallow [:space:] re:run(Str, "^(?!:\/\/)(?=[^[:space:]]{1,255}$)((.{1,63}\.){1,127}(?![0-9]*$)[a-z0-9-]+\.?)$"). droplast(List) -> lists:sublist(List, length(List)-1). from_string_to_ip(String, {[], String}) -> {error, {conversion, {String, 'IP'}}}; %% No port from_string_to_ip(String, {IpPlusColon, PortString}) -> %% Still need to drop last character from IP, the trailing %% colon. Perfect use case for lists:droplast/1 but it's a recent %% addition IP = droplast(IpPlusColon), ip_conversions(String, IP, inet:parse_address(IP), port_to_integer(PortString)). from_string_to_fqdn(String, {[], String}) -> {error, {conversion, {String, 'FQDN'}}}; from_string_to_fqdn(String, {FQDNPlusColon, PortString}) -> FQDN = droplast(FQDNPlusColon), fqdn_conversions(String, FQDN, validate_fqdn(FQDN), port_to_integer(PortString)). -ifdef(TEST). -define(XLATE(X), lists:flatten(cuttlefish_error:xlate(X))). to_string_atom_test() -> ?assertEqual("split_the", to_string(split_the, atom)), ?assertEqual("split_the", to_string("split_the", atom)). to_string_integer_test() -> ?assertEqual("32", to_string(32, integer)), ?assertEqual("32", to_string("32", integer)). to_string_ip_test() -> ?assertEqual("127.0.0.1:8098", to_string("127.0.0.1:8098", ip)), ?assertEqual("127.0.0.1:8098", to_string({"127.0.0.1", 8098}, ip)). to_string_enum_test() -> ?assertEqual("true", to_string("true", {enum, [true, false]})), ?assertEqual("true", to_string(true, {enum, [true, false]})). to_string_string_test() -> ?assertEqual("string", to_string("string", string)). to_string_duration_test() -> ?assertEqual("1w", to_string("1w", {duration, s})), ?assertEqual("1w", to_string(604800000, {duration, ms})). to_string_bytesize_test() -> ?assertEqual("1GB", to_string(1073741824, bytesize)), ?assertEqual("1GB", to_string("1GB", bytesize)). to_string_percent_integer_test() -> ?assertEqual("10%", to_string(10, {percent, integer})), ?assertEqual("10%", to_string("10%", {percent, integer})), ok. to_string_percent_float_test() -> ?assertEqual("10%", to_string(0.1, {percent, float})), ?assertEqual("10%", to_string("10%", {percent, float})), ok. to_string_float_test() -> ?assertEqual("0.1", to_string(0.1, float)), ?assertEqual("0.1", to_string("0.1", float)), ok. to_string_extended_type_test() -> ?assertEqual("split_the", to_string(split_the, {atom, split_the})), ?assertEqual("split_the", to_string("split_the", {atom, split_the})), ?assertEqual("32", to_string(32, {integer, 32})), ?assertEqual("32", to_string("32", {integer, 32})), ?assertEqual("127.0.0.1:8098", to_string("127.0.0.1:8098", {ip, "127.0.0.1:8098"})), ?assertEqual("127.0.0.1:8098", to_string({"127.0.0.1", 8098}, {ip, {"127.0.0.1", 8098}})), ?assertEqual("example.com:8098", to_string("example.com:8098", {fqdn, "example.com:8098"})), ?assertEqual("example.com:8098", to_string({"example.com", 8098}, {fqdn, {"example.com", 8098}})), ?assertEqual("string", to_string("string", {string, "string"})), ?assertEqual("1w", to_string("1w", {{duration, s}, "1w"})), ?assertEqual("1w", to_string(604800000, {{duration, ms}, "1w"})), ?assertEqual("1GB", to_string(1073741824, {bytesize, "1GB"})), ?assertEqual("1GB", to_string("1GB", {bytesize, "1GB"})), ?assertEqual("10%", to_string(10, {{percent, integer}, "10%"})), ?assertEqual("10%", to_string("10%", {{percent, integer}, "10%"})), ?assertEqual("10%", to_string(0.1, {{percent, float}, "10%"})), ?assertEqual("10%", to_string("10%", {{percent, float}, "10%"})), ?assertEqual("0.1", to_string(0.1, {float, 0.1})), ?assertEqual("0.1", to_string("0.1", {float, 0.1})), ok. to_string_unsupported_datatype_test() -> ?assertEqual("Tried to convert \"Something\" but invalid datatype: unsupported_datatype", ?XLATE(to_string("Something", unsupported_datatype))). from_string_atom_test() -> ?assertEqual(split_the, from_string(split_the, atom)), ?assertEqual(split_the, from_string("split_the", atom)). from_string_integer_test() -> ?assertEqual(32, from_string(32, integer)), ?assertEqual(32, from_string("32", integer)), ?assertEqual("\"thirty_two\" cannot be converted to a(n) integer", ?XLATE(from_string("thirty_two", integer))), ok. from_string_ip_test() -> ?assertEqual({"127.0.0.1", 8098}, from_string("127.0.0.1:8098", ip)), ?assertEqual( {"2001:0db8:85a3:0042:1000:8a2e:0370:7334", 8098}, from_string("2001:0db8:85a3:0042:1000:8a2e:0370:7334:8098", ip)), ?assertEqual( {"2001:0db8:85a3::0370:7334", 8098}, from_string("2001:0db8:85a3::0370:7334:8098", ip)), ?assertEqual( {"::1", 1}, from_string("::1:1", ip)), BadIPs = [ "This is not an IP:80", "2001:0db8:85a3:0042:1000:8a2e:0370:80", "127.0.0.1.1:80", "127.256.0.1:80", "127.0.0.1", %% No port "127.0.0.1:-5", "0:127.0.0.1:80", "127.0.0.1:80l", ":1:1" ], lists:foreach(fun(Bad) -> ?assertEqual({error, {conversion, {Bad, 'IP'}}}, from_string(Bad, ip)) end, BadIPs), ok. from_string_fqdn_test() -> ?assertEqual({"fqdn.com", 8098}, from_string("fqdn.com:8098", fqdn)), ?assertEqual( {"f.q.d.n.com", 8098}, from_string("f.q.d.n.com:8098", fqdn)), ?assertEqual( {"fqdn.com.", 8098}, from_string("fqdn.com.:8098", fqdn)), ?assertEqual( {"FqDn.com", 8098}, from_string("FqDn.com:8098", fqdn)), ?assertEqual( {"ec2-35-160-210-253.us-west-2-.compute.amazonaws.com", 1}, from_string("ec2-35-160-210-253.us-west-2-.compute.amazonaws.com:1", fqdn)), BadFQDNs = [ "This is not an fqdn:80", "This.is not.an.fqdn:80", "", "127.0.0.1:80", "fqdn.com", %% No port "fqdn.com:-5", "fqdn.com:80:81" ], lists:foreach(fun(Bad) -> ?assertEqual({error, {conversion, {Bad, 'FQDN'}}}, from_string(Bad, fqdn)) end, BadFQDNs), ok. from_string_enum_test() -> ?assertEqual("\"a\" is not a valid enum value, acceptable values are: b, c", ?XLATE(from_string(a, {enum, [b, c]}))), ?assertEqual(true, from_string("true", {enum, [true, false]})), ?assertEqual(true, from_string(true, {enum, [true, false]})). from_string_duration_test() -> %% more examples in the the cuttlefish_duration tests ?assertEqual(1100, from_string("1s100ms", {duration, ms})), ?assertEqual(1100, from_string(1100, {duration, ms})), ok. from_string_duration_secs_test() -> %% more examples in the the cuttlefish_duration tests %% also rounds up for smaller units ?assertEqual(2, from_string("1s100ms", {duration, s})), ?assertEqual(2, from_string(2, {duration, s})), ok. from_string_percent_integer_test() -> ?assertEqual(10, from_string("10%", {percent, integer})), ?assertEqual(10, from_string(10, {percent, integer})), %% Range! ?assertEqual(0, from_string("0%", {percent, integer})), ?assertEqual(100, from_string("100%", {percent, integer})), ?assertEqual("110% can't be outside the range 0 - 100%", ?XLATE(from_string("110%", {percent, integer}))), ?assertEqual("-1% can't be outside the range 0 - 100%", ?XLATE(from_string("-1%", {percent, integer}))), ok. from_string_percent_float_test() -> ?assertEqual(0.10, from_string("10%", {percent, float})), ?assertEqual(0.10, from_string(0.1, {percent, float})), %% Range! ?assertEqual(0.0, from_string("0%", {percent, float})), ?assertEqual(1.0, from_string("100%", {percent, float})), ?assertEqual("110% can't be outside the range 0 - 100%", ?XLATE(from_string("110%", {percent, float}))), ?assertEqual("-1% can't be outside the range 0 - 100%", ?XLATE(from_string("-1%", {percent, float}))), ok. from_string_float_test() -> ?assertEqual(0.1, from_string("0.100", float)), ?assertEqual(0.1, from_string(0.1, float)), ok. from_string_string_test() -> ?assertEqual("string", from_string("string", string)). from_string_string_list_test() -> %% more examples in the the cuttlefish_duration tests ?assertEqual(["v1", "v2", "v3"], from_string("v1, v2,v3", {list, string})), ok. from_string_integer_list_test() -> %% more examples in the the cuttlefish_duration tests ?assertEqual([1, 2, 3], from_string("1, 2,3", {list, integer})), ok. from_string_atom_list_test() -> %% more examples in the the cuttlefish_duration tests ?assertEqual([a, b, c], from_string("a, b,c", {list, atom})), ok. from_string_string_in_integer_list_test() -> %% more examples in the the cuttlefish_duration tests ?assertEqual([{error, {conversion, {"a", integer}}}, 1, 2], from_string("a, 1,2", {list, integer})), ok. from_string_unsupported_datatype_test() -> ?assertEqual("Tried to convert \"string\" but invalid datatype: unsupported_datatype", ?XLATE(from_string("string", unsupported_datatype))). is_supported_test() -> ?assert(is_supported(integer)), ?assert(is_supported(string)), ?assert(is_supported(atom)), ?assert(is_supported(file)), ?assert(is_supported(directory)), ?assert(is_supported({enum, [one, two, three]})), ?assert(not(is_supported({enum, not_a_list}))), ?assert(is_supported(ip)), ?assert(is_supported({duration, f})), ?assert(is_supported({duration, w})), ?assert(is_supported({duration, d})), ?assert(is_supported({duration, h})), ?assert(is_supported({duration, m})), ?assert(is_supported({duration, s})), ?assert(is_supported({duration, ms})), ?assert(is_supported(bytesize)), ?assert(is_supported({list, string})), ?assert(not(is_supported({list, {list, string}}))), ?assert(not(is_supported(some_unsupported_type))), ok. is_extended_test() -> ?assertEqual(true, is_extended({integer, 10})), ?assertEqual(true, is_extended({integer, -10})), ?assertEqual(false, is_extended({integer, "ten"})), ?assertEqual(true, is_extended({string, "string"})), ?assertEqual(false, is_extended({string, string})), ?assertEqual(false, is_extended({string, 10})), ?assertEqual(true, is_extended({atom, atom})), ?assertEqual(false, is_extended({atom, "atom"})), ?assertEqual(false, is_extended({atom, 10})), ?assertEqual(true, is_extended({file, "/tmp/foo.txt"})), ?assertEqual(true, is_extended({file, ""})), ?assertEqual(false, is_extended({file, this})), ?assertEqual(true, is_extended({directory, "/tmp/foo.txt"})), ?assertEqual(true, is_extended({directory, ""})), ?assertEqual(false, is_extended({directory, this})), ?assertEqual(true, is_extended({ip, {"1.2.3.4", 1234}})), ?assertEqual(false, is_extended({ip, {1234, 1234}})), ?assertEqual(false, is_extended({ip, {"1.2.3.4", "1234"}})), ?assertEqual(true, is_extended({{duration, f}, "10f"})), ?assertEqual(true, is_extended({{duration, w}, "10f"})), ?assertEqual(true, is_extended({{duration, d}, "10f"})), ?assertEqual(true, is_extended({{duration, g}, "10f"})), ?assertEqual(true, is_extended({{duration, m}, "10f"})), ?assertEqual(true, is_extended({{duration, s}, "10f"})), ?assertEqual(true, is_extended({{duration, ms}, "10ms"})), ?assertEqual(true, is_extended({bytesize, "10GB"})), ?assertEqual(true, is_extended({{percent, integer}, "10%"})), ?assertEqual(true, is_extended({{percent, integer}, 10})), ?assertEqual(true, is_extended({{percent, float}, "10%"})), ?assertEqual(true, is_extended({{percent, float}, 0.1})), ?assertEqual(true, is_extended({float, 0.1})), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_effective.erl0000644000232200023220000002446514027401005021724 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_effective: handles generating the effective configuration %% %% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_effective). -define(FMT(F,A), lists:flatten(io_lib:format(F,A))). -export([build/3]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -spec build(cuttlefish_conf:conf(), cuttlefish_schema:schema(), [proplists:property()]) -> [string()]. build(Conf, {_Translations, Mappings, _Validators} = _Schema, AdvConfig) -> EffectiveConfig = lists:reverse(lists:sort(cuttlefish_generator:add_defaults(Conf, Mappings))), %% EffectiveConfig is a list of { [string()], term() } %% Returns the list of cuttlefish variables that have been overridden in advanced.config KeysToHateOn = process_advanced(Mappings, AdvConfig), EffectiveOutput = lists:foldl( fun({Var, Value}, Acc) -> Variable = string:join(Var, "."), IsHater = lists:any( fun(X) -> cuttlefish_variable:is_fuzzy_match(Var, X) end, KeysToHateOn ), Line = try ?FMT("~s = ~s", [Variable, Value]) of X -> X catch _:_ -> %% I hate that I had to do this, 'cause you know... %% Erlang and Strings, but actually this is ok because %% sometimes there are going to be weird tuply things %% in here, so always good to fall back on ~p. %% honestly, I think this try should be built into io:format ?FMT("~s = ~p", [Variable, Value]) end, case IsHater of true -> [?FMT("## ~s was overridden in advanced.config", [Variable]), "## " ++ Line] ++ Acc; _ -> [Line | Acc] end end, [], EffectiveConfig ), case AdvConfig of [] -> EffectiveOutput; _ -> EffectiveOutput ++ [ "## The following advanced.config was used in generating the ", "## configuration and may have overridden some options that were ", "## commented out above." ] ++ advanced_as_comment(AdvConfig) end. advanced_as_comment(AdvConfig) -> Str = lists:flatten(io_lib:format("~p", [AdvConfig])), [ "## " ++ L || L <- string:tokens(Str, "$\n")]. %% @doc checks a mapping's "mapping" is in the set of kvc paths in %% the provided advanced.config -spec process_advanced( [cuttlefish_mapping:mapping()], [proplists:property()]) -> [cuttlefish_variable:variable()]. process_advanced(Mappings, AdvancedConfig) -> AdvKeys = proplist_to_kvcpaths(AdvancedConfig), lists:foldl( fun(M, Acc) -> case lists:member(cuttlefish_mapping:mapping(M), AdvKeys) of true -> [cuttlefish_mapping:variable(M)|Acc]; _ -> Acc end end, [], Mappings). %% @doc returns a list of kvcesque paths that represent the structure %% of a proplist of proplists of proplists etc... %% e.g. [{parent, [{child1, [{grandchild1, _}]}, {child2, _}]}] -> %% ["parent.child1.grandchild1", "parent.child2"] -spec proplist_to_kvcpaths([proplists:property()]) -> [string()]. proplist_to_kvcpaths(Proplist) -> proplist_to_kvcpaths("", Proplist). -spec proplist_to_kvcpaths(string(), [proplists:property()]) -> [string()]. proplist_to_kvcpaths(Prefix, Proplist) -> %% Handles the base case, without this, all keys would start with "." NewPrefix = case Prefix of "" -> ""; _ -> Prefix ++ "." end, lists:foldl(fun(K, Acc) -> KeyedPrefix = NewPrefix ++ canonicalize_key(K), R = proplist_to_kvcpaths( KeyedPrefix, proplists:get_value(K, Proplist)), case R of [] -> [KeyedPrefix|Acc]; _ -> Acc ++ R end end, [], keys_if_you_got_em(Proplist) ). %% So this is gross, but is the simplest scheme for determining the %% type of data coming into this function. It doesn't really %% matter how we handle non-atoms because cuttlefish only creates %% proplists with atoms as the keynames. -spec canonicalize_key(atom() | list() | binary()) -> string(). canonicalize_key(K) when is_atom(K) -> atom_to_list(K); canonicalize_key(K) when is_list(K) -> "\"" ++ K ++ "\""; canonicalize_key(K) when is_binary(K) -> "<<\""++binary_to_list(K)++"\">>". -spec keys_if_you_got_em([proplists:property()]) -> [term()]. keys_if_you_got_em(Proplist) when is_list(Proplist) -> proplists:get_keys(Proplist); keys_if_you_got_em(_) -> []. -ifdef(TEST). %% This is the comprehensive test of all functionality of this module %% working together in perfect harmony probably_the_most_important_test() -> Mappings = [ cuttlefish_mapping:parse( {mapping, "namespace.var1", "app.setting1", []} ), cuttlefish_mapping:parse( {mapping, "namespace.2.$sub", "app.setting2", []} ), cuttlefish_mapping:parse( {mapping, "namespace.var3", "app.setting3", []} ), cuttlefish_mapping:parse( {mapping, "namespace.4.$sub", "app.setting4", []} ) ], Conf = [ {["namespace", "var1"], "x"}, {["namespace", "2", "1"], "x"}, {["namespace", "2", "2"], "x"}, {["namespace", "2", "3"], "x"}, {["namespace", "var3"], "y"}, {["namespace", "4", "1"], "y"}, {["namespace", "4", "2"], "y"}, {["namespace", "4", "3"], "y"} ], AdvConfig = [{app, [{setting3, "z"}, {setting4, "zz"}]}], Effective = build(Conf, {[], Mappings, []}, AdvConfig), ?assertEqual(16, length(Effective)), %% Remember, this output is sorted by variable, even if there's a comment ?assertEqual("namespace.2.1 = x", lists:nth(1, Effective)), ?assertEqual("namespace.2.2 = x", lists:nth(2, Effective)), ?assertEqual("namespace.2.3 = x", lists:nth(3, Effective)), ?assertEqual("## namespace.4.1 was overridden in advanced.config", lists:nth(4, Effective)), ?assertEqual("## namespace.4.1 = y", lists:nth(5, Effective)), ?assertEqual("## namespace.4.2 was overridden in advanced.config", lists:nth(6, Effective)), ?assertEqual("## namespace.4.2 = y", lists:nth(7, Effective)), ?assertEqual("## namespace.4.3 was overridden in advanced.config", lists:nth(8, Effective)), ?assertEqual("## namespace.4.3 = y", lists:nth(9, Effective)), ?assertEqual("namespace.var1 = x", lists:nth(10, Effective)), ?assertEqual("## namespace.var3 was overridden in advanced.config", lists:nth(11, Effective)), ?assertEqual("## namespace.var3 = y", lists:nth(12, Effective)), ?assertEqual("## The following advanced.config was used in generating the ", lists:nth(13, Effective)), ?assertEqual("## configuration and may have overridden some options that were ", lists:nth(14, Effective)), ?assertEqual("## commented out above.", lists:nth(15, Effective)), ?assertEqual("## [{app,[{setting3,\"z\"},{setting4,\"zz\"}]}]", lists:nth(16, Effective)), ok. process_advanced_test() -> Mappings = [ cuttlefish_mapping:parse( {mapping, "thing.1", "a.b.c", []} ) ], AdvConfig = [{a, [{b, [{c, ""}, {d, ""}]}]}], KeysToWatch = process_advanced(Mappings, AdvConfig), ?assertEqual([["thing", "1"]], KeysToWatch), ok. build_with_sub_test() -> Mappings = [ cuttlefish_mapping:parse( {mapping, "a.$whatev.thing", "a.b.c", []} ) ], AdvConfig = [{a, [{b, [{c, ""}, {d, ""}]}]}], Conf = [ {["a", "1", "thing"], "x"}, {["a", "2", "thing"], "x"}, {["a", "3", "thing"], "x"} ], Effective = build(Conf, {[], Mappings, []}, AdvConfig), ?assertEqual(10, length(Effective)), ?assertEqual("## a.1.thing was overridden in advanced.config", lists:nth(1, Effective)), ?assertEqual("## a.1.thing = x", lists:nth(2, Effective)), ?assertEqual("## a.2.thing was overridden in advanced.config", lists:nth(3, Effective)), ?assertEqual("## a.2.thing = x", lists:nth(4, Effective)), ?assertEqual("## a.3.thing was overridden in advanced.config", lists:nth(5, Effective)), ?assertEqual("## a.3.thing = x", lists:nth(6, Effective)), ?assertEqual("## The following advanced.config was used in generating the ", lists:nth(7, Effective)), ?assertEqual("## configuration and may have overridden some options that were ", lists:nth(8, Effective)), ?assertEqual("## commented out above.", lists:nth(9, Effective)), ?assertEqual("## [{a,[{b,[{c,[]},{d,[]}]}]}]", lists:nth(10, Effective)), ok. proplist_to_kvcpath_test() -> Proplist = [{a, [ {b, [ {c, "x"} ]}, {d, [ {e, "y"} ]}, {f , "z"} ] }, {g, "q"}], Paths = proplist_to_kvcpaths(Proplist), ?assertEqual(sets:from_list([ "a.b.c", "a.d.e", "a.f", "g" ]), sets:from_list(Paths)), ok. proplists_to_kvcpath_riak_core_test() -> Proplist = [{riak_core,[ {ring_creation_size,128}, {cluster_mgr, {"127.0.0.1", 9080 } } ]}], Paths = proplist_to_kvcpaths(Proplist), ?assertEqual([ "riak_core.ring_creation_size", "riak_core.cluster_mgr" ], Paths), ok. -endif. cuttlefish-3.0.1/src/cuttlefish.app.src0000644000232200023220000000052114027401005020453 0ustar debalancedebalance{application,cuttlefish, [{description,"cuttlefish configuration abstraction"}, {vsn,"3.0.1"}, {registered,[]}, {applications,[kernel,stdlib]}, {env,[]}, {licenses,["Apache"]}, {links,[{"GitHub","https://github.com/Kyorai/cuttlefish"}]}]}. cuttlefish-3.0.1/src/cuttlefish_variable.erl0000644000232200023220000002302314027401005021536 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% handles both variable and variable definitions %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_variable). -type variable() :: [string()]. -export_type([variable/0]). -ifdef(TEST). -ifdef(EQC). -include_lib("eqc/include/eqc.hrl"). -define(QC_OUT(Prop), on_output(fun(F,A) -> io:format(user, F, A) end, Prop)). -endif. -include_lib("eunit/include/eunit.hrl"). -endif. -export([ format/1, tokenize/1, split_on_match/1, replace_match/2, extract_first_match/2, fuzzy_matches/2, is_fuzzy_match/2, filter_by_prefix/2 ]). %% @doc Formats a variable back into its dot-separated version. %% Inverse of tokenize/1. -spec format(variable()) -> string(). format(Key=[H|_]) when is_list(H) -> Escaped = [re:replace(Word, "[.]", "\\\\&", [{return, list}, global]) || Word <- Key], string:join(Escaped, "."). %% @doc like string:tokens(Key, "."), but if the dot was escaped %% i.e. \\., don't tokenize that -spec tokenize(string()) -> variable(). tokenize(Key) -> tokenize(Key, "", []). tokenize([$\\, $. |Rest], Part, Acc) -> tokenize(Rest, [$. |Part], Acc); tokenize([$. |Rest], Part, Acc) -> tokenize(Rest, "", [lists:reverse(Part)|Acc]); tokenize([], "", Acc) -> lists:reverse(Acc); tokenize([], Part, Acc) -> lists:reverse([lists:reverse(Part)|Acc]); tokenize([Char|Rest], Part, Acc) -> tokenize(Rest, [Char|Part], Acc). %% @doc split a key definition into: %% * Prefix: Things before the $var %% * Var: The $var itself %% * Suffix: Things after the $var -spec split_on_match(variable()) -> {variable(), string(), variable()}. split_on_match(Variable) -> {PrefixToks, MatchGroup, SuffixToks} = lists:foldl( fun(T, {Prefix, MatchGroup, Suffix}) -> case {T, MatchGroup} of {[$$|_], []} -> {Prefix, T, Suffix}; {_, []} -> {[T|Prefix], MatchGroup, Suffix}; {_, _} -> {Prefix, MatchGroup, [T|Suffix]} end end, {[], [], []}, Variable), { lists:reverse(PrefixToks), MatchGroup, lists:reverse(SuffixToks) }. %% @doc replaces the $var in Key with Sub -spec replace_match(variable(), string()) -> variable(). replace_match(Variable, Sub) -> [ begin case {H, Sub} of {$$, undefined} -> T; {$$, Sub} -> Sub; _ -> Tok end end || [H|T]=Tok <- Variable]. -spec extract_first_match(variable(), variable()) -> nomatch | [{string(), string()}]. %% If the lengths are equal, try to pair up a fuzzy segment with its match. extract_first_match(VariableDef, Variable) when length(VariableDef) == length(Variable) -> extract_first_match(VariableDef, Variable, nomatch); %% This could never match because they are different lengths. extract_first_match(_,_) -> nomatch. %% We have a perfect match, or no match at all, so return the result. extract_first_match([], [], Result) when is_list(Result) -> %% If the Result is 'nomatch', the last function clause will be %% the only one that matches. lists:reverse(Result); %% We found the first fuzzy segment, grab the binding of the segment. extract_first_match([[$$|_]=Fuzzy|VariableDef], [Value|Variable], nomatch) -> extract_first_match(VariableDef, Variable, [{Fuzzy, Value}]); %% We found a fuzzy segment and already have a match, so just recurse. extract_first_match([[$$|_]=Fuzzy|VariableDef], [Value|Variable], Result) -> extract_first_match(VariableDef, Variable, [{Fuzzy, Value}|Result]); %% We found two segments that are static and equal. extract_first_match([X|VariableDef], [X|Variable], Result) -> extract_first_match(VariableDef, Variable, Result); %% Something else happened, so we don't match! extract_first_match(_,_,_) -> nomatch. %% @doc given a KeyDef "a.b.$c.d", what are the possible values for $c %% in the set of Keys in Conf = [{Key, Value}]? -spec fuzzy_matches(variable(), cuttlefish_conf:conf()) -> [{string(), any()}]. fuzzy_matches(VariableDef, Conf) -> lists:foldl( fun({Variable, _}, Acc) -> case extract_first_match(VariableDef, Variable) of nomatch -> Acc; [Match|_] -> [Match|Acc] end end, [], Conf). %% @doc could this fixed Key be a match for the variable key KeyDef? %% e.g. could a.b.$var.d =:= a.b.c.d? -spec is_fuzzy_match(variable(), variable()) -> boolean(). is_fuzzy_match(Variable, VariableDef) -> case length(Variable) =:= length(VariableDef) of true -> Zipped = lists:zip(Variable, VariableDef), lists:all( fun({X,Y}) -> X =:= Y orelse hd(Y) =:= $$ end, Zipped); _ -> false end. %% @doc For Proplist, return the subset of the proplist that starts %% with "Key" -spec filter_by_prefix(string() | [string()], [{[string()], any()}]) -> [{[string()], any()}]. filter_by_prefix([H|_T]=Prefix, Proplist) when is_list(H) -> [ T || {Key,_}=T <- Proplist, lists:prefix(Prefix, Key) ]; filter_by_prefix(StringPrefix, Proplist) -> filter_by_prefix(tokenize(StringPrefix), Proplist). -ifdef(TEST). tokenize_variable_key_test() -> ?assertEqual(["a", "b", "c", "d"], (tokenize("a.b.c.d"))), ?assertEqual(["a", "b.c", "d"], (tokenize("a.b\\.c.d"))), %% Covers GH #22 ?assertEqual( ["listener", "http"], (tokenize("listener.http.")) ), ok. split_variable_on_match_test() -> ?assertEqual({["a", "b"], "$c", ["d", "e"]}, (split_on_match(["a", "b", "$c", "d", "e"]))), ?assertEqual({["a", "b", "c", "d", "e"], [], []}, (split_on_match(["a", "b", "c", "d", "e"]))), ?assertEqual({[], "$a", ["b", "c", "d", "e"]}, (split_on_match(["$a", "b", "c", "d", "e"]))), ok. variable_match_replace_test() -> ?assertEqual(["a", "b", "c"], (replace_match(["a", "b", "c"], "d"))), ?assertEqual(["a", "b", "c"], (replace_match(["a", "b", "c"], "e"))), ?assertEqual(["a", "b", "c"], (replace_match(["a", "b", "c"], "f"))), ?assertEqual(["a", "b", "c"], (replace_match(["a", "b", "c"], "g"))), ?assertEqual(["a", "g", "c"], (replace_match(["a", "$b", "c"], "g"))), ?assertEqual(["a", "b", "c"], (replace_match(["a", "$b", "c"], undefined))), ok. fuzzy_variable_match_test() -> ?assert(is_fuzzy_match(["alpha","bravo","charlie","delta"], ["alpha","bravo","charlie","delta"])), ?assert(is_fuzzy_match(["alpha","bravo","anything","delta"], ["alpha","bravo","$charlie","delta"])), ?assertNot(is_fuzzy_match(["alpha","bravo.anything","delta"], ["alpha","bravo","charlie","delta"])), ?assert(is_fuzzy_match(["alpha","bravo","any.thing","delta"], ["alpha","bravo","$charlie","delta"])), ?assert(is_fuzzy_match(["alpha","bravo","any.thing.you.need","delta"], ["alpha","bravo","$charlie","delta"])), ok. matches_for_variable_def_test() -> Conf = [ {["multi_backend","backend1","storage_backend"], 1}, {["multi_backend","backend2","storage_backend"], 2}, {["multi_backend","backend.3","storage_backend"], 3}, {["multi_backend","backend4","storage_backend"], 4} ], Vars = proplists:get_all_values("$name", fuzzy_matches(["multi_backend","$name","storage_backend"], Conf) ), ?assertEqual(4, (length(Vars))), ?assert(lists:member("backend1", Vars)), ?assert(lists:member("backend2", Vars)), ?assert(lists:member("backend.3", Vars)), ?assert(lists:member("backend4", Vars)), ?assertEqual(4, (length(Vars))), ok. filter_by_variable_starts_with_test() -> Proplist = [ {["regular","key"], 1}, {["other","normal","key"], 2}, {["prefixed","key1"], 3}, {["prefixed","key2"], 4}, {["interleaved","key"], 5}, {["prefixed","key3"], 6} ], FilteredByList = filter_by_prefix(["prefixed"], Proplist), ?assertEqual([ {["prefixed","key1"], 3}, {["prefixed","key2"], 4}, {["prefixed","key3"], 6} ], FilteredByList), FilteredByString = filter_by_prefix("prefixed", Proplist), ?assertEqual([ {["prefixed","key1"], 3}, {["prefixed","key2"], 4}, {["prefixed","key3"], 6} ], FilteredByString), ok. -ifdef(EQC). variable_roundtrip_test_() -> {timeout, 15, [?_assert(quickcheck(eqc:testing_time(3,?QC_OUT(prop_format_tokenize_roundtrip()))))]}. prop_format_tokenize_roundtrip() -> ?FORALL(Variable, non_empty(list(gen_word())), tokenize(format(Variable)) == Variable). gen_word() -> ?LET(F, non_empty(list(gen_word_char())), lists:flatten(F)). gen_word_char() -> oneof([$., $_, $-, choose($0, $9), choose($A, $Z), choose($a, $z)]). -endif. -endif. cuttlefish-3.0.1/src/cuttlefish_bytesize.erl0000644000232200023220000000640414027401005021613 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_bytesize: complexity for parsing bytesizes %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_bytesize). -define(KILOBYTE, 1024). -define(MEGABYTE, 1048576). -define(GIGABYTE, 1073741824). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -export([parse/1, to_string/1]). %% @doc turns an integer of bytes into a string. %% Will use the smallest unit to not lose precision. %% e.g. 1024 -> 1kb, but 1025 = 1025. -spec to_string(integer()) -> string(). to_string(Bytez) -> case { Bytez rem ?GIGABYTE, Bytez rem ?MEGABYTE, Bytez rem ?KILOBYTE} of {0, _, _} -> integer_to_list(Bytez div ?GIGABYTE) ++ "GB"; {_, 0, _} -> integer_to_list(Bytez div ?MEGABYTE) ++ "MB"; {_, _, 0} -> integer_to_list(Bytez div ?KILOBYTE) ++ "KB"; _ -> integer_to_list(Bytez) end. %% @doc the reverse of to_string/1. turns "1kb" or "1KB" into 1024. -spec parse(string()) -> integer()|cuttlefish_error:error(). parse(String) -> case lists:reverse(String) of [$B,$K|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?KILOBYTE); [$b,$k|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?KILOBYTE); [$B,$M|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?MEGABYTE); [$b,$m|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?MEGABYTE); [$B,$G|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?GIGABYTE); [$b,$g|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)),?GIGABYTE); BSize -> cuttlefish_util:numerify(lists:reverse(BSize)) end. -spec bmult(number()|cuttlefish_error:error(), integer()) -> number()|cuttlefish_error:error(). bmult({error, _ErrorTerm}=Error, _Mult) -> Error; bmult(Quantity, Multiplier) -> Quantity * Multiplier. -ifdef(TEST). to_string_test() -> ?assertEqual("1KB", to_string(1024)), ?assertEqual("2KB", to_string(2048)), ?assertEqual("10MB", to_string(10485760)), ?assertEqual("1GB", to_string(1073741824)), ?assertEqual("20", to_string(20)), ok. parse_test() -> ?assertEqual(1024, parse("1kb")), ?assertEqual(2048, parse("2KB")), ?assertEqual(10485760, parse("10mb")), ?assertEqual(10485760, parse("10MB")), ?assertEqual(1073741824, parse("1GB")), ?assertEqual(1073741824, parse("1gb")), ?assertEqual(20, parse("20")), ?assertEqual({error, {number_parse, "10MB10"}}, parse("10MB10kb")), ok. -endif. cuttlefish-3.0.1/src/cuttlefish_duration.hrl0000644000232200023220000000062514027401005021604 0ustar debalancedebalance-define(FMT(F,A), lists:flatten(io_lib:format(F,A))). -define(FORTNIGHT, 1209600000). -define(WEEK, 604800000). -define(DAY, 86400000). -define(HOUR, 3600000). -define(MINUTE, 60000). -define(SECOND, 1000). -define(MULTIPLIERS, [{f, ?FORTNIGHT}, {w, ?WEEK}, {d, ?DAY}, {h, ?HOUR}, {m, ?MINUTE}, {s, ?SECOND}, {ms, 1}]). cuttlefish-3.0.1/src/cuttlefish_error.erl0000644000232200023220000002033114027401005021101 0ustar debalancedebalance%% ------------------------------------------------------------------- %% %% cuttlefish_generator: this is where the action is %% %% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved. %% %% This file is provided to you under the Apache License, %% Version 2.0 (the "License"); you may not use this file %% except in compliance with the License. You may obtain %% a copy of the License at %% %% http://www.apache.org/licenses/LICENSE-2.0 %% %% Unless required by applicable law or agreed to in writing, %% software distributed under the License is distributed on an %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(cuttlefish_error). -include_lib("kernel/include/logger.hrl"). -type error() :: {'error', {atom(), term()}}. -type errorlist() :: {'errorlist', [error()]}. -export_type([error/0, errorlist/0]). -export([ contains_error/1, is_error/1, filter/1, errorlist_maybe/1, print/1, print/2, xlate/1 ]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. %% We'll be calling this a lot from `xlate' -define(STR(X, Y), xlate(cuttlefish_datatypes:to_string(X, Y))). -spec xlate({atom(), term()}|string()) -> iolist(). xlate(Message) when is_list(Message) -> %% We allow for strings so that we can safely call %% `cuttlefish_datatypes:to_string` when creating these messages Message; xlate({error, Details}) -> xlate(Details); xlate({_Error, {error, NestedError}}) -> xlate(NestedError); xlate({type, {Value, Type}}) -> io_lib:format("Tried to convert ~p but invalid datatype: ~p", [Value, Type]); xlate({range, {{Value, Type}, Range}}) -> [?STR(Value, Type), " can't be outside the range ", Range]; xlate({conversion, {Value, Type}}) -> io_lib:format("~p cannot be converted to a(n) ~s", [Value, Type]); xlate({duration, Value}) -> io_lib:format("Invalid duration value: ~ts", [Value]); xlate({enum_name, {Value, EnumNames}}) -> io_lib:format("~p is not a valid enum value, acceptable values are: ~ts", [Value, string:join(EnumNames, ", ")]); xlate({enum_format, Value}) -> %% This collapses two different type of formatting errors into one %% error message io_lib:format("Enum elements must be atoms, strings, or 2-tuples with " "atom or string as first element. Bad value: ~w", [Value]); xlate({mapping_types, List}) -> io_lib:format("Invalid datatype list for mapping: ~ts", [string:join(List, ", ")]); xlate({mapping_parse, Term}) -> io_lib:format( "Poorly formatted input to cuttlefish_mapping:parse/1 : ~p", [Term] ); xlate({translation_parse, Term}) -> io_lib:format( "Poorly formatted input to cuttlefish_translation:parse/1 : ~p", [Term] ); xlate({validator_parse, Term}) -> io_lib:format( "Poorly formatted input to cuttlefish_validator:parse/1 : ~p", [Term] ); xlate({conf_to_latin1, LineNum}) -> io_lib:format("Error converting value on line #~p to latin1", [LineNum]); xlate({bytesize_parse, Value}) -> io_lib:format("Error converting value ~p to a number of bytes", [Value]); xlate({file_open, {File, Reason}}) -> io_lib:format("Could not open file (~s) for Reason ~s", [File, Reason]); xlate({conf_syntax, {File, {Line, Col}}}) -> io_lib:format("Syntax error in ~s after line ~p column ~p, " "parsing incomplete", [File, Line, Col]); xlate({in_file, {File, Error}}) -> [File, ": ", xlate(Error)]; xlate({translation_missing_setting, {Translation, Setting}}) -> io_lib:format("Translation for '~s' expected to find setting '~s' but was missing", [Translation, Setting]); xlate({translation_invalid_configuration, {Translation, Invalid}}) -> io_lib:format("Translation for '~s' found invalid configuration: ~s", [Translation, Invalid]); xlate({translation_unknown_error, {Translation, {Class, Error}}}) -> io_lib:format("Error running translation for ~s, [~p, ~p]", [Translation, Class, Error]); xlate({translation_arity, {Translation, Arity}}) -> io_lib:format("~p is not a valid arity for translation fun() ~s." " Try 1 or 2", [Arity, Translation]); xlate({map_multiple_match, VariableDefinition}) -> io_lib:format("~p has both a fuzzy and strict match", [VariableDefinition]); xlate({unknown_variable, Variable}) -> ["Conf file attempted to set unknown variable: ", Variable]; xlate({unsupported_type, Type}) -> io_lib:format("~p is not a supported datatype", [Type]); xlate({transform_type, Type}) -> ["Error transforming datatype for: ", Type]; xlate({transform_type_exception, {Type, {Class, Error}}}) -> io_lib:format("Caught exception converting to ~p: ~p:~p", [Type, Class, Error]); xlate({transform_type_unacceptable, {Value, BadValue}}) -> io_lib:format("~p is not accepted value: ~p", [Value, BadValue]); xlate({circular_rhs, History}) -> io_lib:format("Circular RHS substitutions: ~p", [History]); xlate({substitution_missing_config, {Substitution, Variable}}) -> io_lib:format("'~s' substitution requires a config variable '~s' to be set", [Substitution, Variable]); xlate({mapping_not_found, Variable}) -> [Variable, " not_found"]; xlate({mapping_multiple, {Variable, {Hard, Fuzzy}}}) -> io_lib:format("~p hard mappings and ~p fuzzy mappings found " "for ~s", [Hard, Fuzzy, Variable]); xlate({validation, {Variable, Description}}) -> [Variable, " invalid, ", Description]; xlate({erl_parse, {Reason, LineNo}}) -> ["Schema parse error near line number ", integer_to_list(LineNo), ": ", Reason]; xlate({erl_parse, Reason}) -> io_lib:format("Schema parse error: ~p", [Reason]); xlate({erl_parse_unexpected, Error}) -> io_lib:format("Unexpected return from erl_parse:parse_exprs/1: ~p", [Error]); xlate({parse_schema, Value}) -> io_lib:format("Unknown parse return: ~p", [Value]); xlate({erl_scan, LineNo}) -> ["Error scanning erlang near line ", integer_to_list(LineNo)]. -spec contains_error(list()) -> boolean(). contains_error(List) -> lists:any(fun is_error/1, List). -spec is_error(any()) -> boolean(). is_error({error, _}) -> true; is_error(_) -> false. -spec filter(list()) -> errorlist(). filter(List) -> {errorlist, lists:filter(fun is_error/1, List)}. -spec errorlist_maybe(any()) -> any(). errorlist_maybe(List) when is_list(List) -> case filter(List) of {errorlist, []} -> List; Errorlist -> Errorlist end; errorlist_maybe(AnythingElse) -> AnythingElse. -spec print(string(), [any()]) -> ok. print(FormatString, Args) -> print(io_lib:format(FormatString, Args)). -spec print(string() | error()) -> ok. print({error, ErrorTerm}) -> print(lists:flatten(xlate(ErrorTerm))); print(String) -> try ?LOG_ERROR("~s", [String]) catch _:_:_ -> io:format("~s~n", [String]), ok end. -ifdef(TEST). is_error_test() -> ?assert(is_error({error, "oh no!"})), ?assert(not(is_error("just an innocent string... I mean a list... I mean... argh, erlang"))), ok. contains_error_test() -> ?assert(contains_error(["hi", {error, "hi!"}, "bye"])), ?assert(not(contains_error(["hi", "I'm not an error", "bye"]))), ok. filter_test() -> ?assertEqual({errorlist, []}, filter(["hi", "what even is an error?", "bye"])), ?assertEqual({errorlist, [{error, "etoomanythings"}]}, filter(["hi", {error, "etoomanythings"}, "bye"])), ok. errorlist_maybe_test() -> ?assertEqual(atom, errorlist_maybe(atom)), ?assertEqual(12, errorlist_maybe(12)), %% Fool you! "string" is a list!, but doesn't contain an error() ?assertEqual("string", errorlist_maybe("string")), ?assertEqual( {errorlist, [{error, "etoomanythings"}]}, errorlist_maybe(["hi", {error, "etoomanythings"}, "bye"])), ?assertEqual( ["hi", "what even is an error?", "bye"], errorlist_maybe(["hi", "what even is an error?", "bye"])), ok. -endif.