[go: up one dir, main page]

Skip to content

Commit

Permalink
Add --quiet to not output stats (nor anything else) on stdout
Browse files Browse the repository at this point in the history
  • Loading branch information
rixed committed Jan 25, 2022
1 parent bff6cae commit a529509
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 92 deletions.
39 changes: 28 additions & 11 deletions README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -319,21 +319,35 @@ let check_command_line output_file discard kafka_brokers kafka_topic kafka_parti
raise (Failure "--kafka-compression-level must be between -1 and 12")
----

=== Options to control verbosity

Datasino will output regularly its actual output rate on stdout unless
instructed to be quiet (recommanded if one intend to output data on stdout):

.command line arguments
[source,ml]
----
let quiet =
let env = Term.env_info "DATASINO_QUIET" in
let doc = "Do not print actual output rate on stdout." in
let i = Arg.info ~doc ~env [ "q" ; "quiet" ] in
Arg.(value (flag i))
----

== Main function

These are all the command line arguments that are needed.
After displaying the version of the program (always useful when all we have are
the logs), cmdliner can parse them all and call the +start+ function:
We rely on cmdliner to parse them all and call the +start+ function:

.main function
[source,ml]
----
let () =
Printf.printf "Datasino v%s\n%!" version ;
let start_cmd =
let doc = "Datasino - random data generator" in
Term.(
(const start
$ quiet
$ schema
$ rate_limit
$ stutter
Expand All @@ -355,17 +369,19 @@ let () =
Term.eval start_cmd |> Term.exit
----

The first thing this +start+ function should do is to call the +check_command_line+
function:
After displaying the version of the program (always useful when all we have are
the logs), the first thing this +start+ function should do is to call the
+check_command_line+ function:

.start function
[source,ml]
----
let start
schema rate_limit stutter encoding output_file discard
quiet schema rate_limit stutter encoding output_file discard
kafka_brokers kafka_topic kafka_partitions kafka_timeout kafka_wait_confirm
kafka_compression_codec kafka_compression_level
max_size max_count (* ...extra command line parameters... *) =
if not quiet then Printf.printf "Datasino v%s\n%!" version ;
check_command_line
output_file discard
kafka_brokers kafka_topic kafka_partitions kafka_timeout kafka_wait_confirm
Expand Down Expand Up @@ -728,7 +744,7 @@ let output =
else if discard then
ignore
else
output_to_kafka kafka_brokers kafka_topic kafka_partitions kafka_timeout
output_to_kafka quiet kafka_brokers kafka_topic kafka_partitions kafka_timeout
kafka_wait_confirm kafka_compression_codec kafka_compression_level
max_msg_size
in
Expand Down Expand Up @@ -765,10 +781,10 @@ As for kafka, we merely rely on the bindings to rdkafka client library:
.output functions
[source,ml]
----
let output_to_kafka brokers topic partitions timeout wait_confirm
let output_to_kafka quiet brokers topic partitions timeout wait_confirm
compression_codec compression_level max_msg_size =
let open Kafka in
Printf.printf "Connecting to Kafka at %s\n%!" brokers ;
if not quiet then Printf.printf "Connecting to Kafka at %s\n%!" brokers ;
let delivery_callback msg_id = function
| None -> (* No error *) ()
| Some err_code ->
Expand Down Expand Up @@ -881,7 +897,7 @@ counts, and a function being called every time +rate_limit+ is:
Avg.update avg_5m now |||
Avg.update avg_1m now |||
Avg.update avg_10s now in
if display then
if not quiet && display then
Printf.printf "%sRates: 10s: %a, 1min: %a, 5min: %a, global: %a\n%!"
prefix (* <2> *)
Avg.print avg_10s
Expand Down Expand Up @@ -933,7 +949,7 @@ struct
let print oc t =
if t.last_avg >= 0. then
Printf.printf "%g" t.last_avg
Printf.fprintf oc "%g" t.last_avg
else
String.print oc "n.a."
end
Expand Down Expand Up @@ -1293,6 +1309,7 @@ open Datasino_main
[source,ml]
----
(* ...external modules... *)
open Datasino_config
open Datasino_tools
(* ...registering callback... *)
Expand Down
53 changes: 30 additions & 23 deletions src/datasino_cli.ml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

# 1286 "README.adoc"
# 1298 "README.adoc"

# 32 "README.adoc"
# 28 "README.adoc"
open Batteries
open Cmdliner

Expand All @@ -12,14 +12,14 @@ module DM = DessserMasks
module DT = DessserTypes
module DU = DessserCompilationUnit

# 1286 "README.adoc"
# 1298 "README.adoc"

open Datasino_config
open Datasino_tools
open Datasino_main


# 91 "README.adoc"
# 87 "README.adoc"
let mn_t =
let parse s =
let s =
Expand All @@ -37,7 +37,7 @@ let mn_t =
in
Arg.conv ~docv:"TYPE" (parse, print)

# 652 "README.adoc"
# 664 "README.adoc"
let better_char =
let parse = function
| "\\t" ->
Expand All @@ -52,31 +52,31 @@ let better_char =
in
Arg.conv ~docv:"CHAR" (parse, print)

# 1291 "README.adoc"
# 1303 "README.adoc"


# 78 "README.adoc"
# 74 "README.adoc"
let schema =
let env = Term.env_info "SCHEMA" in
let doc = "The type of the data to be generated (inline or @file)." in
let i = Arg.info ~doc ~env ~docv:"TYPE" [ "s" ; "schema" ] in
Arg.(required (opt (some mn_t) None i))

# 123 "README.adoc"
# 119 "README.adoc"
let rate_limit =
let env = Term.env_info "RATE_LIMIT" in
let doc = "Maximum number of generated values per seconds." in
let i = Arg.info ~doc ~env [ "r" ; "rate-limit" ] in
Arg.(value (opt float 0. i))

# 143 "README.adoc"
# 139 "README.adoc"
let stutter =
let env = Term.env_info "STUTTER" in
let doc = "Reuse each generated value that many time." in
let i = Arg.info ~doc ~env [ "stutter" ] in
Arg.(value (opt float 0. i))

# 162 "README.adoc"
# 158 "README.adoc"
let encoding =
let encodings =
[ "null", Null ; (* <1> *)
Expand All @@ -91,7 +91,7 @@ let encoding =
let i = Arg.info ~doc ~docv ~env [ "e" ; "encoding" ] in
Arg.(value (opt (enum encodings) SExpr i))

# 208 "README.adoc"
# 204 "README.adoc"
let output_file =
let doc = "File name where to append the generated values." in
let i = Arg.info ~doc [ "o" ; "output-file" ] in
Expand Down Expand Up @@ -145,7 +145,7 @@ let kafka_compression_level =
let i = Arg.info ~doc ~env [ "kafka-compression-level" ] in
Arg.(value (opt int ~-1 i))

# 277 "README.adoc"
# 273 "README.adoc"
let max_size =
let env = Term.env_info "MAX_SIZE" in
let doc = "Rotate the current output file/kafka message after that size \
Expand All @@ -160,7 +160,14 @@ let max_count =
let i = Arg.info ~doc ~env [ "max-count" ] in
Arg.(value (opt int 0 (* <1> *) i))

# 598 "README.adoc"
# 330 "README.adoc"
let quiet =
let env = Term.env_info "DATASINO_QUIET" in
let doc = "Do not print actual output rate on stdout." in
let i = Arg.info ~doc ~env [ "q" ; "quiet" ] in
Arg.(value (flag i))

# 610 "README.adoc"
let separator =
let env = Term.env_info "CSV_SEPARATOR" in
let doc = "Character to use as a separator." in
Expand Down Expand Up @@ -191,30 +198,30 @@ let with_newlines =
let i = Arg.info ~doc ~env [ "with-newlines" ] in
Arg.(value (flag i))

# 966 "README.adoc"
# 978 "README.adoc"
let prefix =
let env = Term.env_info "PREFIX" in
let doc = "Any string to prefix the stdout logs with." in
let i = Arg.info ~doc ~env [ "prefix" ] in
Arg.(value (opt string "" i))

# 1036 "README.adoc"
# 1048 "README.adoc"
let extra_search_paths =
let env = Term.env_info "EXTRA_SEARCH_PATHS" in
let doc = "Where to find datasino libraries." in
let i = Arg.info ~doc ~env [ "I" ; "extra-search-paths" ] in
Arg.(value (opt_all string [] i))

# 1292 "README.adoc"
# 1304 "README.adoc"


# 335 "README.adoc"
# 345 "README.adoc"
let () =
Printf.printf "Datasino v%s\n%!" version ;
let start_cmd =
let doc = "Datasino - random data generator" in
Term.(
(const start
$ quiet
$ schema
$ rate_limit
$ stutter
Expand All @@ -231,24 +238,24 @@ let () =
$ max_size
$ max_count

# 632 "README.adoc"
# 644 "README.adoc"
$ separator
$ null
$ quote
$ clickhouse_syntax
$ with_newlines

# 976 "README.adoc"
# 988 "README.adoc"
$ prefix

# 1046 "README.adoc"
# 1058 "README.adoc"
$ extra_search_paths

# 356 "README.adoc"
# 366 "README.adoc"
),
info "datasino" ~version ~doc)
in
Term.eval start_cmd |> Term.exit

# 1293 "README.adoc"
# 1305 "README.adoc"

Loading

0 comments on commit a529509

Please sign in to comment.