diff --git a/tokenize_args.lua b/tokenize_args.lua index 16b3b76..bb434e4 100644 --- a/tokenize_args.lua +++ b/tokenize_args.lua @@ -8,7 +8,7 @@ function tokenize_args(raw) -- '*?' does not work -- '|' is not or -- - -- This means we're better of implementing a programatic lexer. + -- This means we're better of implementing the lexer with an algorithm. local t = {} local current = "" local in_str = false @@ -33,7 +33,9 @@ function tokenize_args(raw) current = current .. c end end - table.insert(t, current) + if string.len(current) > 0 then + table.insert(t, current) + end return t end @@ -58,7 +60,8 @@ function dump(t) end function main() - local mockargs = [[-iab --foo '{"QUX": "BAR"}' --ala=boa]] + local mockargs = + [[--name "tls_service_rs" --base-dir ${PWD%/*/*} --log-level "TRACE" -j '{ "MODE": "server", "CERT": "data/server.crt", "KEY": "data/server.key", "CHAIN": "data/root.crt", "ADDR": "127.0.0.1:9999", "HOSTNAME": "localhost" }']] print(mockargs) local split = tokenize_args(mockargs) print(dump(split))