function tokenize_args(raw) -- NOTE: string.gmatch is does not use regex, but a smaller pattern matcher! -- A complete regex parser would be larger than lua itself. See -- [Programming in Lua 20.2](https://www.lua.org/pil/20.2.html). -- -- Notable differences: -- '-' is ungreedy wildcard -- '*?' does not work -- '|' is not or -- -- This means we're better of implementing the lexer with an algorithm. local t = {} local current = "" local in_str = false local str_seek for c in string.gmatch(raw, ".") do -- iterate through all chars if c == ' ' and not in_str then if string.len(current) > 0 then table.insert(t, current) current = "" end elseif c == '"' and not in_str then in_str = true str_seek = '"' elseif c == "'" and not in_str then in_str = true str_seek = "'" elseif c == str_seek and in_str then in_str = false table.insert(t, current) current = "" else current = current .. c end end if string.len(current) > 0 then table.insert(t, current) end return t end --- dumps a variable into a string, so it can be printed. This is meant for --- debug prints --- @param any t any variable --- @return string t_dumped t dumped to string function dump(t) if type(t) == 'table' then local s = '{ ' for k, v in pairs(t) do if type(k) ~= 'number' then k = '"' .. k .. '"' end if k ~= 1 then s = s .. ', ' end s = s .. '[' .. k .. '] = \'' .. dump(v) .. '\'' end return s .. ' }' else return tostring(t) end end function main() if arg[1] == nil then print("need to give an argument (probably a string)") return end local split = tokenize_args(arg[1]) print(dump(split)) end if pcall(getfenv, 4) then print("Library") else main() end