commit b925fbc3789a8230185368f2bc3793fc914b7fc4 Author: Oliver Hartmann Date: Mon Feb 28 20:15:23 2022 +0100 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a7e02ee --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +clink_history* +clink.log diff --git a/.init.lua b/.init.lua new file mode 100644 index 0000000..406a789 --- /dev/null +++ b/.init.lua @@ -0,0 +1,3 @@ +-- The line below extends package.path with modules +-- directory to allow to require them +package.path = debug.getinfo(1, "S").source:match[[^@?(.*[\/])[^\/]-$]] .."modules/?.lua;".. package.path \ No newline at end of file diff --git a/.inputrc b/.inputrc new file mode 100644 index 0000000..c48fdae --- /dev/null +++ b/.inputrc @@ -0,0 +1,5 @@ +$if clink + +"\t": clink-popup-complete + +$endif diff --git a/clink_settings b/clink_settings new file mode 100644 index 0000000..f29a1be --- /dev/null +++ b/clink_settings @@ -0,0 +1,126 @@ +# name: Enable automatic suggestions +# type: boolean +autosuggest.enable = True + +# name: Selects default key bindings +# type: enum +# options: bash,windows +clink.default_bindings = windows + +# name: Use GUI popup windows +# type: boolean +clink.gui_popups = False + +# name: Pressing Ctrl-D exits session +# type: boolean +cmd.ctrld_exits = False + +# name: Argument color +# type: color +color.arg = bold + +# name: Argument info color +# type: color +color.arginfo = sgr 1;38;5;172 + +# name: Argmatcher color +# type: color +color.argmatcher = sgr 1;38;5;40 + +# name: Shell command completions +# type: color +color.cmd = sgr 1;38;5;231 + +# name: Color for < and > redirection symbols +# type: color +color.cmdredir = sgr 38;5;172 + +# name: Color for & and | command separators +# type: color +color.cmdsep = sgr 38;5;214 + +# name: Color for comment row +# type: color +color.comment_row = sgr 38;5;87;48;5;18 + +# name: Description completion color +# type: color +color.description = sgr 38;5;39 + +# name: Doskey completions +# type: color +color.doskey = sgr 1;38;5;75 + +# name: Color for executable command word +# type: color +color.executable = sgr 38;5;33 + +# name: Filtered completion color +# type: color +color.filtered = sgr 38;5;231 + +# name: Flag color +# type: color +color.flag = sgr 38;5;117 + +# name: Hidden file completions +# type: color +color.hidden = sgr 38;5;160 + +# name: Horizontal scroll marker color +# type: color +color.horizscroll = sgr 38;5;16;48;5;30 + +# name: Input text color +# type: color +color.input = sgr 38;5;222 + +# name: For user-interaction prompts +# type: color +color.interact = bold + +# name: Message area color +# type: color +color.message = default + +# name: Readonly file completions +# type: color +color.readonly = sgr 38;5;28 + +# name: Selected completion color +# type: color +color.selected_completion = sgr 38;5;16;48;5;254 + +# name: Selection color +# type: color +color.selection = sgr 38;5;16;48;5;179 + +# name: Color for suggestion text +# type: color +color.suggestion = sgr 38;5;239 + +# name: Unexpected argument color +# type: color +color.unexpected = default + +# name: Color for unrecognized command word +# type: color +color.unrecognized = sgr 38;5;203 + +# name: The number of history lines to save +# type: integer +history.max_lines = 25000 + +# name: Expand envvars when completing +# type: boolean +match.expand_envvars = True + +# name: Try substring if no prefix matches +# type: boolean +match.substring = True + +# name: Controls when past prompts are collapsed +# type: enum +# options: off,always,same_dir +prompt.transient = off + diff --git a/fzf.lua b/fzf.lua new file mode 100644 index 0000000..f9cd5bd --- /dev/null +++ b/fzf.lua @@ -0,0 +1,334 @@ +-------------------------------------------------------------------------------- +-- FZF integration for Clink. +-- +-- Clink is available at https://chrisant996.github.io/clink +-- FZF is available from https://nicedoc.io/junegunn/fzf +-- +-- To use this: +-- +-- 1. Copy this script into your Clink scripts directory. +-- +-- 2. Either put fzf.exe in a directory listed in the system PATH environment +-- variable, or run 'clink set fzf.exe_location ' to tell +-- Clink where to find fzf.exe. +-- +-- 3. The default key bindings are as follows, when using Clink v1.2.46 or +-- higher: +--[[ + +# Default key bindings for fzf with Clink. +"\C-t": "luafunc:fzf_file" # Ctrl+T lists files recursively; choose one or multiple to insert them. +"\C-r": "luafunc:fzf_history" # Ctrl+R lists history entries; choose one to insert it. +"\M-c": "luafunc:fzf_directory" # Alt+C lists subdirectories; choose one to 'cd /d' to it. +"\M-b": "luafunc:fzf_bindings" # Alt+B lists key bindings; choose one to invoke it. +"\e[27;5;32~": "luafunc:fzf_complete" # Ctrl+Space uses fzf to filter match completions. + +]] +-- 4. Optional: You can use your own custom key bindings if you want. +-- Run 'clink set fzf.default_bindings false' and add key bindings to +-- your .inputrc file manually. The default key bindings are listed +-- above in .inputrc format for convenience. +-- +-- 5. Optional: You can set the following environment variables to +-- customize the behavior: +-- +-- FZF_CTRL_T_OPTS = fzf options for fzf_file() function. +-- FZF_CTRL_R_OPTS = fzf options for fzf_history() function. +-- FZF_ALT_C_OPTS = fzf options for fzf_directory() function. +-- FZF_BINDINGS_OPTS = fzf options for fzf_bindings() function. +-- FZF_COMPLETE_OPTS = fzf options for fzf_complete() function. +-- +-- FZF_CTRL_T_COMMAND = command to run for collecting files for fzf_file() function. +-- FZF_ALT_C_COMMAND = command to run for collecting directories for fzf_directory() function. + +-------------------------------------------------------------------------------- +-- Compatibility check. +if not io.popenrw then + print('fzf.lua requires a newer version of Clink; please upgrade.') + return +end + +-------------------------------------------------------------------------------- +-- Settings available via 'clink set'. + +settings.add('fzf.height', '40%', 'Height to use for the --height flag') +settings.add('fzf.exe_location', '', 'Location of fzf.exe if not on the PATH') + +if rl.setbinding then + + settings.add('fzf.default_bindings', true, 'Use default key bindings', 'If the default key bindings interfere with your own, you can turn off the\ndefault key bindings and add bindings manually to your .inputrc file.\n\nChanging this takes effect for the next session.') + + if settings.get('fzf.default_bindings') then + rl.setbinding([["\C-t"]], [["luafunc:fzf_file"]]) + rl.setbinding([["\C-r"]], [["luafunc:fzf_history"]]) + rl.setbinding([["\M-c"]], [["luafunc:fzf_directory"]]) + rl.setbinding([["\M-b"]], [["luafunc:fzf_bindings"]]) + rl.setbinding([["\e[27;5;32~"]], [["luafunc:fzf_complete"]]) + end + +end + +-------------------------------------------------------------------------------- +-- Helpers. + +local diag = false +local fzf_complete_intercept = false + +local function get_fzf(env) + local height = settings.get('fzf.height') + local command = settings.get('fzf.exe_location') + if os.expandenv and command then + -- Expand so that os.getshortpathname() can work even when envvars are + -- present. + command = os.expandenv(command) + end + if not command or command == '' then + command = 'fzf.exe' + else + -- CMD.exe cannot use pipe redirection with a quoted program name, so + -- try to use a short name. + local short = os.getshortpathname(command) + if short then + command = short + end + end + if command and command ~= '' and height and height ~= '' then + command = command..' --height '..height + end + if env then + local options = os.getenv(env) + if options then + command = command..' '..options + end + end + return command +end + +local function get_clink() + local clink_alias = os.getalias('clink') + if not clink_alias or clink_alias == '' then + return '' + end + return clink_alias:gsub(' $[*]', '') +end + +local function replace_dir(str, line_state) + local dir = '.' + if line_state:getwordcount() > 0 then + local info = line_state:getwordinfo(line_state:getwordcount()) + if info then + local word = line_state:getline():sub(info.offset, line_state:getcursor()) + if word and #word > 0 then + dir = word + end + end + end + return str:gsub('$dir', dir) +end + +-------------------------------------------------------------------------------- +-- Functions for use with 'luafunc:' key bindings. + +function fzf_complete(rl_buffer) + fzf_complete_intercept = true + rl.invokecommand('complete') + if fzf_complete_intercept then + rl_buffer:ding() + end + fzf_complete_intercept = false + rl_buffer:refreshline() +end + +function fzf_history(rl_buffer) + local clink_command = get_clink() + if #clink_command == 0 then + rl_buffer:ding() + return + end + + -- Build command to get history for the current Clink session. + local history = clink_command..' --session '..clink.getsession()..' history --bare' + if diag then + history = history..' --diag' + end + + -- This intentionally does not use '--query' because that isn't safe: + -- Depending on what the user has typed so far, passing it as an argument + -- may cause the command to interpreted differently than expected. + -- E.g. suppose the user typed: "pgm.exe & rd /s + -- Then fzf would be invoked as: fzf.exe --query""pgm.exe & rd /s" + -- And since the & is not inside quotes, the 'rd /s' command gets actually + -- run by mistake! + local r = io.popen(history..' | '..get_fzf("FZF_CTRL_R_OPTS")..' -i --tac') + if not r then + rl_buffer:ding() + return + end + + local str = r:read('*all') + str = str and str:gsub('[\r\n]', '') or '' + r:close() + + -- If something was selected, insert it. + if #str > 0 then + rl_buffer:beginundogroup() + rl_buffer:remove(0, -1) + rl_buffer:insert(str) + rl_buffer:endundogroup() + end + + rl_buffer:refreshline() +end + +function fzf_file(rl_buffer, line_state) + local ctrl_t_command = os.getenv('FZF_CTRL_T_COMMAND') + if not ctrl_t_command then + ctrl_t_command = 'dir /b /s /a:-s $dir' + end + + ctrl_t_command = replace_dir(ctrl_t_command, line_state) + +print('"'..ctrl_t_command..'"') + local r = io.popen(ctrl_t_command..' | '..get_fzf('FZF_CTRL_T_OPTS')..' -i -m') + if not r then + rl_buffer:ding() + return + end + + local str = r:read('*line') + str = str and str:gsub('[\r\n]+', ' ') or '' + str = str:gsub(' +$', '') + r:close() + + if #str > 0 then + rl_buffer:insert(str) + end + + rl_buffer:refreshline() +end + +function fzf_directory(rl_buffer, line_state) + local alt_c_opts = os.getenv('FZF_ALT_C_OPTS') + if not alt_c_opts then + alt_c_opts = "" + end + + local alt_c_command = os.getenv('FZF_ALT_C_COMMAND') + if not alt_c_command then + alt_c_command = 'dir /b /s /a:d-s $dir' + end + + alt_c_command = replace_dir(alt_c_command, line_state) + + local temp_contents = rl_buffer:getbuffer() + local r = io.popen(alt_c_command..' | '..get_fzf('FZF_ALT_C_OPTS')..' -i') + if not r then + rl_buffer:ding() + return + end + + local str = r:read('*all') + str = str and str:gsub('[\r\n]', '') or '' + r:close() + + if #str > 0 then + rl_buffer:beginundogroup() + rl_buffer:remove(0, -1) + rl_buffer:insert('cd /d '..str) + rl_buffer:endundogroup() + rl_buffer:refreshline() + rl.invokecommand('accept-line') + return + end + + rl_buffer:refreshline() +end + +function fzf_bindings(rl_buffer) + if not rl.getkeybindings then + rl_buffer:beginoutput() + print('fzf_bindings() in fzf.lua requires a newer version of Clink; please upgrade.') + return + end + + local bindings = rl.getkeybindings() + if #bindings <= 0 then + rl_buffer:refreshline() + return + end + + local line + local r,w = io.popenrw(get_fzf('FZF_BINDINGS_OPTS')..' -i') + if r and w then + -- Write key bindings to the write pipe. + for _,kb in ipairs(bindings) do + w:write(kb.key..' : '..kb.binding..'\n') + end + w:close() + + -- Read filtered matches. + local ret = {} + line = r:read('*line') + r:close() + end + + rl_buffer:refreshline() + + if line and #line > 0 then + local binding = line:sub(#bindings[1].key + 3 + 1) + rl.invokecommand(binding) + end +end + +-------------------------------------------------------------------------------- +-- Match generator. + +local function filter_matches(matches, completion_type, filename_completion_desired) + if not fzf_complete_intercept then + return + end + + -- Start fzf. + local r,w = io.popenrw(get_fzf("FZF_COMPLETE_OPTS")) + if not r or not w then + return + end + + -- Write matches to the write pipe. + for _,m in ipairs(matches) do + w:write(m.match..'\n') + end + w:close() + + -- Read filtered matches. + local ret = {} + while (true) do + local line = r:read('*line') + if not line then + break + end + for _,m in ipairs(matches) do + if m.match == line then + table.insert(ret, m) + end + end + end + r:close() + + -- Yay, successful; clear it to not ding. + fzf_complete_intercept = false + return ret +end + +local interceptor = clink.generator(0) +function interceptor:generate(line_state, match_builder) + if fzf_complete_intercept then + clink.onfiltermatches(filter_matches) + end + return false +end + +clink.onbeginedit(function () + fzf_complete_intercept = false +end) + diff --git a/git.lua b/git.lua new file mode 100644 index 0000000..577cb3a --- /dev/null +++ b/git.lua @@ -0,0 +1,992 @@ +-- preamble: common routines + +local path = require('path') +local git = require('gitutil') +local matchers = require('matchers') +local w = require('tables').wrap +local clink_version = require('clink_version') +local color = require('color') +local parser = clink.arg.new_parser + +if clink_version.supports_color_settings then + settings.add('color.git.star', 'bright green', 'Color for preferred branch completions') +end + +--- + -- Lists remote branches based on packed-refs file from git directory + -- @param string [dir] Directory where to search file for + -- @return table List of remote branches +local function list_packed_refs(dir) + local result = w() + local git_dir = dir or git.get_git_common_dir() + if not git_dir then return result end + + local packed_refs_file = io.open(git_dir..'/packed-refs') + if packed_refs_file == nil then return {} end + + for line in packed_refs_file:lines() do + -- SHA is 40 char length + 1 char for space + if #line > 41 then + local match = line:sub(41):match('refs/remotes/(.*)') + if match then table.insert(result, match) end + end + end + + packed_refs_file:close() + return result +end + +local function list_remote_branches(dir) + local git_dir = dir or git.get_git_common_dir() + if not git_dir then return w() end + + return w(path.list_files(git_dir..'/refs/remotes', '/*', + --[[recursive=]]true, --[[reverse_separator=]]true)) + :concat(list_packed_refs(git_dir)) + :sort():dedupe() +end + +--- + -- Lists local branches for git repo in git_dir directory. + -- + -- @param string [dir] Git directory, where to search for remote branches + -- @return table List of branches. +local function list_local_branches(dir) + local git_dir = dir or git.get_git_common_dir() + if not git_dir then return w() end + + local result = w(path.list_files(git_dir..'/refs/heads', '/*', + --[[recursive=]]true, --[[reverse_separator=]]true)) + + return result +end + +local branches = function (token) + local git_dir = git.get_git_common_dir() + if not git_dir then return w() end + + return list_local_branches(git_dir) + :filter(function(branch) + return clink.is_match(token, branch) + end) +end + +local function alias(token) + local res = w() + + -- Try to resolve .git directory location + local git_dir = git.get_git_dir() + + if git_dir == nil then return res end + + local f = io.popen("git config --get-regexp alias 2>nul") + if f == nil then return {} end + + for line in f:lines() do + local s = line:find(" ", 1, true) + local alias_name = line:sub(7, s - 1) + local start = alias_name:find(token, 1, true) + if start and start == 1 then + table.insert(res, alias_name) + end + end + + f:close() + + return res +end + +local function remotes(token) -- luacheck: no unused args + local result = w() + local git_dir = git.get_git_common_dir() + if not git_dir then return result end + + local git_config = io.open(git_dir..'/config') + -- if there is no gitconfig file (WAT?!), return empty list + if git_config == nil then return result end + + for line in git_config:lines() do + local remote = line:match('%[remote "(.*)"%]') + if (remote) then + table.insert(result, remote) + end + end + + git_config:close() + return result +end + +local function local_or_remote_branches(token) + -- Try to resolve .git directory location + local git_dir = git.get_git_common_dir() + if not git_dir then return w() end + + return list_local_branches(git_dir) + :concat(list_remote_branches(git_dir)) + :filter(function(branch) + return clink.is_match(token, branch) + end) +end + +local function checkout_spec_generator(token) + local files = matchers.files(token) + :filter(function(file) + return path.is_real_dir(file) + end) + + local git_dir = git.get_git_common_dir() + + local local_branches = branches(token) + local remote_branches = list_remote_branches(git_dir) + :filter(function(branch) + return clink.is_match(token, branch) + end) + + local predicted_branches = list_remote_branches(git_dir) + :map(function (remote_branch) + return remote_branch:match('.-/(.+)') + end) + :filter(function(branch) + return branch + and clink.is_match(token, branch) + -- Filter out those predictions which are already exists as local branches + and not local_branches:contains(branch) + end) + + if (#local_branches + #remote_branches + #predicted_branches) == 0 then return files end + + -- if there is any refspec that matches token then: + -- * disable readline's filename completion, otherwise we'll get a list of these specs + -- threaten as list of files (without 'path' part), ie. 'some_branch' instead of 'my_remote/some_branch' + -- * create display filter for completion table to append path separator to each directory entry + -- since it is not added automatically by readline (see previous point) + clink.matches_are_files(0) + clink.match_display_filter = function () + local star = '*' + if clink_version.supports_query_rl_var and rl.isvariabletrue('colored-stats') then + star = color.get_clink_color('color.git.star')..star..color.get_clink_color('color.filtered') + end + return files:map(function(file) + return clink.is_dir(file) and file..'\\' or file + end) + :concat(local_branches) + :concat(predicted_branches:map(function(branch) return star..branch end)) + :concat(remote_branches) + end + + return files + :concat(local_branches) + :concat(predicted_branches) + :concat(remote_branches) +end + +local function push_branch_spec(token) + local git_dir = git.get_git_common_dir() + if not git_dir then return w() end + + local plus_prefix = token:sub(0, 1) == '+' + -- cut out leading '+' symbol as it is a part of branch spec + local branch_spec = plus_prefix and token:sub(2) or token + -- check if there a local/remote branch separator + local s, e = branch_spec:find(':') + + -- starting from here we have 2 options: + -- * if there is no branch separator complete word with local branches + if not s then + local b = branches(branch_spec) + + -- setup display filter to prevent display '+' symbol in completion list + clink.match_display_filter = function () + return b + end + + return b:map(function(branch) + -- append '+' to results if it was specified + return plus_prefix and '+'..branch or branch + end) + else + -- * if there is ':' separator then we need to complete remote branch + local local_branch_spec = branch_spec:sub(1, s - 1) + local remote_branch_spec = branch_spec:sub(e + 1) + + -- TODO: show remote branches only for remote that has been specified as previous argument + local b = w(clink.find_dirs(git_dir..'/refs/remotes/*')) + :filter(function(remote) return path.is_real_dir(remote) end) + :reduce({}, function(result, remote) + return w(path.list_files(git_dir..'/refs/remotes/'..remote, '/*', + --[[recursive=]]true, --[[reverse_separator=]]true)) + :filter(function(remote_branch) + return clink.is_match(remote_branch_spec, remote_branch) + end) + :concat(result) + end) + + -- setup display filter to prevent display '+' symbol in completion list + clink.match_display_filter = function () + return b + end + + return b:map(function(branch) + return (plus_prefix and '+'..local_branch_spec or local_branch_spec)..':'..branch + end) + end +end + +local stashes = function(token) -- luacheck: no unused args + + local git_dir = git.get_git_dir() + if not git_dir then return w() end + + local stash_file = io.open(git_dir..'/logs/refs/stash') + -- if there is no stash file, return empty list + if stash_file == nil then return w() end + + local stashes = {} + -- make a dictionary of stash time and stash comment to + -- be able to sort stashes by date/time created + for stash in stash_file:lines() do + local stash_time, stash_name = stash:match('(%d%d%d%d%d%d%d%d%d%d) [+-]%d%d%d%d%s+(.*)') + if (stash_name and stash_name) then + stashes[stash_time] = stash_name + end + end + + stash_file:close() + + -- get times for available stashes into separate table and sort it + -- from newest to oldest. This is required because of stash@{0} + -- represents _latest_ stash, not the last one in file + local stash_times = {} + for k in pairs(stashes) do + table.insert(stash_times, k) + end + + table.sort(stash_times, function (a, b) + return a > b + end) + + -- generate matches and match filter table + local ret = {} + local ret_filter = {} + for i,v in ipairs(stash_times) do + local match = "stash@{"..(i-1).."}" + table.insert(ret, match) + if clink_version.supports_display_filter_description then + -- Clink now has a richer match interface. By returning a table, + -- the script is able to provide the stash name separately from the + -- description. If the script does so, then the popup completion + -- window is able to show the stash name plus a dimmed description, + -- but only insert the stash name. + table.insert(ret_filter, { match=match, type="word", description=stashes[v] }) + else + table.insert(ret_filter, match.." "..stashes[v]) + end + end + + local function filter() + return ret_filter + end + + if clink_version.supports_display_filter_description then + clink.ondisplaymatches(filter) + else + clink.match_display_filter = filter + end + + return ret +end + +local color_opts = parser({"true", "false", "always"}) + +local git_options = { + "core.editor", + "core.pager", + "core.excludesfile", + "core.autocrlf"..parser({"true", "false", "input"}), + "core.trustctime"..parser({"true", "false"}), + "core.whitespace"..parser({ + "cr-at-eol", + "-cr-at-eol", + "indent-with-non-tab", + "-indent-with-non-tab", + "space-before-tab", + "-space-before-tab", + "trailing-space", + "-trailing-space" + }), + "commit.template", + "color.ui"..color_opts, "color.*"..color_opts, "color.branch"..color_opts, + "color.diff"..color_opts, "color.interactive"..color_opts, "color.status"..color_opts, + "help.autocorrect", + "merge.tool", "mergetool.*.cmd", "mergetool.trustExitCode"..parser({"true", "false"}), "diff.external", + "user.name", "user.email", "user.signingkey", +} + +local config_parser = parser( + "--system", "--global", "--local", "--file"..parser({matchers.files}), + "--int", "--bool", "--path", + "-z", "--null", + "--add", + "--replace-all", + "--get", "--get-all", "--get-regexp", "--get-urlmatch", + "--unset", "--unset-all", + "--rename-section", "--remove-section", + "-l", "--list", + "--get-color", "--get-colorbool", + "-e", "--edit", + {git_options} +) + +local merge_recursive_options = parser({ + "ours", + "theirs", + "renormalize", + "no-renormalize", + "diff-algorithm="..parser({ + "patience", + "minimal", + "histogram", + "myers" + }), + "patience", + "ignore-space-change", + "ignore-all-space", + "ignore-space-at-eol", + "rename-threshold=", + -- "subtree="..parser(), + "subtree" +}) + +local merge_strategies = parser({ + "resolve", + "recursive", + "ours", + "octopus", + "subtree" +}) + +local cleanup_options = parser({ + "strip", + "whitespace", + "verbatim", + "scissors", + "default" +}) + +local git_parser = parser( + { + {alias}, + "add" .. parser({matchers.files}, + "-n", "--dry-run", + "-v", "--verbose", + "-f", "--force", + "-i", "--interactive", + "-p", "--patch", + "-e", "--edit", + "-u", "--update", + "-A", "--all", + "--no-all", + "--ignore-removal", + "--no-ignore-removal", + "-N", "--intent-to-add", + "--refresh", + "--ignore-errors", + "--ignore-missing" + ), + "add--interactive", + "am", + "annotate" .. parser({matchers.files}, + "-b", + "--root", + "--show-stats", + "-L", + "-l", + "-t", + "-S", + "--reverse", + "-p", + "--porcelain", + "--line-porcelain", + "--incremental", + "--encoding=", + "--contents", + "--date", + "-M", + "-C", + "-h" + ), + "apply" .. parser( + "--stat", + "--numstat", + "--summary", + "--check", + "--index", + "--cached", + "-3", "--3way", + "--build-fake-ancestor=", + "-R", "--reverse", + "--reject", + "-z", + "-p", + "-C", + "--unidiff-zero", + "--apply", + "--no-add", + "--allow-binary-replacement", "--binary", + "--exclude=", + "--include=", + "--ignore-space-change", "--ignore-whitespace", + "--whitespace=", + "--inaccurate-eof", + "-v", "--verbose", + "--recount", + "--directory=" + ), + "archive", + "bisect", + "bisect--helper", + "blame", + "branch" .. parser( + "-v", "--verbose", + "-q", "--quiet", + "-t", "--track", + "--set-upstream", + "-u", "--set-upstream-to", + "--unset-upstream", + "--color", + "-r", "--remotes", + "--contains" , + "--abbrev", + "-a", "--all", + "-d" .. parser({branches}):loop(1), + "--delete" .. parser({branches}):loop(1), + "-D" .. parser({branches}):loop(1), + "-m", "--move", + "-M", + "--list", + "-l", "--create-reflog", + "--edit-description", + "-f", "--force", + "--no-merged", + "--merged", + "--column" + ), + "bundle", + "cat-file", + "check-attr", + "check-ignore", + "check-mailmap", + "check-ref-format", + "checkout" .. parser({checkout_spec_generator}, + "-q", "--quiet", + "-b", + "-B", + "-l", + "--detach", + "-t", "--track", + "--orphan", + "-2", "--ours", + "-3", "--theirs", + "-f", "--force", + "-m", "--merge", + "--overwrite-ignore", + "--conflict", + "-p", "--patch", + "--ignore-skip-worktree-bits" + ), + "checkout-index", + "cherry", + "cherry-pick"..parser( + "-e", "--edit", + "-m", "--mainline ", + "-n", "--no-commit", + "-r", + "-x", + "--ff", + "-s", "-S", "--gpg-sign", + "--allow-empty", + "--allow-empty-message", + "--keep-redundant-commits", + "--strategy"..parser({merge_strategies}), + "-X"..parser({merge_recursive_options}), + "--strategy-option"..parser({merge_recursive_options}), + "--continue", + "--quit", + "--abort" + ), + "citool", + "clean", + "clone" .. parser( + "--template", + "-l", "--local", + "-s", "--shared", + "--no-hardlinks", + "-q", "--quiet", + "-n", "--no-checkout", + "--bare", + "--mirror", + "-o", "--origin", + "-b", "--branch", + "-u", "--upload-pack", + "--reference", + "--dissociate", + "--separate-git-dir", + "--depth", + "--single-branch", "--no-single-branch", + "--no-tags", + "--recurse-submodules", "--shallow-submodules", "--no-shallow-submodules", + "--jobs" + ), + "column", + "commit" .. parser( + "-a", "--all", + "-p", "--patch", + "-C", "--reuse-message=", + "-c", "--reedit-message=", + "--fixup=", + "--squash=", + "--reset-author", + "--short", + "--branch", + "--porcelain", + "--long", + "-z", + "--null", + "-F", "--file=", + "--author=", + "--date=", + "-m", "--message=", + "-t", "--template=", + "-s", "--signoff", + "-n", "--no-verify", + "--allow-empty", + "--allow-empty-message", + "--cleanup"..cleanup_options, + "-e", "--edit", + "--no-edit", + "--amend", + "--no-post-rewrite", + "-i", "--include", + "-o", "--only", + "-u", "--untracked-files", "--untracked-files=", -- .. parser({"no", "normal", "all"}), + "-v", "--verbose", + "-q", "--quiet", + "--dry-run", + "--status", + "--no-status", + "-S", "--gpg-sign", "--gpg-sign=", + "--" + ), + "commit-tree", + "config"..config_parser, + "count-objects", + "credential", + "credential-store", + "credential-wincred", + "daemon", + "describe", + "diff" .. parser({local_or_remote_branches, matchers.files}), + "diff-files", + "diff-index", + "diff-tree", + "difftool"..parser( + "-d", "--dir-diff", + "-y", "--no-prompt", "--prompt", + "-t", "--tool=" -- TODO: complete tool (take from config) + ), + "difftool--helper", + "fast-export", + "fast-import", + "fetch" .. parser({remotes}, + "--all", + "--prune", + "--tags" + ), + "fetch-pack", + "filter-branch", + "fmt-merge-msg", + "for-each-ref", + "format-patch", + "fsck", + "fsck-objects", + "gc", + "get-tar-commit-id", + "grep", + "gui", + "gui--askpass", + "gui--askyesno", + "gui.tcl", + "hash-object", + "help", + "http-backend", + "http-fetch", + "http-push", + "imap-send", + "index-pack", + "init", + "init-db", + "log", + "lost-found", + "ls-files", + "ls-remote", + "ls-tree", + "mailinfo", + "mailsplit", + "merge" .. parser({branches}, + "--commit", "--no-commit", + "--edit", "-e", "--no-edit", + "--ff", "--no-ff", "--ff-only", + "--log", "--no-log", + "--stat", "-n", "--no-stat", + "--squash", "--no-squash", + "-s" .. merge_strategies, + "--strategy" .. merge_strategies, + "-X" .. merge_recursive_options, + "--strategy-option" .. merge_recursive_options, + "--verify-signatures", "--no-verify-signatures", + "-q", "--quiet", "-v", "--verbose", + "--progress", "--no-progress", + "-S", "--gpg-sign", + "-m", + "--rerere-autoupdate", "--no-rerere-autoupdate", + "--abort" + ), + "merge-base", + "merge-file", + "merge-index", + "merge-octopus", + "merge-one-file", + "merge-ours", + "merge-recursive", + "merge-resolve", + "merge-subtree", + "merge-tree", + "mergetool", + "mergetool--lib", + "mktag", + "mktree", + "mv", + "name-rev", + "notes", + "p4", + "pack-objects", + "pack-redundant", + "pack-refs", + "parse-remote", + "patch-id", + "peek-remote", + "prune", + "prune-packed", + "pull" .. parser( + {remotes}, {branches}, + "-q", "--quiet", + "-v", "--verbose", + "--recurse-submodules", --[no-]recurse-submodules[=yes|on-demand|no] + "--no-recurse-submodules", + "--commit", "--no-commit", + "-e", "--edit", "--no-edit", + "--ff", "--no-ff", "--ff-only", + "--log", "--no-log", + "--stat", "-n", "--no-stat", + "--squash", "--no-squash", + "-s"..merge_strategies, + "--strategy"..merge_strategies, + "-X"..merge_recursive_options, + "--strategy-option"..merge_recursive_options, + "--verify-signatures", "--no-verify-signatures", + "--summary", "--no-summary", + "-r", "--rebase", "--no-rebase", + "--all", + "-a", "--append", + "--depth", "--unshallow", "--update-shallow", + "-f", "--force", + "-k", "--keep", + "--no-tags", + "-u", "--update-head-ok", + "--upload-pack", + "--progress" + ), + "push" .. parser( + {remotes}, + {push_branch_spec}, + "-v", "--verbose", + "-q", "--quiet", + "--repo", + "--all", + "--mirror", + "--delete", + "--tags", + "-n", "--dry-run", + "--porcelain", + "-f", "--force", + "--force-with-lease", + "--recurse-submodules", + "--thin", + "--receive-pack", + "--exec", + "-u", "--set-upstream", + "--progress", + "--prune", + "--no-verify", + "--follow-tags" + ), + "quiltimport", + "read-tree", + "rebase" .. parser({local_or_remote_branches}, {branches}, + "-i", "--interactive", + "--onto" .. parser({branches}), + "--continue", + "--abort", + "--keep-empty", + "--skip", + "--edit-todo", + "-m", "--merge", + "-s" .. merge_strategies, + "--strategy"..merge_strategies, + "-X" .. merge_recursive_options, + "--strategy-option"..merge_recursive_options, + "-S", "--gpg-sign", + "-q", "--quiet", + "-v", "--verbose", + "--stat", "-n", "--no-stat", + "--no-verify", "--verify", + "-C", + "-f", "--force-rebase", + "--fork-point", "--no-fork-point", + "--ignore-whitespace", "--whitespace", + "--committer-date-is-author-date", "--ignore-date", + "-i", "--interactive", + "-p", "--preserve-merges", + "-x", "--exec", + "--root", + "--autosquash", "--no-autosquash", + "--autostash", "--no-autostash", + "--no-ff" + ), + "receive-pack", + "reflog", + "remote"..parser({ + "add" ..parser( + "-t"..parser({branches}), + "-m", + "-f", + "--mirror", + "--tags", "--no-tags" + ), + "rename"..parser({remotes}), + "remove"..parser({remotes}), + "rm"..parser({remotes}), + "set-head"..parser({remotes}, {branches}, + "-a", "--auto", + "-d", "--delete" + ), + "set-branches"..parser("--add", {remotes}, {branches}), + "set-url"..parser( + "--add"..parser("--push", {remotes}), + "--delete"..parser("--push", {remotes}) + ), + "get-url"..parser({remotes}, "--push", "--all"), + "show"..parser("-n", {remotes}), + "prune"..parser("-n", "--dry-run", {remotes}), + "update"..parser({remotes}, "-p", "--prune") + }, "-v", "--verbose"), + "remote-ext", + "remote-fd", + "remote-ftp", + "remote-ftps", + "remote-hg", + "remote-http", + "remote-https", + "remote-testsvn", + "repack", + "replace", + "repo-config", + "request-pull", + "rerere", + -- TODO: Add commit completions + "reset"..parser({local_or_remote_branches}, + "-q", + "-p", "--patch", + "--soft", "--mixed", "--hard", + "--merge", "--keep" + ), + "restore"..parser({matchers.files}, + "-s", "--source", + "-p", "--patch", + "-W", "--worktree", + "-S", "--staged", + "-q", "--quiet", + "--progress", "--no-progress", + "--ours", "--theirs", + "-m", "--merge", + "--conflict", + "--ignore-unmerged", + "--ignore-skip-worktree-bits", + "--overlay", "--no-overlay" + ), + "rev-list", + "rev-parse", + "revert"..parser( + "-e", "--edit", + "-m", "--mainline", + "--no-edit", + "--cleanup"..cleanup_options, + "-n", "--no-commit", + "-S", "--gpg-sign", + "--no-gpg-sign", + "-s", "--signoff", + "--strategy"..merge_strategies, + "-X"..merge_recursive_options, + "--strategy-option"..merge_recursive_options, + "--rerere-autoupdate", + "--no-rerere-autoupdate", + "--continue", + "--skip", + "--quit", + "--abort" + ), + "rm", + "send-email", + "send-pack", + "sh-i18n", + "sh-i18n--envsubst", + "sh-setup", + "shortlog", + "show", + "show-branch", + "show-index", + "show-ref", + "stage", + "stash"..parser({ + "list", -- TODO: The command takes options applicable to the git log + -- command to control what is shown and how it's done + "show"..parser({stashes}), + "drop"..parser({stashes}, "-q", "--quiet"), + "pop"..parser({stashes}, "--index", "-q", "--quiet"), + "apply"..parser({stashes}, "--index", "-q", "--quiet"), + "branch"..parser({branches}, {stashes}), + "save"..parser( + "-p", "--patch", + "-k", "--no-keep-index", "--keep-index", + "-q", "--quiet", + "-u", "--include-untracked", + "-a", "--all" + ), + "clear" + }), + "status", + "stripspace", + "submodule"..parser({ + "add", + "init", + "deinit", + "foreach", + "status"..parser("--cached", "--recursive"), + "summary", + "sync", + "update" + }, '--quiet'), + "subtree", + "switch"..parser({local_or_remote_branches}, + "-c", "-C", "--create", + "--force-create", + "-d", "--detach", + "--guess", "--no-guess", + "-f", "--force", "--discard-changes", + "-m", "--merge", + "--conflict", + "-q", "--quiet", + "--progress", "--no-progress", + "-t", "--track", + "--no-track", + "--orphan", + "--ignore-other-worktrees", + "--recurse-submodules", "--no-recurse-submodules" + ), + "svn"..parser({ + "init"..parser("-T", "--trunk", "-t", "--tags", "-b", "--branches", "-s", "--stdlayout", + "--no-metadata", "--use-svm-props", "--use-svnsync-props", "--rewrite-root", + "--rewrite-uuid", "--username", "--prefix"..parser({"origin"}), "--ignore-paths", + "--include-paths", "--no-minimize-url"), + "fetch"..parser({remotes}, "--localtime", "--parent", "--ignore-paths", "--include-paths", + "--log-window-size"), + "clone"..parser("-T", "--trunk", "-t", "--tags", "-b", "--branches", "-s", "--stdlayout", + "--no-metadata", "--use-svm-props", "--use-svnsync-props", "--rewrite-root", + "--rewrite-uuid", "--username", "--prefix"..parser({"origin"}), "--ignore-paths", + "--include-paths", "--no-minimize-url", "--preserve-empty-dirs", + "--placeholder-filename"), + "rebase"..parser({local_or_remote_branches}, {branches}), + "dcommit"..parser("--no-rebase", "--commit-url", "--mergeinfo", "--interactive"), + "branch"..parser("-m","--message","-t", "--tags", "-d", "--destination", + "--username", "--commit-url", "--parents"), + "log"..parser("-r", "--revision", "-v", "--verbose", "--limit", + "--incremental", "--show-commit", "--oneline"), + "find-rev"..parser("--before", "--after"), + "reset"..parser("-r", "--revision", "-p", "--parent"), + "tag", + "blame", + "set-tree", + "create-ignore", + "show-ignore", + "mkdirs", + "commit-diff", + "info", + "proplist", + "propget", + "show-externals", + "gc" + }), + "symbolic-ref", + "tag", + "tar-tree", + "unpack-file", + "unpack-objects", + "update-index", + "update-ref", + "update-server-info", + "upload-archive", + "upload-pack", + "var", + "verify-pack", + "verify-tag", + "web--browse", + "whatchanged", + "worktree"..parser({ + "add"..parser( + {matchers.dirs}, + {branches}, + "-f", "--force", + "--detach", + "--checkout", + "--lock", + "-b"..parser({branches}) + ), + "list"..parser("--porcelain"), + "lock"..parser("--reason"), + "move", + "prune"..parser( + "-n", "--dry-run", + "-v", "--verbose", + "--expire" + ), + "remove"..parser("-f"), + "unlock" + }), + "write-tree", + }, + "--version", + "--help", + "-c", + "--exec-path", + "--html-path", + "--man-path", + "--info-path", + "-p", "--paginate", "--no-pager", + "--no-replace-objects", + "--bare", + "--git-dir=", + "--work-tree=", + "--namespace=" +) + +clink.arg.register_parser("git", git_parser) diff --git a/modules/JSON.lua b/modules/JSON.lua new file mode 100644 index 0000000..4f89372 --- /dev/null +++ b/modules/JSON.lua @@ -0,0 +1,1724 @@ +-- -*- coding: utf-8 -*- +-- +-- Simple JSON encoding and decoding in pure Lua. +-- +-- Copyright 2010-2017 Jeffrey Friedl +-- http://regex.info/blog/ +-- Latest version: http://regex.info/blog/lua/json +-- +-- This code is released under a Creative Commons CC-BY "Attribution" License: +-- http://creativecommons.org/licenses/by/3.0/deed.en_US +-- +-- It can be used for any purpose so long as: +-- 1) the copyright notice above is maintained +-- 2) the web-page links above are maintained +-- 3) the 'AUTHOR_NOTE' string below is maintained +-- +local VERSION = '20170927.26' -- version history at end of file +local AUTHOR_NOTE = "-[ JSON.lua package by Jeffrey Friedl (http://regex.info/blog/lua/json) version 20170927.26 ]-" + +-- +-- The 'AUTHOR_NOTE' variable exists so that information about the source +-- of the package is maintained even in compiled versions. It's also +-- included in OBJDEF below mostly to quiet warnings about unused variables. +-- +local OBJDEF = { + VERSION = VERSION, + AUTHOR_NOTE = AUTHOR_NOTE, +} + + +-- +-- Simple JSON encoding and decoding in pure Lua. +-- JSON definition: http://www.json.org/ +-- +-- +-- JSON = assert(loadfile "JSON.lua")() -- one-time load of the routines +-- +-- local lua_value = JSON:decode(raw_json_text) +-- +-- local raw_json_text = JSON:encode(lua_table_or_value) +-- local pretty_json_text = JSON:encode_pretty(lua_table_or_value) -- "pretty printed" version for human readability +-- +-- +-- +-- DECODING (from a JSON string to a Lua table) +-- +-- +-- JSON = assert(loadfile "JSON.lua")() -- one-time load of the routines +-- +-- local lua_value = JSON:decode(raw_json_text) +-- +-- If the JSON text is for an object or an array, e.g. +-- { "what": "books", "count": 3 } +-- or +-- [ "Larry", "Curly", "Moe" ] +-- +-- the result is a Lua table, e.g. +-- { what = "books", count = 3 } +-- or +-- { "Larry", "Curly", "Moe" } +-- +-- +-- The encode and decode routines accept an optional second argument, +-- "etc", which is not used during encoding or decoding, but upon error +-- is passed along to error handlers. It can be of any type (including nil). +-- +-- +-- +-- ERROR HANDLING DURING DECODE +-- +-- With most errors during decoding, this code calls +-- +-- JSON:onDecodeError(message, text, location, etc) +-- +-- with a message about the error, and if known, the JSON text being +-- parsed and the byte count where the problem was discovered. You can +-- replace the default JSON:onDecodeError() with your own function. +-- +-- The default onDecodeError() merely augments the message with data +-- about the text and the location (and, an 'etc' argument had been +-- provided to decode(), its value is tacked onto the message as well), +-- and then calls JSON.assert(), which itself defaults to Lua's built-in +-- assert(), and can also be overridden. +-- +-- For example, in an Adobe Lightroom plugin, you might use something like +-- +-- function JSON:onDecodeError(message, text, location, etc) +-- LrErrors.throwUserError("Internal Error: invalid JSON data") +-- end +-- +-- or even just +-- +-- function JSON.assert(message) +-- LrErrors.throwUserError("Internal Error: " .. message) +-- end +-- +-- If JSON:decode() is passed a nil, this is called instead: +-- +-- JSON:onDecodeOfNilError(message, nil, nil, etc) +-- +-- and if JSON:decode() is passed HTML instead of JSON, this is called: +-- +-- JSON:onDecodeOfHTMLError(message, text, nil, etc) +-- +-- The use of the 'etc' argument allows stronger coordination between +-- decoding and error reporting, especially when you provide your own +-- error-handling routines. Continuing with the the Adobe Lightroom +-- plugin example: +-- +-- function JSON:onDecodeError(message, text, location, etc) +-- local note = "Internal Error: invalid JSON data" +-- if type(etc) = 'table' and etc.photo then +-- note = note .. " while processing for " .. etc.photo:getFormattedMetadata('fileName') +-- end +-- LrErrors.throwUserError(note) +-- end +-- +-- : +-- : +-- +-- for i, photo in ipairs(photosToProcess) do +-- : +-- : +-- local data = JSON:decode(someJsonText, { photo = photo }) +-- : +-- : +-- end +-- +-- +-- +-- If the JSON text passed to decode() has trailing garbage (e.g. as with the JSON "[123]xyzzy"), +-- the method +-- +-- JSON:onTrailingGarbage(json_text, location, parsed_value, etc) +-- +-- is invoked, where: +-- +-- 'json_text' is the original JSON text being parsed, +-- 'location' is the count of bytes into 'json_text' where the garbage starts (6 in the example), +-- 'parsed_value' is the Lua result of what was successfully parsed ({123} in the example), +-- 'etc' is as above. +-- +-- If JSON:onTrailingGarbage() does not abort, it should return the value decode() should return, +-- or nil + an error message. +-- +-- local new_value, error_message = JSON:onTrailingGarbage() +-- +-- The default JSON:onTrailingGarbage() simply invokes JSON:onDecodeError("trailing garbage"...), +-- but you can have this package ignore trailing garbage via +-- +-- function JSON:onTrailingGarbage(json_text, location, parsed_value, etc) +-- return parsed_value +-- end +-- +-- +-- DECODING AND STRICT TYPES +-- +-- Because both JSON objects and JSON arrays are converted to Lua tables, +-- it's not normally possible to tell which original JSON type a +-- particular Lua table was derived from, or guarantee decode-encode +-- round-trip equivalency. +-- +-- However, if you enable strictTypes, e.g. +-- +-- JSON = assert(loadfile "JSON.lua")() --load the routines +-- JSON.strictTypes = true +-- +-- then the Lua table resulting from the decoding of a JSON object or +-- JSON array is marked via Lua metatable, so that when re-encoded with +-- JSON:encode() it ends up as the appropriate JSON type. +-- +-- (This is not the default because other routines may not work well with +-- tables that have a metatable set, for example, Lightroom API calls.) +-- +-- +-- ENCODING (from a lua table to a JSON string) +-- +-- JSON = assert(loadfile "JSON.lua")() -- one-time load of the routines +-- +-- local raw_json_text = JSON:encode(lua_table_or_value) +-- local pretty_json_text = JSON:encode_pretty(lua_table_or_value) -- "pretty printed" version for human readability +-- local custom_pretty = JSON:encode(lua_table_or_value, etc, { pretty = true, indent = "| ", align_keys = false }) +-- +-- On error during encoding, this code calls: +-- +-- JSON:onEncodeError(message, etc) +-- +-- which you can override in your local JSON object. Also see "HANDLING UNSUPPORTED VALUE TYPES" below. +-- +-- The 'etc' in the error call is the second argument to encode() and encode_pretty(), or nil if it wasn't provided. +-- +-- +-- +-- +-- ENCODING OPTIONS +-- +-- An optional third argument, a table of options, can be provided to encode(). +-- +-- encode_options = { +-- -- options for making "pretty" human-readable JSON (see "PRETTY-PRINTING" below) +-- pretty = true, -- turn pretty formatting on +-- indent = " ", -- use this indent for each level of an array/object +-- align_keys = false, -- if true, align the keys in a way that sounds like it should be nice, but is actually ugly +-- array_newline = false, -- if true, array elements become one to a line rather than inline +-- +-- -- other output-related options +-- null = "\0", -- see "ENCODING JSON NULL VALUES" below +-- stringsAreUtf8 = false, -- see "HANDLING UNICODE LINE AND PARAGRAPH SEPARATORS FOR JAVA" below +-- } +-- +-- json_string = JSON:encode(mytable, etc, encode_options) +-- +-- +-- +-- For reference, the defaults are: +-- +-- pretty = false +-- null = nil, +-- stringsAreUtf8 = false, +-- +-- +-- +-- PRETTY-PRINTING +-- +-- Enabling the 'pretty' encode option helps generate human-readable JSON. +-- +-- pretty = JSON:encode(val, etc, { +-- pretty = true, +-- indent = " ", +-- align_keys = false, +-- }) +-- +-- encode_pretty() is also provided: it's identical to encode() except +-- that encode_pretty() provides a default options table if none given in the call: +-- +-- { pretty = true, indent = " ", align_keys = false, array_newline = false } +-- +-- For example, if +-- +-- JSON:encode(data) +-- +-- produces: +-- +-- {"city":"Kyoto","climate":{"avg_temp":16,"humidity":"high","snowfall":"minimal"},"country":"Japan","wards":11} +-- +-- then +-- +-- JSON:encode_pretty(data) +-- +-- produces: +-- +-- { +-- "city": "Kyoto", +-- "climate": { +-- "avg_temp": 16, +-- "humidity": "high", +-- "snowfall": "minimal" +-- }, +-- "country": "Japan", +-- "wards": 11 +-- } +-- +-- The following lines all return identical strings: +-- JSON:encode_pretty(data) +-- JSON:encode_pretty(data, nil, { pretty = true, indent = " ", align_keys = false, array_newline = false}) +-- JSON:encode_pretty(data, nil, { pretty = true, indent = " " }) +-- JSON:encode (data, nil, { pretty = true, indent = " " }) +-- +-- An example of setting your own indent string: +-- +-- JSON:encode_pretty(data, nil, { pretty = true, indent = "| " }) +-- +-- produces: +-- +-- { +-- | "city": "Kyoto", +-- | "climate": { +-- | | "avg_temp": 16, +-- | | "humidity": "high", +-- | | "snowfall": "minimal" +-- | }, +-- | "country": "Japan", +-- | "wards": 11 +-- } +-- +-- An example of setting align_keys to true: +-- +-- JSON:encode_pretty(data, nil, { pretty = true, indent = " ", align_keys = true }) +-- +-- produces: +-- +-- { +-- "city": "Kyoto", +-- "climate": { +-- "avg_temp": 16, +-- "humidity": "high", +-- "snowfall": "minimal" +-- }, +-- "country": "Japan", +-- "wards": 11 +-- } +-- +-- which I must admit is kinda ugly, sorry. This was the default for +-- encode_pretty() prior to version 20141223.14. +-- +-- +-- HANDLING UNICODE LINE AND PARAGRAPH SEPARATORS FOR JAVA +-- +-- If the 'stringsAreUtf8' encode option is set to true, consider Lua strings not as a sequence of bytes, +-- but as a sequence of UTF-8 characters. +-- +-- Currently, the only practical effect of setting this option is that Unicode LINE and PARAGRAPH +-- separators, if found in a string, are encoded with a JSON escape instead of being dumped as is. +-- The JSON is valid either way, but encoding this way, apparently, allows the resulting JSON +-- to also be valid Java. +-- +-- AMBIGUOUS SITUATIONS DURING THE ENCODING +-- +-- During the encode, if a Lua table being encoded contains both string +-- and numeric keys, it fits neither JSON's idea of an object, nor its +-- idea of an array. To get around this, when any string key exists (or +-- when non-positive numeric keys exist), numeric keys are converted to +-- strings. +-- +-- For example, +-- JSON:encode({ "one", "two", "three", SOMESTRING = "some string" })) +-- produces the JSON object +-- {"1":"one","2":"two","3":"three","SOMESTRING":"some string"} +-- +-- To prohibit this conversion and instead make it an error condition, set +-- JSON.noKeyConversion = true +-- +-- +-- ENCODING JSON NULL VALUES +-- +-- Lua tables completely omit keys whose value is nil, so without special handling there's +-- no way to represent JSON object's null value in a Lua table. For example +-- JSON:encode({ username = "admin", password = nil }) +-- +-- produces: +-- +-- {"username":"admin"} +-- +-- In order to actually produce +-- +-- {"username":"admin", "password":null} +-- + +-- one can include a string value for a "null" field in the options table passed to encode().... +-- any Lua table entry with that value becomes null in the JSON output: +-- +-- JSON:encode({ username = "admin", password = "xyzzy" }, -- First arg is the Lua table to encode as JSON. +-- nil, -- Second arg is the 'etc' value, ignored here +-- { null = "xyzzy" }) -- Third arg is th options table +-- +-- produces: +-- +-- {"username":"admin", "password":null} +-- +-- Just be sure to use a string that is otherwise unlikely to appear in your data. +-- The string "\0" (a string with one null byte) may well be appropriate for many applications. +-- +-- The "null" options also applies to Lua tables that become JSON arrays. +-- JSON:encode({ "one", "two", nil, nil }) +-- +-- produces +-- +-- ["one","two"] +-- +-- while +-- +-- NullPlaceholder = "\0" +-- encode_options = { null = NullPlaceholder } +-- JSON:encode({ "one", "two", NullPlaceholder, NullPlaceholder}, nil, encode_options) +-- produces +-- +-- ["one","two",null,null] +-- +-- +-- +-- HANDLING LARGE AND/OR PRECISE NUMBERS +-- +-- +-- Without special handling, numbers in JSON can lose precision in Lua. +-- For example: +-- +-- T = JSON:decode('{ "small":12345, "big":12345678901234567890123456789, "precise":9876.67890123456789012345 }') +-- +-- print("small: ", type(T.small), T.small) +-- print("big: ", type(T.big), T.big) +-- print("precise: ", type(T.precise), T.precise) +-- +-- produces +-- +-- small: number 12345 +-- big: number 1.2345678901235e+28 +-- precise: number 9876.6789012346 +-- +-- Precision is lost with both 'big' and 'precise'. +-- +-- This package offers ways to try to handle this better (for some definitions of "better")... +-- +-- The most precise method is by setting the global: +-- +-- JSON.decodeNumbersAsObjects = true +-- +-- When this is set, numeric JSON data is encoded into Lua in a form that preserves the exact +-- JSON numeric presentation when re-encoded back out to JSON, or accessed in Lua as a string. +-- +-- This is done by encoding the numeric data with a Lua table/metatable that returns +-- the possibly-imprecise numeric form when accessed numerically, but the original precise +-- representation when accessed as a string. +-- +-- Consider the example above, with this option turned on: +-- +-- JSON.decodeNumbersAsObjects = true +-- +-- T = JSON:decode('{ "small":12345, "big":12345678901234567890123456789, "precise":9876.67890123456789012345 }') +-- +-- print("small: ", type(T.small), T.small) +-- print("big: ", type(T.big), T.big) +-- print("precise: ", type(T.precise), T.precise) +-- +-- This now produces: +-- +-- small: table 12345 +-- big: table 12345678901234567890123456789 +-- precise: table 9876.67890123456789012345 +-- +-- However, within Lua you can still use the values (e.g. T.precise in the example above) in numeric +-- contexts. In such cases you'll get the possibly-imprecise numeric version, but in string contexts +-- and when the data finds its way to this package's encode() function, the original full-precision +-- representation is used. +-- +-- You can force access to the string or numeric version via +-- JSON:forceString() +-- JSON:forceNumber() +-- For example, +-- local probably_okay = JSON:forceNumber(T.small) -- 'probably_okay' is a number +-- +-- Code the inspects the JSON-turned-Lua data using type() can run into troubles because what used to +-- be a number can now be a table (e.g. as the small/big/precise example above shows). Update these +-- situations to use JSON:isNumber(item), which returns nil if the item is neither a number nor one +-- of these number objects. If it is either, it returns the number itself. For completeness there's +-- also JSON:isString(item). +-- +-- If you want to try to avoid the hassles of this "number as an object" kludge for all but really +-- big numbers, you can set JSON.decodeNumbersAsObjects and then also set one or both of +-- JSON:decodeIntegerObjectificationLength +-- JSON:decodeDecimalObjectificationLength +-- They refer to the length of the part of the number before and after a decimal point. If they are +-- set and their part is at least that number of digits, objectification occurs. If both are set, +-- objectification occurs when either length is met. +-- +-- ----------------------- +-- +-- Even without using the JSON.decodeNumbersAsObjects option, you can encode numbers in your Lua +-- table that retain high precision upon encoding to JSON, by using the JSON:asNumber() function: +-- +-- T = { +-- imprecise = 123456789123456789.123456789123456789, +-- precise = JSON:asNumber("123456789123456789.123456789123456789") +-- } +-- +-- print(JSON:encode_pretty(T)) +-- +-- This produces: +-- +-- { +-- "precise": 123456789123456789.123456789123456789, +-- "imprecise": 1.2345678912346e+17 +-- } +-- +-- +-- ----------------------- +-- +-- A different way to handle big/precise JSON numbers is to have decode() merely return the exact +-- string representation of the number instead of the number itself. This approach might be useful +-- when the numbers are merely some kind of opaque object identifier and you want to work with them +-- in Lua as strings anyway. +-- +-- This approach is enabled by setting +-- +-- JSON.decodeIntegerStringificationLength = 10 +-- +-- The value is the number of digits (of the integer part of the number) at which to stringify numbers. +-- NOTE: this setting is ignored if JSON.decodeNumbersAsObjects is true, as that takes precedence. +-- +-- Consider our previous example with this option set to 10: +-- +-- JSON.decodeIntegerStringificationLength = 10 +-- +-- T = JSON:decode('{ "small":12345, "big":12345678901234567890123456789, "precise":9876.67890123456789012345 }') +-- +-- print("small: ", type(T.small), T.small) +-- print("big: ", type(T.big), T.big) +-- print("precise: ", type(T.precise), T.precise) +-- +-- This produces: +-- +-- small: number 12345 +-- big: string 12345678901234567890123456789 +-- precise: number 9876.6789012346 +-- +-- The long integer of the 'big' field is at least JSON.decodeIntegerStringificationLength digits +-- in length, so it's converted not to a Lua integer but to a Lua string. Using a value of 0 or 1 ensures +-- that all JSON numeric data becomes strings in Lua. +-- +-- Note that unlike +-- JSON.decodeNumbersAsObjects = true +-- this stringification is simple and unintelligent: the JSON number simply becomes a Lua string, and that's the end of it. +-- If the string is then converted back to JSON, it's still a string. After running the code above, adding +-- print(JSON:encode(T)) +-- produces +-- {"big":"12345678901234567890123456789","precise":9876.6789012346,"small":12345} +-- which is unlikely to be desired. +-- +-- There's a comparable option for the length of the decimal part of a number: +-- +-- JSON.decodeDecimalStringificationLength +-- +-- This can be used alone or in conjunction with +-- +-- JSON.decodeIntegerStringificationLength +-- +-- to trip stringification on precise numbers with at least JSON.decodeIntegerStringificationLength digits after +-- the decimal point. (Both are ignored if JSON.decodeNumbersAsObjects is true.) +-- +-- This example: +-- +-- JSON.decodeIntegerStringificationLength = 10 +-- JSON.decodeDecimalStringificationLength = 5 +-- +-- T = JSON:decode('{ "small":12345, "big":12345678901234567890123456789, "precise":9876.67890123456789012345 }') +-- +-- print("small: ", type(T.small), T.small) +-- print("big: ", type(T.big), T.big) +-- print("precise: ", type(T.precise), T.precise) +-- +-- produces: +-- +-- small: number 12345 +-- big: string 12345678901234567890123456789 +-- precise: string 9876.67890123456789012345 +-- +-- +-- HANDLING UNSUPPORTED VALUE TYPES +-- +-- Among the encoding errors that might be raised is an attempt to convert a table value that has a type +-- that this package hasn't accounted for: a function, userdata, or a thread. You can handle these types as table +-- values (but not as table keys) if you supply a JSON:unsupportedTypeEncoder() method along the lines of the +-- following example: +-- +-- function JSON:unsupportedTypeEncoder(value_of_unsupported_type) +-- if type(value_of_unsupported_type) == 'function' then +-- return "a function value" +-- else +-- return nil +-- end +-- end +-- +-- Your unsupportedTypeEncoder() method is actually called with a bunch of arguments: +-- +-- self:unsupportedTypeEncoder(value, parents, etc, options, indent, for_key) +-- +-- The 'value' is the function, thread, or userdata to be converted to JSON. +-- +-- The 'etc' and 'options' arguments are those passed to the original encode(). The other arguments are +-- probably of little interest; see the source code. (Note that 'for_key' is never true, as this function +-- is invoked only on table values; table keys of these types still trigger the onEncodeError method.) +-- +-- If your unsupportedTypeEncoder() method returns a string, it's inserted into the JSON as is. +-- If it returns nil plus an error message, that error message is passed through to an onEncodeError invocation. +-- If it returns only nil, processing falls through to a default onEncodeError invocation. +-- +-- If you want to handle everything in a simple way: +-- +-- function JSON:unsupportedTypeEncoder(value) +-- return tostring(value) +-- end +-- +-- +-- SUMMARY OF METHODS YOU CAN OVERRIDE IN YOUR LOCAL LUA JSON OBJECT +-- +-- assert +-- onDecodeError +-- onDecodeOfNilError +-- onDecodeOfHTMLError +-- onTrailingGarbage +-- onEncodeError +-- unsupportedTypeEncoder +-- +-- If you want to create a separate Lua JSON object with its own error handlers, +-- you can reload JSON.lua or use the :new() method. +-- +--------------------------------------------------------------------------- + +local default_pretty_indent = " " +local default_pretty_options = { pretty = true, indent = default_pretty_indent, align_keys = false, array_newline = false } + +local isArray = { __tostring = function() return "JSON array" end } isArray.__index = isArray +local isObject = { __tostring = function() return "JSON object" end } isObject.__index = isObject + +function OBJDEF:newArray(tbl) + return setmetatable(tbl or {}, isArray) +end + +function OBJDEF:newObject(tbl) + return setmetatable(tbl or {}, isObject) +end + + + + +local function getnum(op) + return type(op) == 'number' and op or op.N +end + +local isNumber = { + __tostring = function(T) return T.S end, + __unm = function(op) return getnum(op) end, + + __concat = function(op1, op2) return tostring(op1) .. tostring(op2) end, + __add = function(op1, op2) return getnum(op1) + getnum(op2) end, + __sub = function(op1, op2) return getnum(op1) - getnum(op2) end, + __mul = function(op1, op2) return getnum(op1) * getnum(op2) end, + __div = function(op1, op2) return getnum(op1) / getnum(op2) end, + __mod = function(op1, op2) return getnum(op1) % getnum(op2) end, + __pow = function(op1, op2) return getnum(op1) ^ getnum(op2) end, + __lt = function(op1, op2) return getnum(op1) < getnum(op2) end, + __eq = function(op1, op2) return getnum(op1) == getnum(op2) end, + __le = function(op1, op2) return getnum(op1) <= getnum(op2) end, +} +isNumber.__index = isNumber + +function OBJDEF:asNumber(item) + + if getmetatable(item) == isNumber then + -- it's already a JSON number object. + return item + elseif type(item) == 'table' and type(item.S) == 'string' and type(item.N) == 'number' then + -- it's a number-object table that lost its metatable, so give it one + return setmetatable(item, isNumber) + else + -- the normal situation... given a number or a string representation of a number.... + local holder = { + S = tostring(item), -- S is the representation of the number as a string, which remains precise + N = tonumber(item), -- N is the number as a Lua number. + } + return setmetatable(holder, isNumber) + end +end + +-- +-- Given an item that might be a normal string or number, or might be an 'isNumber' object defined above, +-- return the string version. This shouldn't be needed often because the 'isNumber' object should autoconvert +-- to a string in most cases, but it's here to allow it to be forced when needed. +-- +function OBJDEF:forceString(item) + if type(item) == 'table' and type(item.S) == 'string' then + return item.S + else + return tostring(item) + end +end + +-- +-- Given an item that might be a normal string or number, or might be an 'isNumber' object defined above, +-- return the numeric version. +-- +function OBJDEF:forceNumber(item) + if type(item) == 'table' and type(item.N) == 'number' then + return item.N + else + return tonumber(item) + end +end + +-- +-- If the given item is a number, return it. Otherwise, return nil. +-- This, this can be used both in a conditional and to access the number when you're not sure its form. +-- +function OBJDEF:isNumber(item) + if type(item) == 'number' then + return item + elseif type(item) == 'table' and type(item.N) == 'number' then + return item.N + else + return nil + end +end + +function OBJDEF:isString(item) + if type(item) == 'string' then + return item + elseif type(item) == 'table' and type(item.S) == 'string' then + return item.S + else + return nil + end +end + + +local function unicode_codepoint_as_utf8(codepoint) + -- + -- codepoint is a number + -- + if codepoint <= 127 then + return string.char(codepoint) + + elseif codepoint <= 2047 then + -- + -- 110yyyxx 10xxxxxx <-- useful notation from http://en.wikipedia.org/wiki/Utf8 + -- + local highpart = math.floor(codepoint / 0x40) + local lowpart = codepoint - (0x40 * highpart) + return string.char(0xC0 + highpart, + 0x80 + lowpart) + + elseif codepoint <= 65535 then + -- + -- 1110yyyy 10yyyyxx 10xxxxxx + -- + local highpart = math.floor(codepoint / 0x1000) + local remainder = codepoint - 0x1000 * highpart + local midpart = math.floor(remainder / 0x40) + local lowpart = remainder - 0x40 * midpart + + highpart = 0xE0 + highpart + midpart = 0x80 + midpart + lowpart = 0x80 + lowpart + + -- + -- Check for an invalid character (thanks Andy R. at Adobe). + -- See table 3.7, page 93, in http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf#G28070 + -- + if ( highpart == 0xE0 and midpart < 0xA0 ) or + ( highpart == 0xED and midpart > 0x9F ) or + ( highpart == 0xF0 and midpart < 0x90 ) or + ( highpart == 0xF4 and midpart > 0x8F ) + then + return "?" + else + return string.char(highpart, + midpart, + lowpart) + end + + else + -- + -- 11110zzz 10zzyyyy 10yyyyxx 10xxxxxx + -- + local highpart = math.floor(codepoint / 0x40000) + local remainder = codepoint - 0x40000 * highpart + local midA = math.floor(remainder / 0x1000) + remainder = remainder - 0x1000 * midA + local midB = math.floor(remainder / 0x40) + local lowpart = remainder - 0x40 * midB + + return string.char(0xF0 + highpart, + 0x80 + midA, + 0x80 + midB, + 0x80 + lowpart) + end +end + +function OBJDEF:onDecodeError(message, text, location, etc) + if text then + if location then + message = string.format("%s at byte %d of: %s", message, location, text) + else + message = string.format("%s: %s", message, text) + end + end + + if etc ~= nil then + message = message .. " (" .. OBJDEF:encode(etc) .. ")" + end + + if self.assert then + self.assert(false, message) + else + assert(false, message) + end +end + +function OBJDEF:onTrailingGarbage(json_text, location, parsed_value, etc) + return self:onDecodeError("trailing garbage", json_text, location, etc) +end + +OBJDEF.onDecodeOfNilError = OBJDEF.onDecodeError +OBJDEF.onDecodeOfHTMLError = OBJDEF.onDecodeError + +function OBJDEF:onEncodeError(message, etc) + if etc ~= nil then + message = message .. " (" .. OBJDEF:encode(etc) .. ")" + end + + if self.assert then + self.assert(false, message) + else + assert(false, message) + end +end + +local function grok_number(self, text, start, options) + -- + -- Grab the integer part + -- + local integer_part = text:match('^-?[1-9]%d*', start) + or text:match("^-?0", start) + + if not integer_part then + self:onDecodeError("expected number", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + local i = start + integer_part:len() + + -- + -- Grab an optional decimal part + -- + local decimal_part = text:match('^%.%d+', i) or "" + + i = i + decimal_part:len() + + -- + -- Grab an optional exponential part + -- + local exponent_part = text:match('^[eE][-+]?%d+', i) or "" + + i = i + exponent_part:len() + + local full_number_text = integer_part .. decimal_part .. exponent_part + + if options.decodeNumbersAsObjects then + + local objectify = false + + if not options.decodeIntegerObjectificationLength and not options.decodeDecimalObjectificationLength then + -- no options, so objectify + objectify = true + + elseif (options.decodeIntegerObjectificationLength + and + (integer_part:len() >= options.decodeIntegerObjectificationLength or exponent_part:len() > 0)) + + or + (options.decodeDecimalObjectificationLength + and + (decimal_part:len() >= options.decodeDecimalObjectificationLength or exponent_part:len() > 0)) + then + -- have options and they are triggered, so objectify + objectify = true + end + + if objectify then + return OBJDEF:asNumber(full_number_text), i + end + -- else, fall through to try to return as a straight-up number + + else + + -- Not always decoding numbers as objects, so perhaps encode as strings? + + -- + -- If we're told to stringify only under certain conditions, so do. + -- We punt a bit when there's an exponent by just stringifying no matter what. + -- I suppose we should really look to see whether the exponent is actually big enough one + -- way or the other to trip stringification, but I'll be lazy about it until someone asks. + -- + if (options.decodeIntegerStringificationLength + and + (integer_part:len() >= options.decodeIntegerStringificationLength or exponent_part:len() > 0)) + + or + + (options.decodeDecimalStringificationLength + and + (decimal_part:len() >= options.decodeDecimalStringificationLength or exponent_part:len() > 0)) + then + return full_number_text, i -- this returns the exact string representation seen in the original JSON + end + + end + + + local as_number = tonumber(full_number_text) + + if not as_number then + self:onDecodeError("bad number", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + return as_number, i +end + + +local function grok_string(self, text, start, options) + + if text:sub(start,start) ~= '"' then + self:onDecodeError("expected string's opening quote", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + local i = start + 1 -- +1 to bypass the initial quote + local text_len = text:len() + local VALUE = "" + while i <= text_len do + local c = text:sub(i,i) + if c == '"' then + return VALUE, i + 1 + end + if c ~= '\\' then + VALUE = VALUE .. c + i = i + 1 + elseif text:match('^\\b', i) then + VALUE = VALUE .. "\b" + i = i + 2 + elseif text:match('^\\f', i) then + VALUE = VALUE .. "\f" + i = i + 2 + elseif text:match('^\\n', i) then + VALUE = VALUE .. "\n" + i = i + 2 + elseif text:match('^\\r', i) then + VALUE = VALUE .. "\r" + i = i + 2 + elseif text:match('^\\t', i) then + VALUE = VALUE .. "\t" + i = i + 2 + else + local hex = text:match('^\\u([0123456789aAbBcCdDeEfF][0123456789aAbBcCdDeEfF][0123456789aAbBcCdDeEfF][0123456789aAbBcCdDeEfF])', i) + if hex then + i = i + 6 -- bypass what we just read + + -- We have a Unicode codepoint. It could be standalone, or if in the proper range and + -- followed by another in a specific range, it'll be a two-code surrogate pair. + local codepoint = tonumber(hex, 16) + if codepoint >= 0xD800 and codepoint <= 0xDBFF then + -- it's a hi surrogate... see whether we have a following low + local lo_surrogate = text:match('^\\u([dD][cdefCDEF][0123456789aAbBcCdDeEfF][0123456789aAbBcCdDeEfF])', i) + if lo_surrogate then + i = i + 6 -- bypass the low surrogate we just read + codepoint = 0x2400 + (codepoint - 0xD800) * 0x400 + tonumber(lo_surrogate, 16) + else + -- not a proper low, so we'll just leave the first codepoint as is and spit it out. + end + end + VALUE = VALUE .. unicode_codepoint_as_utf8(codepoint) + + else + + -- just pass through what's escaped + VALUE = VALUE .. text:match('^\\(.)', i) + i = i + 2 + end + end + end + + self:onDecodeError("unclosed string", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible +end + +local function skip_whitespace(text, start) + + local _, match_end = text:find("^[ \n\r\t]+", start) -- [http://www.ietf.org/rfc/rfc4627.txt] Section 2 + if match_end then + return match_end + 1 + else + return start + end +end + +local grok_one -- assigned later + +local function grok_object(self, text, start, options) + + if text:sub(start,start) ~= '{' then + self:onDecodeError("expected '{'", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + local i = skip_whitespace(text, start + 1) -- +1 to skip the '{' + + local VALUE = self.strictTypes and self:newObject { } or { } + + if text:sub(i,i) == '}' then + return VALUE, i + 1 + end + local text_len = text:len() + while i <= text_len do + local key, new_i = grok_string(self, text, i, options) + + i = skip_whitespace(text, new_i) + + if text:sub(i, i) ~= ':' then + self:onDecodeError("expected colon", text, i, options.etc) + return nil, i -- in case the error method doesn't abort, return something sensible + end + + i = skip_whitespace(text, i + 1) + + local new_val, new_i = grok_one(self, text, i, options) + + VALUE[key] = new_val + + -- + -- Expect now either '}' to end things, or a ',' to allow us to continue. + -- + i = skip_whitespace(text, new_i) + + local c = text:sub(i,i) + + if c == '}' then + return VALUE, i + 1 + end + + if text:sub(i, i) ~= ',' then + self:onDecodeError("expected comma or '}'", text, i, options.etc) + return nil, i -- in case the error method doesn't abort, return something sensible + end + + i = skip_whitespace(text, i + 1) + end + + self:onDecodeError("unclosed '{'", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible +end + +local function grok_array(self, text, start, options) + if text:sub(start,start) ~= '[' then + self:onDecodeError("expected '['", text, start, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + local i = skip_whitespace(text, start + 1) -- +1 to skip the '[' + local VALUE = self.strictTypes and self:newArray { } or { } + if text:sub(i,i) == ']' then + return VALUE, i + 1 + end + + local VALUE_INDEX = 1 + + local text_len = text:len() + while i <= text_len do + local val, new_i = grok_one(self, text, i, options) + + -- can't table.insert(VALUE, val) here because it's a no-op if val is nil + VALUE[VALUE_INDEX] = val + VALUE_INDEX = VALUE_INDEX + 1 + + i = skip_whitespace(text, new_i) + + -- + -- Expect now either ']' to end things, or a ',' to allow us to continue. + -- + local c = text:sub(i,i) + if c == ']' then + return VALUE, i + 1 + end + if text:sub(i, i) ~= ',' then + self:onDecodeError("expected comma or ']'", text, i, options.etc) + return nil, i -- in case the error method doesn't abort, return something sensible + end + i = skip_whitespace(text, i + 1) + end + self:onDecodeError("unclosed '['", text, start, options.etc) + return nil, i -- in case the error method doesn't abort, return something sensible +end + + +grok_one = function(self, text, start, options) + -- Skip any whitespace + start = skip_whitespace(text, start) + + if start > text:len() then + self:onDecodeError("unexpected end of string", text, nil, options.etc) + return nil, start -- in case the error method doesn't abort, return something sensible + end + + if text:find('^"', start) then + return grok_string(self, text, start, options.etc) + + elseif text:find('^[-0123456789 ]', start) then + return grok_number(self, text, start, options) + + elseif text:find('^%{', start) then + return grok_object(self, text, start, options) + + elseif text:find('^%[', start) then + return grok_array(self, text, start, options) + + elseif text:find('^true', start) then + return true, start + 4 + + elseif text:find('^false', start) then + return false, start + 5 + + elseif text:find('^null', start) then + return options.null, start + 4 + + else + self:onDecodeError("can't parse JSON", text, start, options.etc) + return nil, 1 -- in case the error method doesn't abort, return something sensible + end +end + +function OBJDEF:decode(text, etc, options) + -- + -- If the user didn't pass in a table of decode options, make an empty one. + -- + if type(options) ~= 'table' then + options = {} + end + + -- + -- If they passed in an 'etc' argument, stuff it into the options. + -- (If not, any 'etc' field in the options they passed in remains to be used) + -- + if etc ~= nil then + options.etc = etc + end + + + if type(self) ~= 'table' or self.__index ~= OBJDEF then + local error_message = "JSON:decode must be called in method format" + OBJDEF:onDecodeError(error_message, nil, nil, options.etc) + return nil, error_message -- in case the error method doesn't abort, return something sensible + end + + if text == nil then + local error_message = "nil passed to JSON:decode()" + self:onDecodeOfNilError(error_message, nil, nil, options.etc) + return nil, error_message -- in case the error method doesn't abort, return something sensible + + elseif type(text) ~= 'string' then + local error_message = "expected string argument to JSON:decode()" + self:onDecodeError(string.format("%s, got %s", error_message, type(text)), nil, nil, options.etc) + return nil, error_message -- in case the error method doesn't abort, return something sensible + end + + if text:match('^%s*$') then + -- an empty string is nothing, but not an error + return nil + end + + if text:match('^%s*<') then + -- Can't be JSON... we'll assume it's HTML + local error_message = "HTML passed to JSON:decode()" + self:onDecodeOfHTMLError(error_message, text, nil, options.etc) + return nil, error_message -- in case the error method doesn't abort, return something sensible + end + + -- + -- Ensure that it's not UTF-32 or UTF-16. + -- Those are perfectly valid encodings for JSON (as per RFC 4627 section 3), + -- but this package can't handle them. + -- + if text:sub(1,1):byte() == 0 or (text:len() >= 2 and text:sub(2,2):byte() == 0) then + local error_message = "JSON package groks only UTF-8, sorry" + self:onDecodeError(error_message, text, nil, options.etc) + return nil, error_message -- in case the error method doesn't abort, return something sensible + end + + -- + -- apply global options + -- + if options.decodeNumbersAsObjects == nil then + options.decodeNumbersAsObjects = self.decodeNumbersAsObjects + end + if options.decodeIntegerObjectificationLength == nil then + options.decodeIntegerObjectificationLength = self.decodeIntegerObjectificationLength + end + if options.decodeDecimalObjectificationLength == nil then + options.decodeDecimalObjectificationLength = self.decodeDecimalObjectificationLength + end + if options.decodeIntegerStringificationLength == nil then + options.decodeIntegerStringificationLength = self.decodeIntegerStringificationLength + end + if options.decodeDecimalStringificationLength == nil then + options.decodeDecimalStringificationLength = self.decodeDecimalStringificationLength + end + + + -- + -- Finally, go parse it + -- + local success, value, next_i = pcall(grok_one, self, text, 1, options) + + if success then + + local error_message = nil + if next_i ~= #text + 1 then + -- something's left over after we parsed the first thing.... whitespace is allowed. + next_i = skip_whitespace(text, next_i) + + -- if we have something left over now, it's trailing garbage + if next_i ~= #text + 1 then + value, error_message = self:onTrailingGarbage(text, next_i, value, options.etc) + end + end + return value, error_message + + else + + -- If JSON:onDecodeError() didn't abort out of the pcall, we'll have received + -- the error message here as "value", so pass it along as an assert. + local error_message = value + if self.assert then + self.assert(false, error_message) + else + assert(false, error_message) + end + -- ...and if we're still here (because the assert didn't throw an error), + -- return a nil and throw the error message on as a second arg + return nil, error_message + + end +end + +local function backslash_replacement_function(c) + if c == "\n" then + return "\\n" + elseif c == "\r" then + return "\\r" + elseif c == "\t" then + return "\\t" + elseif c == "\b" then + return "\\b" + elseif c == "\f" then + return "\\f" + elseif c == '"' then + return '\\"' + elseif c == '\\' then + return '\\\\' + else + return string.format("\\u%04x", c:byte()) + end +end + +local chars_to_be_escaped_in_JSON_string + = '[' + .. '"' -- class sub-pattern to match a double quote + .. '%\\' -- class sub-pattern to match a backslash + .. '%z' -- class sub-pattern to match a null + .. '\001' .. '-' .. '\031' -- class sub-pattern to match control characters + .. ']' + + +local LINE_SEPARATOR_as_utf8 = unicode_codepoint_as_utf8(0x2028) +local PARAGRAPH_SEPARATOR_as_utf8 = unicode_codepoint_as_utf8(0x2029) +local function json_string_literal(value, options) + local newval = value:gsub(chars_to_be_escaped_in_JSON_string, backslash_replacement_function) + if options.stringsAreUtf8 then + -- + -- This feels really ugly to just look into a string for the sequence of bytes that we know to be a particular utf8 character, + -- but utf8 was designed purposefully to make this kind of thing possible. Still, feels dirty. + -- I'd rather decode the byte stream into a character stream, but it's not technically needed so + -- not technically worth it. + -- + newval = newval:gsub(LINE_SEPARATOR_as_utf8, '\\u2028'):gsub(PARAGRAPH_SEPARATOR_as_utf8,'\\u2029') + end + return '"' .. newval .. '"' +end + +local function object_or_array(self, T, etc) + -- + -- We need to inspect all the keys... if there are any strings, we'll convert to a JSON + -- object. If there are only numbers, it's a JSON array. + -- + -- If we'll be converting to a JSON object, we'll want to sort the keys so that the + -- end result is deterministic. + -- + local string_keys = { } + local number_keys = { } + local number_keys_must_be_strings = false + local maximum_number_key + + for key in pairs(T) do + if type(key) == 'string' then + table.insert(string_keys, key) + elseif type(key) == 'number' then + table.insert(number_keys, key) + if key <= 0 or key >= math.huge then + number_keys_must_be_strings = true + elseif not maximum_number_key or key > maximum_number_key then + maximum_number_key = key + end + elseif type(key) == 'boolean' then + table.insert(string_keys, tostring(key)) + else + self:onEncodeError("can't encode table with a key of type " .. type(key), etc) + end + end + + if #string_keys == 0 and not number_keys_must_be_strings then + -- + -- An empty table, or a numeric-only array + -- + if #number_keys > 0 then + return nil, maximum_number_key -- an array + elseif tostring(T) == "JSON array" then + return nil + elseif tostring(T) == "JSON object" then + return { } + else + -- have to guess, so we'll pick array, since empty arrays are likely more common than empty objects + return nil + end + end + + table.sort(string_keys) + + local map + if #number_keys > 0 then + -- + -- If we're here then we have either mixed string/number keys, or numbers inappropriate for a JSON array + -- It's not ideal, but we'll turn the numbers into strings so that we can at least create a JSON object. + -- + + if self.noKeyConversion then + self:onEncodeError("a table with both numeric and string keys could be an object or array; aborting", etc) + end + + -- + -- Have to make a shallow copy of the source table so we can remap the numeric keys to be strings + -- + map = { } + for key, val in pairs(T) do + map[key] = val + end + + table.sort(number_keys) + + -- + -- Throw numeric keys in there as strings + -- + for _, number_key in ipairs(number_keys) do + local string_key = tostring(number_key) + if map[string_key] == nil then + table.insert(string_keys , string_key) + map[string_key] = T[number_key] + else + self:onEncodeError("conflict converting table with mixed-type keys into a JSON object: key " .. number_key .. " exists both as a string and a number.", etc) + end + end + end + + return string_keys, nil, map +end + +-- +-- Encode +-- +-- 'options' is nil, or a table with possible keys: +-- +-- pretty -- If true, return a pretty-printed version. +-- +-- indent -- A string (usually of spaces) used to indent each nested level. +-- +-- align_keys -- If true, align all the keys when formatting a table. The result is uglier than one might at first imagine. +-- Results are undefined if 'align_keys' is true but 'pretty' is not. +-- +-- array_newline -- If true, array elements are formatted each to their own line. The default is to all fall inline. +-- Results are undefined if 'array_newline' is true but 'pretty' is not. +-- +-- null -- If this exists with a string value, table elements with this value are output as JSON null. +-- +-- stringsAreUtf8 -- If true, consider Lua strings not as a sequence of bytes, but as a sequence of UTF-8 characters. +-- (Currently, the only practical effect of setting this option is that Unicode LINE and PARAGRAPH +-- separators, if found in a string, are encoded with a JSON escape instead of as raw UTF-8. +-- The JSON is valid either way, but encoding this way, apparently, allows the resulting JSON +-- to also be valid Java.) +-- +-- +local function encode_value(self, value, parents, etc, options, indent, for_key) + + -- + -- keys in a JSON object can never be null, so we don't even consider options.null when converting a key value + -- + if value == nil or (not for_key and options and options.null and value == options.null) then + return 'null' + + elseif type(value) == 'string' then + return json_string_literal(value, options) + + elseif type(value) == 'number' then + if value ~= value then + -- + -- NaN (Not a Number). + -- JSON has no NaN, so we have to fudge the best we can. This should really be a package option. + -- + return "null" + elseif value >= math.huge then + -- + -- Positive infinity. JSON has no INF, so we have to fudge the best we can. This should + -- really be a package option. Note: at least with some implementations, positive infinity + -- is both ">= math.huge" and "<= -math.huge", which makes no sense but that's how it is. + -- Negative infinity is properly "<= -math.huge". So, we must be sure to check the ">=" + -- case first. + -- + return "1e+9999" + elseif value <= -math.huge then + -- + -- Negative infinity. + -- JSON has no INF, so we have to fudge the best we can. This should really be a package option. + -- + return "-1e+9999" + else + return tostring(value) + end + + elseif type(value) == 'boolean' then + return tostring(value) + + elseif type(value) ~= 'table' then + + if self.unsupportedTypeEncoder then + local user_value, user_error = self:unsupportedTypeEncoder(value, parents, etc, options, indent, for_key) + -- If the user's handler returns a string, use that. If it returns nil plus an error message, bail with that. + -- If only nil returned, fall through to the default error handler. + if type(user_value) == 'string' then + return user_value + elseif user_value ~= nil then + self:onEncodeError("unsupportedTypeEncoder method returned a " .. type(user_value), etc) + elseif user_error then + self:onEncodeError(tostring(user_error), etc) + end + end + + self:onEncodeError("can't convert " .. type(value) .. " to JSON", etc) + + elseif getmetatable(value) == isNumber then + return tostring(value) + else + -- + -- A table to be converted to either a JSON object or array. + -- + local T = value + + if type(options) ~= 'table' then + options = {} + end + if type(indent) ~= 'string' then + indent = "" + end + + if parents[T] then + self:onEncodeError("table " .. tostring(T) .. " is a child of itself", etc) + else + parents[T] = true + end + + local result_value + + local object_keys, maximum_number_key, map = object_or_array(self, T, etc) + if maximum_number_key then + -- + -- An array... + -- + local key_indent + if options.array_newline then + key_indent = indent .. tostring(options.indent or "") + else + key_indent = indent + end + + local ITEMS = { } + for i = 1, maximum_number_key do + table.insert(ITEMS, encode_value(self, T[i], parents, etc, options, key_indent)) + end + + if options.array_newline then + result_value = "[\n" .. key_indent .. table.concat(ITEMS, ",\n" .. key_indent) .. "\n" .. indent .. "]" + elseif options.pretty then + result_value = "[ " .. table.concat(ITEMS, ", ") .. " ]" + else + result_value = "[" .. table.concat(ITEMS, ",") .. "]" + end + + elseif object_keys then + -- + -- An object + -- + local TT = map or T + + if options.pretty then + + local KEYS = { } + local max_key_length = 0 + for _, key in ipairs(object_keys) do + local encoded = encode_value(self, tostring(key), parents, etc, options, indent, true) + if options.align_keys then + max_key_length = math.max(max_key_length, #encoded) + end + table.insert(KEYS, encoded) + end + local key_indent = indent .. tostring(options.indent or "") + local subtable_indent = key_indent .. string.rep(" ", max_key_length) .. (options.align_keys and " " or "") + local FORMAT = "%s%" .. string.format("%d", max_key_length) .. "s: %s" + + local COMBINED_PARTS = { } + for i, key in ipairs(object_keys) do + local encoded_val = encode_value(self, TT[key], parents, etc, options, subtable_indent) + table.insert(COMBINED_PARTS, string.format(FORMAT, key_indent, KEYS[i], encoded_val)) + end + result_value = "{\n" .. table.concat(COMBINED_PARTS, ",\n") .. "\n" .. indent .. "}" + + else + + local PARTS = { } + for _, key in ipairs(object_keys) do + local encoded_val = encode_value(self, TT[key], parents, etc, options, indent) + local encoded_key = encode_value(self, tostring(key), parents, etc, options, indent, true) + table.insert(PARTS, string.format("%s:%s", encoded_key, encoded_val)) + end + result_value = "{" .. table.concat(PARTS, ",") .. "}" + + end + else + -- + -- An empty array/object... we'll treat it as an array, though it should really be an option + -- + result_value = "[]" + end + + parents[T] = false + return result_value + end +end + +local function top_level_encode(self, value, etc, options) + local val = encode_value(self, value, {}, etc, options) + if val == nil then + --PRIVATE("may need to revert to the previous public verison if I can't figure out what the guy wanted") + return val + else + return val + end +end + +function OBJDEF:encode(value, etc, options) + if type(self) ~= 'table' or self.__index ~= OBJDEF then + OBJDEF:onEncodeError("JSON:encode must be called in method format", etc) + end + + -- + -- If the user didn't pass in a table of decode options, make an empty one. + -- + if type(options) ~= 'table' then + options = {} + end + + return top_level_encode(self, value, etc, options) +end + +function OBJDEF:encode_pretty(value, etc, options) + if type(self) ~= 'table' or self.__index ~= OBJDEF then + OBJDEF:onEncodeError("JSON:encode_pretty must be called in method format", etc) + end + + -- + -- If the user didn't pass in a table of decode options, use the default pretty ones + -- + if type(options) ~= 'table' then + options = default_pretty_options + end + + return top_level_encode(self, value, etc, options) +end + +function OBJDEF.__tostring() + return "JSON encode/decode package" +end + +OBJDEF.__index = OBJDEF + +function OBJDEF:new(args) + local new = { } + + if args then + for key, val in pairs(args) do + new[key] = val + end + end + + return setmetatable(new, OBJDEF) +end + +return OBJDEF:new() + +-- +-- Version history: +-- +-- 20170927.26 Use option.null in decoding as well. Thanks to Max Sindwani for the bump, and sorry to Oliver Hitz +-- whose first mention of it four years ago was completely missed by me. +-- +-- 20170823.25 Added support for JSON:unsupportedTypeEncoder(). +-- Thanks to Chronos Phaenon Eosphoros (https://github.com/cpeosphoros) for the idea. +-- +-- 20170819.24 Added support for boolean keys in tables. +-- +-- 20170416.23 Added the "array_newline" formatting option suggested by yurenchen (http://www.yurenchen.com/) +-- +-- 20161128.22 Added: +-- JSON:isString() +-- JSON:isNumber() +-- JSON:decodeIntegerObjectificationLength +-- JSON:decodeDecimalObjectificationLength +-- +-- 20161109.21 Oops, had a small boo-boo in the previous update. +-- +-- 20161103.20 Used to silently ignore trailing garbage when decoding. Now fails via JSON:onTrailingGarbage() +-- http://seriot.ch/parsing_json.php +-- +-- Built-in error message about "expected comma or ']'" had mistakenly referred to '[' +-- +-- Updated the built-in error reporting to refer to bytes rather than characters. +-- +-- The decode() method no longer assumes that error handlers abort. +-- +-- Made the VERSION string a string instead of a number +-- + +-- 20160916.19 Fixed the isNumber.__index assignment (thanks to Jack Taylor) +-- +-- 20160730.18 Added JSON:forceString() and JSON:forceNumber() +-- +-- 20160728.17 Added concatenation to the metatable for JSON:asNumber() +-- +-- 20160709.16 Could crash if not passed an options table (thanks jarno heikkinen ). +-- +-- Made JSON:asNumber() a bit more resilient to being passed the results of itself. +-- +-- 20160526.15 Added the ability to easily encode null values in JSON, via the new "null" encoding option. +-- (Thanks to Adam B for bringing up the issue.) +-- +-- Added some support for very large numbers and precise floats via +-- JSON.decodeNumbersAsObjects +-- JSON.decodeIntegerStringificationLength +-- JSON.decodeDecimalStringificationLength +-- +-- Added the "stringsAreUtf8" encoding option. (Hat tip to http://lua-users.org/wiki/JsonModules ) +-- +-- 20141223.14 The encode_pretty() routine produced fine results for small datasets, but isn't really +-- appropriate for anything large, so with help from Alex Aulbach I've made the encode routines +-- more flexible, and changed the default encode_pretty() to be more generally useful. +-- +-- Added a third 'options' argument to the encode() and encode_pretty() routines, to control +-- how the encoding takes place. +-- +-- Updated docs to add assert() call to the loadfile() line, just as good practice so that +-- if there is a problem loading JSON.lua, the appropriate error message will percolate up. +-- +-- 20140920.13 Put back (in a way that doesn't cause warnings about unused variables) the author string, +-- so that the source of the package, and its version number, are visible in compiled copies. +-- +-- 20140911.12 Minor lua cleanup. +-- Fixed internal reference to 'JSON.noKeyConversion' to reference 'self' instead of 'JSON'. +-- (Thanks to SmugMug's David Parry for these.) +-- +-- 20140418.11 JSON nulls embedded within an array were being ignored, such that +-- ["1",null,null,null,null,null,"seven"], +-- would return +-- {1,"seven"} +-- It's now fixed to properly return +-- {1, nil, nil, nil, nil, nil, "seven"} +-- Thanks to "haddock" for catching the error. +-- +-- 20140116.10 The user's JSON.assert() wasn't always being used. Thanks to "blue" for the heads up. +-- +-- 20131118.9 Update for Lua 5.3... it seems that tostring(2/1) produces "2.0" instead of "2", +-- and this caused some problems. +-- +-- 20131031.8 Unified the code for encode() and encode_pretty(); they had been stupidly separate, +-- and had of course diverged (encode_pretty didn't get the fixes that encode got, so +-- sometimes produced incorrect results; thanks to Mattie for the heads up). +-- +-- Handle encoding tables with non-positive numeric keys (unlikely, but possible). +-- +-- If a table has both numeric and string keys, or its numeric keys are inappropriate +-- (such as being non-positive or infinite), the numeric keys are turned into +-- string keys appropriate for a JSON object. So, as before, +-- JSON:encode({ "one", "two", "three" }) +-- produces the array +-- ["one","two","three"] +-- but now something with mixed key types like +-- JSON:encode({ "one", "two", "three", SOMESTRING = "some string" })) +-- instead of throwing an error produces an object: +-- {"1":"one","2":"two","3":"three","SOMESTRING":"some string"} +-- +-- To maintain the prior throw-an-error semantics, set +-- JSON.noKeyConversion = true +-- +-- 20131004.7 Release under a Creative Commons CC-BY license, which I should have done from day one, sorry. +-- +-- 20130120.6 Comment update: added a link to the specific page on my blog where this code can +-- be found, so that folks who come across the code outside of my blog can find updates +-- more easily. +-- +-- 20111207.5 Added support for the 'etc' arguments, for better error reporting. +-- +-- 20110731.4 More feedback from David Kolf on how to make the tests for Nan/Infinity system independent. +-- +-- 20110730.3 Incorporated feedback from David Kolf at http://lua-users.org/wiki/JsonModules: +-- +-- * When encoding lua for JSON, Sparse numeric arrays are now handled by +-- spitting out full arrays, such that +-- JSON:encode({"one", "two", [10] = "ten"}) +-- returns +-- ["one","two",null,null,null,null,null,null,null,"ten"] +-- +-- In 20100810.2 and earlier, only up to the first non-null value would have been retained. +-- +-- * When encoding lua for JSON, numeric value NaN gets spit out as null, and infinity as "1+e9999". +-- Version 20100810.2 and earlier created invalid JSON in both cases. +-- +-- * Unicode surrogate pairs are now detected when decoding JSON. +-- +-- 20100810.2 added some checking to ensure that an invalid Unicode character couldn't leak in to the UTF-8 encoding +-- +-- 20100731.1 initial public release +-- diff --git a/modules/clink_version.lua b/modules/clink_version.lua new file mode 100644 index 0000000..d8e8e35 --- /dev/null +++ b/modules/clink_version.lua @@ -0,0 +1,13 @@ +local exports = {} + +-- Busted runs these modules scripts *outside* of Clink. +-- So these Clink scripts have to work without any Clink APIs being available. +clink = clink or {} + +local clink_version_encoded = clink.version_encoded or 0 + +exports.supports_display_filter_description = (clink_version_encoded >= 10010012) +exports.supports_color_settings = (clink_version_encoded >= 10010009) +exports.supports_query_rl_var = (clink_version_encoded >= 10010009) + +return exports diff --git a/modules/color.lua b/modules/color.lua new file mode 100644 index 0000000..0b28d7a --- /dev/null +++ b/modules/color.lua @@ -0,0 +1,41 @@ +local clink_version = require('clink_version') + +local exports = {} + +exports.BLACK = 0 +exports.RED = 1 +exports.GREEN = 2 +exports.YELLOW = 3 +exports.BLUE = 4 +exports.MAGENTA = 5 +exports.CYAN = 6 +exports.WHITE = 7 +exports.DEFAULT = 9 +exports.BOLD = 1 + +exports.set_color = function (fore, back, bold) + local err_message = "All arguments must be either nil or numbers between 0-9" + assert(fore == nil or (type(fore) == "number" and fore >= 0 and fore <=9), err_message) + assert(back == nil or (type(back) == "number" and back >= 0 and back <=9), err_message) + + fore = fore or exports.DEFAULT + back = back or exports.DEFAULT + bold = bold and exports.BOLD or 22 + + return "\x1b[3"..fore..";"..bold..";".."4"..back.."m" +end + +exports.get_clink_color = function (setting_name) + -- Clink's settings.get() returns SGR parameters for a CSI SGR escape code. + local sgr = clink_version.supports_color_settings and settings.get(setting_name) or "" + if sgr ~= "" then + sgr = "\x1b["..sgr.."m" + end + return sgr +end + +exports.color_text = function (text, fore, back, bold) + return exports.set_color(fore, back, bold)..text..exports.set_color() +end + +return exports diff --git a/modules/funclib.lua b/modules/funclib.lua new file mode 100644 index 0000000..0880cd7 --- /dev/null +++ b/modules/funclib.lua @@ -0,0 +1,100 @@ + +local exports = {} + +--- Implementation of table.filter function. Applies filter function to each + -- element of table and returns a new table with values for which filter + -- returns 'true'. + -- + -- @param tbl a table to filter. Default is an empty table. + -- @param filter function that accepts an element of table, specified in the + -- first argument and returns either 'true' or 'false'. If not specified, + -- then default function is used that returns its argument. + -- + -- @return a new table with values that are not filtered out by 'filter' function. +exports.filter = function (tbl, filter) + if not tbl then return {} end + if not filter then filter = function(v) return v end end + local ret = {} + for _,v in ipairs(tbl) do + if filter(v) then table.insert(ret, v) end + end + return ret +end + +--- Implementation of table.map function. Applies filter function to each + -- element of table and returns a new table with values returned by mapper + -- function. + -- + -- @param tbl a table to filter. Default is an empty table. + -- @param map_func function that accepts an element of table, specified in the + -- first argument and returns a new value for resultant table. If not + -- specified, then 'map' function returns it input table. + -- + -- @return a new table with values produced by 'map_func'. +exports.map = function (tbl, map_func) + assert(tbl == nil or type(tbl) == "table", + "First argument must be either table or nil") + + assert(map_func == nil or type(map_func) == "function", + "Second argument must be either function or nil") + + if tbl == nil then return {} end + if not map_func then return tbl end + local ret = {} + for _,v in ipairs(tbl) do + table.insert(ret, map_func(v)) + end + return ret +end + +--- Implementation of table.reduce function. Iterates through table and calls + -- 'func' function passing an accumulator and an entry from the original + -- table. The result of table is stored in accumulator and passed to next + -- 'func' call. + -- + -- @param accum an accumulator, initial value that will be passed to first + -- 'func' call. + -- @param tbl a table to reduce. Default is an empty table. + -- @param func function that accepts two params: an accumulator and an element + -- of table, specified in the first argument and returns a new value for + -- accumulator. + -- + -- @return a resultant accumulator value. +exports.reduce = function (accum, tbl, func) + assert(type(func) == "function", + "Third argument must be a function") + + if not tbl then return accum end + for _,v in ipairs(tbl) do + accum = func(accum, v) + end + return accum +end + +--- Concatenates any number of input values into one table. If input parameter is + -- a table then its values is copied to the end of resultant table. If the + -- parameter is single value, then it is appended to the resultant table. If + -- the input value is 'nil', then it is omitted. + -- + -- @return a result of concatenation. The result is always a table. +exports.concat = function (...) + local input = {...} + local ret = {} + local i = 1 + + while i <= #input do + local arg = input[i] + if type(arg) == 'table' then + for _,v in ipairs(arg) do + table.insert(ret, v) + end + elseif arg ~= nil then + table.insert(ret, arg) + end + i = i + 1 + end + + return ret +end + +return exports diff --git a/modules/gitutil.lua b/modules/gitutil.lua new file mode 100644 index 0000000..1cf1594 --- /dev/null +++ b/modules/gitutil.lua @@ -0,0 +1,83 @@ +local path = require('path') + +local exports = {} + +--- + -- Resolves closest .git directory location. + -- Navigates subsequently up one level and tries to find .git directory + -- @param {string} path Path to directory will be checked. If not provided + -- current directory will be used + -- @return {string} Path to .git directory or nil if such dir not found +exports.get_git_dir = function (start_dir) + + -- Checks if provided directory contains '.git' directory + -- and returns path to that directory + local function has_git_dir(dir) + return #clink.find_dirs(dir..'/.git') > 0 and dir..'/.git' + end + + -- checks if directory contains '.git' _file_ and if it does + -- parses it and returns a path to git directory from that file + local function has_git_file(dir) + local gitfile = io.open(dir..'/.git') + if not gitfile then return false end + + local git_dir = gitfile:read():match('gitdir: (.*)') + gitfile:close() + + if not git_dir then return false end + -- If found path is absolute don't prepend initial + -- directory - return absolute path value + return path.is_absolute(git_dir) and git_dir + or dir..'/'..git_dir + end + + -- Set default path to current directory + if not start_dir or start_dir == '.' then start_dir = clink.get_cwd() end + + -- Calculate parent path now otherwise we won't be + -- able to do that inside of logical operator + local parent_path = path.pathname(start_dir) + + return has_git_dir(start_dir) + or has_git_file(start_dir) + -- Otherwise go up one level and make a recursive call + or (parent_path ~= start_dir and exports.get_git_dir(parent_path) or nil) +end + +exports.get_git_common_dir = function (start_dir) + local git_dir = exports.get_git_dir(start_dir) + if not git_dir then return git_dir end + local commondirfile = io.open(git_dir..'/commondir') + if commondirfile then + -- If there's a commondir file, we're in a git worktree + local commondir = commondirfile:read() + commondirfile.close() + return path.is_absolute(commondir) and commondir + or git_dir..'/'..commondir + end + return git_dir +end + +--- + -- Find out current branch + -- @return {nil|git branch name} +--- +exports.get_git_branch = function (dir) + local git_dir = dir or exports.get_git_dir() + + -- If git directory not found then we're probably outside of repo + -- or something went wrong. The same is when head_file is nil + local head_file = git_dir and io.open(git_dir..'/HEAD') + if not head_file then return end + + local HEAD = head_file:read() + head_file:close() + + -- if HEAD matches branch expression, then we're on named branch + -- otherwise it is a detached commit + local branch_name = HEAD:match('ref: refs/heads/(.+)') + return branch_name or 'HEAD detached at '..HEAD:sub(1, 7) +end + +return exports diff --git a/modules/matchers.lua b/modules/matchers.lua new file mode 100644 index 0000000..b9a9861 --- /dev/null +++ b/modules/matchers.lua @@ -0,0 +1,80 @@ + +local exports = {} + +local path = require('path') +local w = require('tables').wrap + +exports.dirs = function(word) + -- Strip off any path components that may be on text. + local prefix = "" + local i = word:find("[\\/:][^\\/:]*$") + if i then + prefix = word:sub(1, i) + end + local include_dots = word:find("%.+$") ~= nil + + -- Find matches. + local matches = w(clink.find_dirs(word.."*", true)) + :filter(function (dir) + return clink.is_match(word, prefix..dir) and + (include_dots or path.is_real_dir(dir)) + end) + :map(function(dir) + return prefix..dir + end) + + -- If there was no matches but word is a dir then use it as the single match. + -- Otherwise tell readline that matches are files and it will do magic. + if #matches == 0 and clink.is_dir(rl_state.text) then + return {rl_state.text} + end + + clink.matches_are_files() + return matches +end + +exports.files = function (word) + -- Strip off any path components that may be on text. + local prefix = "" + local i = word:find("[\\/:][^\\/:]*$") + if i then + prefix = word:sub(1, i) + end + + -- Find matches. + local matches = w(clink.find_files(word.."*", true)) + :filter(function (file) + return clink.is_match(word, prefix..file) + end) + :map(function(file) + return prefix..file + end) + + -- Tell readline that matches are files and it will do magic. + if #matches ~= 0 then + clink.matches_are_files() + end + + return matches +end + +exports.create_dirs_matcher = function (dir_pattern, show_dotfiles) + return function (token) + return w(clink.find_dirs(dir_pattern)) + :filter(function(dir) + return clink.is_match(token, dir) and (path.is_real_dir(dir) or show_dotfiles) + end ) + end +end + +exports.create_files_matcher = function (file_pattern) + return function (token) + return w(clink.find_files(file_pattern)) + :filter(function(file) + -- Filter out '.' and '..' entries as well + return clink.is_match(token, file) and path.is_real_dir(file) + end ) + end +end + +return exports diff --git a/modules/path.lua b/modules/path.lua new file mode 100644 index 0000000..75114d6 --- /dev/null +++ b/modules/path.lua @@ -0,0 +1,69 @@ +local exports = {} + +local w = require('tables').wrap + +exports.list_files = function (base_path, glob, recursive, reverse_separator) + local mask = glob or '/*' + + local entries = w(clink.find_files(base_path..mask)) + :filter(function(entry) + return exports.is_real_dir(entry) + end) + + local files = entries:filter(function(entry) + return not clink.is_dir(base_path..'/'..entry) + end) + + -- if 'recursive' flag is not set, we don't need to iterate + -- through directories, so just return files found + if not recursive then return files end + + local sep = reverse_separator and '/' or '\\' + + return entries + :filter(function(entry) + return clink.is_dir(base_path..'/'..entry) + end) + :reduce(files, function(accum, dir) + -- iterate through directories and call list_files recursively + return exports.list_files(base_path..'/'..dir, mask, recursive, reverse_separator) + :map(function(entry) + return dir..sep..entry + end) + :concat(accum) + end) +end + +exports.basename = function (path) + local prefix = path + local i = path:find("[\\/:][^\\/:]*$") + if i then + prefix = path:sub(i + 1) + end + return prefix +end + +exports.pathname = function (path) + local prefix = "" + local i = path:find("[\\/:][^\\/:]*$") + if i then + prefix = path:sub(1, i-1) + end + return prefix +end + +exports.is_absolute = function (path) + local drive = path:find("^%s?[%l%a]:[\\/]") + if drive then return true else return false end +end + +exports.is_metadir = function (dirname) + return exports.basename(dirname) == '.' + or exports.basename(dirname) == '..' +end + +exports.is_real_dir = function (dirname) + return not exports.is_metadir(dirname) +end + +return exports diff --git a/modules/tables.lua b/modules/tables.lua new file mode 100644 index 0000000..c42637f --- /dev/null +++ b/modules/tables.lua @@ -0,0 +1,74 @@ +local concat = require('funclib').concat +local filter = require('funclib').filter +local map = require('funclib').map +local reduce = require('funclib').reduce + +local exports = {} + +local wrap_filter = function (tbl, filter_func) + return exports.wrap(filter(tbl, filter_func)) +end + +local wrap_map = function (tbl, map_func) + return exports.wrap(map(tbl, map_func)) +end + +local wrap_reduce = function (tbl, accum, reduce_func) + local res = reduce(accum, tbl, reduce_func) + return (type(res) == "table" and exports.wrap(res) or res) +end + +local wrap_concat = function (tbl, ...) + return exports.wrap(concat(tbl, ...)) +end + +local wrap_print = function (tbl) + return exports.wrap(filter(tbl, function (item) + print(item) + return true + end)) +end + +exports.wrap = function (tbl) + if tbl == nil then tbl = {} end + if type(tbl) ~= "table" then tbl = {tbl} end + + local mt = getmetatable(tbl) or {} + mt.__index = mt.__index or {} + mt.__index.filter = wrap_filter + mt.__index.map = wrap_map + mt.__index.reduce = wrap_reduce + mt.__index.concat = wrap_concat + mt.__index.print = wrap_print + mt.__index.keys = function (arg) + local res = {} + for k,_ in pairs(arg) do + table.insert(res, k) + end + return exports.wrap(res) + end + mt.__index.sort = function (arg) + table.sort(arg) + return arg + end + mt.__index.dedupe = function (arg) + local res, hash = {}, {} + for _,v in ipairs(arg) do + if not hash[v] then + hash[v] = true + table.insert(res, v) + end + end + return exports.wrap(res) + end + mt.__index.contains = function (arg, value) + for _,v in ipairs(arg) do + if v == value then return true, _ end + end + return false + end + + return setmetatable(tbl, mt) +end + +return exports diff --git a/pip.lua b/pip.lua new file mode 100644 index 0000000..41bb448 --- /dev/null +++ b/pip.lua @@ -0,0 +1,240 @@ +-- -*- coding: utf-8 -*- +-- preamble: common routines + +local matchers = require("matchers") +local w = require("tables").wrap + +local parser = clink.arg.new_parser + +local function pip_libs_list(token) + local handle = io.popen('python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())"') + local python_lib_path = handle:read("*a") + handle:close() + + -- trim spaces + python_lib_path = python_lib_path:gsub("^%s*(.-)%s*$", "%1") + + local finder = matchers.create_files_matcher(python_lib_path .. "\\*.dist-info") + + local list = w(finder(token)) + + list = + list:map( + function(package) + package = package:gsub("-[%d%.]+dist%-info$", "") + return package + end + ) + + return list +end + +local pip_default_flags = { + "--help", + "-h", + "--isolated", + "--verbose", + "-v", + "--version", + "-V", + "--quiet", + "-q", + "--log", + "--proxy", + "--retries", + "--timeout", + "--exists-action", + "--trusted-host", + "--cert", + "--client-cert", + "--cache-dir", + "--no-cache-dir", + "--disable-pip-version-check", + "--no-color" +} + +local pip_requirement_flags = { + "--requirement" .. parser({clink.matches_are_files}), + "-r" .. parser({clink.matches_are_files}) +} + +local pip_index_flags = { + "--index-url", + "-i", + "--extra-index-url", + "--no-index", + "--find-links", + "-f" +} + +local pip_install_download_wheel_flags = { + pip_requirement_flags, + "--no-binary", + "--only-binary", + "--prefer-binary", + "--no-build-isolation", + "--use-pep517", + "--constraint", + "-c", + "--src", + "--no-deps", + "--progress-bar" .. parser({"off", "on", "ascii", "pretty", "emoji"}), + "--global-option", + "--pre", + "--no-clean", + "--requires-hashes" +} + +local pip_install_download_flags = { + pip_install_download_wheel_flags, + "--platform", + "--python-version", + "--implementation" .. parser({"pp", "jy", "cp", "ip"}), + "--abi" +} + +local pip_install_parser = + parser( + {}, + "--editable", + "-e", + "--target", + "-t", + "--user", + "--root", + "--prefix", + "--build", + "-b", + "--upgrade", + "-U", + "--upgrade-strategy" .. parser({"eager", "only-if-needed"}), + "--force-reinstall", + "--ignore-installed", + "-I", + "--ignore-requires-python", + "--install-option", + "--compile", + "--no-compile", + "--no-warn-script-location", + "--no-warn-conflicts" +):loop(1) +pip_install_parser:add_flags(pip_install_download_flags) +pip_install_parser:add_flags(pip_index_flags) +pip_install_parser:add_flags(pip_default_flags) + +local pip_download_parser = parser({}, "--build", "-b", "--dest", "-d"):loop(1) +pip_download_parser:add_flags(pip_install_download_flags) +pip_download_parser:add_flags(pip_index_flags) +pip_download_parser:add_flags(pip_default_flags) + +local pip_uninstall_parser = + parser({pip_libs_list}, "--yes", "-y"):add_flags(pip_default_flags, pip_requirement_flags):loop(1) + +local pip_freeze_parser = parser({}, "--find-links", "--local", "-l", "--user", "--all", "--exclude-editable") +pip_freeze_parser:add_flags(pip_default_flags, pip_requirement_flags) + +local pip_list_parser = + parser( + {}, + "--outdated", + "-o", + "--uptodate", + "-u", + "--editable", + "-e", + "--local", + "-l", + "--user", + "--pre", + "--format" .. parser({"columns", "freeze", "json"}), + "--not-required", + "--exclude-editable", + "--include-editable" +) +pip_list_parser:add_flags(pip_default_flags) + +local pip_config_parser = + parser( + { + "list", + "edit", + "get", + "set", + "unset" + }, + "--editor", + "--global", + "--user", + "--venv", + pip_default_flags +) +pip_config_parser:add_flags(pip_default_flags) + +local pip_search_parser = parser({}, "--index", "-i"):add_flags(pip_default_flags) + +local pip_wheel_parser = + parser( + {}, + "--wheel-dir", + "-w", + "--build-option", + "--editable", + "-e", + "--ignore-requires-python", + "--build", + "-b" +):loop(1) +pip_wheel_parser:add_flags(pip_install_download_flags) +pip_wheel_parser:add_flags(pip_index_flags) +pip_wheel_parser:add_flags(pip_default_flags) + +local pip_hash_parser = + parser( + {}, + "--algorithm" .. parser({"sha256", "sha384", "sha512"}), + "-a" .. parser({"sha256", "sha384", "sha512"}), + pip_default_flags +) +pip_hash_parser:add_flags(pip_default_flags) + +local pip_completion_parser = parser({}, "--bash", "-b", "--zsh", "-z", "--fish", "-f"):add_flags(pip_default_flags) + +local pip_help_parser = + parser( + { + "install", + "download", + "uninstall", + "freeze", + "list", + "show", + "config", + "search", + "wheel", + "hash", + "completion", + "help" + } +) +pip_help_parser:add_flags(pip_default_flags) + +local pip_parser = + parser( + { + "install" .. pip_install_parser, + "download" .. pip_download_parser, + "uninstall" .. pip_uninstall_parser, + "freeze" .. pip_freeze_parser, + "list" .. pip_list_parser, + "show" .. parser({pip_libs_list}, pip_default_flags), + "config" .. pip_config_parser, + "search" .. pip_search_parser, + "wheel" .. pip_wheel_parser, + "hash" .. pip_hash_parser, + "completion" .. pip_completion_parser, + "help" .. pip_help_parser + } +) +pip_parser:add_flags(pip_default_flags) + +clink.arg.register_parser("pip", pip_parser) diff --git a/scoop.lua b/scoop.lua new file mode 100644 index 0000000..25958b6 --- /dev/null +++ b/scoop.lua @@ -0,0 +1,343 @@ +-- -*- coding: utf-8 -*- +-- preamble: common routines + +local JSON = require("JSON") + +local matchers = require("matchers") +local path = require("path") +local w = require("tables").wrap +local concat = require("funclib").concat + +local parser = clink.arg.new_parser +local profile = os.getenv("home") or os.getenv("USERPROFILE") + +local function scoop_folder() + local folder = os.getenv("SCOOP") + + if not folder then + folder = profile .. "\\scoop" + end + + return folder +end + +local function scoop_global_folder() + local folder = os.getenv("SCOOP_GLOBAL") + + if not folder then + folder = os.getenv("ProgramData") .. "\\scoop" + end + + return folder +end + +local function scoop_load_config() -- luacheck: no unused args + local file = io.open(profile .. "\\.config\\scoop\\config.json") + -- If there is no such file, then close handle and return + if file == nil then + return w() + end + + -- Read the whole file contents + local contents = file:read("*a") + file:close() + + -- strip UTF-8-BOM + local utf8_len = contents:len() + local pat_start, _ = string.find(contents, "{") + contents = contents:sub(pat_start, utf8_len) + + local data = JSON:decode(contents) + + if data == nil then + return w() + end + + return data +end + +local function scoop_alias_list(token) -- luacheck: no unused args + local data = scoop_load_config() + + return w(data.alias):keys() +end + +local function scoop_config_list(token) -- luacheck: no unused args + local data = scoop_load_config() + + return w(data):keys() +end + +local function scoop_bucket_known_list(token) -- luacheck: no unused args + local file = io.open(scoop_folder() .. "\\apps\\scoop\\current\\buckets.json") + -- If there is no such file, then close handle and return + if file == nil then + return w() + end + + -- Read the whole file contents + local contents = file:read("*a") + file:close() + + local data = JSON:decode(contents) + + return w(data):keys() +end + +local function scoop_bucket_list(token) + local finder = matchers.create_files_matcher(scoop_folder() .. "\\buckets\\*") + + local list = finder(token) + + return list:filter(path.is_real_dir) +end + +local function scoop_apps_list(token) + local folders = {scoop_folder(), scoop_global_folder()} + + local list = w() + for _, folder in pairs(folders) do + local finder = matchers.create_files_matcher(folder .. "\\apps\\*") + + local new_list = finder(token) + list = w(concat(list, new_list)) + end + + return list:filter(path.is_real_dir) +end + +local function scoop_available_apps_list(token) + -- search in default bucket + local finder = matchers.create_files_matcher(scoop_folder() .. "\\apps\\scoop\\current\\bucket\\*.json") + local list = finder(token) + + -- search in each installed bucket + local buckets = scoop_bucket_list("") + for _, bucket in pairs(buckets) do + local bucket_folder = scoop_folder() .. "\\buckets\\" .. bucket + + -- check the bucket folder exists + if clink.is_dir(bucket_folder .. "\\bucket") then + bucket_folder = bucket_folder .. "\\bucket" + end + + local b_finder = matchers.create_files_matcher(bucket_folder .. "\\*.json") + local b_list = b_finder(token) + list = w(concat(list, b_list)) + end + + -- remove ".json" of file name + for k, v in pairs(list) do + list[k] = v:gsub(".json", "") + end + + return list +end + +local function scoop_cache_apps_list(token) + local cache_folder = os.getenv("SCOOP_CACHE") + if not cache_folder then + cache_folder = scoop_folder() .. "\\cache" + end + + local finder = matchers.create_files_matcher(cache_folder .. "\\*") + + local list = finder(token) + list = w(list:filter(path.is_real_dir)) + + -- get name before "#" from cache list (name#version#url) + for k, v in pairs(list) do + list[k] = v:gsub("#.*$", "") + end + + return list +end + +local scoop_default_flags = { + "--help", + "-h" +} + +local scoop_alias_parser = + parser( + { + "add", + "list" .. parser("-v", "--verbose"), + "rm" .. parser({scoop_alias_list}) + } +) + +local scoop_bucket_parser = + parser( + { + "add" .. parser({scoop_bucket_known_list}), + "list", + "known", + "rm" .. parser({scoop_bucket_list}) + } +) + +local scoop_cache_parser = + parser( + { + "show" .. parser({scoop_cache_apps_list, scoop_apps_list, "*"}), + "rm" .. parser({scoop_cache_apps_list, "*"}) + } +) + +local scoop_cleanup_parser = + parser( + { + scoop_apps_list, + "*" + }, + "--global", + "-g", + "--cache", + "-k" +):loop(1) + +local scoop_config_parser = + parser( + { + "rm" .. parser({scoop_config_list}), + scoop_config_list, + "aria2-enabled" .. parser({"true", "false"}), + "aria2-max-connection-per-server", + "aria2-min-split-size", + "aria2-options", + "aria2-retry-wait", + "aria2-split", + "debug" .. parser({"true", "false"}), + "proxy", + "show_update_log" .. parser({"true", "false"}), + "virustotal_api_key" + } +) + +local scoop_uninstall_parser = + parser( + { + scoop_apps_list + }, + "--global", + "-g", + "--purge", + "-p" +):loop(1) + +local scoop_update_parser = + parser( + { + scoop_apps_list, + "*" + }, + "--force", + "-f", + "--global", + "-g", + "--independent", + "-i", + "--no-cache", + "-k", + "--skip", + "-s", + "--quiet", + "-q" +):loop(1) + +local scoop_install_parser = + parser( + {scoop_available_apps_list}, + "--global", + "-g", + "--independent", + "-i", + "--no-cache", + "-k", + "--skip", + "-s", + "--arch" .. parser({"32bit", "64bit"}), + "-a" .. parser({"32bit", "64bit"}) +):loop(1) + +local scoop_help_parser = + parser( + { + "alias", + "bucket", + "cache", + "checkup", + "cleanup", + "config", + "create", + "depends", + "export", + "help", + "home", + "hold", + "info", + "install", + "list", + "prefix", + "reset", + "search", + "status", + "unhold", + "uninstall", + "update", + "virustotal", + "which" + }, + "/?", + "--help", + "-h", + "--version" +) + +local scoop_parser = parser() +scoop_parser:set_flags(scoop_default_flags) +scoop_parser:set_arguments( + { + scoop_alias_list, + "alias" .. scoop_alias_parser, + "bucket" .. scoop_bucket_parser, + "cache" .. scoop_cache_parser, + "checkup", + "cleanup" .. scoop_cleanup_parser, + "config" .. scoop_config_parser, + "create", + "depends" .. + parser( + {scoop_available_apps_list, scoop_apps_list}, + "--arch" .. parser({"32bit", "64bit"}), + "-a" .. parser({"32bit", "64bit"}) + ), + "export", + "help" .. scoop_help_parser, + "hold" .. parser({scoop_apps_list}), + "home" .. parser({scoop_available_apps_list, scoop_apps_list}), + "info" .. parser({scoop_available_apps_list, scoop_apps_list}), + "install" .. scoop_install_parser, + "list", + "prefix" .. parser({scoop_apps_list}), + "reset" .. parser({scoop_apps_list}):loop(1), + "search", + "status", + "unhold" .. parser({scoop_apps_list}), + "uninstall" .. scoop_uninstall_parser, + "update" .. scoop_update_parser, + "virustotal" .. + parser( + {scoop_apps_list, "*"}, + "--arch" .. parser({"32bit", "64bit"}), + "-a" .. parser({"32bit", "64bit"}), + "--scan", + "-s", + "--no-depends", + "-n" + ):loop(1), + "which" + } +) +clink.arg.register_parser("scoop", scoop_parser) diff --git a/ssh.lua b/ssh.lua new file mode 100644 index 0000000..fe93f10 --- /dev/null +++ b/ssh.lua @@ -0,0 +1,39 @@ +local w = require('tables').wrap +local parser = clink.arg.new_parser + +local function read_lines (filename) + local lines = w({}) + local f = io.open(filename) + if not f then return lines end + + for line in f:lines() do table.insert(lines, line) end + + f:close() + return lines +end + +-- read all Host entries in the user's ssh config file +local function list_ssh_hosts() + return read_lines(clink.get_env("userprofile") .. "/.ssh/config") + :map(function (line) + return line:match('^Host%s+(.*)$') + end) + :filter() +end + +local function list_known_hosts() + return read_lines(clink.get_env("userprofile") .. "/.ssh/known_hosts") + :map(function (line) + return line:match('^([%w-.]*).*') + end) + :filter() +end + +local hosts = function (token) -- luacheck: no unused args + return list_ssh_hosts() + :concat(list_known_hosts()) +end + +local ssh_hosts_parser = parser({hosts}) + +clink.arg.register_parser("ssh", ssh_hosts_parser)