summaryrefslogtreecommitdiff
path: root/Data/Libraries/LDoc/ldoc/prettify.lua
blob: 1dc625c12bd191362d1d66c7585bf951e7306f85 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
-- Making Lua source code look pretty.
-- A simple scanner based prettifier, which scans comments for @{ref} and code
-- for known modules and functions.
-- A module reference to an example `test-fun.lua` would look like
-- `@{example:test-fun}`.
local List = require 'pl.List'
local tablex = require 'pl.tablex'
local globals = require 'ldoc.builtin.globals'
local prettify = {}

local user_keywords = {}

local escaped_chars = {
   ['&'] = '&',
   ['<'] = '&lt;',
   ['>'] = '&gt;',
}
local escape_pat = '[&<>]'

local function escape(str)
   return (str:gsub(escape_pat,escaped_chars))
end

local function span(t,val)
   return ('<span class="%s">%s</span>'):format(t,val)
end

local spans = {keyword=true,number=true,string=true,comment=true,global=true,backtick=true}

local cpp_lang = {C = true, c = true, cpp = true, cxx = true, h = true}

function prettify.lua (lang, fname, code, initial_lineno, pre, linenos)
   local res, lexer = List(), require 'ldoc.lexer'
   local tokenizer
   local ik = 1
   if not cpp_lang[lang] then
      tokenizer = lexer.lua
   else
      tokenizer = lexer.cpp
   end

   if pre then
      res:append '<pre>\n'
   end
   initial_lineno = initial_lineno or 0

   local tok = tokenizer(code,{},{})
   local error_reporter = {
      warning = function (self,msg)
         io.stderr:write(fname..':'..tok:lineno()+initial_lineno..': '..msg,'\n')
      end
   }
   local last_t, last_val
   local t,val = tok()
   if not t then return nil,"empty file" end
   while t do
      val = escape(val)
      if linenos and tok:lineno() == linenos[ik] then
         res:append('<a id="'..linenos[ik]..'"></a>')
         ik = ik + 1
      end
      if globals.functions[val] or globals.tables[val] then
         t = 'global'
      end
      if user_keywords[val] then
        res:append(span('user-keyword keyword-' .. val,val))
      elseif spans[t] then
         if t == 'comment' or t == 'backtick' then -- may contain @{ref} or `..`
            val = prettify.resolve_inline_references(val,error_reporter)
         end
         res:append(span(t,val))
      else
         res:append(val)
      end
      last_t, last_val = t,val
      t,val = tok()
   end
   if last_t == 'comment' then
      res[#res] = span('comment',last_val:gsub('\r*\n$',''))
   end
   local last = res[#res]
   if last:match '\n$' then
      res[#res] = last:gsub('\n+','')
   end
   if pre then
      res:append '</pre>\n'
   end
   return res:join ()
end

local lxsh

local lxsh_highlighers = {bib=true,c=true,lua=true,sh=true}

function prettify.code (lang,fname,code,initial_lineno,pre)
   if not lxsh then
      return prettify.lua (lang,fname, code, initial_lineno, pre)
   else
      if not lxsh_highlighers[lang] then
         lang = 'lua'
      end
      code = lxsh.highlighters[lang](code, {
         formatter = lxsh.formatters.html,
         external = true
      })
      if not pre then
         code = code:gsub("^<pre.->(.-)%s*</pre>$", '%1')
      end
      return code
   end
end

function prettify.set_prettifier (pretty)
   local ok
   if pretty == 'lxsh' then
      ok,lxsh = pcall(require,'lxsh')
      if not ok then
         print('pretty: '..pretty..' not found, using built-in Lua')
         lxsh = nil
      end
   end
end

function prettify.set_user_keywords(keywords)
  if keywords then
    user_keywords = tablex.makeset(keywords)
  end
end

return prettify