Bram Moolenaar | a40ceaf | 2006-01-13 22:35:40 +0000 | [diff] [blame] | 1 | "pycomplete.vim - Omni Completion for python |
| 2 | " Maintainer: Aaron Griffin |
| 3 | " Version: 0.2 |
| 4 | " Last Updated: 5 January 2006 |
| 5 | " |
| 6 | " TODO |
| 7 | " * local variables *inside* class members |
| 8 | |
| 9 | if !has('python') |
| 10 | echo "Error: Required vim compiled with +python" |
| 11 | finish |
| 12 | endif |
| 13 | |
| 14 | function! pycomplete#Complete(findstart, base) |
| 15 | "findstart = 1 when we need to get the text length |
| 16 | if a:findstart |
| 17 | let line = getline('.') |
| 18 | let idx = col('.') |
| 19 | while idx > 0 |
| 20 | let idx -= 1 |
| 21 | let c = line[idx-1] |
| 22 | if c =~ '\w' |
| 23 | continue |
| 24 | elseif ! c =~ '\.' |
| 25 | idx = -1 |
| 26 | break |
| 27 | else |
| 28 | break |
| 29 | endif |
| 30 | endwhile |
| 31 | |
| 32 | return idx |
| 33 | "findstart = 0 when we need to return the list of completions |
| 34 | else |
| 35 | execute "python get_completions('" . a:base . "')" |
| 36 | return g:pycomplete_completions |
| 37 | endif |
| 38 | endfunction |
| 39 | |
| 40 | function! s:DefPython() |
| 41 | python << PYTHONEOF |
| 42 | import vim |
| 43 | import sys |
| 44 | import __builtin__ |
| 45 | |
| 46 | LOCALDEFS = \ |
| 47 | ['LOCALDEFS', 'clean_up','eval_source_code', \ |
| 48 | 'get_completions', '__builtin__', '__builtins__', \ |
| 49 | 'dbg', '__name__', 'vim', 'sys'] |
| 50 | #comment/uncomment one line at a time to enable/disable debugging |
| 51 | def dbg(msg): |
| 52 | pass |
| 53 | # print(msg) |
| 54 | |
| 55 | #it seems that by this point, vim has already stripped the base |
| 56 | # matched in the findstart=1 section, so we will create the |
| 57 | # statement from scratch |
| 58 | def get_completions(base): |
| 59 | stmt = vim.eval('expand("<cWORD>")')+base |
| 60 | dbg("parsed statement => %s" % stmt) |
| 61 | eval_source_code() |
| 62 | try: |
| 63 | dbg("eval: %s" % stmt) |
| 64 | if len(stmt.split('.')) == 1: |
| 65 | all = globals().keys() + dir(__builtin__) |
| 66 | match = stmt |
| 67 | else: |
| 68 | rindex= stmt.rfind('.') |
| 69 | all = dir(eval(stmt[:rindex])) |
| 70 | match = stmt[rindex+1:] |
| 71 | |
| 72 | completions = [] |
| 73 | dbg("match == %s" % match) |
| 74 | for m in all: |
| 75 | #TODO: remove private (_foo) functions? |
| 76 | if m.find('__') != 0 and \ |
| 77 | m.find(match) == 0 and \ |
| 78 | m not in LOCALDEFS: |
| 79 | dbg("matched... %s, %s" % (m, m.find(match))) |
| 80 | completions.append(m) |
| 81 | dbg("all completions: %s" % completions) |
| 82 | vim.command("let g:pycomplete_completions = %s" % completions) |
| 83 | except: |
| 84 | dbg("exception: %s" % sys.exc_info()[1]) |
| 85 | vim.command("let g:pycomplete_completions = []") |
| 86 | clean_up() |
| 87 | |
| 88 | #yes, this is a quasi-functional python lexer |
| 89 | def eval_source_code(): |
| 90 | import tokenize |
| 91 | import keyword |
| 92 | import StringIO |
| 93 | s = StringIO.StringIO('\n'.join(vim.current.buffer[:]) + '\n') |
| 94 | g = tokenize.generate_tokens(s.readline) |
| 95 | |
| 96 | stmts = [] |
| 97 | lineNo = 0 |
| 98 | try: |
| 99 | for type, str, begin, end, line in g: |
| 100 | if begin[0] == lineNo: |
| 101 | continue |
| 102 | #junk |
| 103 | elif type == tokenize.INDENT or \ |
| 104 | type == tokenize.DEDENT or \ |
| 105 | type == tokenize.ERRORTOKEN or \ |
| 106 | type == tokenize.ENDMARKER or \ |
| 107 | type == tokenize.NEWLINE: |
| 108 | continue |
| 109 | #import statement |
| 110 | elif str == 'import': |
| 111 | for type, str, begin, end, line in g: |
| 112 | if str == ';' or type == tokenize.NEWLINE: break |
| 113 | dbg("found [import %s]" % str) |
| 114 | stmts.append("import %s" % str) |
| 115 | #import from statement |
| 116 | elif str == 'from': |
| 117 | type, str, begin, end, line = g.next() |
| 118 | mod = str |
| 119 | |
| 120 | type, str, begin, end, line = g.next() |
| 121 | if str != "import": break |
| 122 | mem = '' |
| 123 | for type, str, begin, end, line in g: |
| 124 | if str == ';' or type == tokenize.NEWLINE: break |
| 125 | mem += (str + ',') |
| 126 | if len(mem) > 0: |
| 127 | dbg("found [from %s import %s]" % (mod, mem[:-1])) |
| 128 | stmts.append("from %s import %s" % (mod, mem[:-1])) |
| 129 | #class declaration |
| 130 | elif str == 'class': |
| 131 | type, str, begin, end, line = g.next() |
| 132 | classname = str |
| 133 | dbg("found [class %s]" % classname) |
| 134 | |
| 135 | level = 0 |
| 136 | members = [] |
| 137 | #we don't care about the meat of the members, |
| 138 | # only the signatures, so we'll replace the bodies |
| 139 | # with 'pass' for evaluation |
| 140 | for type, str, begin, end, line in g: |
| 141 | if type == tokenize.INDENT: |
| 142 | level += 1 |
| 143 | elif type == tokenize.DEDENT: |
| 144 | level -= 1 |
| 145 | if level == 0: break; |
| 146 | elif str == 'def': |
| 147 | #TODO: if name begins with '_', keep private |
| 148 | memberstr = '' |
| 149 | for type, str, begin, end, line in g: |
| 150 | if str == ':': break |
| 151 | memberstr += str |
| 152 | dbg(" member [%s]" % memberstr) |
| 153 | members.append(memberstr) |
| 154 | #TODO parse self.blah = something lines |
| 155 | #elif str == "self" && next && str == "." ...blah... |
| 156 | classstr = 'class %s:' % classname |
| 157 | for m in members: |
| 158 | classstr += ("\n def %s:\n pass" % m) |
| 159 | stmts.append("%s\n" % classstr) |
| 160 | elif keyword.iskeyword(str) or str in globals(): |
| 161 | dbg("keyword = %s" % str) |
| 162 | lineNo = begin[0] |
| 163 | else: |
| 164 | if line.find("=") == -1: continue |
| 165 | var = str |
| 166 | type, str, begin, end, line = g.next() |
| 167 | dbg('next = %s' % str) |
| 168 | if str != '=': continue |
| 169 | |
| 170 | type, str, begin, end, line = g.next() |
| 171 | if type == tokenize.NEWLINE: |
| 172 | continue |
| 173 | elif type == tokenize.STRING or str == 'str': |
| 174 | stmts.append('%s = str' % var) |
| 175 | elif str == '[' or str == 'list': |
| 176 | stmts.append('%s= list' % var) |
| 177 | elif str == '{' or str == 'dict': |
| 178 | stmts.append('%s = dict' % var) |
| 179 | elif type == tokenize.NUMBER: |
| 180 | continue |
| 181 | elif str == 'Set': |
| 182 | stmts.append('%s = Set' % var) |
| 183 | elif str == 'open' or str == 'file': |
| 184 | stmts.append('%s = file' % var) |
| 185 | else: |
| 186 | inst = str |
| 187 | for type, str, begin, end, line in g: |
| 188 | if type == tokenize.NEWLINE: |
| 189 | break |
| 190 | inst += str |
| 191 | if len(inst) > 0: |
| 192 | dbg("found [%s = %s]" % (var, inst)) |
| 193 | stmts.append('%s = %s' % (var, inst)) |
| 194 | lineNo = begin[0] |
| 195 | for s in stmts: |
| 196 | try: |
| 197 | dbg("evaluating: %s\n" % s) |
| 198 | exec(s) in globals() |
| 199 | except: |
| 200 | pass |
| 201 | except: |
| 202 | dbg("exception: %s" % sys.exc_info()[1]) |
| 203 | |
| 204 | def clean_up(): |
| 205 | for o in globals().keys(): |
| 206 | if o not in LOCALDEFS: |
| 207 | try: |
| 208 | exec('del %s' % o) in globals() |
| 209 | except: pass |
| 210 | |
| 211 | sys.path.extend(['.','..']) |
| 212 | PYTHONEOF |
| 213 | endfunction |
| 214 | |
| 215 | call s:DefPython() |
| 216 | " vim: set et ts=4: |