wiki

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
# -*- coding: utf-8 -*-
from tekisuto import BaseLexer, BaseDirective, Token
simple = u"""
Simple input:
*bold*
//italics//. And longer
*really bold*
* 1 *2* 3 *
//and italic//
//*bold and italic*//
*//italic and bold//*
[[url]]
[[url|name]]
"""
def ioken(data):
return Token('html/token', data)
class SimpleDirective(BaseDirective):
start_pattern = None
end_pattern = None
escape = True
name = None
tag = None
def __init__(self, lexer):
super(SimpleDirective, self).__init__(lexer)
if self.escape:
self.start_pattern = re.escape(self.start_pattern)
self.end_pattern = re.escape(self.end_pattern)
def start_directive(self, ctx):
return self.lexer.match(self.start_pattern) is not None
def end_directive(self, ctx):
return self.lexer.match(self.end_pattern) is not None
def process(self, stream, ctx):
yield Token('%s/start' % self.name, self.tag)
for token in stream:
yield token
yield Token('%s/end' % self.name, self.tag)
class Italic(BaseDirective):
def start_directive(self, ctx):
return self.lexer.match(r"//") is not None
def end_directive(self, ctx):
return self.lexer.match(r"//") is not None
def process(self, stream, ctx):
yield ioken('<em>')
for token in stream:
yield token
yield ioken('</em>')
class Strong(BaseDirective):
def start_directive(self, ctx):
return self.lexer.match(r"\*") is not None
def end_directive(self, ctx):
return self.lexer.match(r"\*") is not None
def process(self, stream, ctx):
yield ioken('<strong>')
for token in stream:
yield token
yield ioken('</strong>')
class Url(BaseDirective):
def start_directive(self, ctx):
m = self.lexer.match(r'\[\[(.*?)\]\]')
if not m:
return False
try:
ctx['url'], ctx['name'] = m.group(1).split('|')
except ValueError:
ctx['url'], ctx['name'] = m.group(1), m.group(1)
return True
def process(self, stream, ctx):
yield ioken('<a href="%s">%s</a>' % (ctx['url'], ctx['name']))
class WikiLexer(BaseLexer):
directives = [Strong, Italic, Url]
text_token_name = 'wiki/text'
if __name__ == '__main__':
print simple
print '------------'
print ''.join([token.data for token in WikiLexer(simple)])
#for token in WikiLexer(simple):
# print token