1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 import re
20
21 from trac.cache import cached
22 from trac.config import BoolOption, ListOption
23 from trac.core import *
24 from trac.resource import IResourceManager
25 from trac.util.html import is_safe_origin, tag
26 from trac.util.text import unquote_label
27 from trac.util.translation import _
28 from trac.wiki.parser import WikiParser
32 """Components that want to get notified about the creation,
33 deletion and modification of wiki pages should implement that
34 interface.
35 """
36
38 """Called whenever a new Wiki page is added."""
39
40 - def wiki_page_changed(page, version, t, comment, author):
41 """Called when a page has been modified."""
42
44 """Called when a page has been deleted."""
45
47 """Called when a version of a page has been deleted."""
48
49 - def wiki_page_renamed(page, old_name):
50 """Called when a page has been renamed."""
51
53 """Called when a page comment has been modified."""
54
55
56 -class IWikiPageManipulator(Interface):
57 """Components that need to do specific pre- and post- processing of
58 wiki page changes have to implement this interface.
59
60 Unlike change listeners, a manipulator can reject changes being
61 committed to the database.
62 """
63
64 - def prepare_wiki_page(req, page, fields):
65 """Validate a wiki page before rendering it.
66
67 :param page: is the `WikiPage` being viewed.
68
69 :param fields: is a dictionary which contains the wiki `text`
70 of the page, initially identical to `page.text` but it can
71 eventually be transformed in place before being used as
72 input to the formatter.
73 """
74
75 - def validate_wiki_page(req, page):
76 """Validate a wiki page after it's been populated from user input.
77
78 :param page: is the `WikiPage` being edited.
79
80 :return: a list of `(field, message)` tuples, one for each
81 problem detected. `field` can be `None` to indicate an
82 overall problem with the page. Therefore, a return value of
83 `[]` means everything is OK.
84 """
85
88 """Augment the Wiki markup with new Wiki macros.
89
90 .. versionchanged :: 0.12
91 new Wiki processors can also be added that way.
92 """
93
95 """Return an iterable that provides the names of the provided macros.
96 """
97
99 """Return a tuple of a domain name to translate and plain text
100 description of the macro or only the description with the specified
101 name.
102
103 .. versionchanged :: 1.0
104 `get_macro_description` can return a domain to translate the
105 description.
106
107 .. versionchanged :: 1.3.6
108 the macro will be hidden from the macro index (`[[MacroList]]`)
109 if `None` is returned.
110 """
111
113 """Return `True` if the content generated is an inline XHTML element.
114
115 .. versionadded :: 1.0
116 """
117
119 """Called by the formatter when rendering the parsed wiki text.
120
121 .. versionadded:: 0.11
122
123 .. versionchanged:: 0.12
124 added the `args` parameter
125
126 :param formatter: the wiki `Formatter` currently processing
127 the wiki markup
128
129 :param name: is the name by which the macro has been called;
130 remember that via `get_macros`, multiple names could be
131 associated to this macros. Note that the macro names are
132 case sensitive.
133
134 :param content: is the content of the macro call. When called
135 using macro syntax (`[[Macro(content)]]`), this is the
136 string contained between parentheses, usually containing
137 macro arguments. When called using wiki processor syntax
138 (`{{{!#Macro ...}}}`), it is the content of the processor
139 block, that is, the text starting on the line following the
140 macro name.
141
142 :param args: will be a dictionary containing the named
143 parameters passed when using the Wiki processor syntax.
144
145 The named parameters can be specified when calling the macro
146 using the wiki processor syntax::
147
148 {{{#!Macro arg1=value1 arg2="value 2"`
149 ... some content ...
150 }}}
151
152 In this example, `args` will be
153 `{'arg1': 'value1', 'arg2': 'value 2'}`
154 and `content` will be `"... some content ..."`.
155
156 If no named parameters are given like in::
157
158 {{{#!Macro
159 ...
160 }}}
161
162 then `args` will be `{}`. That makes it possible to
163 differentiate the above situation from a call
164 made using the macro syntax::
165
166 [[Macro(arg1=value1, arg2="value 2", ... some content...)]]
167
168 in which case `args` will always be `None`. Here `content`
169 will be the
170 `"arg1=value1, arg2="value 2", ... some content..."` string.
171 If like in this example, `content` is expected to contain
172 some arguments and named parameters, one can use the
173 `parse_args` function to conveniently extract them.
174 """
175
178 """Enrich the Wiki syntax with new markup."""
179
181 """Return an iterable that provides additional wiki syntax.
182
183 Additional wiki syntax correspond to a pair of `(regexp, cb)`,
184 the `regexp` for the additional syntax and the callback `cb`
185 which will be called if there's a match. That function is of
186 the form `cb(formatter, ns, match)`.
187 """
188
190 """Return an iterable over `(namespace, formatter)` tuples.
191
192 Each formatter should be a function of the form::
193
194 def format(formatter, ns, target, label, fullmatch=None):
195 pass
196
197 and should return some HTML fragment. The `label` is already
198 HTML escaped, whereas the `target` is not. The `fullmatch`
199 argument is optional, and is bound to the regexp match object
200 for the link.
201 """
202
204 """Utility for parsing macro "content" and splitting them into arguments.
205
206 The content is split along commas, unless they are escaped with a
207 backquote (see example below).
208
209 :param args: a string containing macros arguments
210 :param strict: if `True`, only Python-like identifiers will be
211 recognized as keyword arguments
212
213 Example usage::
214
215 >>> parse_args('')
216 ([], {})
217 >>> parse_args('Some text')
218 (['Some text'], {})
219 >>> parse_args('Some text, mode= 3, some other arg\, with a comma.')
220 (['Some text', ' some other arg, with a comma.'], {'mode': ' 3'})
221 >>> parse_args('milestone=milestone1,status!=closed', strict=False)
222 ([], {'status!': 'closed', 'milestone': 'milestone1'})
223
224 """
225 largs, kwargs = [], {}
226 if args:
227 for arg in re.split(r'(?<!\\),', args):
228 arg = arg.replace(r'\,', ',')
229 if strict:
230 m = re.match(r'\s*[a-zA-Z_]\w+=', arg)
231 else:
232 m = re.match(r'\s*[^=]+=', arg)
233 if m:
234 kw = arg[:m.end()-1].strip()
235 if strict:
236 kw = unicode(kw).encode('utf-8')
237 kwargs[kw] = arg[m.end():]
238 else:
239 largs.append(arg)
240 return largs, kwargs
241
242
243 -def validate_page_name(pagename):
244 """Utility for validating wiki page name.
245
246 :param pagename: wiki page name to validate
247 """
248 return pagename and \
249 all(part not in ('', '.', '..') for part in pagename.split('/'))
250
253 """Wiki system manager."""
254
255 implements(IResourceManager, IWikiSyntaxProvider)
256
257 change_listeners = ExtensionPoint(IWikiChangeListener)
258 macro_providers = ExtensionPoint(IWikiMacroProvider)
259 syntax_providers = ExtensionPoint(IWikiSyntaxProvider)
260
261 realm = 'wiki'
262 START_PAGE = 'WikiStart'
263 TITLE_INDEX_PAGE = 'TitleIndex'
264
265 ignore_missing_pages = BoolOption('wiki', 'ignore_missing_pages', 'false',
266 """Enable/disable highlighting CamelCase links to missing pages.
267 """)
268
269 split_page_names = BoolOption('wiki', 'split_page_names', 'false',
270 """Enable/disable splitting the WikiPageNames with space characters.
271 """)
272
273 render_unsafe_content = BoolOption('wiki', 'render_unsafe_content', 'false',
274 """Enable/disable the use of unsafe HTML tags such as `<script>` or
275 `<embed>` with the HTML [wiki:WikiProcessors WikiProcessor].
276
277 For public sites where anonymous users can edit the wiki it is
278 recommended to leave this option disabled.
279 """)
280
281 safe_schemes = ListOption('wiki', 'safe_schemes',
282 'cvs, file, ftp, git, irc, http, https, news, sftp, smb, ssh, svn, '
283 'svn+ssh',
284 doc="""List of URI schemes considered "safe", that will be rendered as
285 external links even if `[wiki] render_unsafe_content` is `false`.
286 """)
287
288 safe_origins = ListOption('wiki', 'safe_origins',
289 'data:',
290 doc="""List of URIs considered "safe cross-origin", that will be
291 rendered as `img` element without `crossorigin="anonymous"` attribute
292 or used in `url()` of inline style attribute even if
293 `[wiki] render_unsafe_content` is `false` (''since 1.0.15'').
294
295 To make any origins safe, specify "*" in the list.""")
296
297 @cached
299 """Return the names of all existing wiki pages."""
300 return {name for name,
301 in self.env.db_query("SELECT DISTINCT name FROM wiki")}
302
303
304
305 - def get_pages(self, prefix=None):
306 """Iterate over the names of existing Wiki pages.
307
308 :param prefix: if given, only names that start with that
309 prefix are included.
310 """
311 for page in self.pages:
312 if not prefix or page.startswith(prefix):
313 yield page
314
315 - def has_page(self, pagename):
316 """Whether a page with the specified name exists."""
317 return pagename.rstrip('/') in self.pages
318
321
323 """Resolves a pagename relative to a referrer pagename."""
324 if pagename.startswith(('./', '../')) or pagename in ('.', '..'):
325 return self._resolve_relative_name(pagename, referrer)
326 return pagename
327
328
329
330 XML_NAME = r"[\w:](?<!\d)(?:[\w:.-]*[\w-])?"
331
332
333
334 PAGE_SPLIT_RE = re.compile(r"([a-z])([A-Z])(?=[a-z])")
335
336 Lu = ''.join(unichr(c) for c in xrange(0x10000) if unichr(c).isupper())
337 Ll = ''.join(unichr(c) for c in xrange(0x10000) if unichr(c).islower())
338
343
345 """Create a label from a wiki target.
346
347 A trailing fragment and query string is stripped. Then, leading ./,
348 ../ and / elements are stripped, except when this would lead to an
349 empty label. Finally, if `split_page_names` is true, the label
350 is split accordingly.
351 """
352 label = target.split('#', 1)[0].split('?', 1)[0]
353 if not label:
354 return target
355 components = label.split('/')
356 for i, comp in enumerate(components):
357 if comp not in ('', '.', '..'):
358 label = '/'.join(components[i:])
359 break
360 return self.format_page_name(label)
361
363 wiki_page_name = (
364 r"(?:[%(upper)s](?:[%(lower)s])+/?){2,}"
365 r"(?:@[0-9]+)?"
366 r"(?:#%(xml)s)?"
367 r"(?=:(?:\Z|\s)|[^:\w%(upper)s%(lower)s]|\s|\Z)"
368
369 % {'upper': self.Lu, 'lower': self.Ll, 'xml': self.XML_NAME})
370
371
372 def wikipagename_link(formatter, match, fullmatch):
373 return self._format_link(formatter, 'wiki', match,
374 self.format_page_name(match),
375 self.ignore_missing_pages, match)
376
377
378
379 yield (r"!?(?<![\w/])(?:\.?\.?/)*"
380 + wiki_page_name, wikipagename_link)
381
382
383 def wikipagename_with_label_link(formatter, match, fullmatch):
384 page = fullmatch.group('wiki_page')
385 label = fullmatch.group('wiki_label')
386 return self._format_link(formatter, 'wiki', page, label.strip(),
387 self.ignore_missing_pages, match)
388 yield (r"!?\[(?P<wiki_page>%s)\s+(?P<wiki_label>%s|[^\]]+)\]"
389 % (wiki_page_name, WikiParser.QUOTED_STRING),
390 wikipagename_with_label_link)
391
392
393 def internal_free_link(fmt, m, fullmatch):
394 page = fullmatch.group('ifl_page')[1:-1]
395 label = fullmatch.group('ifl_label')
396 if label is None:
397 label = self.make_label_from_target(page)
398 return self._format_link(fmt, 'wiki', page, label.strip(), False)
399 yield (r"!?\[(?P<ifl_page>%s)(?:\s+(?P<ifl_label>%s|[^\]]+))?\]"
400 % (WikiParser.QUOTED_STRING, WikiParser.QUOTED_STRING),
401 internal_free_link)
402
404 def link_resolver(formatter, ns, target, label, fullmatch=None):
405 if fullmatch is not None:
406
407
408
409 groups = fullmatch.groupdict()
410 if groups.get('lns') and not groups.get('label'):
411 label = self.make_label_from_target(target)
412 return self._format_link(formatter, ns, target, label, False)
413 yield ('wiki', link_resolver)
414
453
455 base = referrer.split('/')
456 components = pagename.split('/')
457 for i, comp in enumerate(components):
458 if comp == '..':
459 if base:
460 base.pop()
461 elif comp != '.':
462 base.extend(components[i:])
463 break
464 return '/'.join(base)
465
467 referrer = referrer.split('/')
468 if len(referrer) == 1:
469 return pagename
470
471 for i in xrange(len(referrer) - 1, 0, -1):
472 name = '/'.join(referrer[:i]) + '/' + pagename
473 if self.has_page(name):
474 return name
475 if self.has_page(pagename):
476 return pagename
477
478
479
480 if '/' in pagename:
481 (first, rest) = pagename.split('/', 1)
482 for (i, part) in enumerate(referrer):
483 if first == part:
484 anchor = '/'.join(referrer[:i + 1])
485 if self.has_page(anchor):
486 return anchor + '/' + rest
487
488 return '/'.join(referrer[:-1]) + '/' + pagename
489
490
491
494
496 """
497 >>> from trac.test import EnvironmentStub
498 >>> from trac.resource import Resource, get_resource_description
499 >>> env = EnvironmentStub()
500 >>> main = Resource('wiki', 'WikiStart')
501 >>> get_resource_description(env, main)
502 'WikiStart'
503
504 >>> get_resource_description(env, main(version=3))
505 'WikiStart'
506
507 >>> get_resource_description(env, main(version=3), format='summary')
508 'WikiStart'
509
510 >>> env.config['wiki'].set('split_page_names', 'true')
511 >>> get_resource_description(env, main(version=3))
512 'Wiki Start'
513 """
514 return self.format_page_name(resource.id)
515
517 """
518 >>> from trac.test import EnvironmentStub
519 >>> from trac.resource import Resource, resource_exists
520 >>> env = EnvironmentStub()
521
522 >>> resource_exists(env, Resource('wiki', 'WikiStart'))
523 False
524
525 >>> from trac.wiki.model import WikiPage
526 >>> main = WikiPage(env, 'WikiStart')
527 >>> main.text = 'some content'
528 >>> main.save('author', 'no comment')
529 >>> resource_exists(env, main.resource)
530 True
531 """
532 if resource.version is None:
533 return resource.id in self.pages
534 return bool(self.env.db_query(
535 "SELECT name FROM wiki WHERE name=%s AND version=%s",
536 (resource.id, resource.version)))
537