gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[GNUnet-SVN] [taler-docs] 04/05: replace old extension with new one


From: gnunet
Subject: [GNUnet-SVN] [taler-docs] 04/05: replace old extension with new one
Date: Fri, 27 Sep 2019 00:55:12 +0200

This is an automated email from the git hooks/post-receive script.

dold pushed a commit to branch master
in repository docs.

commit a1ce922c1efc95e963ce7d2ff694de4e78b4a1c9
Author: Florian Dold <address@hidden>
AuthorDate: Fri Sep 27 00:54:16 2019 +0200

    replace old extension with new one
---
 _exts/tslex.py |  88 ---------------------
 _exts/tsref.py | 241 ---------------------------------------------------------
 conf.py        |   4 +-
 3 files changed, 2 insertions(+), 331 deletions(-)

diff --git a/_exts/tslex.py b/_exts/tslex.py
deleted file mode 100644
index 2be6f29..0000000
--- a/_exts/tslex.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from pygments.token import *
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
-     include, this
-import re
-
-class BetterTypeScriptLexer(RegexLexer):
-    """
-    For `TypeScript <https://www.typescriptlang.org/>`_ source code.
-    """
-
-    name = 'TypeScript'
-    aliases = ['ts']
-    filenames = ['*.ts']
-    mimetypes = ['text/x-typescript']
-
-    flags = re.DOTALL
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'<!--', Comment),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            (r'', Text, '#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'typeexp': [
-            (r'[a-zA-Z]+', Keyword.Type),
-            (r'\s+', Text),
-            (r'[|]', Text),
-            (r'\n', Text, "#pop"),
-            (r';', Text, "#pop"),
-            (r'', Text, "#pop"),
-        ],
-        'root': [
-            (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 
'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
-             r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'this)\b', Keyword, 'slashstartsregex'),
-            (r'(var|let|const|with|function)\b', Keyword.Declaration, 
'slashstartsregex'),
-            
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
-             
r'extends|final|float|goto|implements|import|int|interface|long|native|'
-             
r'package|private|protected|public|short|static|super|synchronized|throws|'
-             r'transient|volatile)\b', Keyword.Reserved),
-            (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-            (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
-             r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
-             r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            # Match stuff like: module name {...}
-            (r'\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)',
-             bygroups(Keyword.Reserved, Text, Name.Other, Text), 
'slashstartsregex'),
-            # Match variable type keywords
-            (r'\b(string|bool|number)\b', Keyword.Type),
-            # Match stuff like: constructor
-            (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
-            # Match stuff like: super(argument, list)
-            (r'(super)(\s*)\(([a-zA-Z0-9,_?.$\s]+\s*)\)',
-             bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
-            # Match stuff like: function() {...}
-            (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
-            # Match stuff like: (function: return type)
-            (r'([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)([a-zA-Z0-9_?.$][\w?.$]*)',
-             bygroups(Name.Other, Text, Keyword.Type)),
-            # Match stuff like: type Foo = Bar | Baz
-            (r'\b(type)(\s*)([a-zA-Z0-9_?.$]+)(\s*)(=)(\s*)',
-             bygroups(Keyword.Reserved, Text, Name.Other, Text, Operator, 
Text), 'typeexp'),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\\'|[^'])*'", String.Single),
-        ]
-    }
diff --git a/_exts/tsref.py b/_exts/tsref.py
deleted file mode 100644
index 980b9a5..0000000
--- a/_exts/tsref.py
+++ /dev/null
@@ -1,241 +0,0 @@
-"""
-  This file is part of GNU TALER.
-  Copyright (C) 2014, 2015 GNUnet e.V. and INRIA
-  TALER is free software; you can redistribute it and/or modify it under the
-  terms of the GNU Lesser General Public License as published by the Free 
Software
-  Foundation; either version 2.1, or (at your option) any later version.
-  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
-  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-  A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more 
details.
-  You should have received a copy of the GNU Lesser General Public License 
along with
-  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
-
-  @author Florian Dold
-"""
-
-"""
-This extension adds a new lexer "tsref" for TypeScript, which
-allows reST-style links inside comments (`LinkName`_),
-and semi-automatically adds links to the definition of types.
-
-For type TYPE, a reference to tsref-type-TYPE is added.
-
-Known bugs and limitations:
- - The way the extension works right now interferes wiht
-   Sphinx's caching, the build directory should be cleared
-   before every build.
-"""
-
-
-from pygments.util import get_bool_opt
-from pygments.token import Name, Comment, Token, _TokenType
-from pygments.filter import Filter
-from sphinx.highlighting import PygmentsBridge
-from sphinx.builders.html import StandaloneHTMLBuilder
-from sphinx.pygments_styles import SphinxStyle
-from pygments.formatters import HtmlFormatter
-from docutils import nodes
-from docutils.nodes import make_id
-from sphinx.util import logging
-import re
-import sys
-
-
-logger = logging.getLogger(__name__)
-
-_escape_html_table = {
-    ord('&'): u'&amp;',
-    ord('<'): u'&lt;',
-    ord('>'): u'&gt;',
-    ord('"'): u'&quot;',
-    ord("'"): u'&#39;',
-}
-
-
-class LinkingHtmlFormatter(HtmlFormatter):
-    def __init__(self, **kwargs):
-        super(LinkingHtmlFormatter, self).__init__(**kwargs)
-        self._builder = kwargs['_builder']
-
-    def _fmt(self, value, tok):
-        cls = self._get_css_class(tok)
-        href = tok_getprop(tok, "href")
-        caption = tok_getprop(tok, "caption")
-        content = caption if caption is not None else value
-        if href:
-            value = '<a style="color:inherit;text-decoration:underline" 
href="%s">%s</a>' % (href, content)
-        if cls is None or cls == "":
-            return value
-        return '<span class="%s">%s</span>' % (cls, value)
-
-    def _format_lines(self, tokensource):
-        """
-        Just format the tokens, without any wrapping tags.
-        Yield individual lines.
-        """
-        lsep = self.lineseparator
-        escape_table = _escape_html_table
-
-        line = ''
-        for ttype, value in tokensource:
-            link = get_annotation(ttype, "link")
-
-            parts = value.translate(escape_table).split('\n')
-
-            if len(parts) == 0:
-                # empty token, usually should not happen
-                pass
-            elif len(parts) == 1:
-                # no newline before or after token
-                line += self._fmt(parts[0], ttype)
-            else:
-                line += self._fmt(parts[0], ttype)
-                yield 1, line + lsep
-                for part in parts[1:-1]:
-                    yield 1, self._fmt(part, ttype) + lsep
-                line = self._fmt(parts[-1], ttype)
-
-        if line:
-            yield 1, line + lsep
-
-
-class MyPygmentsBridge(PygmentsBridge):
-    def __init__(self, builder, trim_doctest_flags):
-        self.dest = "html"
-        self.trim_doctest_flags = trim_doctest_flags
-        self.formatter_args = {'style': SphinxStyle, '_builder': builder}
-        self.formatter = LinkingHtmlFormatter
-
-
-class MyHtmlBuilder(StandaloneHTMLBuilder):
-    name = "html-linked"
-    def init_highlighter(self):
-        if self.config.pygments_style is not None:
-            style = self.config.pygments_style
-        elif self.theme:
-            style = self.theme.get_confstr('theme', 'pygments_style', 'none')
-        else:
-            style = 'sphinx'
-        self.highlighter = MyPygmentsBridge(self, 
self.config.trim_doctest_flags)
-
-    def write_doc(self, docname, doctree):
-        self._current_docname = docname
-        super(MyHtmlBuilder, self).write_doc(docname, doctree)
-
-def get_annotation(tok, key):
-    if not hasattr(tok, "kv"):
-        return None
-    return tok.kv.get(key)
-
-
-def copy_token(tok):
-    new_tok = _TokenType(tok)
-    # This part is very fragile against API changes ...
-    new_tok.subtypes = set(tok.subtypes)
-    new_tok.parent = tok.parent
-    return new_tok
-
-
-def tok_setprop(tok, key, value):
-    tokid = id(tok)
-    e = token_props.get(tokid)
-    if e is None:
-        e = token_props[tokid] = (tok, {})
-    _, kv = e
-    kv[key] = value
-
-
-def tok_getprop(tok, key):
-    tokid = id(tok)
-    e = token_props.get(tokid)
-    if e is None:
-        return None
-    _, kv = e
-    return kv.get(key)
-
-
-link_reg = re.compile(r"`([^`<]+)\s*(?:<([^>]+)>)?\s*`_")
-
-# Map from token id to props.
-# Properties can't be added to tokens
-# since they derive from Python's tuple.
-token_props = {}
-
-
-class LinkFilter(Filter):
-    def __init__(self, app, **options):
-        self.app = app
-        Filter.__init__(self, **options)
-
-    def filter(self, lexer, stream):
-        id_to_doc = self.app.env.domaindata.get("_tsref", {})
-        for ttype, value in stream:
-            if ttype in Token.Keyword.Type:
-                defname = make_id('tsref-type-' + value);
-                t = copy_token(ttype)
-                if defname in id_to_doc:
-                    if hasattr(self.app.builder, "_current_docname"):
-                        current_docname = self.app.builder._current_docname
-                    else:
-                        current_docname = "(unknown-doc)"
-                    docname = id_to_doc[defname]
-                    uri = self.app.builder.get_relative_uri(current_docname, 
docname)
-                    href = uri + "#" + defname
-                    tok_setprop(t, "href", href)
-
-                yield t, value
-            elif ttype in Token.Comment:
-                last = 0
-                for m in re.finditer(link_reg, value):
-                    pre = value[last:m.start()]
-                    if pre:
-                        yield ttype, pre
-                    t = copy_token(ttype)
-                    x1, x2 = m.groups()
-                    if x2 is None:
-                        caption = x1.strip()
-                        id = make_id(x1)
-                    else:
-                        caption = x1.strip()
-                        id = make_id(x2)
-                    if id in id_to_doc:
-                        docname = id_to_doc[id]
-                        href = self.app.builder.get_target_uri(docname) + "#" 
+ id
-                        tok_setprop(t, "href", href)
-                        tok_setprop(t, "caption", caption)
-                    else:
-                        logger.warning("unresolved link target in comment: " + 
id)
-                    yield t, m.group(1)
-                    last = m.end()
-                post = value[last:]
-                if post:
-                    yield ttype, post
-            else:
-                yield ttype, value
-
-
-
-def remember_targets(app, doctree):
-    docname = app.env.docname
-    id_to_doc = app.env.domaindata.get("_tsref", None)
-    if id_to_doc is None:
-        id_to_doc = app.env.domaindata["_tsref"] = {}
-    for node in doctree.traverse():
-        if not isinstance(node, nodes.Element):
-            continue
-        ids = node.get("ids")
-        if ids:
-            for id in ids:
-                id_to_doc[id] = docname
-
-
-def setup(app): 
-    from sphinx.highlighting import lexers
-    from pygments.token import Name
-    from pygments.filters import NameHighlightFilter
-    from tslex import BetterTypeScriptLexer
-    lexer = BetterTypeScriptLexer()
-    lexer.add_filter(LinkFilter(app))
-    app.add_lexer('tsref', lexer)
-    app.add_builder(MyHtmlBuilder)
-    app.connect("doctree-read", remember_targets)
diff --git a/conf.py b/conf.py
index 1bd0bb2..4c81529 100644
--- a/conf.py
+++ b/conf.py
@@ -51,7 +51,7 @@ needs_sphinx = '1.3'
 # ones.
 extensions = [
     'ebicsdomain',
-    'tsref',
+    'typescriptdomain',
     'taler_sphinx_theme',
     'sphinx.ext.todo',
     'sphinx.ext.imgmath',
@@ -103,7 +103,7 @@ exclude_patterns = ['_build', '_exts', 'cf', 'prebuilt']
 
 # The reST default role (used for this markup: `text`) to use for all
 # documents.
-#default_role = None
+default_role = "ts:type"
 
 # If true, '()' will be appended to :func: etc. cross-reference text.
 #add_function_parentheses = True

-- 
To stop receiving notification emails like this one, please contact
address@hidden.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]