Skip to content

Commit

Permalink
Merge pull request #3 from ipython/no-py2
Browse files Browse the repository at this point in the history
Simplify to only support IPython with Python 3 syntax
  • Loading branch information
takluyver authored Jan 15, 2025
2 parents bb234c8 + 1f91256 commit e4c264d
Showing 1 changed file with 55 additions and 110 deletions.
165 changes: 55 additions & 110 deletions ipython_pygments_lexers.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,80 +50,58 @@

line_re = re.compile('.*?\n')

__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
'IPythonConsoleLexer', 'IPyLexer']


def build_ipy_lexer(python3):
"""Builds IPython lexers depending on the value of `python3`.
The lexer inherits from an appropriate Python lexer and then adds
information about IPython specific keywords (i.e. magic commands,
shell commands, etc.)
Parameters
----------
python3 : bool
If `True`, then build an IPython lexer from a Python 3 lexer.
"""
# It would be nice to have a single IPython lexer class which takes
# a boolean `python3`. But since there are two Python lexer classes,
# we will also have two IPython lexer classes.
if python3:
PyLexer = Python3Lexer
name = 'IPython3'
aliases = ['ipython3']
doc = """IPython3 Lexer"""
else:
PyLexer = PythonLexer
name = 'IPython'
aliases = ['ipython2', 'ipython']
doc = """IPython Lexer"""

ipython_tokens = [
(r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))),
(r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
(r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
(r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))),
(r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))),
(r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))),
(r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))),
(r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
(r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
(r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
(r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
(r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
(r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
using(BashLexer), Text)),
(r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
(r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
(r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
(r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
(r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
]

tokens = PyLexer.tokens.copy()
__all__ = [
'IPython3Lexer',
'IPythonLexer',
'IPythonPartialTracebackLexer',
'IPythonTracebackLexer',
'IPythonConsoleLexer',
'IPyLexer'
]


ipython_tokens = [
(r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))),
(r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
(r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
(r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))),
(r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))),
(r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))),
(r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))),
(r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
(r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
(r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
(r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
(r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
(r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
using(BashLexer), Text)),
(r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
(r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
(r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
(r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
(r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
]


class IPython3Lexer(Python3Lexer):
"""IPython code lexer (based on Python 3)"""
name = "IPython"
aliases = ["ipython", "ipython3"]

tokens = Python3Lexer.tokens.copy()
tokens['root'] = ipython_tokens + tokens['root']

attrs = {'name': name, 'aliases': aliases, 'filenames': [],
'__doc__': doc, 'tokens': tokens}

return type(name, (PyLexer,), attrs)


IPython3Lexer = build_ipy_lexer(python3=True)
IPythonLexer = build_ipy_lexer(python3=False)
IPythonLexer = IPython3Lexer


class IPythonPartialTracebackLexer(RegexLexer):
Expand Down Expand Up @@ -192,7 +170,7 @@ class IPythonTracebackLexer(DelegatingLexer):
# lexer.
#
name = 'IPython Traceback'
aliases = ['ipythontb']
aliases = ['ipythontb', 'ipython3tb']

def __init__(self, **options):
"""
Expand All @@ -202,18 +180,7 @@ def __init__(self, **options):
# note we need a __init__ doc, as otherwise it inherits the doc from the super class
# which will fail the documentation build as it references section of the pygments docs that
# do not exists when building IPython's docs.
self.python3 = get_bool_opt(options, 'python3', False)
if self.python3:
self.aliases = ['ipython3tb']
else:
self.aliases = ['ipython2tb', 'ipythontb']

if self.python3:
IPyLexer = IPython3Lexer
else:
IPyLexer = IPythonLexer

DelegatingLexer.__init__(self, IPyLexer,
DelegatingLexer.__init__(self, IPython3Lexer,
IPythonPartialTracebackLexer, **options)

class IPythonConsoleLexer(Lexer):
Expand Down Expand Up @@ -246,7 +213,7 @@ class IPythonConsoleLexer(Lexer):
"""
name = 'IPython console session'
aliases = ['ipythonconsole']
aliases = ['ipythonconsole', 'ipython3console']
mimetypes = ['text/x-ipython-console']

# The regexps used to determine what is input and what is output.
Expand All @@ -271,9 +238,6 @@ def __init__(self, **options):
Parameters
----------
python3 : bool
If `True`, then the console inputs are parsed using a Python 3
lexer. Otherwise, they are parsed using a Python 2 lexer.
in1_regex : RegexObject
The compiled regular expression used to detect the start
of inputs. Although the IPython configuration setting may have a
Expand All @@ -289,12 +253,6 @@ def __init__(self, **options):
then the default output prompt is assumed.
"""
self.python3 = get_bool_opt(options, 'python3', False)
if self.python3:
self.aliases = ['ipython3console']
else:
self.aliases = ['ipython2console', 'ipythonconsole']

in1_regex = options.get('in1_regex', self.in1_regex)
in2_regex = options.get('in2_regex', self.in2_regex)
out_regex = options.get('out_regex', self.out_regex)
Expand All @@ -319,15 +277,8 @@ def __init__(self, **options):

Lexer.__init__(self, **options)

if self.python3:
pylexer = IPython3Lexer
tblexer = IPythonTracebackLexer
else:
pylexer = IPythonLexer
tblexer = IPythonTracebackLexer

self.pylexer = pylexer(**options)
self.tblexer = tblexer(**options)
self.pylexer = IPython3Lexer(**options)
self.tblexer = IPythonTracebackLexer(**options)

self.reset()

Expand Down Expand Up @@ -508,7 +459,7 @@ class IPyLexer(Lexer):
"""
name = 'IPy session'
aliases = ['ipy']
aliases = ['ipy', 'ipy3']

def __init__(self, **options):
"""
Expand All @@ -518,12 +469,6 @@ def __init__(self, **options):
"""
# init docstring is necessary for docs not to fail to build do to parent
# docs referenceing a section in pygments docs.
self.python3 = get_bool_opt(options, 'python3', False)
if self.python3:
self.aliases = ['ipy3']
else:
self.aliases = ['ipy2', 'ipy']

Lexer.__init__(self, **options)

self.IPythonLexer = IPythonLexer(**options)
Expand Down

0 comments on commit e4c264d

Please sign in to comment.