From 42ce358c2f9831c1bb02f5b36b4c16ef050d8256 Mon Sep 17 00:00:00 2001 From: Thomas Kluyver Date: Wed, 15 Jan 2025 10:31:35 +0000 Subject: [PATCH] Fix some tests --- ipython_pygments_lexers.py | 12 ++++++------ test_ipython_pygments_lexers.py | 20 ++++++++++---------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/ipython_pygments_lexers.py b/ipython_pygments_lexers.py index 03ef4e2..09e8926 100644 --- a/ipython_pygments_lexers.py +++ b/ipython_pygments_lexers.py @@ -115,10 +115,6 @@ r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(Python3Lexer)), ), - ( - r"(?s)(\s*)(%%python)([^\n]*\n)(.*)", - bygroups(Text, Operator, Text, using(Python3Lexer)), - ), ( r"(?s)(\s*)(%%python2)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(Python2Lexer)), @@ -127,16 +123,20 @@ r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(Python3Lexer)), ), + ( + r"(?s)(\s*)(%%python)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), ( r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(RubyLexer)), ), ( - r"(?s)(\s*)(%%time)([^\n]*\n)(.*)", + r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(Python3Lexer)), ), ( - r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)", + r"(?s)(\s*)(%%time)([^\n]*\n)(.*)", bygroups(Text, Operator, Text, using(Python3Lexer)), ), ( diff --git a/test_ipython_pygments_lexers.py b/test_ipython_pygments_lexers.py index 2893014..5fb442c 100644 --- a/test_ipython_pygments_lexers.py +++ b/test_ipython_pygments_lexers.py @@ -12,6 +12,8 @@ pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14) +TOKEN_WS = Token.Text.Whitespace if pyg214 else Token.Text + class TestLexers(TestCase): """Collection of lexers tests""" @@ -71,10 +73,8 @@ def testIPythonLexer(self): (Token.Text, " "), (Token.Operator, "%"), (Token.Keyword, "sx"), - (Token.Text, " "), + (TOKEN_WS, " "), ] + bash_tokens[1:] - if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+ - tokens_2[7] = (Token.Text.Whitespace, " ") assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] fragment_2 = "f = %R function () {}\n" @@ -147,26 +147,26 @@ def testIPythonLexer(self): (Token.Text, " "), (Token.Name, "b"), (Token.Punctuation, ":"), - (Token.Text, "\n"), + (TOKEN_WS, "\n"), (Token.Text, " "), (Token.Keyword, "pass"), - (Token.Text, "\n"), + (TOKEN_WS, "\n"), ] - if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+ - tokens[10] = (Token.Text.Whitespace, "\n") - assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1] + assert tokens == list(self.lexer.get_tokens(fragment)) fragment = "%%timeit\nmath.sin(0)" tokens = [ - (Token.Operator, "%%timeit\n"), + (Token.Operator, "%%timeit"), + (Token.Text, "\n"), (Token.Name, "math"), (Token.Operator, "."), (Token.Name, "sin"), (Token.Punctuation, "("), (Token.Literal.Number.Integer, "0"), (Token.Punctuation, ")"), - (Token.Text, "\n"), + (TOKEN_WS, "\n"), ] + assert tokens == list(self.lexer.get_tokens(fragment)) fragment = "%%HTML\n
foo
" tokens = [