Skip to content

Commit

Permalink
Merge branch 'main' into tokenizer-return-token
Browse files Browse the repository at this point in the history
  • Loading branch information
lysnikolaou authored Oct 6, 2022
2 parents 35b6dd9 + 23e83a8 commit cdfc30a
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 2 deletions.
4 changes: 3 additions & 1 deletion Lib/asyncio/base_subprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,9 @@ def _process_exited(self, returncode):
self._proc.returncode = returncode
self._call(self._protocol.process_exited)
for p in self._pipes.values():
p.pipe.close()
if p is not None:
p.pipe.close()

self._try_finish()

async def _wait(self):
Expand Down
22 changes: 21 additions & 1 deletion Lib/test/test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
open as tokenize_open, Untokenizer, generate_tokens,
NEWLINE, _generate_tokens_from_c_tokenizer)
NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT)
from io import BytesIO, StringIO
import unittest
from textwrap import dedent
Expand Down Expand Up @@ -2512,6 +2512,26 @@ def get_tokens(string):
self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
self.assertRaises(SyntaxError, get_tokens, "]")

def test_max_indent(self):
MAXINDENT = 100

def generate_source(indents):
source = ''.join((' ' * x) + 'if True:\n' for x in range(indents))
source += ' ' * indents + 'pass\n'
return source

valid = generate_source(MAXINDENT - 1)
tokens = list(_generate_tokens_from_c_tokenizer(valid))
self.assertEqual(tokens[-1].type, DEDENT)
compile(valid, "<string>", "exec")

invalid = generate_source(MAXINDENT)
tokens = list(_generate_tokens_from_c_tokenizer(invalid))
self.assertEqual(tokens[-1].type, NEWLINE)
self.assertRaises(
IndentationError, compile, invalid, "<string>", "exec"
)

def test_continuation_lines_indentation(self):
def get_tokens(string):
return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]
Expand Down

0 comments on commit cdfc30a

Please sign in to comment.