Skip to content

Commit

Permalink
Allow new lines in between everything and handle them correctly
Browse files Browse the repository at this point in the history
  • Loading branch information
DaDeather committed Feb 11, 2025
1 parent 81de606 commit e7ef350
Show file tree
Hide file tree
Showing 3 changed files with 273 additions and 38 deletions.
9 changes: 9 additions & 0 deletions src/Parser/TokenIterator.php
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,15 @@ public function tryConsumeTokenType(int $tokenType): bool
}


/** @phpstan-impure */
public function skipNewLineTokens(): void
{
do {
$foundNewLine = $this->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
} while ($foundNewLine === true);
}


private function detectNewline(): void
{
$value = $this->currentTokenValue();
Expand Down
81 changes: 43 additions & 38 deletions src/Parser/TypeParser.php
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ private function subParse(TokenIterator $tokens): Ast\Type\TypeNode
if ($tokens->isCurrentTokenValue('is')) {
$type = $this->parseConditional($tokens, $type);
} else {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

if ($tokens->isCurrentTokenType(Lexer::TOKEN_UNION)) {
$type = $this->subParseUnion($tokens, $type);
Expand All @@ -112,9 +112,9 @@ private function parseAtomic(TokenIterator $tokens): Ast\Type\TypeNode
$startIndex = $tokens->currentTokenIndex();

if ($tokens->tryConsumeTokenType(Lexer::TOKEN_OPEN_PARENTHESES)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$type = $this->subParse($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_PARENTHESES);

Expand Down Expand Up @@ -256,9 +256,9 @@ private function subParseUnion(TokenIterator $tokens, Ast\Type\TypeNode $type):
$types = [$type];

while ($tokens->tryConsumeTokenType(Lexer::TOKEN_UNION)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$types[] = $this->parseAtomic($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

return new Ast\Type\UnionTypeNode($types);
Expand All @@ -284,9 +284,9 @@ private function subParseIntersection(TokenIterator $tokens, Ast\Type\TypeNode $
$types = [$type];

while ($tokens->tryConsumeTokenType(Lexer::TOKEN_INTERSECTION)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$types[] = $this->parseAtomic($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

return new Ast\Type\IntersectionTypeNode($types);
Expand All @@ -306,15 +306,15 @@ private function parseConditional(TokenIterator $tokens, Ast\Type\TypeNode $subj

$targetType = $this->parse($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_NULLABLE);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$ifType = $this->parse($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_COLON);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$elseType = $this->subParse($tokens);

Expand All @@ -335,15 +335,15 @@ private function parseConditionalForParameter(TokenIterator $tokens, string $par

$targetType = $this->parse($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_NULLABLE);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$ifType = $this->parse($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_COLON);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$elseType = $this->subParse($tokens);

Expand Down Expand Up @@ -409,8 +409,13 @@ public function parseGeneric(TokenIterator $tokens, Ast\Type\IdentifierTypeNode
$variances = [];

$isFirst = true;
while ($isFirst || $tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
while (
$isFirst
|| $tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)
|| $tokens->tryConsumeTokenType(Lexer::TOKEN_UNION)
|| $tokens->tryConsumeTokenType(Lexer::TOKEN_INTERSECTION)
) {
$tokens->skipNewLineTokens();

// trailing comma case
if (!$isFirst && $tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET)) {
Expand All @@ -419,7 +424,7 @@ public function parseGeneric(TokenIterator $tokens, Ast\Type\IdentifierTypeNode
$isFirst = false;

[$genericTypes[], $variances[]] = $this->parseGenericTypeArgument($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

$type = new Ast\Type\GenericTypeNode($baseType, $genericTypes, $variances);
Expand Down Expand Up @@ -510,19 +515,19 @@ private function parseCallable(TokenIterator $tokens, Ast\Type\IdentifierTypeNod
: [];

$tokens->consumeTokenType(Lexer::TOKEN_OPEN_PARENTHESES);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$parameters = [];
if (!$tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_PARENTHESES)) {
$parameters[] = $this->parseCallableParameter($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
if ($tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_PARENTHESES)) {
break;
}
$parameters[] = $this->parseCallableParameter($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}
}

Expand Down Expand Up @@ -550,7 +555,7 @@ private function parseCallableTemplates(TokenIterator $tokens): array

$isFirst = true;
while ($isFirst || $tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

// trailing comma case
if (!$isFirst && $tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET)) {
Expand All @@ -559,7 +564,7 @@ private function parseCallableTemplates(TokenIterator $tokens): array
$isFirst = false;

$templates[] = $this->parseCallableTemplateArgument($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET);
Expand Down Expand Up @@ -830,7 +835,7 @@ private function parseArrayShape(TokenIterator $tokens, Ast\Type\TypeNode $type,
$unsealedType = null;

do {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

if ($tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET)) {
return Ast\Type\ArrayShapeNode::createSealed($items, $kind);
Expand All @@ -839,14 +844,14 @@ private function parseArrayShape(TokenIterator $tokens, Ast\Type\TypeNode $type,
if ($tokens->tryConsumeTokenType(Lexer::TOKEN_VARIADIC)) {
$sealed = false;

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET)) {
if ($kind === Ast\Type\ArrayShapeNode::KIND_ARRAY) {
$unsealedType = $this->parseArrayShapeUnsealedType($tokens);
} else {
$unsealedType = $this->parseListShapeUnsealedType($tokens);
}
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

$tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA);
Expand All @@ -855,10 +860,10 @@ private function parseArrayShape(TokenIterator $tokens, Ast\Type\TypeNode $type,

$items[] = $this->parseArrayShapeItem($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
} while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA));

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET);

if ($sealed) {
Expand Down Expand Up @@ -945,18 +950,18 @@ private function parseArrayShapeUnsealedType(TokenIterator $tokens): Ast\Type\Ar
$startIndex = $tokens->currentTokenIndex();

$tokens->consumeTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$valueType = $this->parse($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$keyType = null;
if ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$keyType = $valueType;
$valueType = $this->parse($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
}

$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET);
Expand All @@ -978,10 +983,10 @@ private function parseListShapeUnsealedType(TokenIterator $tokens): Ast\Type\Arr
$startIndex = $tokens->currentTokenIndex();

$tokens->consumeTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$valueType = $this->parse($tokens);
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET);

Expand All @@ -1003,18 +1008,18 @@ private function parseObjectShape(TokenIterator $tokens): Ast\Type\ObjectShapeNo
$items = [];

do {
$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();

if ($tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET)) {
return new Ast\Type\ObjectShapeNode($items);
}

$items[] = $this->parseObjectShapeItem($tokens);

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
} while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA));

$tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL);
$tokens->skipNewLineTokens();
$tokens->consumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET);

return new Ast\Type\ObjectShapeNode($items);
Expand Down
Loading

0 comments on commit e7ef350

Please sign in to comment.