From d3316f9217bd40d21f5ac030577007115757eff5 Mon Sep 17 00:00:00 2001 From: Alex Hoppen Date: Mon, 15 May 2023 19:40:29 -0700 Subject: [PATCH] Drop `Keyword` suffix from `TokenSpecSet` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since we started modelling keywords as a single `RawTokenKind` there hasn’t been an `ifKeyword` etc. anymore. Instead we always refered to them as `Keyword.if` or `.if` everywhere. We should be consistent about this in `TokenSpecSet` as well, which also shouldn’t have a `Keyword` suffix on the cases. --- Sources/SwiftParser/Attributes.swift | 20 +- Sources/SwiftParser/Declarations.swift | 104 +++---- Sources/SwiftParser/Expressions.swift | 56 ++-- Sources/SwiftParser/Names.swift | 2 +- Sources/SwiftParser/Patterns.swift | 56 ++-- Sources/SwiftParser/Statements.swift | 60 ++-- Sources/SwiftParser/TokenSpecSet.swift | 368 ++++++++++++------------- 7 files changed, 331 insertions(+), 335 deletions(-) diff --git a/Sources/SwiftParser/Attributes.swift b/Sources/SwiftParser/Attributes.swift index 081d9e2801a..5e92df0c54a 100644 --- a/Sources/SwiftParser/Attributes.swift +++ b/Sources/SwiftParser/Attributes.swift @@ -362,14 +362,14 @@ extension Parser { enum DifferentiabilityKind: TokenSpecSet { case reverse - case linear - case forward + case _linear + case _forward init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { case TokenSpec(.reverse): self = .reverse - case TokenSpec(._linear): self = .linear - case TokenSpec(._forward): self = .forward + case TokenSpec(._linear): self = ._linear + case TokenSpec(._forward): self = ._forward default: return nil } } @@ -377,8 +377,8 @@ extension Parser { var spec: TokenSpec { switch self { case .reverse: return .keyword(.reverse) - case .linear: return .keyword(._linear) - case .forward: return .keyword(._forward) + case ._linear: return .keyword(._linear) + case ._forward: return .keyword(._forward) } } } @@ -474,13 +474,13 @@ extension Parser { enum ExpectedTokenKind: TokenSpecSet { case identifier case integerLiteral - case selfKeyword + case `self` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { case TokenSpec(.identifier): self = .identifier case TokenSpec(.integerLiteral): self = .integerLiteral - case TokenSpec(.self): self = .selfKeyword + case TokenSpec(.self): self = .self default: return nil } } @@ -489,7 +489,7 @@ extension Parser { switch self { case .identifier: return .identifier case .integerLiteral: return .integerLiteral - case .selfKeyword: return .keyword(.self) + case .self: return .keyword(.self) } } } @@ -511,7 +511,7 @@ extension Parser { trailingComma: comma, arena: self.arena ) - case (.selfKeyword, let handle)?: + case (.self, let handle)?: let token = self.eat(handle) let comma = self.consume(if: .comma) return RawDifferentiabilityParamSyntax( diff --git a/Sources/SwiftParser/Declarations.swift b/Sources/SwiftParser/Declarations.swift index 689be886f77..2ec0b8f5658 100644 --- a/Sources/SwiftParser/Declarations.swift +++ b/Sources/SwiftParser/Declarations.swift @@ -100,7 +100,7 @@ extension TokenConsumer { declStartKeyword = subparser.at(anyIn: DeclarationStart.self)?.0 } switch declStartKeyword { - case .actorKeyword: + case .actor: // actor Foo {} if subparser.peek().rawTokenKind == .identifier { return true @@ -113,13 +113,13 @@ extension TokenConsumer { lookahead.consumeAnyToken() } while lookahead.atStartOfDeclaration(isAtTopLevel: isAtTopLevel, allowInitDecl: allowInitDecl) return lookahead.at(.identifier) - case .caseKeyword: + case .case: // When 'case' appears inside a function, it's probably a switch // case, not an enum case declaration. return false - case .initKeyword: + case .`init`: return allowInitDecl - case .macroKeyword: + case .macro: // macro Foo ... return subparser.peek().rawTokenKind == .identifier case .pound: @@ -234,42 +234,42 @@ extension Parser { let recoveryPrecedence = inMemberDeclList ? TokenPrecedence.closingBrace : nil switch self.canRecoverTo(anyIn: DeclarationStart.self, overrideRecoveryPrecedence: recoveryPrecedence) { - case (.importKeyword, let handle)?: + case (.import, let handle)?: return RawDeclSyntax(self.parseImportDeclaration(attrs, handle)) - case (.classKeyword, let handle)?: + case (.class, let handle)?: return RawDeclSyntax(self.parseNominalTypeDeclaration(for: RawClassDeclSyntax.self, attrs: attrs, introucerHandle: handle)) - case (.enumKeyword, let handle)?: + case (.enum, let handle)?: return RawDeclSyntax(self.parseNominalTypeDeclaration(for: RawEnumDeclSyntax.self, attrs: attrs, introucerHandle: handle)) - case (.caseKeyword, let handle)?: + case (.case, let handle)?: return RawDeclSyntax(self.parseEnumCaseDeclaration(attrs, handle)) - case (.structKeyword, let handle)?: + case (.struct, let handle)?: return RawDeclSyntax(self.parseNominalTypeDeclaration(for: RawStructDeclSyntax.self, attrs: attrs, introucerHandle: handle)) - case (.protocolKeyword, let handle)?: + case (.protocol, let handle)?: return RawDeclSyntax(self.parseNominalTypeDeclaration(for: RawProtocolDeclSyntax.self, attrs: attrs, introucerHandle: handle)) - case (.associatedtypeKeyword, let handle)?: + case (.associatedtype, let handle)?: return RawDeclSyntax(self.parseAssociatedTypeDeclaration(attrs, handle)) - case (.typealiasKeyword, let handle)?: + case (.typealias, let handle)?: return RawDeclSyntax(self.parseTypealiasDeclaration(attrs, handle)) - case (.extensionKeyword, let handle)?: + case (.extension, let handle)?: return RawDeclSyntax(self.parseExtensionDeclaration(attrs, handle)) - case (.funcKeyword, let handle)?: + case (.func, let handle)?: return RawDeclSyntax(self.parseFuncDeclaration(attrs, handle)) - case (.subscriptKeyword, let handle)?: + case (.subscript, let handle)?: return RawDeclSyntax(self.parseSubscriptDeclaration(attrs, handle)) - case (.letKeyword, let handle)?, (.varKeyword, let handle)?, - (.inoutKeyword, let handle)?: + case (.let, let handle)?, (.var, let handle)?, + (.inout, let handle)?: return RawDeclSyntax(self.parseBindingDeclaration(attrs, handle, inMemberDeclList: inMemberDeclList)) - case (.initKeyword, let handle)?: + case (.`init`, let handle)?: return RawDeclSyntax(self.parseInitializerDeclaration(attrs, handle)) - case (.deinitKeyword, let handle)?: + case (.deinit, let handle)?: return RawDeclSyntax(self.parseDeinitializerDeclaration(attrs, handle)) - case (.operatorKeyword, let handle)?: + case (.operator, let handle)?: return RawDeclSyntax(self.parseOperatorDeclaration(attrs, handle)) - case (.precedencegroupKeyword, let handle)?: + case (.precedencegroup, let handle)?: return RawDeclSyntax(self.parsePrecedenceGroupDeclaration(attrs, handle)) - case (.actorKeyword, let handle)?: + case (.actor, let handle)?: return RawDeclSyntax(self.parseNominalTypeDeclaration(for: RawActorDeclSyntax.self, attrs: attrs, introucerHandle: handle)) - case (.macroKeyword, let handle)?: + case (.macro, let handle)?: return RawDeclSyntax(self.parseMacroDeclaration(attrs: attrs, introducerHandle: handle)) case (.pound, let handle)?: return RawDeclSyntax(self.parseMacroExpansionDeclaration(attrs, handle)) @@ -575,50 +575,50 @@ extension Parser { } enum LayoutConstraint: TokenSpecSet { - case trivialLayout - case trivialAtMostLayout - case unknownLayout - case refCountedObjectLayout - case nativeRefCountedObjectLayout - case classLayout - case nativeClassLayout + case _Trivial + case _TrivialAtMost + case _UnknownLayout + case _RefCountedObjectLayout + case _NativeRefCountedObjectLayout + case _Class + case _NativeClass init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(._Trivial): self = .trivialLayout - case TokenSpec(._TrivialAtMost): self = .trivialAtMostLayout - case TokenSpec(._UnknownLayout): self = .unknownLayout - case TokenSpec(._RefCountedObject): self = .refCountedObjectLayout - case TokenSpec(._NativeRefCountedObject): self = .nativeRefCountedObjectLayout - case TokenSpec(._Class): self = .classLayout - case TokenSpec(._NativeClass): self = .nativeClassLayout + case TokenSpec(._Trivial): self = ._Trivial + case TokenSpec(._TrivialAtMost): self = ._TrivialAtMost + case TokenSpec(._UnknownLayout): self = ._UnknownLayout + case TokenSpec(._RefCountedObject): self = ._RefCountedObjectLayout + case TokenSpec(._NativeRefCountedObject): self = ._NativeRefCountedObjectLayout + case TokenSpec(._Class): self = ._Class + case TokenSpec(._NativeClass): self = ._NativeClass default: return nil } } var spec: TokenSpec { switch self { - case .trivialLayout: return .keyword(._Trivial) - case .trivialAtMostLayout: return .keyword(._TrivialAtMost) - case .unknownLayout: return .keyword(._UnknownLayout) - case .refCountedObjectLayout: return .keyword(._RefCountedObject) - case .nativeRefCountedObjectLayout: return .keyword(._NativeRefCountedObject) - case .classLayout: return .keyword(._Class) - case .nativeClassLayout: return .keyword(._NativeClass) + case ._Trivial: return .keyword(._Trivial) + case ._TrivialAtMost: return .keyword(._TrivialAtMost) + case ._UnknownLayout: return .keyword(._UnknownLayout) + case ._RefCountedObjectLayout: return .keyword(._RefCountedObject) + case ._NativeRefCountedObjectLayout: return .keyword(._NativeRefCountedObject) + case ._Class: return .keyword(._Class) + case ._NativeClass: return .keyword(._NativeClass) } } var hasArguments: Bool { switch self { - case .trivialLayout, - .trivialAtMostLayout: + case ._Trivial, + ._TrivialAtMost: return true - case .unknownLayout, - .refCountedObjectLayout, - .nativeRefCountedObjectLayout, - .classLayout, - .nativeClassLayout: + case ._UnknownLayout, + ._RefCountedObjectLayout, + ._NativeRefCountedObjectLayout, + ._Class, + ._NativeClass: return false } } @@ -696,7 +696,7 @@ extension Parser { let rightParen: RawTokenSyntax? // Unlike the other layout constraints, _Trivial's argument list // is optional. - if layoutConstraint.hasArguments && (layoutConstraint != .trivialLayout || self.at(.leftParen)) { + if layoutConstraint.hasArguments && (layoutConstraint != ._Trivial || self.at(.leftParen)) { (unexpectedBeforeLeftParen, leftParen) = self.expect(.leftParen) size = self.expectWithoutRecovery(.integerLiteral) comma = self.consume(if: .comma) diff --git a/Sources/SwiftParser/Expressions.swift b/Sources/SwiftParser/Expressions.swift index 25c68955d83..0211a1d5596 100644 --- a/Sources/SwiftParser/Expressions.swift +++ b/Sources/SwiftParser/Expressions.swift @@ -259,22 +259,22 @@ extension Parser { case binaryOperator case infixQuestionMark case equal - case isKeyword - case asKeyword + case `is` + case `as` case async case arrow - case throwsKeyword + case `throws` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { case TokenSpec(.binaryOperator): self = .binaryOperator case TokenSpec(.infixQuestionMark): self = .infixQuestionMark case TokenSpec(.equal): self = .equal - case TokenSpec(.is): self = .isKeyword - case TokenSpec(.as): self = .asKeyword + case TokenSpec(.is): self = .is + case TokenSpec(.as): self = .as case TokenSpec(.async): self = .async case TokenSpec(.arrow): self = .arrow - case TokenSpec(.throws): self = .throwsKeyword + case TokenSpec(.throws): self = .throws default: return nil } } @@ -284,11 +284,11 @@ extension Parser { case .binaryOperator: return .binaryOperator case .infixQuestionMark: return .infixQuestionMark case .equal: return .equal - case .isKeyword: return .keyword(.is) - case .asKeyword: return .keyword(.as) + case .is: return .keyword(.is) + case .as: return .keyword(.as) case .async: return .keyword(.async) case .arrow: return .arrow - case .throwsKeyword: return .keyword(.throws) + case .throws: return .keyword(.throws) } } } @@ -336,7 +336,7 @@ extension Parser { return (RawExprSyntax(op), nil) } - case (.isKeyword, let handle)?: + case (.is, let handle)?: let isKeyword = self.eat(handle) let op = RawUnresolvedIsExprSyntax( isTok: isKeyword, @@ -349,7 +349,7 @@ extension Parser { return (RawExprSyntax(op), RawExprSyntax(rhs)) - case (.asKeyword, let handle)?: + case (.as, let handle)?: return parseUnresolvedAsExpr(handle: handle) case (.async, _)?: @@ -358,7 +358,7 @@ extension Parser { } else { return nil } - case (.arrow, _)?, (.throwsKeyword, _)?: + case (.arrow, _)?, (.throws, _)?: var effectSpecifiers = self.parseTypeEffectSpecifiers() let (unexpectedBeforeArrow, arrow) = self.expect(.arrow) @@ -408,7 +408,7 @@ extension Parser { } EXPR_PREFIX: switch self.at(anyIn: ExpressionModifierKeyword.self) { - case (.awaitKeyword, let handle)?: + case (.await, let handle)?: let awaitTok = self.eat(handle) let sub = self.parseSequenceExpressionElement( flavor, @@ -422,7 +422,7 @@ extension Parser { arena: self.arena ) ) - case (.tryKeyword, let handle)?: + case (.try, let handle)?: let tryKeyword = self.eat(handle) let mark = self.consume(if: .exclamationMark, .postfixQuestionMark) @@ -439,7 +439,7 @@ extension Parser { arena: self.arena ) ) - case (._moveKeyword, let handle)?: + case (._move, let handle)?: let moveTok = self.eat(handle) let sub = self.parseSequenceExpressionElement( flavor, @@ -453,7 +453,7 @@ extension Parser { arena: self.arena ) ) - case (._borrowKeyword, let handle)?: + case (._borrow, let handle)?: let borrowTok = self.eat(handle) let sub = self.parseSequenceExpressionElement( flavor, @@ -468,7 +468,7 @@ extension Parser { ) ) - case (.copyKeyword, let handle)?: + case (.copy, let handle)?: // `copy` is only contextually a keyword, if it's followed by an // identifier or keyword on the same line. We do this to ensure that we do // not break any copy functions defined by users. This is following with @@ -497,7 +497,7 @@ extension Parser { ) ) - case (.consumeKeyword, let handle)?: + case (.consume, let handle)?: // `consume` is only contextually a keyword, if it's followed by an // identifier or keyword on the same line. We do this to ensure that we do // not break any copy functions defined by users. This is following with @@ -1200,7 +1200,7 @@ extension Parser { return RawExprSyntax(self.parseStringLiteral()) case (.extendedRegexDelimiter, _)?, (.regexSlash, _)?: return RawExprSyntax(self.parseRegexLiteral()) - case (.nilKeyword, let handle)?: + case (.nil, let handle)?: let nilKeyword = self.eat(handle) return RawExprSyntax( RawNilLiteralExprSyntax( @@ -1208,8 +1208,8 @@ extension Parser { arena: self.arena ) ) - case (.trueKeyword, let handle)?, - (.falseKeyword, let handle)?: + case (.true, let handle)?, + (.false, let handle)?: let tok = self.eat(handle) return RawExprSyntax( RawBooleanLiteralExprSyntax( @@ -1217,7 +1217,7 @@ extension Parser { arena: self.arena ) ) - case (.identifier, let handle)?, (.selfKeyword, let handle)?, (.initKeyword, let handle)?: + case (.identifier, let handle)?, (.self, let handle)?, (.`init`, let handle)?: // If we have "case let x." or "case let x(", we parse x as a normal // name, not a binding, because it is the start of an enum pattern or // call pattern. @@ -1254,9 +1254,9 @@ extension Parser { } return RawExprSyntax(self.parseIdentifierExpression()) - case (.capitalSelfKeyword, _)?: // Self + case (.Self, _)?: // Self return RawExprSyntax(self.parseIdentifierExpression()) - case (.anyKeyword, _)?: // Any + case (.Any, _)?: // Any let anyType = RawTypeSyntax(self.parseAnyType()) return RawExprSyntax(RawTypeExprSyntax(type: anyType, arena: self.arena)) case (.dollarIdentifier, _)?: @@ -1322,7 +1322,7 @@ extension Parser { arena: self.arena ) ) - case (.superKeyword, _)?: // 'super' + case (.super, _)?: // 'super' return RawExprSyntax(self.parseSuperExpression()) case (.leftParen, _)?: @@ -1344,7 +1344,7 @@ extension Parser { // try to parse a primary expression for a directive mutating func parsePrimaryExprForDirective() -> RawExprSyntax? { switch self.at(anyIn: CompilationCondition.self) { - case (.canImportKeyword, let handle)?: + case (.canImport, let handle)?: return RawExprSyntax(self.parseCanImportExpression(handle)) // TODO: add case `swift` and `compiler` here @@ -2476,9 +2476,9 @@ extension Parser { let label: RawSwitchCaseSyntax.Label switch self.canRecoverTo(anyIn: SwitchCaseStart.self) { - case (.caseKeyword, let handle)?: + case (.case, let handle)?: label = .case(self.parseSwitchCaseLabel(handle)) - case (.defaultKeyword, let handle)?: + case (.default, let handle)?: label = .default(self.parseSwitchDefaultLabel(handle)) case nil: label = .case( diff --git a/Sources/SwiftParser/Names.swift b/Sources/SwiftParser/Names.swift index 563ed4b4eb5..9daeff401ad 100644 --- a/Sources/SwiftParser/Names.swift +++ b/Sources/SwiftParser/Names.swift @@ -263,7 +263,7 @@ extension Parser.Lookahead { extension Lexer.Lexeme { func canBeArgumentLabel(allowDollarIdentifier: Bool = false) -> Bool { // `inout` is reserved as an argument label for historical reasons. - if TypeSpecifier(lexeme: self) == .inoutKeyword { + if TypeSpecifier(lexeme: self) == .inout { return false } diff --git a/Sources/SwiftParser/Patterns.swift b/Sources/SwiftParser/Patterns.swift index 1ac96f67bf7..2df50d56dd2 100644 --- a/Sources/SwiftParser/Patterns.swift +++ b/Sources/SwiftParser/Patterns.swift @@ -50,9 +50,9 @@ extension Parser { case wildcard case identifier case dollarIdentifier // For recovery - case letKeyword - case varKeyword - case inoutKeyword + case `let` + case `var` + case `inout` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { @@ -60,9 +60,9 @@ extension Parser { case TokenSpec(.wildcard): self = .wildcard case TokenSpec(.identifier): self = .identifier case TokenSpec(.dollarIdentifier): self = .dollarIdentifier - case TokenSpec(.let): self = .letKeyword - case TokenSpec(.var): self = .varKeyword - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.let): self = .let + case TokenSpec(.var): self = .var + case TokenSpec(.inout): self = .inout default: return nil } } @@ -73,9 +73,9 @@ extension Parser { case .wildcard: return .wildcard case .identifier: return .identifier case .dollarIdentifier: return .dollarIdentifier - case .letKeyword: return .keyword(.let) - case .varKeyword: return .keyword(.var) - case .inoutKeyword: return .keyword(.inout) + case .let: return .keyword(.let) + case .var: return .keyword(.var) + case .inout: return .keyword(.inout) } } } @@ -121,9 +121,9 @@ extension Parser { arena: self.arena ) ) - case (.letKeyword, let handle)?, - (.varKeyword, let handle)?, - (.inoutKeyword, let handle)?: + case (.let, let handle)?, + (.var, let handle)?, + (.inout, let handle)?: let bindingKeyword = self.eat(handle) let value = self.parsePattern() return RawPatternSyntax( @@ -256,9 +256,9 @@ extension Parser { mutating func parseMatchingPattern(context: PatternContext) -> RawPatternSyntax { // Parse productions that can only be patterns. switch self.at(anyIn: MatchingPatternStart.self) { - case (.varKeyword, let handle)?, - (.letKeyword, let handle)?, - (.inoutKeyword, let handle)?: + case (.var, let handle)?, + (.let, let handle)?, + (.inout, let handle)?: let bindingKeyword = self.eat(handle) let value = self.parseMatchingPattern(context: .bindingIntroducer) return RawPatternSyntax( @@ -268,7 +268,7 @@ extension Parser { arena: self.arena ) ) - case (.isKeyword, let handle)?: + case (.is, let handle)?: let isKeyword = self.eat(handle) let type = self.parseType() return RawPatternSyntax( @@ -311,19 +311,19 @@ extension Parser.Lookahead { enum PatternStartTokens: TokenSpecSet { case identifier case wildcard - case letKeyword - case varKeyword + case `let` + case `var` case leftParen - case inoutKeyword + case `inout` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { case TokenSpec(.identifier): self = .identifier case TokenSpec(.wildcard): self = .wildcard - case TokenSpec(.let): self = .letKeyword - case TokenSpec(.var): self = .varKeyword + case TokenSpec(.let): self = .let + case TokenSpec(.var): self = .var case TokenSpec(.leftParen): self = .leftParen - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.inout): self = .inout default: return nil } } @@ -332,10 +332,10 @@ extension Parser.Lookahead { switch self { case .identifier: return .identifier case .wildcard: return .wildcard - case .letKeyword: return .keyword(.let) - case .varKeyword: return .keyword(.var) + case .let: return .keyword(.let) + case .var: return .keyword(.var) case .leftParen: return .leftParen - case .inoutKeyword: return .keyword(.inout) + case .inout: return .keyword(.inout) } } } @@ -345,9 +345,9 @@ extension Parser.Lookahead { (.wildcard, let handle)?: self.eat(handle) return true - case (.letKeyword, let handle)?, - (.varKeyword, let handle)?, - (.inoutKeyword, let handle)?: + case (.let, let handle)?, + (.var, let handle)?, + (.inout, let handle)?: self.eat(handle) return self.canParsePattern() case (.leftParen, _)?: diff --git a/Sources/SwiftParser/Statements.swift b/Sources/SwiftParser/Statements.swift index e761f1c4bd4..edb647c1b10 100644 --- a/Sources/SwiftParser/Statements.swift +++ b/Sources/SwiftParser/Statements.swift @@ -94,46 +94,44 @@ extension Parser { let optLabel = self.parseOptionalStatementLabel() switch self.canRecoverTo(anyIn: CanBeStatementStart.self) { - case (.forKeyword, let handle)?: + case (.for, let handle)?: return label(self.parseForEachStatement(forHandle: handle), with: optLabel) - case (.whileKeyword, let handle)?: + case (.while, let handle)?: return label(self.parseWhileStatement(whileHandle: handle), with: optLabel) - case (.repeatKeyword, let handle)?: + case (.repeat, let handle)?: return label(self.parseRepeatWhileStatement(repeatHandle: handle), with: optLabel) - case (.ifKeyword, let handle)?: + case (.if, let handle)?: let ifExpr = self.parseIfExpression(ifHandle: handle) let ifStmt = RawExpressionStmtSyntax( expression: RawExprSyntax(ifExpr), arena: self.arena ) return label(ifStmt, with: optLabel) - case (.guardKeyword, let handle)?: + case (.guard, let handle)?: return label(self.parseGuardStatement(guardHandle: handle), with: optLabel) - case (.switchKeyword, let handle)?: + case (.switch, let handle)?: let switchExpr = self.parseSwitchExpression(switchHandle: handle) let switchStmt = RawExpressionStmtSyntax( expression: RawExprSyntax(switchExpr), arena: self.arena ) return label(switchStmt, with: optLabel) - case (.breakKeyword, let handle)?: + case (.break, let handle)?: return label(self.parseBreakStatement(breakHandle: handle), with: optLabel) - case (.continueKeyword, let handle)?: + case (.continue, let handle)?: return label(self.parseContinueStatement(continueHandle: handle), with: optLabel) - case (.fallthroughKeyword, let handle)?: + case (.fallthrough, let handle)?: return label(self.parseFallthroughStatement(fallthroughHandle: handle), with: optLabel) - case (.forgetKeyword, let handle)?: // NOTE: support for deprecated _forget - fallthrough - case (.discardKeyword, let handle)?: + case (._forget, let handle)?, (.discard, let handle)?: // NOTE: support for deprecated _forget return label(self.parseDiscardStatement(discardHandle: handle), with: optLabel) - case (.returnKeyword, let handle)?: + case (.return, let handle)?: return label(self.parseReturnStatement(returnHandle: handle), with: optLabel) - case (.throwKeyword, let handle)?: + case (.throw, let handle)?: return label(self.parseThrowStatement(throwHandle: handle), with: optLabel) - case (.deferKeyword, let handle)?: + case (.defer, let handle)?: return label(self.parseDeferStatement(deferHandle: handle), with: optLabel) - case (.doKeyword, let handle)?: + case (.do, let handle)?: return label(self.parseDoStatement(doHandle: handle), with: optLabel) case (.yield, let handle)?: return label(self.parseYieldStatement(yieldHandle: handle), with: optLabel) @@ -955,20 +953,20 @@ extension Parser.Lookahead { switchSubject = self.at(anyIn: CanBeStatementStart.self)?.0 } switch switchSubject { - case .returnKeyword?, - .throwKeyword?, - .deferKeyword?, - .ifKeyword?, - .guardKeyword?, - .whileKeyword?, - .doKeyword?, - .forKeyword?, - .breakKeyword?, - .continueKeyword?, - .fallthroughKeyword?, - .switchKeyword?: + case .return?, + .throw?, + .defer?, + .if?, + .guard?, + .while?, + .do?, + .for?, + .break?, + .continue?, + .fallthrough?, + .switch?: return true - case .repeatKeyword?: + case .repeat?: // 'repeat' followed by anything other than a brace stmt // is a pack expansion expression. // FIXME: 'repeat' followed by '{' could be a pack expansion @@ -992,9 +990,7 @@ extension Parser.Lookahead { // yield statement of some singular expression. return !self.peek().isAtStartOfLine } - case .forgetKeyword?: // NOTE: support for deprecated _forget - fallthrough - case .discardKeyword?: + case ._forget?, .discard?: // NOTE: support for deprecated _forget let next = peek() // The thing to be discarded must be on the same line as `discard`. if next.isAtStartOfLine { diff --git a/Sources/SwiftParser/TokenSpecSet.swift b/Sources/SwiftParser/TokenSpecSet.swift index c590acebc33..79cc38cfa76 100644 --- a/Sources/SwiftParser/TokenSpecSet.swift +++ b/Sources/SwiftParser/TokenSpecSet.swift @@ -74,40 +74,40 @@ enum AccessorKind: TokenSpecSet { } enum CanBeStatementStart: TokenSpecSet { - case breakKeyword - case continueKeyword - case deferKeyword - case doKeyword - case fallthroughKeyword - case forKeyword - case forgetKeyword // NOTE: support for deprecated _forget - case discardKeyword - case guardKeyword - case ifKeyword - case repeatKeyword - case returnKeyword - case switchKeyword - case throwKeyword - case whileKeyword + case _forget // NOTE: support for deprecated _forget + case `break` + case `continue` + case `defer` + case `do` + case `fallthrough` + case `for` + case discard + case `guard` + case `if` + case `repeat` + case `return` + case `switch` + case `throw` + case `while` case yield init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.break): self = .breakKeyword - case TokenSpec(.continue): self = .continueKeyword - case TokenSpec(.defer): self = .deferKeyword - case TokenSpec(.do): self = .doKeyword - case TokenSpec(.fallthrough): self = .fallthroughKeyword - case TokenSpec(.for): self = .forKeyword - case TokenSpec(._forget): self = .forgetKeyword - case TokenSpec(.discard): self = .discardKeyword - case TokenSpec(.guard): self = .guardKeyword - case TokenSpec(.if): self = .ifKeyword - case TokenSpec(.repeat): self = .repeatKeyword - case TokenSpec(.return): self = .returnKeyword - case TokenSpec(.switch): self = .switchKeyword - case TokenSpec(.throw): self = .throwKeyword - case TokenSpec(.while): self = .whileKeyword + case TokenSpec(._forget): self = ._forget + case TokenSpec(.break): self = .break + case TokenSpec(.continue): self = .continue + case TokenSpec(.defer): self = .defer + case TokenSpec(.do): self = .do + case TokenSpec(.fallthrough): self = .fallthrough + case TokenSpec(.for): self = .for + case TokenSpec(.discard): self = .discard + case TokenSpec(.guard): self = .guard + case TokenSpec(.if): self = .if + case TokenSpec(.repeat): self = .repeat + case TokenSpec(.return): self = .return + case TokenSpec(.switch): self = .switch + case TokenSpec(.throw): self = .throw + case TokenSpec(.while): self = .while case TokenSpec(.yield): self = .yield default: return nil } @@ -115,45 +115,45 @@ enum CanBeStatementStart: TokenSpecSet { var spec: TokenSpec { switch self { - case .breakKeyword: return .keyword(.break) - case .continueKeyword: return .keyword(.continue) - case .deferKeyword: return .keyword(.defer) - case .doKeyword: return .keyword(.do) - case .fallthroughKeyword: return .keyword(.fallthrough) - case .forKeyword: return .keyword(.for) - case .forgetKeyword: return TokenSpec(._forget, recoveryPrecedence: .stmtKeyword) - case .discardKeyword: return TokenSpec(.discard, recoveryPrecedence: .stmtKeyword) - case .guardKeyword: return .keyword(.guard) - case .ifKeyword: return .keyword(.if) - case .repeatKeyword: return .keyword(.repeat) - case .returnKeyword: return .keyword(.return) - case .switchKeyword: return .keyword(.switch) - case .throwKeyword: return .keyword(.throw) - case .whileKeyword: return .keyword(.while) + case ._forget: return TokenSpec(._forget, recoveryPrecedence: .stmtKeyword) + case .break: return .keyword(.break) + case .continue: return .keyword(.continue) + case .defer: return .keyword(.defer) + case .do: return .keyword(.do) + case .fallthrough: return .keyword(.fallthrough) + case .for: return .keyword(.for) + case .discard: return TokenSpec(.discard, recoveryPrecedence: .stmtKeyword) + case .guard: return .keyword(.guard) + case .if: return .keyword(.if) + case .repeat: return .keyword(.repeat) + case .return: return .keyword(.return) + case .switch: return .keyword(.switch) + case .throw: return .keyword(.throw) + case .while: return .keyword(.while) case .yield: return .keyword(.yield) } } } enum CompilationCondition: TokenSpecSet { - case swiftKeyword - case compilerKeyword - case canImportKeyword + case swift + case compiler + case canImport init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.swift): self = .swiftKeyword - case TokenSpec(.compiler): self = .compilerKeyword - case TokenSpec(.canImport): self = .canImportKeyword + case TokenSpec(.swift): self = .swift + case TokenSpec(.compiler): self = .compiler + case TokenSpec(.canImport): self = .canImport default: return nil } } var spec: TokenSpec { switch self { - case .swiftKeyword: return .keyword(.swift) - case .compilerKeyword: return .keyword(.compiler) - case .canImportKeyword: return .keyword(.canImport) + case .swift: return .keyword(.swift) + case .compiler: return .keyword(.compiler) + case .canImport: return .keyword(.canImport) } } @@ -252,50 +252,50 @@ enum ContextualDeclKeyword: TokenSpecSet { } enum DeclarationStart: TokenSpecSet { - case actorKeyword - case associatedtypeKeyword - case caseKeyword - case classKeyword - case deinitKeyword - case enumKeyword - case extensionKeyword - case funcKeyword - case importKeyword - case initKeyword - case letKeyword - case macroKeyword - case operatorKeyword - case precedencegroupKeyword - case protocolKeyword - case structKeyword - case subscriptKeyword - case typealiasKeyword - case varKeyword - case inoutKeyword + case actor + case `associatedtype` + case `case` + case `class` + case `deinit` + case `enum` + case `extension` + case `func` + case `import` + case `init` + case `let` + case macro + case `operator` + case `precedencegroup` + case `protocol` + case `struct` + case `subscript` + case `typealias` + case `var` + case `inout` case pound init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.actor): self = .actorKeyword - case TokenSpec(.macro): self = .macroKeyword - case TokenSpec(.associatedtype): self = .associatedtypeKeyword - case TokenSpec(.case): self = .caseKeyword - case TokenSpec(.class): self = .classKeyword - case TokenSpec(.deinit): self = .deinitKeyword - case TokenSpec(.enum): self = .enumKeyword - case TokenSpec(.extension): self = .extensionKeyword - case TokenSpec(.func): self = .funcKeyword - case TokenSpec(.import): self = .importKeyword - case TokenSpec(.`init`): self = .initKeyword - case TokenSpec(.let): self = .letKeyword - case TokenSpec(.operator): self = .operatorKeyword - case TokenSpec(.precedencegroup): self = .precedencegroupKeyword - case TokenSpec(.protocol): self = .protocolKeyword - case TokenSpec(.struct): self = .structKeyword - case TokenSpec(.subscript): self = .subscriptKeyword - case TokenSpec(.typealias): self = .typealiasKeyword - case TokenSpec(.var): self = .varKeyword - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.actor): self = .actor + case TokenSpec(.macro): self = .macro + case TokenSpec(.associatedtype): self = .associatedtype + case TokenSpec(.case): self = .case + case TokenSpec(.class): self = .class + case TokenSpec(.deinit): self = .deinit + case TokenSpec(.enum): self = .enum + case TokenSpec(.extension): self = .extension + case TokenSpec(.func): self = .func + case TokenSpec(.import): self = .import + case TokenSpec(.`init`): self = .`init` + case TokenSpec(.let): self = .let + case TokenSpec(.operator): self = .operator + case TokenSpec(.precedencegroup): self = .precedencegroup + case TokenSpec(.protocol): self = .protocol + case TokenSpec(.struct): self = .struct + case TokenSpec(.subscript): self = .subscript + case TokenSpec(.typealias): self = .typealias + case TokenSpec(.var): self = .var + case TokenSpec(.inout): self = .inout case TokenSpec(.pound): self = .pound default: return nil } @@ -303,26 +303,26 @@ enum DeclarationStart: TokenSpecSet { var spec: TokenSpec { switch self { - case .actorKeyword: return TokenSpec(.actor, recoveryPrecedence: .declKeyword) - case .associatedtypeKeyword: return .keyword(.associatedtype) - case .caseKeyword: return TokenSpec(.case, recoveryPrecedence: .declKeyword) - case .classKeyword: return .keyword(.class) - case .deinitKeyword: return .keyword(.deinit) - case .enumKeyword: return .keyword(.enum) - case .extensionKeyword: return .keyword(.extension) - case .funcKeyword: return .keyword(.func) - case .importKeyword: return .keyword(.import) - case .initKeyword: return .keyword(.`init`) - case .letKeyword: return .keyword(.let) - case .macroKeyword: return TokenSpec(.macro, recoveryPrecedence: .declKeyword) - case .operatorKeyword: return .keyword(.operator) - case .precedencegroupKeyword: return .keyword(.precedencegroup) - case .protocolKeyword: return .keyword(.protocol) - case .structKeyword: return .keyword(.struct) - case .subscriptKeyword: return .keyword(.subscript) - case .typealiasKeyword: return .keyword(.typealias) - case .varKeyword: return .keyword(.var) - case .inoutKeyword: return TokenSpec(.inout, recoveryPrecedence: .declKeyword) + case .actor: return TokenSpec(.actor, recoveryPrecedence: .declKeyword) + case .associatedtype: return .keyword(.associatedtype) + case .case: return TokenSpec(.case, recoveryPrecedence: .declKeyword) + case .class: return .keyword(.class) + case .deinit: return .keyword(.deinit) + case .enum: return .keyword(.enum) + case .extension: return .keyword(.extension) + case .func: return .keyword(.func) + case .import: return .keyword(.import) + case .`init`: return .keyword(.`init`) + case .let: return .keyword(.let) + case .macro: return TokenSpec(.macro, recoveryPrecedence: .declKeyword) + case .operator: return .keyword(.operator) + case .precedencegroup: return .keyword(.precedencegroup) + case .protocol: return .keyword(.protocol) + case .struct: return .keyword(.struct) + case .subscript: return .keyword(.subscript) + case .typealias: return .keyword(.typealias) + case .var: return .keyword(.var) + case .inout: return TokenSpec(.inout, recoveryPrecedence: .declKeyword) case .pound: return TokenSpec(.pound, recoveryPrecedence: .openingPoundIf) } } @@ -415,28 +415,28 @@ enum PoundDeclarationStart: TokenSpecSet { } enum SwitchCaseStart: TokenSpecSet { - case caseKeyword - case defaultKeyword + case `case` + case `default` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.case): self = .caseKeyword - case TokenSpec(.default): self = .defaultKeyword + case TokenSpec(.case): self = .case + case TokenSpec(.default): self = .default default: return nil } } var spec: TokenSpec { switch self { - case .caseKeyword: return .keyword(.case) - case .defaultKeyword: return .keyword(.default) + case .case: return .keyword(.case) + case .default: return .keyword(.default) } } } @_spi(Diagnostics) public enum TypeSpecifier: TokenSpecSet { - case inoutKeyword + case `inout` case owned case shared case borrowing @@ -444,7 +444,7 @@ public enum TypeSpecifier: TokenSpecSet { init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.inout): self = .inout case TokenSpec(.__owned): self = .owned case TokenSpec(.__shared): self = .shared case TokenSpec(.consuming): self = .consuming @@ -456,7 +456,7 @@ public enum TypeSpecifier: TokenSpecSet { @_spi(Diagnostics) public init?(token: TokenSyntax) { switch token { - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.inout): self = .inout case TokenSpec(.__owned): self = .owned case TokenSpec(.__shared): self = .shared case TokenSpec(.consuming): self = .shared @@ -467,7 +467,7 @@ public enum TypeSpecifier: TokenSpecSet { var spec: TokenSpec { switch self { - case .inoutKeyword: return .keyword(.inout) + case .inout: return .keyword(.inout) case .owned: return .keyword(.__owned) case .shared: return .keyword(.__shared) case .borrowing: return .keyword(.borrowing) @@ -479,53 +479,53 @@ public enum TypeSpecifier: TokenSpecSet { // MARK: Expression start enum ExpressionModifierKeyword: TokenSpecSet { - case awaitKeyword - case _moveKeyword - case _borrowKeyword - case tryKeyword - case consumeKeyword - case copyKeyword + case await + case _move + case _borrow + case `try` + case consume + case copy init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.await): self = .awaitKeyword - case TokenSpec(._move): self = ._moveKeyword - case TokenSpec(._borrow): self = ._borrowKeyword - case TokenSpec(.try): self = .tryKeyword - case TokenSpec(.consume): self = .consumeKeyword - case TokenSpec(.copy): self = .copyKeyword + case TokenSpec(.await): self = .await + case TokenSpec(._move): self = ._move + case TokenSpec(._borrow): self = ._borrow + case TokenSpec(.try): self = .try + case TokenSpec(.consume): self = .consume + case TokenSpec(.copy): self = .copy default: return nil } } var spec: TokenSpec { switch self { - case .awaitKeyword: return .keyword(.await) - case ._moveKeyword: return .keyword(._move) - case ._borrowKeyword: return .keyword(._borrow) - case .consumeKeyword: return .keyword(.consume) - case .copyKeyword: return .keyword(.copy) - case .tryKeyword: return .keyword(.try) + case .await: return .keyword(.await) + case ._move: return .keyword(._move) + case ._borrow: return .keyword(._borrow) + case .consume: return .keyword(.consume) + case .copy: return .keyword(.copy) + case .try: return .keyword(.try) } } } enum IfOrSwitch: TokenSpecSet { - case ifKeyword - case switchKeyword + case `if` + case `switch` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.if): self = .ifKeyword - case TokenSpec(.switch): self = .switchKeyword + case TokenSpec(.if): self = .if + case TokenSpec(.switch): self = .switch default: return nil } } var spec: TokenSpec { switch self { - case .ifKeyword: return .keyword(.if) - case .switchKeyword: return .keyword(.switch) + case .if: return .keyword(.if) + case .switch: return .keyword(.switch) } } } @@ -554,27 +554,27 @@ enum ExpressionPrefixOperator: TokenSpecSet { } enum MatchingPatternStart: TokenSpecSet { - case isKeyword - case letKeyword - case varKeyword - case inoutKeyword + case `is` + case `let` + case `var` + case `inout` init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.is): self = .isKeyword - case TokenSpec(.let): self = .letKeyword - case TokenSpec(.var): self = .varKeyword - case TokenSpec(.inout): self = .inoutKeyword + case TokenSpec(.is): self = .is + case TokenSpec(.let): self = .let + case TokenSpec(.var): self = .var + case TokenSpec(.inout): self = .inout default: return nil } } var spec: TokenSpec { switch self { - case .isKeyword: return .keyword(.is) - case .letKeyword: return .keyword(.let) - case .varKeyword: return .keyword(.var) - case .inoutKeyword: return .keyword(.inout) + case .is: return .keyword(.is) + case .let: return .keyword(.let) + case .var: return .keyword(.var) + case .inout: return .keyword(.inout) } } } @@ -600,28 +600,28 @@ enum ParameterModifier: TokenSpecSet { } enum PrimaryExpressionStart: TokenSpecSet { - case anyKeyword + case `Any` case atSign // For recovery - case capitalSelfKeyword + case `Self` case dollarIdentifier - case falseKeyword + case `false` case floatingLiteral case identifier - case initKeyword + case `init` case integerLiteral case leftBrace case leftParen case leftSquareBracket - case nilKeyword + case `nil` case period case pound case poundAvailableKeyword // For recovery case poundUnavailableKeyword // For recovery case regexSlash case extendedRegexDelimiter - case selfKeyword - case superKeyword - case trueKeyword + case `self` + case `super` + case `true` case wildcard case rawStringDelimiter case stringQuote @@ -630,28 +630,28 @@ enum PrimaryExpressionStart: TokenSpecSet { init?(lexeme: Lexer.Lexeme) { switch PrepareForKeywordMatch(lexeme) { - case TokenSpec(.Any): self = .anyKeyword + case TokenSpec(.Any): self = .Any case TokenSpec(.atSign): self = .atSign - case TokenSpec(.Self): self = .capitalSelfKeyword + case TokenSpec(.Self): self = .Self case TokenSpec(.dollarIdentifier): self = .dollarIdentifier - case TokenSpec(.false): self = .falseKeyword + case TokenSpec(.false): self = .false case TokenSpec(.floatingLiteral): self = .floatingLiteral case TokenSpec(.identifier): self = .identifier - case TokenSpec(.`init`): self = .initKeyword + case TokenSpec(.`init`): self = .`init` case TokenSpec(.integerLiteral): self = .integerLiteral case TokenSpec(.leftBrace): self = .leftBrace case TokenSpec(.leftParen): self = .leftParen case TokenSpec(.leftSquareBracket): self = .leftSquareBracket - case TokenSpec(.nil): self = .nilKeyword + case TokenSpec(.nil): self = .nil case TokenSpec(.period): self = .period case TokenSpec(.pound): self = .pound case TokenSpec(.poundAvailableKeyword): self = .poundAvailableKeyword case TokenSpec(.poundUnavailableKeyword): self = .poundUnavailableKeyword case TokenSpec(.regexSlash): self = .regexSlash case TokenSpec(.extendedRegexDelimiter): self = .extendedRegexDelimiter - case TokenSpec(.self): self = .selfKeyword - case TokenSpec(.super): self = .superKeyword - case TokenSpec(.true): self = .trueKeyword + case TokenSpec(.self): self = .self + case TokenSpec(.super): self = .super + case TokenSpec(.true): self = .true case TokenSpec(.wildcard): self = .wildcard case TokenSpec(.rawStringDelimiter): self = .rawStringDelimiter case TokenSpec(.stringQuote): self = .stringQuote @@ -663,28 +663,28 @@ enum PrimaryExpressionStart: TokenSpecSet { var spec: TokenSpec { switch self { - case .anyKeyword: return .keyword(.Any) + case .Any: return .keyword(.Any) case .atSign: return .atSign - case .capitalSelfKeyword: return .keyword(.Self) + case .Self: return .keyword(.Self) case .dollarIdentifier: return .dollarIdentifier - case .falseKeyword: return .keyword(.false) + case .false: return .keyword(.false) case .floatingLiteral: return .floatingLiteral case .identifier: return .identifier - case .initKeyword: return .keyword(.`init`) + case .`init`: return .keyword(.`init`) case .integerLiteral: return .integerLiteral case .leftBrace: return .leftBrace case .leftParen: return .leftParen case .leftSquareBracket: return .leftSquareBracket - case .nilKeyword: return .keyword(.nil) + case .nil: return .keyword(.nil) case .period: return .period case .pound: return .pound case .poundAvailableKeyword: return .poundAvailableKeyword case .poundUnavailableKeyword: return .poundUnavailableKeyword case .regexSlash: return .regexSlash case .extendedRegexDelimiter: return .extendedRegexDelimiter - case .selfKeyword: return .keyword(.self) - case .superKeyword: return .keyword(.super) - case .trueKeyword: return .keyword(.true) + case .self: return .keyword(.self) + case .super: return .keyword(.super) + case .true: return .keyword(.true) case .wildcard: return .wildcard case .rawStringDelimiter: return .rawStringDelimiter case .stringQuote: return .stringQuote