Skip to content

Use TokenSpec (nee RawTokenKindMatch) in consume functions #1328

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ struct GenerateSwiftSyntax: ParsableCommand {
// SwiftParser
TemplateSpec(sourceFile: declarationModifierFile, module: swiftParserDir, filename: "DeclarationModifier.swift"),
TemplateSpec(sourceFile: parserEntryFile, module: swiftParserDir, filename: "Parser+Entry.swift"),
TemplateSpec(sourceFile: tokenSpecStaticMembersFile, module: swiftParserDir, filename: "TokenSpecStaticMembers.swift"),
TemplateSpec(sourceFile: typeAttributeFile, module: swiftParserDir, filename: "TypeAttribute.swift"),

// SwiftSyntax
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@ let declarationModifierFile = SourceFileSyntax {
"""
)

try! EnumDeclSyntax("enum DeclarationModifier: RawTokenKindSubset") {
try! EnumDeclSyntax("enum DeclarationModifier: TokenSpecSet") {
for attribute in DECL_MODIFIER_KINDS {
DeclSyntax("case \(raw: attribute.swiftName)")
}

try InitializerDeclSyntax("init?(lexeme: Lexer.Lexeme)") {
try SwitchExprSyntax("switch lexeme") {
for attribute in DECL_MODIFIER_KINDS {
SwitchCaseSyntax("case RawTokenKindMatch(.\(raw: attribute.swiftName)):") {
SwitchCaseSyntax("case TokenSpec(.\(raw: attribute.swiftName)):") {
ExprSyntax("self = .\(raw: attribute.swiftName)")
}
}
Expand All @@ -42,7 +42,7 @@ let declarationModifierFile = SourceFileSyntax {
}
}

try VariableDeclSyntax("var rawTokenKind: RawTokenKind") {
try VariableDeclSyntax("var spec: TokenSpec") {
try SwitchExprSyntax("switch self") {
for attribute in DECL_MODIFIER_KINDS {
SwitchCaseSyntax("case .\(raw: attribute.swiftName):") {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//

import SwiftSyntax
import SwiftSyntaxBuilder
import SyntaxSupport
import Utils

let tokenSpecStaticMembersFile = SourceFileSyntax(
leadingTrivia: .docLineComment(generateCopyrightHeader(for: "generate-swiftparser"))
) {
DeclSyntax("import SwiftSyntax")

try! ExtensionDeclSyntax("extension TokenSpec") {
DeclSyntax("static var eof: TokenSpec { return TokenSpec(.eof) }")

for token in SYNTAX_TOKENS where token.swiftKind != "keyword" {
DeclSyntax("static var \(raw: token.swiftKind): TokenSpec { return TokenSpec(.\(raw: token.swiftKind)) }")
}

DeclSyntax("static func keyword(_ keyword: Keyword) -> TokenSpec { return TokenSpec(.keyword(keyword)) }")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ let typeAttributeFile = SourceFileSyntax {
)

try! ExtensionDeclSyntax("extension Parser") {
try EnumDeclSyntax("enum TypeAttribute: RawTokenKindSubset") {
try EnumDeclSyntax("enum TypeAttribute: TokenSpecSet") {
for attribute in TYPE_ATTR_KINDS {
DeclSyntax("case \(raw: attribute.name)")
}

try InitializerDeclSyntax("init?(lexeme: Lexer.Lexeme)") {
SwitchExprSyntax(switchKeyword: .keyword(.switch), expression: ExprSyntax("lexeme")) {
for attribute in TYPE_ATTR_KINDS {
SwitchCaseSyntax("case RawTokenKindMatch(.\(raw: attribute.name)):") {
SwitchCaseSyntax("case TokenSpec(.\(raw: attribute.name)):") {
ExprSyntax("self = .\(raw: attribute.swiftName)")
}
}
Expand All @@ -43,7 +43,7 @@ let typeAttributeFile = SourceFileSyntax {
}
}

try VariableDeclSyntax("var rawTokenKind: RawTokenKind") {
try VariableDeclSyntax("var spec: TokenSpec") {
SwitchExprSyntax(switchKeyword: .keyword(.switch), expression: ExprSyntax("self")) {
for attribute in TYPE_ATTR_KINDS {
SwitchCaseSyntax("case .\(raw: attribute.swiftName):") {
Expand Down
126 changes: 63 additions & 63 deletions Sources/SwiftParser/Attributes.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ extension Parser {

extension Parser {
/// Compiler-known attributes that take arguments.
enum DeclarationAttributeWithSpecialSyntax: RawTokenKindSubset {
enum DeclarationAttributeWithSpecialSyntax: TokenSpecSet {
case _alignment
case _cdecl
case _documentation
Expand Down Expand Up @@ -65,44 +65,44 @@ extension Parser {

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(._alignment): self = ._alignment
case RawTokenKindMatch(._backDeploy): self = .backDeployed
case RawTokenKindMatch(._cdecl): self = ._cdecl
case RawTokenKindMatch(._documentation): self = ._documentation
case RawTokenKindMatch(._dynamicReplacement): self = ._dynamicReplacement
case RawTokenKindMatch(._effects): self = ._effects
case RawTokenKindMatch(._expose): self = ._expose
case RawTokenKindMatch(._implements): self = ._implements
case RawTokenKindMatch(._nonSendable): self = ._nonSendable
case RawTokenKindMatch(._objcImplementation): self = ._objcImplementation
case RawTokenKindMatch(._objcRuntimeName): self = ._objcRuntimeName
case RawTokenKindMatch(._optimize): self = ._optimize
case RawTokenKindMatch(._originallyDefinedIn): self = ._originallyDefinedIn
case RawTokenKindMatch(._private): self = ._private
case RawTokenKindMatch(._projectedValueProperty): self = ._projectedValueProperty
case RawTokenKindMatch(._semantics): self = ._semantics
case RawTokenKindMatch(._silgen_name): self = ._silgen_name
case RawTokenKindMatch(._specialize): self = ._specialize
case RawTokenKindMatch(._spi): self = ._spi
case RawTokenKindMatch(._spi_available): self = ._spi_available
case RawTokenKindMatch(._swift_native_objc_runtime_base): self = ._swift_native_objc_runtime_base
case RawTokenKindMatch(._typeEraser): self = ._typeEraser
case RawTokenKindMatch(._unavailableFromAsync): self = ._unavailableFromAsync
case RawTokenKindMatch(.`rethrows`): self = .rethrows
case RawTokenKindMatch(.available): self = .available
case RawTokenKindMatch(.backDeployed): self = .backDeployed
case RawTokenKindMatch(.derivative): self = .derivative
case RawTokenKindMatch(.differentiable): self = .differentiable
case RawTokenKindMatch(.exclusivity): self = .exclusivity
case RawTokenKindMatch(.inline): self = .inline
case RawTokenKindMatch(.objc): self = .objc
case RawTokenKindMatch(.transpose): self = .transpose
case TokenSpec(._alignment): self = ._alignment
case TokenSpec(._backDeploy): self = .backDeployed
case TokenSpec(._cdecl): self = ._cdecl
case TokenSpec(._documentation): self = ._documentation
case TokenSpec(._dynamicReplacement): self = ._dynamicReplacement
case TokenSpec(._effects): self = ._effects
case TokenSpec(._expose): self = ._expose
case TokenSpec(._implements): self = ._implements
case TokenSpec(._nonSendable): self = ._nonSendable
case TokenSpec(._objcImplementation): self = ._objcImplementation
case TokenSpec(._objcRuntimeName): self = ._objcRuntimeName
case TokenSpec(._optimize): self = ._optimize
case TokenSpec(._originallyDefinedIn): self = ._originallyDefinedIn
case TokenSpec(._private): self = ._private
case TokenSpec(._projectedValueProperty): self = ._projectedValueProperty
case TokenSpec(._semantics): self = ._semantics
case TokenSpec(._silgen_name): self = ._silgen_name
case TokenSpec(._specialize): self = ._specialize
case TokenSpec(._spi): self = ._spi
case TokenSpec(._spi_available): self = ._spi_available
case TokenSpec(._swift_native_objc_runtime_base): self = ._swift_native_objc_runtime_base
case TokenSpec(._typeEraser): self = ._typeEraser
case TokenSpec(._unavailableFromAsync): self = ._unavailableFromAsync
case TokenSpec(.`rethrows`): self = .rethrows
case TokenSpec(.available): self = .available
case TokenSpec(.backDeployed): self = .backDeployed
case TokenSpec(.derivative): self = .derivative
case TokenSpec(.differentiable): self = .differentiable
case TokenSpec(.exclusivity): self = .exclusivity
case TokenSpec(.inline): self = .inline
case TokenSpec(.objc): self = .objc
case TokenSpec(.transpose): self = .transpose
default:
return nil
}
}

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case ._alignment: return .keyword(._alignment)
case ._cdecl: return .keyword(._cdecl)
Expand Down Expand Up @@ -171,7 +171,7 @@ extension Parser {
case .required:
shouldParseArgument = true
case .customAttribute:
shouldParseArgument = self.withLookahead { $0.isCustomAttributeArgument() } && self.at(.leftParen, allowTokenAtStartOfLine: false)
shouldParseArgument = self.withLookahead { $0.isCustomAttributeArgument() } && self.at(TokenSpec(.leftParen, allowAtStartOfLine: false))
case .optional:
shouldParseArgument = self.at(.leftParen)
}
Expand Down Expand Up @@ -313,7 +313,7 @@ extension Parser {
}
case .rethrows:
let (unexpectedBeforeAtSign, atSign) = self.expect(.atSign)
let (unexpectedBeforeAttributeName, attributeName) = self.expect(.keyword(.rethrows), remapping: .identifier)
let (unexpectedBeforeAttributeName, attributeName) = self.expect(TokenSpec(.keyword(.rethrows), remapping: .identifier))
return .attribute(
RawAttributeSyntax(
unexpectedBeforeAtSign,
Expand Down Expand Up @@ -358,21 +358,21 @@ extension Parser {
)
}

enum DifferentiabilityKind: RawTokenKindSubset {
enum DifferentiabilityKind: TokenSpecSet {
case reverse
case linear
case forward

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(.reverse): self = .reverse
case RawTokenKindMatch(._linear): self = .linear
case RawTokenKindMatch(._forward): self = .forward
case TokenSpec(.reverse): self = .reverse
case TokenSpec(._linear): self = .linear
case TokenSpec(._forward): self = .forward
default: return nil
}
}

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case .reverse: return .keyword(.reverse)
case .linear: return .keyword(._linear)
Expand Down Expand Up @@ -469,21 +469,21 @@ extension Parser {
}

mutating func parseDifferentiabilityParameter() -> RawDifferentiabilityParamSyntax? {
enum ExpectedTokenKind: RawTokenKindSubset {
enum ExpectedTokenKind: TokenSpecSet {
case identifier
case integerLiteral
case selfKeyword

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(.identifier): self = .identifier
case RawTokenKindMatch(.integerLiteral): self = .integerLiteral
case RawTokenKindMatch(.self): self = .selfKeyword
case TokenSpec(.identifier): self = .identifier
case TokenSpec(.integerLiteral): self = .integerLiteral
case TokenSpec(.self): self = .selfKeyword
default: return nil
}
}

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case .identifier: return .identifier
case .integerLiteral: return .integerLiteral
Expand Down Expand Up @@ -649,7 +649,7 @@ extension Parser {
}

extension Parser {
enum SpecializeParameter: RawTokenKindSubset {
enum SpecializeParameter: TokenSpecSet {
case target
case availability
case exported
Expand All @@ -660,18 +660,18 @@ extension Parser {

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(.target): self = .target
case RawTokenKindMatch(.availability): self = .availability
case RawTokenKindMatch(.exported): self = .exported
case RawTokenKindMatch(.kind): self = .kind
case RawTokenKindMatch(.spi): self = .spi
case RawTokenKindMatch(.spiModule): self = .spiModule
case RawTokenKindMatch(.available): self = .available
case TokenSpec(.target): self = .target
case TokenSpec(.availability): self = .availability
case TokenSpec(.exported): self = .exported
case TokenSpec(.kind): self = .kind
case TokenSpec(.spi): self = .spi
case TokenSpec(.spiModule): self = .spiModule
case TokenSpec(.available): self = .available
default: return nil
}
}

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case .target: return .keyword(.target)
case .availability: return .keyword(.availability)
Expand Down Expand Up @@ -1082,14 +1082,14 @@ extension Parser {
let value: RawDocumentationAttributeArgumentSyntax.Value
switch label.tokenText {
case "visibility":
enum AccessLevelModifier: RawTokenKindSubset {
enum AccessLevelModifier: TokenSpecSet {
case `private`
case `fileprivate`
case `internal`
case `public`
case `open`

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case .private: return .keyword(.private)
case .fileprivate: return .keyword(.fileprivate)
Expand All @@ -1101,11 +1101,11 @@ extension Parser {

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(.private): self = .private
case RawTokenKindMatch(.fileprivate): self = .fileprivate
case RawTokenKindMatch(.internal): self = .internal
case RawTokenKindMatch(.public): self = .public
case RawTokenKindMatch(.open): self = .open
case TokenSpec(.private): self = .private
case TokenSpec(.fileprivate): self = .fileprivate
case TokenSpec(.internal): self = .internal
case TokenSpec(.public): self = .public
case TokenSpec(.open): self = .open
default: return nil
}
}
Expand Down Expand Up @@ -1180,7 +1180,7 @@ extension Parser.Lookahead {
return false
}

if self.at(.leftParen, allowTokenAtStartOfLine: false) && self.withLookahead({ $0.isCustomAttributeArgument() }) {
if self.at(TokenSpec(.leftParen, allowAtStartOfLine: false)) && self.withLookahead({ $0.isCustomAttributeArgument() }) {
self.skipSingle()
}

Expand Down
22 changes: 11 additions & 11 deletions Sources/SwiftParser/Availability.swift
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ extension Parser {
return RawAvailabilitySpecListSyntax(elements: elements, arena: self.arena)
}

enum AvailabilityArgumentKind: RawTokenKindSubset {
enum AvailabilityArgumentKind: TokenSpecSet {
case message
case renamed
case introduced
Expand All @@ -67,20 +67,20 @@ extension Parser {

init?(lexeme: Lexer.Lexeme) {
switch lexeme {
case RawTokenKindMatch(.message): self = .message
case RawTokenKindMatch(.renamed): self = .renamed
case RawTokenKindMatch(.introduced): self = .introduced
case RawTokenKindMatch(.deprecated): self = .deprecated
case RawTokenKindMatch(.obsoleted): self = .obsoleted
case RawTokenKindMatch(.unavailable): self = .unavailable
case RawTokenKindMatch(.noasync): self = .noasync
case RawTokenKindMatch(.binaryOperator) where lexeme.tokenText == "*": self = .star
case RawTokenKindMatch(.identifier): self = .identifier
case TokenSpec(.message): self = .message
case TokenSpec(.renamed): self = .renamed
case TokenSpec(.introduced): self = .introduced
case TokenSpec(.deprecated): self = .deprecated
case TokenSpec(.obsoleted): self = .obsoleted
case TokenSpec(.unavailable): self = .unavailable
case TokenSpec(.noasync): self = .noasync
case TokenSpec(.binaryOperator) where lexeme.tokenText == "*": self = .star
case TokenSpec(.identifier): self = .identifier
default: return nil
}
}

var rawTokenKind: RawTokenKind {
var spec: TokenSpec {
switch self {
case .message: return .keyword(.message)
case .renamed: return .keyword(.renamed)
Expand Down
5 changes: 3 additions & 2 deletions Sources/SwiftParser/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ add_swift_host_library(SwiftParser
Nominals.swift
Parser.swift
Patterns.swift
RawTokenKindMatch.swift
RawTokenKindSubset.swift
TokenSpec.swift
TokenSpecSet.swift
Recovery.swift
Specifiers.swift
Statements.swift
Expand All @@ -35,6 +35,7 @@ add_swift_host_library(SwiftParser

generated/DeclarationModifier.swift
generated/Parser+Entry.swift
generated/TokenSpecStaticMembers.swift
generated/TypeAttribute.swift

Lexer/Cursor.swift
Expand Down
Loading