diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8f3635a..11e9dce 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,9 +11,9 @@ jobs: strategy: matrix: # Use these Java versions - java: [16] + java: [17] # and run on Linux, Windows and MacOS - os: [ubuntu-20.04, windows-latest, macos-11] + os: [ubuntu-20.04, windows-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - name: checkout repository @@ -32,8 +32,9 @@ jobs: - name: validate gradle wrapper uses: gradle/wrapper-validation-action@v1 - name: setup jdk ${{ matrix.java }} - uses: actions/setup-java@v1 + uses: actions/setup-java@v2.3.1 with: + distribution: 'zulu' java-version: ${{ matrix.java }} - name: make gradle wrapper executable if: ${{ runner.os != 'Windows' }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 31f22fe..97ed652 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,9 +12,9 @@ jobs: strategy: matrix: # Use these Java versions - java: [16] + java: [17] # and run on Linux, Windows and MacOS - os: [ubuntu-20.04, windows-latest, macos-11] + os: [ubuntu-20.04, windows-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - name: checkout repository @@ -22,8 +22,9 @@ jobs: - name: validate gradle wrapper uses: gradle/wrapper-validation-action@v1 - name: setup jdk ${{ matrix.java }} - uses: actions/setup-java@v1 + uses: actions/setup-java@v2.3.1 with: + distribution: 'zulu' java-version: ${{ matrix.java }} - name: make gradle wrapper executable if: ${{ runner.os != 'Windows' }} @@ -42,7 +43,7 @@ jobs: MCD_PASSWORD: ${{ secrets.MCD_PASSWORD }} - name: Publish to GitHub Packages (macOS) if: ${{ runner.os == 'macOS' }} - run: gradle publishMacosX64PublicationToMavenRepository + run: gradle publishMacosX64PublicationToMavenRepository publishMacosArm64PublicationToMavenRepository env: MCD_USERNAME: ${{ secrets.MCD_USERNAME }} MCD_PASSWORD: ${{ secrets.MCD_PASSWORD }} diff --git a/README.md b/README.md index b9b6a58..7acfa76 100644 --- a/README.md +++ b/README.md @@ -56,7 +56,7 @@ To create a parser use `createParser(grammar) { ... }`, which will run the block ```kotlin // Create a lexer as simple as a method call. -val lexer: Lexer> = createLexer { +val lexer = Lexer.create> { // Implement a token type per line. '+' { process(makeToken(PLUS)) } // Built-in extension functions. @@ -67,24 +67,25 @@ val lexer: Lexer> = createLexer { } // Create a pratt-parser grammar. Dead simple. -val grammar: Grammar = createGrammar { +val grammar = Grammar.create { // Create a prefix parselet as a lambda function. - prefix(STRING) { token -> token.value.removeSurrounding("") } - // Create a infix parselet, with support to precedence as a lambda function. - infix(PLUS, 1) { left, token -> left + parseExpression() } + prefix(STRING) { token -> token.value } + // Create an infix parselet, with support to precedence as a lambda function. + infix(PLUS, 1) { left, _ -> left + parseExpression() } } // Use your grammar to create a pratt-parser. -val parser = createParser(grammar) { // Actual code run by the parser. +val parser = SourceParser.create(lexer, grammar) { // Actual code run by the parser. // Extension function: Throws if there's still tokens. - ensureEOF { + ensureEOF { // Parses a expression using this parsers' grammar. parseExpression() } } // One line of code to rule them all. -val result = parser.parse(classpathSource { "input.str" }, lexer) +val result = parser.parse(Source.classpath { "input.str" }) +println(result) ``` See the full code [here](https://github.com/adriantodt/tartar/blob/master/src/test/java/examples/StringJoiner.kt), diff --git a/build.gradle.kts b/build.gradle.kts index bf62992..352279c 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -1,17 +1,19 @@ plugins { - kotlin("multiplatform") version "1.5.30" + kotlin("multiplatform") version "1.5.31" `maven-publish` - id("org.jetbrains.dokka") version "1.5.0" + id("org.jetbrains.dokka") version "1.5.31" } group = "com.github.adriantodt" -version = "2.3" +version = "3.0" repositories { mavenCentral() } kotlin { + explicitApi() + jvm { compilations.all { kotlinOptions.jvmTarget = "13" @@ -24,10 +26,11 @@ kotlin { browser() nodejs() } - - linuxX64("linuxX64") - macosX64("macosX64") - mingwX64("mingwX64") + linuxX64() + linuxArm64() + macosX64() + macosArm64() + mingwX64() sourceSets { val commonMain by getting @@ -51,16 +54,21 @@ kotlin { val linuxX64Main by getting { dependsOn(nativeMain) } + val linuxArm64Main by getting { + dependsOn(nativeMain) + } val mingwX64Main by getting { dependsOn(nativeMain) } val macosX64Main by getting { dependsOn(nativeMain) } + val macosArm64Main by getting { + dependsOn(nativeMain) + } } } - tasks { register("dokkaJar") { from(dokkaHtml) @@ -68,11 +76,12 @@ tasks { archiveClassifier.set("javadoc") } } + publishing { publications.withType { artifact(tasks["dokkaJar"]) } - // select the repositories you want to publish to + repositories { maven { url = uri("https://maven.cafeteria.dev/releases") diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/Tartar.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/Tartar.kt deleted file mode 100644 index 8f3526b..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/Tartar.kt +++ /dev/null @@ -1,53 +0,0 @@ -package com.github.adriantodt.tartar - -import com.github.adriantodt.tartar.api.Closure -import com.github.adriantodt.tartar.api.GrammarDSL -import com.github.adriantodt.tartar.api.LexerDSL -import com.github.adriantodt.tartar.api.lexer.Lexer -import com.github.adriantodt.tartar.api.parser.Grammar -import com.github.adriantodt.tartar.api.parser.Parser -import com.github.adriantodt.tartar.api.parser.ParserContext -import com.github.adriantodt.tartar.impl.GrammarBuilder -import com.github.adriantodt.tartar.impl.LexerImpl -import com.github.adriantodt.tartar.impl.MatcherImpl -import com.github.adriantodt.tartar.impl.ParserImpl - -/** - * Creates and configures a [Lexer]. - * - * @param T The type of tokens the lexer generates. - * @param block The lexer configurator. - * @return A configured Lexer. - * @author AdrianTodt - */ -fun createLexer(block: Closure, Unit>): Lexer { - return LexerImpl(MatcherImpl().apply(block)) -} - -/** - * Creates and configures a [Grammar]. - * - * @param T The grammar's token type. - * @param E The grammar's expression result. - * @param block The grammar configurator. - * @return A configured Grammar. - * @author AdrianTodt - */ -fun createGrammar(block: Closure, Unit>): Grammar { - return GrammarBuilder().apply(block).build() -} - -/** - * Creates and configures a [Parser]. - * - * @param T The parser's (and grammar's) token type. - * @param E The parser's (and grammar's) expression result. - * @param R The parser's result. - * @param grammar The grammar used by this parser. - * @param block The parser function. - * @return A configured Parser. - * @author AdrianTodt - */ -fun createParser(grammar: Grammar, block: Closure, R>): Parser { - return ParserImpl(grammar, block) -} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/FunctionTypes.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/FunctionTypes.kt deleted file mode 100644 index 8b5cca3..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/FunctionTypes.kt +++ /dev/null @@ -1,14 +0,0 @@ -package com.github.adriantodt.tartar.api - -import com.github.adriantodt.tartar.api.parser.ParserContext -import com.github.adriantodt.tartar.api.parser.Token - -typealias Closure = T.() -> R - -typealias ClosureFunction = T.(E) -> R - -typealias PrefixFunction = ParserContext.(Token) -> E - -typealias InfixFunction = ParserContext.(E, Token) -> E - -typealias CharPredicate = (Char) -> Boolean diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/CharPredicate.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/CharPredicate.kt new file mode 100644 index 0000000..3c90e7c --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/CharPredicate.kt @@ -0,0 +1,33 @@ +package com.github.adriantodt.tartar.api.dsl + +/** + * Represents a predicate (boolean-valued function) of one [Char]-valued argument. + * + * @since 3.0 + */ +public fun interface CharPredicate { + /** + * Evaluates this predicate on the given argument. + * + * @param value the input argument + * @return true if the input argument matches the predicate, otherwise false. + */ + public fun test(value: Char): Boolean + + public companion object { + /** + * [CharPredicate] which predicate is the function [Char.isLetter]. + */ + public val isLetter: CharPredicate = CharPredicate(Char::isLetter) + + /** + * [CharPredicate] which predicate is the function [Char.isDigit]. + */ + public val isDigit: CharPredicate = CharPredicate(Char::isDigit) + + /** + * [CharPredicate] which predicate is the function [Char.isLetterOrDigit]. + */ + public val isLetterOrDigit: CharPredicate = CharPredicate(Char::isLetterOrDigit) + } +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/GrammarDSL.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarDSL.kt similarity index 69% rename from src/commonMain/kotlin/com/github/adriantodt/tartar/api/GrammarDSL.kt rename to src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarDSL.kt index e8e4f90..593c43c 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/GrammarDSL.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarDSL.kt @@ -1,8 +1,8 @@ -package com.github.adriantodt.tartar.api +package com.github.adriantodt.tartar.api.dsl -import com.github.adriantodt.tartar.api.parser.Grammar -import com.github.adriantodt.tartar.api.parser.InfixParser -import com.github.adriantodt.tartar.api.parser.PrefixParser +import com.github.adriantodt.tartar.api.grammar.Grammar +import com.github.adriantodt.tartar.api.grammar.InfixParselet +import com.github.adriantodt.tartar.api.grammar.PrefixParselet /** * A builder of [Grammars][com.github.adriantodt.tartar.api.parser.Grammar], as a domain-specific language (DSL). @@ -11,14 +11,14 @@ import com.github.adriantodt.tartar.api.parser.PrefixParser * @param E The grammar's expression result. * @author AdrianTodt */ -interface GrammarDSL { +public interface GrammarDSL { /** * Imports all parselets from other grammars. * @param override If set to true, imported parselets overrides existing ones. If false, they throw. * @param grammars The grammars to import. */ - fun import(override: Boolean = false, vararg grammars: Grammar) + public fun import(override: Boolean = false, vararg grammars: Grammar) /** * Registers a prefix parselets into the grammar. @@ -26,7 +26,7 @@ interface GrammarDSL { * @param parselet The prefix parselet to register. * @param override If set to true, imported parselets overrides existing ones. If false, they throw. */ - fun prefix(type: T, parselet: PrefixParser, override: Boolean = false) + public fun prefix(type: T, parselet: PrefixParselet, override: Boolean = false) /** * Registers a prefix parselets into the grammar. @@ -34,7 +34,7 @@ interface GrammarDSL { * @param override If set to true, imported parselets overrides existing ones. If false, they throw. * @param block The code to execute when the type matches. */ - fun prefix(type: T, override: Boolean = false, block: PrefixFunction) + public fun prefix(type: T, override: Boolean = false, block: PrefixFunction) /** * Registers a infix parselets into the grammar. @@ -42,7 +42,7 @@ interface GrammarDSL { * @param parselet The infix parselet to register. * @param override If set to true, imported parselets overrides existing ones. If false, they throw. */ - fun infix(type: T, parselet: InfixParser, override: Boolean = false) + public fun infix(type: T, parselet: InfixParselet, override: Boolean = false) /** * Registers a infix parselets into the grammar. @@ -50,5 +50,5 @@ interface GrammarDSL { * @param override If set to true, imported parselets overrides existing ones. If false, they throw. * @param block The code to execute when the type matches. */ - fun infix(type: T, precedence: Int, override: Boolean = false, block: InfixFunction) + public fun infix(type: T, precedence: Int, override: Boolean = false, block: InfixFunction) } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarFunctionTypes.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarFunctionTypes.kt new file mode 100644 index 0000000..457537e --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/GrammarFunctionTypes.kt @@ -0,0 +1,23 @@ +package com.github.adriantodt.tartar.api.dsl + +import com.github.adriantodt.tartar.api.parser.ParserContext +import com.github.adriantodt.tartar.api.parser.Token + +/** + * Function used by [GrammarDSL] to configure a + * [Prefix Parselet][com.github.adriantodt.tartar.api.parser.PrefixParselet] + * in a functional way. + */ +public typealias PrefixFunction = ParserContext.(token: Token) -> E + +/** + * Function used by [GrammarDSL] to configure a + * [Infix Parselet][com.github.adriantodt.tartar.api.parser.InfixParselet] + * in a functional way. + */ +public typealias InfixFunction = ParserContext.(left: E, token: Token) -> E + +/** + * Function which receives a [LexerDSL] as its receiver. + */ +public typealias GrammarConfig = GrammarDSL.() -> Unit diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/LexerDSL.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerDSL.kt similarity index 61% rename from src/commonMain/kotlin/com/github/adriantodt/tartar/api/LexerDSL.kt rename to src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerDSL.kt index 9b402f7..d1ecea8 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/LexerDSL.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerDSL.kt @@ -1,6 +1,4 @@ -package com.github.adriantodt.tartar.api - -import com.github.adriantodt.tartar.api.lexer.LexerContext +package com.github.adriantodt.tartar.api.dsl /** * A builder of [Lexers][com.github.adriantodt.tartar.api.lexer.Lexer], as a domain-specific language (DSL). @@ -8,52 +6,52 @@ import com.github.adriantodt.tartar.api.lexer.LexerContext * @param T The type of tokens the lexer generates. * @author AdrianTodt */ -interface LexerDSL { +public interface LexerDSL { /** * Configures the lexer to execute a specific block of code when this character matches. */ - operator fun Char.invoke(block: ClosureFunction, Char, Unit> = {}) { + public operator fun Char.invoke(block: MatchFunction = {}) { matching(this).configure(block) } /** * Configures the lexer to execute a specific block of code when this sequence of characters matches. */ - operator fun String.invoke(block: ClosureFunction, Char, Unit> = {}) { + public operator fun String.invoke(block: MatchFunction = {}) { matching(this).configure(block) } /** * Returns a matcher for a sequence of characters. */ - fun matching(string: String): LexerDSL + public fun matching(string: String): LexerDSL /** * Returns a matcher for a sequence of characters and configures it. */ - fun matching(string: String, block: Closure, Unit>) { + public fun matching(string: String, block: LexerConfig) { matching(string).block() } /** * Returns a matcher for a character. */ - fun matching(char: Char): LexerDSL + public fun matching(char: Char): LexerDSL /** * Returns a matcher for a characters and configures it. */ - fun matching(char: Char, block: Closure, Unit>) { + public fun matching(char: Char, block: LexerConfig) { matching(char).block() } /** * Returns a matcher for a predicate. */ - fun matching(block: CharPredicate): LexerDSL + public fun matching(block: CharPredicate): LexerDSL /** * Configures the lexer to execute a specific block of code when match. */ - fun configure(block: ClosureFunction, Char, Unit>) + public fun configure(block: MatchFunction) } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerFunctionTypes.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerFunctionTypes.kt new file mode 100644 index 0000000..0748348 --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/LexerFunctionTypes.kt @@ -0,0 +1,13 @@ +package com.github.adriantodt.tartar.api.dsl + +import com.github.adriantodt.tartar.api.lexer.LexerContext + +/** + * Function which receives a [LexerContext] as its receiver and a matched [Char] as its parameter. + */ +public typealias MatchFunction = LexerContext.(char: Char) -> Unit + +/** + * Function which receives a [LexerDSL] as its receiver. + */ +public typealias LexerConfig = LexerDSL.() -> Unit diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/ParserFunctionTypes.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/ParserFunctionTypes.kt new file mode 100644 index 0000000..9465ef0 --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/dsl/ParserFunctionTypes.kt @@ -0,0 +1,8 @@ +package com.github.adriantodt.tartar.api.dsl + +import com.github.adriantodt.tartar.api.parser.ParserContext + +/** + * Function which receives a [ParserContext] as its receiver, and returns a result. + */ +public typealias ParserFunction = ParserContext.() -> R diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/Grammar.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/Grammar.kt new file mode 100644 index 0000000..9e87f31 --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/Grammar.kt @@ -0,0 +1,35 @@ +package com.github.adriantodt.tartar.api.grammar + +import com.github.adriantodt.tartar.api.dsl.GrammarConfig +import com.github.adriantodt.tartar.impl.GrammarBuilder +import kotlin.jvm.JvmStatic + +/** + * A grammar for pratt-parsers. + * + * @param T The grammar's token type. + * @param E The grammar's expression result. + * @param prefix A map of prefix parsers for each token type. + * @param infix A map of prefix parsers for each token type. + * @author An Tran + */ +public data class Grammar( + public val prefix: Map>, + public val infix: Map> +) { + public companion object { + /** + * Creates and configures a [Grammar]. + * + * @param T The grammar's token type. + * @param E The grammar's expression result. + * @param block The grammar configurator. + * @return A configured Grammar. + * @author AdrianTodt + */ + @JvmStatic + public fun create(block: GrammarConfig): Grammar { + return GrammarBuilder().apply(block).build() + } + } +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/InfixParselet.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/InfixParselet.kt new file mode 100644 index 0000000..51a9b0d --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/InfixParselet.kt @@ -0,0 +1,23 @@ +package com.github.adriantodt.tartar.api.grammar + +import com.github.adriantodt.tartar.api.parser.ParserContext +import com.github.adriantodt.tartar.api.parser.Token + +/** + * A interface for infix-based parsing, with support to precedence. + * + * @param T The grammar's token type. + * @param E The grammar's expression result. + * @author An Tran + */ +public interface InfixParselet { + /** + * This infix parser's precedence. + */ + public val precedence: Int + + /** + * This infix parser's parsing implementation. + */ + public fun parse(ctx: ParserContext, left: E, token: Token): E +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/PrefixParselet.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/PrefixParselet.kt new file mode 100644 index 0000000..8a607f8 --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/grammar/PrefixParselet.kt @@ -0,0 +1,18 @@ +package com.github.adriantodt.tartar.api.grammar + +import com.github.adriantodt.tartar.api.parser.ParserContext +import com.github.adriantodt.tartar.api.parser.Token + +/** + * A interface for prefix-based parsing. + * + * @param T The grammar's token type. + * @param E The grammar's expression result. + * @author An Tran + */ +public interface PrefixParselet { + /** + * This prefix parser's parsing implementation. + */ + public fun parse(ctx: ParserContext, token: Token): E +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Lexer.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Lexer.kt index ccedfa8..2c3e1c5 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Lexer.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Lexer.kt @@ -1,19 +1,24 @@ package com.github.adriantodt.tartar.api.lexer +import com.github.adriantodt.tartar.api.dsl.LexerConfig +import com.github.adriantodt.tartar.impl.LexerImpl +import com.github.adriantodt.tartar.impl.MatcherImpl +import kotlin.jvm.JvmStatic + /** * Reads characters from a [Source] and outputs tokens. * * @param T The type of tokens the lexer generates. * @author AdrianTodt */ -interface Lexer { +public interface Lexer { /** * Parses a source and outputs tokens into a consumer. * * @param source A source of characters. * @param output The consumer of tokens. */ - fun parse(source: Source, output: (T) -> Unit) + public fun parse(source: Source, output: (T) -> Unit) /** * Parses a source and adds all tokens into a collection. @@ -22,7 +27,7 @@ interface Lexer { * @param collection The collection to add all tokens into. * @return The collection with the tokens. */ - fun > parseTo(source: Source, collection: C): C { + public fun > parseTo(source: Source, collection: C): C { parse(source) { collection.add(it) } return collection } @@ -33,7 +38,22 @@ interface Lexer { * @param source A source of characters. * @return A list with the tokens. */ - fun parseToList(source: Source): List { + public fun parseToList(source: Source): List { return parseTo(source, ArrayList()) } + + public companion object { + /** + * Creates and configures a [Lexer]. + * + * @param T The type of tokens the lexer generates. + * @param block The lexer configurator. + * @return A configured Lexer. + * @author AdrianTodt + */ + @JvmStatic + public fun create(block: LexerConfig): Lexer { + return LexerImpl(MatcherImpl().apply(block)) + } + } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/LexerContext.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/LexerContext.kt index ecef5d2..971b482 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/LexerContext.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/LexerContext.kt @@ -6,65 +6,65 @@ package com.github.adriantodt.tartar.api.lexer * @param T The type of tokens the lexer generates. * @author AdrianTodt */ -interface LexerContext { +public interface LexerContext { /** * The original source of this context. */ - val source: Source + public val source: Source /** * This context' reader. */ - val reader: StringReader + public val reader: StringReader /** * The current index. */ - val index: Int + public val index: Int /** * Peeks the next character of the reader. */ - fun peek(): Char + public fun peek(): Char /** * Peeks a character a distance far away of the reader. */ - fun peek(distance: Int): Char + public fun peek(distance: Int): Char /** * Peeks a string with a predefined length of the reader. */ - fun peekString(length: Int): String + public fun peekString(length: Int): String /** * Peeks the next character and, if equals the expected character, consumes it. * Returns true if the peeked character were equals the expected consumer and consumed. */ - fun match(expect: Char): Boolean + public fun match(expect: Char): Boolean /** * Checks if there's more characters ahead. */ - fun hasNext(): Boolean + public fun hasNext(): Boolean /** * Returns the next character of the reader. */ - fun next(): Char + public fun next(): Char /** * Returns a predefined length of characters of the reader, as a String. */ - fun nextString(length: Int): String + public fun nextString(length: Int): String /** * Calls the [Lexer.parse]'s token consumer. */ - fun process(token: T) + public fun process(token: T) /** * Lexes once and return the processed tokens. */ - fun parseOnce(): List + public fun parseOnce(): List } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Section.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Section.kt index ae58209..17deb9c 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Section.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Section.kt @@ -11,14 +11,13 @@ import kotlin.math.min * @param length The section's length. * @author An Tran, AdrianTodt */ -data class Section(val source: Source, val index: Int, val length: Int) { +public data class Section(public val source: Source, public val index: Int, public val length: Int) { init { - val bounds = 0..source.content.length - require(index in bounds) { + require(index in source.bounds) { "Section index ($index) must be within content's bounds (0..${source.content.length})" } val end = index + length - require(end in bounds) { + require(end in source.bounds) { "Section end ($end) must be within content's bounds (0..${source.content.length})" } } @@ -26,44 +25,45 @@ data class Section(val source: Source, val index: Int, val length: Int) { /** * The range of this section. */ - val range by lazy { index..(index + length) } + public val range: IntRange = index..(index + length) /** * The substring this section represents. */ - val substring by lazy { source.content.substring(range) } + public val substring: String = source.content.substring(range) /** * The lines of this section. */ - val lines by lazy { - source.lines.dropWhile { range.first > it.range.last }.takeWhile { range.last > it.range.first } - } + public val lines: List = source.lines + .dropWhile { range.first > it.range.last } + .takeWhile { range.last > it.range.first } + /** * The line number of the start of the section. */ - val startLineNumber by lazy { lines.first().lineNumber } + public val startLineNumber: Int = lines.first().lineNumber /** * The line index of the start of the section. */ - val startLineIndex by lazy { range.first - lines.first().range.first } + public val startLineIndex: Int = range.first - lines.first().range.first /** * The line number of the end of the section. */ - val endLineNumber by lazy { lines.last().lineNumber } + public val endLineNumber: Int = lines.last().lineNumber /** * The line index of the end of the section. */ - val endLineIndex by lazy { range.last - lines.last().range.last } + public val endLineIndex: Int = range.last - lines.last().range.last /** * Creates a new section which spans across this and another section. */ - fun span(other: Section): Section { + public fun span(other: Section): Section { require(source == other.source) { "Sections $this and $other have different sources and thus can't be spanned." } @@ -77,5 +77,7 @@ data class Section(val source: Source, val index: Int, val length: Int) { /** * Returns a string representation of the section. */ - override fun toString() = "(${source.name}:$startLineNumber:$startLineIndex)" + override fun toString(): String { + return "(${source.name}:$startLineNumber:$startLineIndex)" + } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Sectional.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Sectional.kt index d140a78..9285bc8 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Sectional.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Sectional.kt @@ -5,14 +5,16 @@ package com.github.adriantodt.tartar.api.lexer * * @author An Tran */ -interface Sectional { +public interface Sectional { /** * The assigned section. */ - val section: Section + public val section: Section /** * Creates a section which spans across this and another section. */ - fun span(other: Sectional) = section.span(other.section) + public fun span(other: Sectional): Section { + return section.span(other.section) + } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Source.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Source.kt index c2d1dc7..f41546d 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Source.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/Source.kt @@ -11,21 +11,26 @@ import com.github.adriantodt.tartar.impl.calculateLineRanges * @param path The path to the source. * @author AdrianTodt, An Tran */ -data class Source(val content: String, val name: String = "?", val path: String = "!!no path!!") { +public data class Source(val content: String, val name: String = "?", val path: String = "!!no path!!") { /** * The lines of the content. */ - val lines by lazy { - content.calculateLineRanges().mapIndexed { index, range -> - Line(index + 1, content.substring(range), range) - } + val lines: List = content.calculateLineRanges().mapIndexed { index, range -> + Line(index + 1, content.substring(range), range) } + /** + * The bounds of a given source. + */ + val bounds: IntRange = 0..content.length + /** * Represents a line from the source. * @param lineNumber The line's number. * @param content The line's content, including the line separator. * @param range The line's range spanning the source's content. */ - data class Line internal constructor(val lineNumber: Int, val content: String, val range: IntRange) + public data class Line internal constructor(val lineNumber: Int, val content: String, val range: IntRange) + + public companion object } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt index 5ab64aa..c63a852 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt @@ -3,25 +3,25 @@ package com.github.adriantodt.tartar.api.lexer /** * Portable implementation of a character stream whose source is a string. */ -expect class StringReader(s: String) { +public expect class StringReader(s: String) { /** * Reads a single character. */ - fun read(): Int + public fun read(): Int /** * Skips a specified amount of characters in the stream, returning the amount of characters actually skipped. */ - fun skip(ns: Long): Long + public fun skip(ns: Long): Long /** * Marks the stream's current position. Calling reset() reverts the stream to this position. */ - fun mark(readAheadLimit: Int) + public fun mark(readAheadLimit: Int) /** * Resets the stream to the last marked position. * If mark() was never called, the stream resets to the beginning of the string. */ - fun reset() + public fun reset() } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Grammar.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Grammar.kt deleted file mode 100644 index 6aa0402..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Grammar.kt +++ /dev/null @@ -1,12 +0,0 @@ -package com.github.adriantodt.tartar.api.parser - -/** - * A grammar for pratt-parsers. - * - * @param T The grammar's token type. - * @param E The grammar's expression result. - * @param prefixParsers A map of prefix parsers for each token type. - * @param infixParsers A map of prefix parsers for each token type. - * @author An Tran - */ -data class Grammar(val prefixParsers: Map>, val infixParsers: Map>) diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/InfixParser.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/InfixParser.kt deleted file mode 100644 index 27ad42c..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/InfixParser.kt +++ /dev/null @@ -1,20 +0,0 @@ -package com.github.adriantodt.tartar.api.parser - -/** - * A interface for infix-based parsing, with support to precedence. - * - * @param T The grammar's token type. - * @param E The grammar's expression result. - * @author An Tran - */ -interface InfixParser { - /** - * This infix parser's precedence. - */ - val precedence: Int - - /** - * This infix parser's parsing implementation. - */ - fun parse(ctx: ParserContext, left: E, token: Token): E -} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Parser.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Parser.kt index ea8c722..7621f32 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Parser.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Parser.kt @@ -1,7 +1,11 @@ package com.github.adriantodt.tartar.api.parser +import com.github.adriantodt.tartar.api.dsl.ParserFunction +import com.github.adriantodt.tartar.api.grammar.Grammar import com.github.adriantodt.tartar.api.lexer.Lexer import com.github.adriantodt.tartar.api.lexer.Source +import com.github.adriantodt.tartar.impl.ParserImpl +import kotlin.jvm.JvmStatic /** * Reads tokens from a list and parses it with a pratt-parser. @@ -11,11 +15,11 @@ import com.github.adriantodt.tartar.api.lexer.Source * @param R The parser's result. * @author AdrianTodt */ -interface Parser { +public interface Parser { /** * The [Grammar] of this pratt-parser. */ - val grammar: Grammar + public val grammar: Grammar /** * Parses tokens with this pratt-parser and returns the computed result. @@ -24,7 +28,7 @@ interface Parser { * @param tokens A list of tokens, probably created with [Lexer]. * @return The computed result. */ - fun parse(source: Source, tokens: List>): R + public fun parse(source: Source, tokens: List>): R /** * Parses tokens from a source, using a specified lexer, with this pratt-parser, and returns the computed result. @@ -33,7 +37,25 @@ interface Parser { * @param lexer A lexer to parse the source. * @return The computed result. */ - fun parse(source: Source, lexer: Lexer>): R { + public fun parse(source: Source, lexer: Lexer>): R { return parse(source, lexer.parseToList(source)) } + + public companion object { + /** + * Creates and configures a [Parser]. + * + * @param T The parser's (and grammar's) token type. + * @param E The parser's (and grammar's) expression result. + * @param R The parser's result. + * @param grammar The grammar used by this parser. + * @param block The parser function. + * @return A configured Parser. + * @author AdrianTodt + */ + @JvmStatic + public fun create(grammar: Grammar, block: ParserFunction): Parser { + return ParserImpl(grammar, block) + } + } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/ParserContext.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/ParserContext.kt index 4a8189c..249a79f 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/ParserContext.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/ParserContext.kt @@ -1,5 +1,6 @@ package com.github.adriantodt.tartar.api.parser +import com.github.adriantodt.tartar.api.grammar.Grammar import com.github.adriantodt.tartar.api.lexer.Source /** @@ -9,94 +10,94 @@ import com.github.adriantodt.tartar.api.lexer.Source * @param E The parser's (and grammar's) expression result. * @author AdrianTodt */ -interface ParserContext { +public interface ParserContext { /** * The source of this grammar's tokens. */ - val source: Source + public val source: Source /** * The grammar of this parser's context. */ - val grammar: Grammar + public val grammar: Grammar /** * The current index in the list of tokens. */ - var index: Int + public var index: Int /** * A property which is true if there's no more tokens. */ - val eof: Boolean + public val eof: Boolean /** * A property which contains the last token. */ - val last: Token + public val last: Token /** * Creates a child parser context with the specified grammar. */ - fun withGrammar(grammar: Grammar): ParserContext + public fun withGrammar(grammar: Grammar): ParserContext /** * Parses the expression using this parser's grammar. */ - fun parseExpression(precedence: Int = 0): E + public fun parseExpression(precedence: Int = 0): E /** * Parses the expression using another grammar. */ - fun Grammar.parseExpression(precedence: Int = 0): E = withGrammar(grammar).parseExpression(precedence) + public fun Grammar.parseExpression(precedence: Int = 0): E = withGrammar(grammar).parseExpression(precedence) /** * Eats the current token, advancing the index by one. */ - fun eat(): Token + public fun eat(): Token /** * Eats the current token, advancing the index by one. Throws a [SyntaxException] if the token type doesn't match. */ - fun eat(type: T): Token + public fun eat(type: T): Token /** * Equivalent to [nextIs], but eats the current token if true. */ - fun match(type: T): Boolean + public fun match(type: T): Boolean /** * Equivalent to [nextIsAny], but eats the current token if true. */ - fun matchAny(vararg type: T): Boolean + public fun matchAny(vararg type: T): Boolean /** * Move the index backwards one token and returns it. */ - fun back(): Token + public fun back(): Token /** * Peeks a token a distance far away of the reader. */ - fun peek(distance: Int = 0): Token + public fun peek(distance: Int = 0): Token /** * Peeks the next token and if the token types are equal, returns true. */ - fun nextIs(type: T): Boolean + public fun nextIs(type: T): Boolean /** * Peeks the next token and if the token type is equal to any of the types, returns true. */ - fun nextIsAny(vararg types: T): Boolean + public fun nextIsAny(vararg types: T): Boolean /** * Peeks the tokens ahead until a token of any of the types is found. */ - fun peekAheadUntil(vararg type: T): List> + public fun peekAheadUntil(vararg type: T): List> /** * Skips tokens ahead until a token of any of the types is found. */ - fun skipUntil(vararg type: T) + public fun skipUntil(vararg type: T) } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/PrefixParser.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/PrefixParser.kt deleted file mode 100644 index 84743cb..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/PrefixParser.kt +++ /dev/null @@ -1,15 +0,0 @@ -package com.github.adriantodt.tartar.api.parser - -/** - * A interface for prefix-based parsing. - * - * @param T The grammar's token type. - * @param E The grammar's expression result. - * @author An Tran - */ -interface PrefixParser { - /** - * This prefix parser's parsing implementation. - */ - fun parse(ctx: ParserContext, token: Token): E -} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SourceParser.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SourceParser.kt new file mode 100644 index 0000000..573f20f --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SourceParser.kt @@ -0,0 +1,50 @@ +package com.github.adriantodt.tartar.api.parser + +import com.github.adriantodt.tartar.api.dsl.ParserFunction +import com.github.adriantodt.tartar.api.grammar.Grammar +import com.github.adriantodt.tartar.api.lexer.Lexer +import com.github.adriantodt.tartar.api.lexer.Source + +/** + * Reads characters from a [Source] with a [lexer][Lexer], parsing the resulting tokens with a [pratt-parser][Parser]. + * + * @param T The parser's (and grammar's) token type. + * @param E The parser's (and grammar's) expression result. + * @param R The parser's result. + * @param lexer The underlying lexer. + * @param parser The underlying parser. + * @author AdrianTodt + */ +public data class SourceParser(public val lexer: Lexer>, public val parser: Parser) { + /** + * Parses tokens from a source, using the bundled lexer, with this pratt-parser, and returns the computed result. + * + * @param source A source of characters. + * @return The computed result. + */ + public fun parse(source: Source): R { + return parser.parse(source, lexer) + } + + public companion object { + /** + * Creates and configures a [SourceParser]. + * + * @param T The parser's (and grammar's) token type. + * @param E The parser's (and grammar's) expression result. + * @param R The parser's result. + * @param lexer The underlying parser. + * @param grammar The grammar used by this parser. + * @param block The parser function. + * @return A configured Parser. + * @author AdrianTodt + */ + public fun create( + lexer: Lexer>, + grammar: Grammar, + block: ParserFunction + ): SourceParser { + return SourceParser(lexer, Parser.create(grammar, block)) + } + } +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SyntaxException.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SyntaxException.kt index 610a620..6a7528e 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SyntaxException.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/SyntaxException.kt @@ -9,4 +9,4 @@ import com.github.adriantodt.tartar.api.lexer.Section * @param position The [section][Section] where it happened. * @author An Tran */ -open class SyntaxException(message: String, val position: Section) : RuntimeException("$message at $position") +public open class SyntaxException(message: String, public val position: Section) : RuntimeException("$message at $position") diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Token.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Token.kt index e5f4f37..75a47ea 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Token.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/api/parser/Token.kt @@ -12,9 +12,11 @@ import com.github.adriantodt.tartar.api.lexer.Sectional * @param section The section of this token. * @author An Tran */ -data class Token(val type: T, val value: String, override val section: Section) : Sectional { +public data class Token(public val type: T, public val value: String, override val section: Section) : Sectional { /** * Returns a string representation of the token. */ - override fun toString() = "$type[$value] $section" + override fun toString(): String { + return "$type[$value] $section" + } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/Extensions.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/Extensions.kt deleted file mode 100644 index 3448806..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/Extensions.kt +++ /dev/null @@ -1,22 +0,0 @@ -package com.github.adriantodt.tartar.extensions - -/** - * Returns true if the character is an ASCII letter. - */ -fun Char.isLetter(): Boolean { - return this in ('a'..'z') || this in ('A'..'Z') -} - -/** - * Returns true if the character is an ASCII digit. - */ -fun Char.isDigit(): Boolean { - return this in ('0'..'9') -} - -/** - * Returns true if the character is an ASCII letter or digit. - */ -fun Char.isLetterOrDigit(): Boolean { - return isLetter() || isDigit() -} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/LexerContext.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/LexerContext.kt index bc4fe50..90b7725 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/LexerContext.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/LexerContext.kt @@ -7,28 +7,29 @@ import com.github.adriantodt.tartar.api.parser.Token /** * Creates a section. */ -fun LexerContext<*>.section(offset: Int, length: Int = 0): Section { +public fun LexerContext<*>.section(offset: Int, length: Int = 0): Section { return Section(source, index - length - offset, length + offset) } /** * Creates a token. */ -fun LexerContext>.makeToken(tokenType: T, offset: Int = 1) = makeToken(tokenType, "", offset) +public fun LexerContext>.makeToken(type: T, offset: Int = 1): Token { + return makeToken(type, "", offset) +} /** * Creates a token. */ -fun LexerContext>.makeToken(tokenType: T, string: String, offset: Int = 0) = Token( - tokenType, - string, - Section(source, index - string.length - offset, string.length + offset) -) +public fun LexerContext>.makeToken(type: T, string: String, offset: Int = 0): Token { + val section = Section(source, index - string.length - offset, string.length + offset) + return Token(type, string, section) +} /** * Reads a C-like identifier. */ -fun LexerContext<*>.readIdentifier(firstChar: Char? = null): String { +public fun LexerContext<*>.readIdentifier(firstChar: Char? = null): String { val buf = StringBuilder() firstChar?.let(buf::append) while (hasNext()) { @@ -46,7 +47,7 @@ fun LexerContext<*>.readIdentifier(firstChar: Char? = null): String { /** * Reads a String up until a delimiter. */ -fun LexerContext<*>.readString(delimiter: Char): String { +public fun LexerContext<*>.readString(delimiter: Char): String { val buf = StringBuilder() var eol = false while (hasNext()) { @@ -69,7 +70,7 @@ fun LexerContext<*>.readString(delimiter: Char): String { /** * Reads a number. */ -fun LexerContext<*>.readNumber(c: Char): LexicalNumber { +public fun LexerContext<*>.readNumber(c: Char): LexicalNumber { val buf = StringBuilder() if (c == '0') { @@ -125,28 +126,28 @@ fun LexerContext<*>.readNumber(c: Char): LexicalNumber { /** * Result of [readNumber]. */ -sealed class LexicalNumber { +public sealed class LexicalNumber { /** * The original string value of the number. */ - abstract val string: String + public abstract val string: String /** * Read number is invalid. */ - data class Invalid(override val string: String) : LexicalNumber() + public data class Invalid(override val string: String) : LexicalNumber() /** * Read number is a decimal. */ - data class Decimal( + public data class Decimal( override val string: String, val value: Double, val isFloat: Boolean = false ) : LexicalNumber() /** * Read number is an integer. */ - data class Integer( + public data class Integer( override val string: String, val value: Long, val radix: Int = 10, val isLong: Boolean = false ) : LexicalNumber() } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/ParserContext.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/ParserContext.kt index 9e4d7d7..bfcfc65 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/ParserContext.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/extensions/ParserContext.kt @@ -2,11 +2,12 @@ package com.github.adriantodt.tartar.extensions import com.github.adriantodt.tartar.api.parser.ParserContext import com.github.adriantodt.tartar.api.parser.SyntaxException +import com.github.adriantodt.tartar.api.parser.Token /** * Ensures there's no character files after this block of code. */ -fun ParserContext<*, *>.ensureEOF(block: () -> R): R { +public fun ParserContext<*, *>.ensureEOF(block: () -> R): R { val r = block() if (!eof) throw SyntaxException("Should've reached end of content", eat().section) return r @@ -15,4 +16,6 @@ fun ParserContext<*, *>.ensureEOF(block: () -> R): R { /** * Eats tokens in a row. Returns a list, which can be used with a destructuring declaration. */ -fun ParserContext.eatMulti(vararg types: T) = types.map(this::eat) +public fun ParserContext.eatMulti(vararg types: T): List> { + return types.map(this::eat) +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/GrammarBuilder.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/GrammarBuilder.kt index 383f7b5..20a0131 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/GrammarBuilder.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/GrammarBuilder.kt @@ -1,45 +1,45 @@ package com.github.adriantodt.tartar.impl -import com.github.adriantodt.tartar.api.GrammarDSL -import com.github.adriantodt.tartar.api.InfixFunction -import com.github.adriantodt.tartar.api.PrefixFunction -import com.github.adriantodt.tartar.api.parser.Grammar -import com.github.adriantodt.tartar.api.parser.InfixParser -import com.github.adriantodt.tartar.api.parser.PrefixParser +import com.github.adriantodt.tartar.api.dsl.GrammarDSL +import com.github.adriantodt.tartar.api.dsl.InfixFunction +import com.github.adriantodt.tartar.api.dsl.PrefixFunction +import com.github.adriantodt.tartar.api.grammar.Grammar +import com.github.adriantodt.tartar.api.grammar.InfixParselet +import com.github.adriantodt.tartar.api.grammar.PrefixParselet -class GrammarBuilder : GrammarDSL { - private val prefixParsers = LinkedHashMap>() - private val infixParsers = LinkedHashMap>() +internal class GrammarBuilder : GrammarDSL { + private val prefix = LinkedHashMap>() + private val infix = LinkedHashMap>() override fun import(override: Boolean, vararg grammars: Grammar) { grammars.forEach { - it.prefixParsers.forEach { (k, v) -> prefix(k, v, override) } - it.infixParsers.forEach { (k, v) -> infix(k, v, override) } + it.prefix.forEach { (k, v) -> prefix(k, v, override) } + it.infix.forEach { (k, v) -> infix(k, v, override) } } } - override fun prefix(type: T, parselet: PrefixParser, override: Boolean) { - if (!override && type in prefixParsers) { + override fun prefix(type: T, parselet: PrefixParselet, override: Boolean) { + if (!override && type in prefix) { throw IllegalArgumentException("Prefix parselet associated with $type already exists. Did you forget to enable overriding?") } - prefixParsers[type] = parselet + prefix[type] = parselet } override fun prefix(type: T, override: Boolean, block: PrefixFunction) { - prefix(type, PrefixParserImpl(block)) + prefix(type, PrefixParseletImpl(block)) } - override fun infix(type: T, parselet: InfixParser, override: Boolean) { - if (!override && type in infixParsers) { + override fun infix(type: T, parselet: InfixParselet, override: Boolean) { + if (!override && type in infix) { throw IllegalArgumentException("Infix parselet associated with $type already exists. Did you forget to enable overriding?") } - infixParsers[type] = parselet + infix[type] = parselet } override fun infix(type: T, precedence: Int, override: Boolean, block: InfixFunction) { - infix(type, InfixParserImpl(precedence, block)) + infix(type, InfixParseletImpl(precedence, block)) } - fun build() = Grammar(prefixParsers.toMap(), infixParsers.toMap()) + internal fun build() = Grammar(prefix.toMap(), infix.toMap()) } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParseletImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParseletImpl.kt new file mode 100644 index 0000000..565aa34 --- /dev/null +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParseletImpl.kt @@ -0,0 +1,13 @@ +package com.github.adriantodt.tartar.impl + +import com.github.adriantodt.tartar.api.dsl.InfixFunction +import com.github.adriantodt.tartar.api.grammar.InfixParselet +import com.github.adriantodt.tartar.api.parser.ParserContext +import com.github.adriantodt.tartar.api.parser.Token + +internal class InfixParseletImpl( + override val precedence: Int, + private val block: InfixFunction +) : InfixParselet { + override fun parse(ctx: ParserContext, left: E, token: Token) = block(ctx, left, token) +} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParserImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParserImpl.kt deleted file mode 100644 index 67b0a57..0000000 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/InfixParserImpl.kt +++ /dev/null @@ -1,10 +0,0 @@ -package com.github.adriantodt.tartar.impl - -import com.github.adriantodt.tartar.api.InfixFunction -import com.github.adriantodt.tartar.api.parser.InfixParser -import com.github.adriantodt.tartar.api.parser.ParserContext -import com.github.adriantodt.tartar.api.parser.Token - -class InfixParserImpl(override val precedence: Int, private val block: InfixFunction) : InfixParser { - override fun parse(ctx: ParserContext, left: E, token: Token) = block(ctx, left, token) -} diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/LexerImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/LexerImpl.kt index c45d819..dc095f1 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/LexerImpl.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/LexerImpl.kt @@ -1,15 +1,12 @@ package com.github.adriantodt.tartar.impl -import com.github.adriantodt.tartar.api.CharPredicate -import com.github.adriantodt.tartar.api.ClosureFunction -import com.github.adriantodt.tartar.api.lexer.Lexer -import com.github.adriantodt.tartar.api.lexer.LexerContext -import com.github.adriantodt.tartar.api.lexer.Source -import com.github.adriantodt.tartar.api.lexer.StringReader +import com.github.adriantodt.tartar.api.dsl.CharPredicate +import com.github.adriantodt.tartar.api.dsl.MatchFunction +import com.github.adriantodt.tartar.api.lexer.* import com.github.adriantodt.tartar.api.parser.SyntaxException import com.github.adriantodt.tartar.extensions.section -class LexerImpl(root: MatcherImpl) : Lexer { +internal class LexerImpl(root: MatcherImpl) : Lexer { private val matcher = LexerMatcher(root) override fun parse(source: Source, output: (T) -> Unit) { @@ -18,7 +15,7 @@ class LexerImpl(root: MatcherImpl) : Lexer { } } - fun doParse(impl: ContextImpl, ctx: LexerContext = impl) { + private fun doParse(impl: ContextImpl, ctx: LexerContext = impl) { if (impl.hasNext()) { impl.read = 0 @@ -35,22 +32,26 @@ class LexerImpl(root: MatcherImpl) : Lexer { } } - data class LexerMatcher( - val trie: Map>, - val predicates: List>>, - val onMatch: ClosureFunction, Char, Unit>? + private class LexerMatcher( + private val trie: Map>, + private val predicates: List>, + val onMatch: MatchFunction? ) { constructor(m: MatcherImpl) : this( m.trie.filterNot { it.value.isEmpty() }.mapValues { LexerMatcher(it.value) }, - m.predicates.filterNot { it.second.isEmpty() }.map { it.first to LexerMatcher(it.second) }, + m.predicates.filterNot { it.isMatcherEmpty() }.map { LexerMatcherWithPredicate(it) }, m.onMatch ) fun tryMatchChild(char: Char): LexerMatcher? { - return trie[char] ?: predicates.firstOrNull { it.first(char) }?.second + return trie[char] ?: predicates.firstOrNull { it.predicate.test(char) }?.matcher } } + private class LexerMatcherWithPredicate(val predicate: CharPredicate, val matcher: LexerMatcher) { + constructor(m: MatcherImpl.MatcherWithPredicate) : this(m.predicate, LexerMatcher(m.matcher)) + } + private tailrec fun LexerMatcher<*>.skipUntilMatch(ctx: ContextImpl) { if (!ctx.hasNext()) return val char = ctx.peek() @@ -64,8 +65,8 @@ class LexerImpl(root: MatcherImpl) : Lexer { return (tryMatchChild(ctx.peek()) ?: return this).doMatch(ctx, true) } - inner class ContextImpl(override val source: Source, private val output: (T) -> Unit) : LexerContext { - inner class CollectingContext(private val collection: MutableCollection) : LexerContext by this { + private inner class ContextImpl(override val source: Source, private val output: (T) -> Unit) : LexerContext { + private inner class CollectingContext(private val collection: MutableCollection) : LexerContext by this { override fun process(token: T) { collection.add(token) } @@ -89,17 +90,31 @@ class LexerImpl(root: MatcherImpl) : Lexer { override fun peek(distance: Int): Char { reader.mark(distance + 1) - val value = generateSequence { reader.read().takeUnless { it == -1 } }.elementAtOrNull(distance) ?: -1 + var value = -1 + for (i in 0 until distance) { + val next = reader.read() + if (next == -1) { + value = -1 + break + } else if (i != distance -1) { + continue + } + value = next + } + // val value = generateSequence { reader.read().takeUnless { it == -1 } }.elementAtOrNull(distance) ?: -1 reader.reset() return value.toChar() } override fun peekString(length: Int): String { reader.mark(length) - val value = generateSequence { reader.read().takeUnless { it == -1 }?.toChar() } - .take(length) - .fold(StringBuilder(), StringBuilder::append) - .toString() + val value = buildString(length) { + for (i in 0 until length) { + val next = reader.read() + if (next == -1) break + append(next.toChar()) + } + } reader.reset() return value } @@ -140,7 +155,7 @@ class LexerImpl(root: MatcherImpl) : Lexer { return mutableListOf().also { doParse(this, CollectingContext(it)) } } - fun use(block: (ContextImpl) -> R): R { + internal inline fun use(block: (ContextImpl) -> R): R { return reader.using { block(this) } } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/MatcherImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/MatcherImpl.kt index 945c0bc..2eaec84 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/MatcherImpl.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/MatcherImpl.kt @@ -1,14 +1,16 @@ package com.github.adriantodt.tartar.impl -import com.github.adriantodt.tartar.api.CharPredicate -import com.github.adriantodt.tartar.api.ClosureFunction -import com.github.adriantodt.tartar.api.LexerDSL -import com.github.adriantodt.tartar.api.lexer.LexerContext +import com.github.adriantodt.tartar.api.dsl.LexerDSL +import com.github.adriantodt.tartar.api.dsl.MatchFunction +import com.github.adriantodt.tartar.api.dsl.CharPredicate -class MatcherImpl : LexerDSL { +internal class MatcherImpl : LexerDSL { + internal class MatcherWithPredicate(val predicate: CharPredicate, val matcher: MatcherImpl) { + fun isMatcherEmpty() = matcher.isEmpty() + } val trie = LinkedHashMap>() - val predicates = ArrayList>>() - var onMatch: ClosureFunction, Char, Unit>? = null + val predicates = ArrayList>() + var onMatch: MatchFunction? = null fun isEmpty() = trie.isEmpty() && predicates.isEmpty() && onMatch == null @@ -26,11 +28,11 @@ class MatcherImpl : LexerDSL { override fun matching(block: CharPredicate): MatcherImpl { val matcher = MatcherImpl() - predicates += block to matcher + predicates += MatcherWithPredicate(block, matcher) return matcher } - override fun configure(block: ClosureFunction, Char, Unit>) { + override fun configure(block: MatchFunction) { onMatch = block } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/ParserImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/ParserImpl.kt index 7ebedc5..9a64ef7 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/ParserImpl.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/ParserImpl.kt @@ -1,19 +1,20 @@ package com.github.adriantodt.tartar.impl -import com.github.adriantodt.tartar.api.Closure +import com.github.adriantodt.tartar.api.dsl.ParserFunction +import com.github.adriantodt.tartar.api.grammar.Grammar import com.github.adriantodt.tartar.api.lexer.Source import com.github.adriantodt.tartar.api.parser.* -class ParserImpl( +internal class ParserImpl( override val grammar: Grammar, - private val block: Closure, R> + private val block: ParserFunction ) : Parser { override fun parse(source: Source, tokens: List>): R { return ContextImpl(source, tokens, grammar).block() } - inner class ContextImpl( + private inner class ContextImpl( override val source: Source, tokens: List>, override val grammar: Grammar @@ -81,14 +82,14 @@ class ParserImpl( while (!eof && !nextIsAny(*type)) eat() } - fun parseExpr(grammar: Grammar, precedence: Int): E { + private fun parseExpr(grammar: Grammar, precedence: Int): E { var left: E = eat().let { - grammar.prefixParsers[it.type]?.parse(this, it) + grammar.prefix[it.type]?.parse(this, it) ?: throw SyntaxException("Unexpected $it", it.section) } - while (!eof && precedence < (this.grammar.infixParsers[this.peek(0).type]?.precedence ?: 0)) { + while (!eof && precedence < (this.grammar.infix[this.peek(0).type]?.precedence ?: 0)) { left = eat().let { - grammar.infixParsers[it.type]?.parse(this, left, it) + grammar.infix[it.type]?.parse(this, left, it) ?: throw SyntaxException("Unexpected $it", it.section) } } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParserImpl.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParseletImpl.kt similarity index 51% rename from src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParserImpl.kt rename to src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParseletImpl.kt index 837957a..f59f56a 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParserImpl.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/PrefixParseletImpl.kt @@ -1,10 +1,10 @@ package com.github.adriantodt.tartar.impl -import com.github.adriantodt.tartar.api.PrefixFunction +import com.github.adriantodt.tartar.api.dsl.PrefixFunction import com.github.adriantodt.tartar.api.parser.ParserContext -import com.github.adriantodt.tartar.api.parser.PrefixParser +import com.github.adriantodt.tartar.api.grammar.PrefixParselet import com.github.adriantodt.tartar.api.parser.Token -class PrefixParserImpl(private val block: PrefixFunction) : PrefixParser { +internal class PrefixParseletImpl(private val block: PrefixFunction) : PrefixParselet { override fun parse(ctx: ParserContext, token: Token) = block(ctx, token) } diff --git a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/Using.kt b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/Using.kt index bb7efd0..0116919 100644 --- a/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/Using.kt +++ b/src/commonMain/kotlin/com/github/adriantodt/tartar/impl/Using.kt @@ -1,3 +1,3 @@ package com.github.adriantodt.tartar.impl -internal expect fun T.using(block: (T) -> R): R +internal expect inline fun T.using(block: (T) -> R): R diff --git a/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt b/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt index 1b614df..315aff6 100644 --- a/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt +++ b/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt @@ -3,17 +3,26 @@ package com.github.adriantodt.tartar.api.lexer import kotlin.math.max import kotlin.math.min -actual class StringReader actual constructor(s: String) { +/** + * Portable (non-jvm) implementation of a character stream whose source is a string. + */ +public actual class StringReader actual constructor(s: String) { private var str: String = s private val length: Int = s.length private var next = 0 private var mark = 0 - actual fun read(): Int { + /** + * Reads a single character. + */ + public actual fun read(): Int { return if (next >= length) -1 else str[next++].code } - actual fun skip(ns: Long): Long { + /** + * Skips a specified amount of characters in the stream, returning the amount of characters actually skipped. + */ + public actual fun skip(ns: Long): Long { if (next >= length) return 0 var n = min(length - next.toLong(), ns) n = max(-next.toLong(), n) @@ -21,12 +30,19 @@ actual class StringReader actual constructor(s: String) { return n } - actual fun mark(readAheadLimit: Int) { + /** + * Marks the stream's current position. Calling reset() reverts the stream to this position. + */ + public actual fun mark(readAheadLimit: Int) { require(readAheadLimit >= 0) { "Read-ahead limit < 0" } mark = next } - actual fun reset() { + /** + * Resets the stream to the last marked position. + * If mark() was never called, the stream resets to the beginning of the string. + */ + public actual fun reset() { next = mark } } diff --git a/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingFallback.kt b/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingFallback.kt index ac0b8a0..71e6cea 100644 --- a/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingFallback.kt +++ b/src/commonNonJvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingFallback.kt @@ -1,5 +1,5 @@ package com.github.adriantodt.tartar.impl -internal actual fun T.using(block: (T) -> R): R { - return this.let(block) +internal actual inline fun T.using(block: (T) -> R): R { + return block(this) } diff --git a/src/jvmMain/kotlin/com/github/adriantodt/tartar/TartarJVM.kt b/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/ClasspathSource.kt similarity index 84% rename from src/jvmMain/kotlin/com/github/adriantodt/tartar/TartarJVM.kt rename to src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/ClasspathSource.kt index fa8e4ab..570d934 100644 --- a/src/jvmMain/kotlin/com/github/adriantodt/tartar/TartarJVM.kt +++ b/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/ClasspathSource.kt @@ -1,6 +1,4 @@ -package com.github.adriantodt.tartar - -import com.github.adriantodt.tartar.api.lexer.Source +package com.github.adriantodt.tartar.api.lexer /** * Creates a source from the classpath. @@ -8,7 +6,7 @@ import com.github.adriantodt.tartar.api.lexer.Source * @return A source loaded from the classpath. * @author AdrianTodt */ -fun classpathSource(lazyName: () -> String): Source { +public fun Source.Companion.classpath(lazyName: () -> String): Source { /* * RE: But WHY? * diff --git a/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt b/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt index cc2d1f5..e0116dc 100644 --- a/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt +++ b/src/jvmMain/kotlin/com/github/adriantodt/tartar/api/lexer/StringReader.kt @@ -1,3 +1,3 @@ package com.github.adriantodt.tartar.api.lexer -actual typealias StringReader = java.io.StringReader +public actual typealias StringReader = java.io.StringReader diff --git a/src/jvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingJVM.kt b/src/jvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingJVM.kt index 4a43754..ddb9999 100644 --- a/src/jvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingJVM.kt +++ b/src/jvmMain/kotlin/com/github/adriantodt/tartar/impl/UsingJVM.kt @@ -2,6 +2,6 @@ package com.github.adriantodt.tartar.impl import java.io.Closeable -internal actual fun T.using(block: (T) -> R): R { - return if (this is Closeable) this.use(block) else this.let(block) +internal actual inline fun T.using(block: (T) -> R): R { + return if (this is Closeable) this.use(block) else block(this) } diff --git a/src/jvmTest/kotlin/examples/LexicalEquality.kt b/src/jvmTest/kotlin/examples/LexicalEquality.kt index 91dd0aa..e52efa8 100644 --- a/src/jvmTest/kotlin/examples/LexicalEquality.kt +++ b/src/jvmTest/kotlin/examples/LexicalEquality.kt @@ -1,10 +1,11 @@ package examples +import com.github.adriantodt.tartar.api.dsl.CharPredicate import com.github.adriantodt.tartar.api.lexer.Lexer +import com.github.adriantodt.tartar.api.lexer.Source +import com.github.adriantodt.tartar.api.lexer.classpath import com.github.adriantodt.tartar.api.parser.SyntaxException import com.github.adriantodt.tartar.api.parser.Token -import com.github.adriantodt.tartar.classpathSource -import com.github.adriantodt.tartar.createLexer import com.github.adriantodt.tartar.extensions.* import examples.extra.CTokenType import examples.extra.CTokenType.* @@ -13,7 +14,7 @@ fun main() { /* * A minimal subset of C was implemented. */ - val lexer: Lexer> = createLexer { + val lexer = Lexer.create> { // NOOP ' '() '\n'() @@ -35,7 +36,7 @@ fun main() { ')' { process(makeToken(RPAREN)) } '{' { process(makeToken(LBRACKET)) } '}' { process(makeToken(RBRACKET)) } - matching { it.isDigit() }.configure { + matching(CharPredicate.isDigit).configure { when (val n = readNumber(it)) { is LexicalNumber.Decimal -> process(makeToken(NUMBER, n.string)) is LexicalNumber.Integer -> process(makeToken(NUMBER, n.string)) @@ -47,8 +48,8 @@ fun main() { } } - val list1 = lexer.parseToList(classpathSource { "input.c" }) - val list2 = lexer.parseToList(classpathSource { "input.min.c" }) + val list1 = lexer.parseToList(Source.classpath { "input.c" }) + val list2 = lexer.parseToList(Source.classpath { "input.min.c" }) // Compares types and values, but not sections. val isEqual = list1.zip(list2).all { (o1, o2) -> o1.type == o2.type && o1.value == o2.value } diff --git a/src/jvmTest/kotlin/examples/StringJoiner.kt b/src/jvmTest/kotlin/examples/StringJoiner.kt index 539b3a7..9f68ff2 100644 --- a/src/jvmTest/kotlin/examples/StringJoiner.kt +++ b/src/jvmTest/kotlin/examples/StringJoiner.kt @@ -1,12 +1,11 @@ package examples +import com.github.adriantodt.tartar.api.grammar.Grammar import com.github.adriantodt.tartar.api.lexer.Lexer -import com.github.adriantodt.tartar.api.parser.Grammar +import com.github.adriantodt.tartar.api.lexer.Source +import com.github.adriantodt.tartar.api.lexer.classpath +import com.github.adriantodt.tartar.api.parser.SourceParser import com.github.adriantodt.tartar.api.parser.Token -import com.github.adriantodt.tartar.classpathSource -import com.github.adriantodt.tartar.createGrammar -import com.github.adriantodt.tartar.createLexer -import com.github.adriantodt.tartar.createParser import com.github.adriantodt.tartar.extensions.ensureEOF import com.github.adriantodt.tartar.extensions.makeToken import com.github.adriantodt.tartar.extensions.readString @@ -20,7 +19,7 @@ private enum class TokenType { fun main() { // Create a lexer as simple as a method call. - val lexer: Lexer> = createLexer { + val lexer = Lexer.create> { // Implement a token type per line. '+' { process(makeToken(PLUS)) } // Built-in extension functions. @@ -31,7 +30,7 @@ fun main() { } // Create a pratt-parser grammar. Dead simple. - val grammar: Grammar = createGrammar { + val grammar = Grammar.create { // Create a prefix parselet as a lambda function. prefix(STRING) { token -> token.value } // Create an infix parselet, with support to precedence as a lambda function. @@ -39,7 +38,7 @@ fun main() { } // Use your grammar to create a pratt-parser. - val parser = createParser(grammar) { // Actual code run by the parser. + val parser = SourceParser.create(lexer, grammar) { // Actual code run by the parser. // Extension function: Throws if there's still tokens. ensureEOF { // Parses a expression using this parsers' grammar. @@ -48,6 +47,6 @@ fun main() { } // One line of code to rule them all. - val result = parser.parse(classpathSource { "input.str" }, lexer) + val result = parser.parse(Source.classpath { "input.str" }) println(result) }