diff --git a/library/src/commonMain/kotlin/dev/usbharu/markdown/Lexer.kt b/library/src/commonMain/kotlin/dev/usbharu/markdown/Lexer.kt index 0127ff4..853fb69 100644 --- a/library/src/commonMain/kotlin/dev/usbharu/markdown/Lexer.kt +++ b/library/src/commonMain/kotlin/dev/usbharu/markdown/Lexer.kt @@ -6,7 +6,7 @@ class Lexer { fun lex(input: String): List { val tokens = mutableListOf() val lines = PeekableStringIterator(input.lines()) - while (lines.hasNext()) { + line@ while (lines.hasNext()) { if (lines.peekOrNull() == "") { blankLine(lines, tokens) @@ -14,7 +14,7 @@ class Lexer { val line = lines.next() val iterator = PeekableCharIterator(line.toCharArray()) - while (iterator.hasNext()) { + char@ while (iterator.hasNext()) { when (val next = iterator.next()) { '#', '#' -> header(iterator, tokens) '>', '>' -> quote(iterator, tokens) @@ -37,22 +37,62 @@ class Lexer { tokens.add(SquareBracketEnd) } + '(', '(' -> { + tokens.add(ParenthesesStart) + } + + ')', ')' -> { + tokens.add(ParenthesesEnd) + } + ' ', ' ' -> { tokens.add(Whitespace(skipWhitespace(iterator) + 1, next)) //nextの分1足す } + 'h' -> { + //todo httpにも対応 + val charIterator = "ttps://".iterator() + val urlBuilder = StringBuilder() + urlBuilder.append(next) + while (charIterator.hasNext() && iterator.hasNext()) { + val nextC = charIterator.next() + val nextC2 = iterator.next() + urlBuilder.append(nextC2) + if (nextC != nextC2) { + tokens.add(Text(urlBuilder.toString())) + continue@char + } + } + if (urlBuilder.length == 1) { + tokens.add(Text(urlBuilder.toString())) //hだけのときはURLじゃないのでテキストとして追加 + } else { + while (iterator.hasNext() && iterator.peekOrNull()?.isWhitespace() != true) { + urlBuilder.append(iterator.next()) + } + tokens.add(Url(urlBuilder.toString())) + } + + + } + + else -> { - tokens.add(Text(next + collect(iterator))) - tokens.add(Break(1)) + val lastToken = tokens.lastOrNull() + if (lastToken is Text) { + lastToken.text += next.toString() + } else { + tokens.add(Text(next.toString())) + } } } } - + tokens.add(Break(1)) } } + println(tokens) val lastToken = tokens.lastOrNull() if (lastToken is Break) { if (lastToken.count == 1) { @@ -151,7 +191,6 @@ class Lexer { tokens.add(Quote(count)) skipWhitespace(iterator) tokens.add(Text(collect(iterator))) - tokens.add(Break(1)) } private fun header( @@ -166,7 +205,6 @@ class Lexer { tokens.add(Header(count)) skipWhitespace(iterator) tokens.add(Text(collect(iterator))) - tokens.add(Break(1)) } fun skipWhitespace(iterator: PeekableCharIterator): Int { diff --git a/library/src/commonMain/kotlin/dev/usbharu/markdown/Token.kt b/library/src/commonMain/kotlin/dev/usbharu/markdown/Token.kt index 3552e0e..6223a06 100644 --- a/library/src/commonMain/kotlin/dev/usbharu/markdown/Token.kt +++ b/library/src/commonMain/kotlin/dev/usbharu/markdown/Token.kt @@ -19,4 +19,7 @@ data object DiscList : List(ListType.DISC) data class DecimalList(val number: Char) : List(ListType.DECIMAL) data class CheckBox(val checked: Boolean) : Token() data object SquareBracketStart : Token() -data object SquareBracketEnd : Token() \ No newline at end of file +data object SquareBracketEnd : Token() +data object ParenthesesStart : Token() +data object ParenthesesEnd : Token() +data class Url(var url: String) : Token() \ No newline at end of file diff --git a/library/src/commonTest/kotlin/dev/usbharu/markdown/LexerTest.kt b/library/src/commonTest/kotlin/dev/usbharu/markdown/LexerTest.kt index 58785dd..fc2cbb1 100644 --- a/library/src/commonTest/kotlin/dev/usbharu/markdown/LexerTest.kt +++ b/library/src/commonTest/kotlin/dev/usbharu/markdown/LexerTest.kt @@ -57,7 +57,7 @@ class LexerTest { println(actual) - assertContentEquals(listOf(Text("abcd"), Break(1), Text("efgh")), actual) + assertContentEquals(listOf(Text("abcd"), Break(1), Text("efg"), Text("h")), actual) } @Test @@ -68,7 +68,7 @@ class LexerTest { println(actual) - assertContentEquals(listOf(Text("abcd"), Break(2), Text("efgh")), actual) + assertContentEquals(listOf(Text("abcd"), Break(2), Text("efg"), Text("h")), actual) } @Test @@ -223,7 +223,7 @@ class LexerTest { println(actual) - assertContentEquals(listOf(Text("---"), Text("aiueo")), actual) + assertContentEquals(listOf(Text("---aiueo")), actual) } @Test @@ -256,7 +256,7 @@ class LexerTest { println(actual) - assertContentEquals(listOf(DiscList, Text("[x"), Text("a a")), actual) + assertContentEquals(listOf(DiscList, Text("[xa"), Whitespace(1, ' '), Text("a")), actual) } @Test @@ -278,7 +278,7 @@ class LexerTest { println(actual) - assertContentEquals(listOf(Text("-"), Text("aiueo")), actual) + assertContentEquals(listOf(Text("-aiueo")), actual) } @Test @@ -408,4 +408,79 @@ class LexerTest { actual ) } + + @Test + fun url() { + val lexer = Lexer() + + val actual = lexer.lex("https://example.com") + + println(actual) + + assertContentEquals(listOf(Url("https://example.com")), actual) + } + + @Test + fun url2() { + val lexer = Lexer() + + val actual = + lexer.lex("https://ja.wikipedia.org/wiki/%E3%83%A4%E3%83%B3%E3%83%90%E3%83%AB%E3%82%AF%E3%82%A4%E3%83%8A#%E6%8E%A1%E9%A4%8C") + + println(actual) + + assertContentEquals( + listOf(Url("https://ja.wikipedia.org/wiki/%E3%83%A4%E3%83%B3%E3%83%90%E3%83%AB%E3%82%AF%E3%82%A4%E3%83%8A#%E6%8E%A1%E9%A4%8C")), + actual + ) + } + + @Test + fun 文中にurl() { + val lexer = Lexer() + + val actual = lexer.lex("こんにちは~ https://example.com\nあいうえお") + + println(actual) + + assertContentEquals( + listOf( + Text("こんにちは~"), + Whitespace(1, ' '), + Url("https://example.com"), + Break(1), + Text("あいうえお") + ), actual + ) + } + + @Test + fun urlかと思ったら違った() { + val lexer = Lexer() + + val actual = lexer.lex("httppppp") + + println(actual) + + assertContentEquals( + listOf( + Text("httppppp") + ), actual + ) + } + + @Test + fun urlかと思ったら違った2() { + val lexer = Lexer() + + val actual = lexer.lex("ha") + + println(actual) + + assertContentEquals( + listOf( + Text("ha") + ), actual + ) + } } \ No newline at end of file