Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/main/java/tech/mlsql/autosuggest/SpecialTableConst.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ object SpecialTableConst {
val OPTION_KEY = "__OPTION__"
val TEMP_TABLE_DB_KEY = "__TEMP_TABLE__"

val OTHER_TABLE_KEY = "__OTHER__TABLE__"

val TOP_LEVEL_KEY = "__TOP_LEVEL__"

def KEY_WORD_TABLE = MetaTable(MetaTableKey(None, None, SpecialTableConst.KEY_WORD), List())
Expand All @@ -19,6 +21,8 @@ object SpecialTableConst {

def OPTION_TABLE = MetaTable(MetaTableKey(None, None, SpecialTableConst.OPTION_KEY), List())

def OTHER_TABLE = MetaTable(MetaTableKey(None, None, SpecialTableConst.OTHER_TABLE_KEY), List())

def tempTable(name: String) = MetaTable(MetaTableKey(None, Option(TEMP_TABLE_DB_KEY), name), List())

def subQueryAliasTable = {
Expand Down
87 changes: 53 additions & 34 deletions src/main/java/tech/mlsql/autosuggest/dsl/TokenMatcher.scala
Original file line number Diff line number Diff line change
Expand Up @@ -157,28 +157,37 @@ class TokenMatcher(tokens: List[Token], val start: Int) {
var isFail = false


foods.map { foodw =>
val stepSize = foodw.foods.count
var matchValue = 0
foodw.foods.foods.zipWithIndex.foreach { case (food, idx) =>
if (matchValue == 0 && matchToken(food, currentIndex + idx) == -1) {
matchValue = -1
}
}
if (foodw.optional) {
if (matchValue != -1) {
currentIndex = currentIndex + stepSize
}
} else {
if (matchValue != -1) {
currentIndex = currentIndex + stepSize
foods.foreach { foodw =>

if (currentIndex >= tokens.size) {
isFail = true
} else {
val stepSize = foodw.foods.count
var matchValue = 0
foodw.foods.foods.zipWithIndex.foreach { case (food, idx) =>
if (matchValue == 0 && matchToken(food, currentIndex + idx) == -1) {
matchValue = -1
}
}
if (foodw.optional) {
if (matchValue != -1) {
currentIndex = currentIndex + stepSize
}
} else {
//mark fail
isFail = true
if (matchValue != -1) {
currentIndex = currentIndex + stepSize

} else {
//mark fail
isFail = true
}
}
}
}

if (!isFail && currentIndex == tokens.size) {
currentIndex = tokens.size - 1
}
val targetIndex = if (isFail) -1 else currentIndex
cacheResult = targetIndex
this
Expand All @@ -189,27 +198,37 @@ class TokenMatcher(tokens: List[Token], val start: Int) {
var isFail = false


foods.map { foodw =>
val stepSize = foodw.foods.count
var matchValue = 0
foodw.foods.foods.zipWithIndex.foreach { case (food, idx) =>
if (matchValue == 0 && matchToken(food, currentIndex - idx) == -1) {
matchValue = -1
}
}
if (foodw.optional) {
if (matchValue != -1) {
currentIndex = currentIndex - stepSize
}
foods.foreach { foodw =>
// if out of bound then mark fail
if (currentIndex <= -1) {
isFail = true
} else {
if (matchValue != -1) {
currentIndex = currentIndex - stepSize

val stepSize = foodw.foods.count
var matchValue = 0
foodw.foods.foods.zipWithIndex.foreach { case (food, idx) =>
if (matchValue == 0 && matchToken(food, currentIndex - idx) == -1) {
matchValue = -1
}
}
if (foodw.optional) {
if (matchValue != -1) {
currentIndex = currentIndex - stepSize
}
} else {
//mark fail
isFail = true
if (matchValue != -1) {
currentIndex = currentIndex - stepSize

} else {
//mark fail
isFail = true
}
}
}

}

if (!isFail && currentIndex == -1) {
currentIndex = 0
}
val targetIndex = if (isFail) -1 else currentIndex
cacheResult = targetIndex
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,12 @@ object LexerUtils {
return TokenPos(lastToken._2, TokenPosType.NEXT, 0)
}

if (colNum > lastToken._1.getCharPositionInLine && colNum <= lastToken._1.getCharPositionInLine + lastToken._1.getText.size) {
if (colNum > lastToken._1.getCharPositionInLine
&& colNum <= lastToken._1.getCharPositionInLine + lastToken._1.getText.size
&&
(lastToken._1.getType != DSLSQLLexer.UNRECOGNIZED
&& lastToken._1.getType != MLSQLTokenTypeWrapper.DOT)
) {
return TokenPos(lastToken._2, TokenPosType.CURRENT, colNum - lastToken._1.getCharPositionInLine)
}
oneLineTokens.map { case (token, index) =>
Expand Down
22 changes: 22 additions & 0 deletions src/main/java/tech/mlsql/autosuggest/statement/LoadSuggester.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class LoadSuggester(val context: AutoSuggestContext, val _tokens: List[Token], v
register(classOf[LoadPathSuggester])
register(classOf[LoadFormatSuggester])
register(classOf[LoadOptionsSuggester])
register(classOf[LoadPathQuoteSuggester])

override def register(clzz: Class[_ <: StatementSuggester]): SuggesterRegister = {
val instance = clzz.getConstructor(classOf[LoadSuggester]).newInstance(this).asInstanceOf[StatementSuggester]
Expand Down Expand Up @@ -120,6 +121,27 @@ class LoadOptionsSuggester(loadSuggester: LoadSuggester) extends StatementSugges
override def tokenPos: TokenPos = loadSuggester._tokenPos
}

class LoadPathQuoteSuggester(loadSuggester: LoadSuggester) extends StatementSuggester with StatementUtils {
override def name: String = "pathQuote"

override def isMatch(): Boolean = {
val temp = TokenMatcher(tokens, tokenPos.pos).back.
eat(Food(None, MLSQLTokenTypeWrapper.DOT)).
eat(Food(None, DSLSQLLexer.IDENTIFIER)).
eat(Food(None, DSLSQLLexer.LOAD)).build
temp.isSuccess
}

override def suggest(): List[SuggestItem] = {
LexerUtils.filterPrefixIfNeeded(List(SuggestItem("``", SpecialTableConst.OTHER_TABLE, Map("desc" -> "path or table"))),
tokens, tokenPos)
}

override def tokens: List[Token] = loadSuggester._tokens

override def tokenPos: TokenPos = loadSuggester._tokenPos
}

//Here you can implement Hive table / HDFS Path auto suggestion
class LoadPathSuggester(loadSuggester: LoadSuggester) extends StatementSuggester with StatementUtils {
override def isMatch(): Boolean = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,9 @@ class LexerUtilsTest extends BaseTest {
assert(LexerUtils.toTokenPos(context.rawTokens, 1, 14) == TokenPos(3, TokenPosType.CURRENT, 3))
}

test("load csv.") {
context.buildFromString("load csv.")
assert(LexerUtils.toTokenPos(context.rawTokens, 1, 9) == TokenPos(2, TokenPosType.NEXT, 0))
}

}
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package com.intigua.antlr4.autosuggest

import org.apache.spark.sql.catalyst.parser.SqlBaseLexer
import tech.mlsql.autosuggest.dsl.{Food, TokenMatcher}
import streaming.dsl.parser.DSLSQLLexer
import tech.mlsql.autosuggest.dsl.{Food, MLSQLTokenTypeWrapper, TokenMatcher}
import tech.mlsql.autosuggest.statement.LexerUtils

import scala.collection.JavaConverters._
Expand Down Expand Up @@ -31,4 +32,58 @@ class MatchTokenTest extends BaseTest {
val temp = TokenMatcher(tokens, 0).forward.orIndex(Array(Food(None, SqlBaseLexer.FROM), Food(None, SqlBaseLexer.SELECT)))
assert(temp == 0)
}

test("forward out of bound success") {
val wow = context.lexer.tokenizeNonDefaultChannel(
"""
|load csv.
|""".stripMargin).tokens.asScala.toList

val temp = TokenMatcher(wow, 0)
.forward
.eat(Food(None, DSLSQLLexer.LOAD))
.eat(Food(None, DSLSQLLexer.IDENTIFIER))
.eat(Food(None, MLSQLTokenTypeWrapper.DOT)).build
assert(temp.isSuccess)
}
test("forward out of bound fail") {
val wow = context.lexer.tokenizeNonDefaultChannel(
"""
|load csv
|""".stripMargin).tokens.asScala.toList

val temp = TokenMatcher(wow, 0)
.forward
.eat(Food(None, DSLSQLLexer.LOAD))
.eat(Food(None, DSLSQLLexer.IDENTIFIER))
.eat(Food(None, MLSQLTokenTypeWrapper.DOT)).build
assert(!temp.isSuccess)
}

test("back out of bound success") {
val wow = context.lexer.tokenizeNonDefaultChannel(
"""
|load csv.
|""".stripMargin).tokens.asScala.toList

val temp = TokenMatcher(wow, 2)
.back
.eat(Food(None, MLSQLTokenTypeWrapper.DOT))
.eat(Food(None, DSLSQLLexer.IDENTIFIER))
.eat(Food(None, DSLSQLLexer.LOAD)).build
assert(temp.isSuccess)
}
test("back out of bound fail") {
val wow = context.lexer.tokenizeNonDefaultChannel(
"""
|csv.
|""".stripMargin).tokens.asScala.toList

val temp = TokenMatcher(wow, 1)
.back
.eat(Food(None, MLSQLTokenTypeWrapper.DOT))
.eat(Food(None, DSLSQLLexer.IDENTIFIER))
.eat(Food(None, DSLSQLLexer.LOAD)).build
assert(!temp.isSuccess)
}
}