Skip to content

Commit

Permalink
Minor refator the operation type to the unify token kind (#115)
Browse files Browse the repository at this point in the history
  • Loading branch information
git-hulk authored Dec 27, 2024
1 parent 5e79b05 commit 72b088c
Show file tree
Hide file tree
Showing 10 changed files with 143 additions and 154 deletions.
6 changes: 3 additions & 3 deletions parser/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ func (p *BinaryOperation) End() Pos {
func (p *BinaryOperation) String() string {
var builder strings.Builder
builder.WriteString(p.LeftExpr.String())
if p.Operation != opTypeCast {
if p.Operation != TokenKindDash {
builder.WriteByte(' ')
}
if p.HasNot {
Expand All @@ -168,7 +168,7 @@ func (p *BinaryOperation) String() string {
builder.WriteString("GLOBAL ")
}
builder.WriteString(string(p.Operation))
if p.Operation != opTypeCast {
if p.Operation != TokenKindDash {
builder.WriteByte(' ')
}
builder.WriteString(p.RightExpr.String())
Expand Down Expand Up @@ -1914,7 +1914,7 @@ func (s *SettingPair) String() string {
var builder strings.Builder
builder.WriteString(s.Name.String())
if s.Value != nil {
if s.Operation == opTypeEQ {
if s.Operation == TokenKindSingleEQ {
builder.WriteString(string(s.Operation))
} else {
builder.WriteByte(' ')
Expand Down
61 changes: 39 additions & 22 deletions parser/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,30 @@ import (
)

const (
TokenEOF TokenKind = "<eof>"
TokenIdent TokenKind = "<ident>"
TokenKeyword TokenKind = "<keyword>"
TokenInt TokenKind = "<int>"
TokenFloat TokenKind = "<float>"
TokenString TokenKind = "<string>"
TokenDot = "."
TokenKindEOF TokenKind = "<eof>"
TokenKindIdent TokenKind = "<ident>"
TokenKindKeyword TokenKind = "<keyword>"
TokenKindInt TokenKind = "<int>"
TokenKindFloat TokenKind = "<float>"
TokenKindString TokenKind = "<string>"
TokenKindDot = "."
TokenKindSingleEQ TokenKind = "="
TokenKindDoubleEQ TokenKind = "=="
TokenKindNE TokenKind = "!="
TokenKindLT TokenKind = "<"
TokenKindLE TokenKind = "<="
TokenKindGT TokenKind = ">"
TokenKindGE TokenKind = ">="
TokenKindQuery = "?"

TokenKindPlus TokenKind = "+"
TokenKindMinus TokenKind = "-"
TokenKindMul TokenKind = "*"
TokenKindDiv TokenKind = "/"
TokenKindMod TokenKind = "%"

TokenKindArrow TokenKind = "->"
TokenKindDash TokenKind = "::"
)

const (
Expand All @@ -33,12 +50,12 @@ type Token struct {

Kind TokenKind
String string
Base int // 10 or 16 on TokenInt
Base int // 10 or 16 on TokenKindInt
QuoteType int
}

func (t *Token) ToString() string {
if t.Kind == TokenKeyword {
if t.Kind == TokenKindKeyword {
return strings.ToUpper(t.String)
}
return t.String
Expand Down Expand Up @@ -87,7 +104,7 @@ func (l *Lexer) consumeNumber() error {
}

hasExp := false
tokenKind := TokenInt
tokenKind := TokenKindInt
hasNumberPart := false
for l.peekOk(i) {
hasNumberPart = true
Expand All @@ -100,7 +117,7 @@ func (l *Lexer) consumeNumber() error {
i++
continue
case c == '.': // float
tokenKind = TokenFloat
tokenKind = TokenKindFloat
i++
continue
case base != 16 && (c == 'e' || c == 'E' || c == 'p' || c == 'P'):
Expand Down Expand Up @@ -165,9 +182,9 @@ func (l *Lexer) consumeIdent(_ Pos) error {
}
slice := l.slice(0, i)
if quoteType == Unquoted && l.isKeyword(strings.ToUpper(slice)) {
token.Kind = TokenKeyword
token.Kind = TokenKindKeyword
} else {
token.Kind = TokenIdent
token.Kind = TokenKindIdent
}
token.Pos = Pos(l.current)
token.End = Pos(l.current + i)
Expand Down Expand Up @@ -214,7 +231,7 @@ func (l *Lexer) consumeString() error {
return errors.New("invalid string")
}
l.lastToken = &Token{
Kind: TokenString,
Kind: TokenKindString,
String: l.slice(1, i),
Pos: Pos(l.current + 1),
End: Pos(l.current + i),
Expand Down Expand Up @@ -265,11 +282,11 @@ func (l *Lexer) peekToken() (*Token, error) {
}

func (l *Lexer) hasPrecedenceToken(last *Token) bool {
return last != nil && (last.Kind == TokenIdent ||
last.Kind == TokenKeyword ||
last.Kind == TokenInt ||
last.Kind == TokenFloat ||
last.Kind == TokenString)
return last != nil && (last.Kind == TokenKindIdent ||
last.Kind == TokenKindKeyword ||
last.Kind == TokenKindInt ||
last.Kind == TokenKindFloat ||
last.Kind == TokenKindString)
}

func (l *Lexer) consumeToken() error {
Expand Down Expand Up @@ -304,7 +321,7 @@ func (l *Lexer) consumeToken() error {
} else if l.peekOk(1) && l.peekN(1) == '>' {
l.lastToken = &Token{
String: l.slice(0, 2),
Kind: opTypeArrow,
Kind: TokenKindArrow,
Pos: Pos(l.current),
End: Pos(l.current + 2),
}
Expand All @@ -321,7 +338,7 @@ func (l *Lexer) consumeToken() error {
if l.peekOk(1) && l.peekN(1) == ':' {
l.lastToken = &Token{
String: l.slice(0, 2),
Kind: opTypeCast,
Kind: TokenKindDash,
Pos: Pos(l.current),
End: Pos(l.current + 2),
}
Expand All @@ -331,7 +348,7 @@ func (l *Lexer) consumeToken() error {
case '.':
l.lastToken = &Token{
String: l.slice(0, 1),
Kind: TokenDot,
Kind: TokenKindDot,
Pos: Pos(l.current),
End: Pos(l.current + 1),
}
Expand Down
12 changes: 6 additions & 6 deletions parser/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ func TestConsumeString(t *testing.T) {
lexer := NewLexer(s)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenString, lexer.lastToken.Kind)
require.Equal(t, TokenKindString, lexer.lastToken.Kind)
require.Equal(t, strings.Trim(s, "'"), lexer.lastToken.String)
require.True(t, lexer.isEOF())
}
Expand All @@ -61,7 +61,7 @@ func TestConsumeNumber(t *testing.T) {
lexer := NewLexer(i)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenInt, lexer.lastToken.Kind)
require.Equal(t, TokenKindInt, lexer.lastToken.Kind)
require.Equal(t, 10, lexer.lastToken.Base)
require.Equal(t, i, lexer.lastToken.String)
require.True(t, lexer.isEOF())
Expand All @@ -77,7 +77,7 @@ func TestConsumeNumber(t *testing.T) {
lexer := NewLexer(n)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenInt, lexer.lastToken.Kind)
require.Equal(t, TokenKindInt, lexer.lastToken.Kind)
require.Equal(t, 16, lexer.lastToken.Base)
require.Equal(t, n, lexer.lastToken.String)
require.True(t, lexer.isEOF())
Expand Down Expand Up @@ -117,7 +117,7 @@ func TestConsumeNumber(t *testing.T) {
lexer := NewLexer(f)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenFloat, lexer.lastToken.Kind)
require.Equal(t, TokenKindFloat, lexer.lastToken.Kind)
require.Equal(t, f, lexer.lastToken.String)
require.True(t, lexer.isEOF())
}
Expand Down Expand Up @@ -167,7 +167,7 @@ func TestConsumeNumber(t *testing.T) {
lexer := NewLexer(i)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenIdent, lexer.lastToken.Kind)
require.Equal(t, TokenKindIdent, lexer.lastToken.Kind)
require.Equal(t, strings.Trim(i, "`"), lexer.lastToken.String)
require.True(t, lexer.isEOF())
}
Expand All @@ -178,7 +178,7 @@ func TestConsumeNumber(t *testing.T) {
lexer := NewLexer(k)
err := lexer.consumeToken()
require.NoError(t, err)
require.Equal(t, TokenKeyword, lexer.lastToken.Kind)
require.Equal(t, TokenKindKeyword, lexer.lastToken.Kind)
require.Equal(t, k, lexer.lastToken.String)
require.True(t, lexer.isEOF())
}
Expand Down
22 changes: 11 additions & 11 deletions parser/parse_system.go
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ func (p *Parser) parseCheckStmt(pos Pos) (*CheckStmt, error) {

func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) {
switch {
case p.matchTokenKind(TokenIdent):
case p.matchTokenKind(TokenKindIdent):
name, err := p.parseIdent()
if err != nil {
return nil, err
Expand All @@ -364,7 +364,7 @@ func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) {
Scope: scope,
OnCluster: onCluster,
}, nil
case p.matchTokenKind(TokenString):
case p.matchTokenKind(TokenKindString):
name, err := p.parseString(p.Pos())
if err != nil {
return nil, err
Expand All @@ -391,7 +391,7 @@ func (p *Parser) tryParseRoleSettings(pos Pos) ([]*RoleSetting, error) {

func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
pairs := make([]*SettingPair, 0)
for p.matchTokenKind(TokenIdent) {
for p.matchTokenKind(TokenKindIdent) {
name, err := p.parseIdent()
if err != nil {
return nil, err
Expand All @@ -404,12 +404,12 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
}, nil
}
switch {
case p.matchTokenKind(opTypeEQ),
p.matchTokenKind(TokenInt),
p.matchTokenKind(TokenFloat),
p.matchTokenKind(TokenString):
case p.matchTokenKind(TokenKindSingleEQ),
p.matchTokenKind(TokenKindInt),
p.matchTokenKind(TokenKindFloat),
p.matchTokenKind(TokenKindString):
var op TokenKind
if token := p.tryConsumeTokenKind(opTypeEQ); token != nil {
if token := p.tryConsumeTokenKind(TokenKindSingleEQ); token != nil {
op = token.Kind
}
value, err := p.parseLiteral(p.Pos())
Expand All @@ -419,7 +419,7 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
// docs: https://clickhouse.com/docs/en/sql-reference/statements/alter/role
// the operator "=" was required if the variable name is NOT in
// ["MIN", "MAX", "PROFILE"] and value is existed.
if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != opTypeEQ {
if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != TokenKindSingleEQ {
return nil, fmt.Errorf("expected operator = or no value, but got %s", op)
}
pairs = append(pairs, &SettingPair{
Expand Down Expand Up @@ -809,7 +809,7 @@ func (p *Parser) parsePrivilegeSystem(pos Pos) (*PrivilegeClause, error) {
}

func (p *Parser) parsePrivilegeClause(pos Pos) (*PrivilegeClause, error) {
if p.matchTokenKind(TokenIdent) {
if p.matchTokenKind(TokenKindIdent) {
if p.last().String == "dictGet" {
_ = p.lexer.consumeToken()
return &PrivilegeClause{
Expand Down Expand Up @@ -921,7 +921,7 @@ func (p *Parser) parseGrantSource(_ Pos) (*TableIdentifier, error) {
return nil, err
}

if p.tryConsumeTokenKind(TokenDot) == nil {
if p.tryConsumeTokenKind(TokenKindDot) == nil {
return &TableIdentifier{
Table: ident,
}, nil
Expand Down
Loading

0 comments on commit 72b088c

Please sign in to comment.