Skip to content

Commit 9fec2d8

Browse files
committed
able to parse most of column attributes; need more work to support more datatypes
1 parent 08d6657 commit 9fec2d8

File tree

4 files changed

+117
-57
lines changed

4 files changed

+117
-57
lines changed

ast.go

Lines changed: 27 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -200,50 +200,44 @@ type DDL struct {
200200
NewName []byte
201201
}
202202

203+
type ColumnAtts []string
204+
205+
func (node ColumnAtts) Format(buf *TrackedBuffer) {
206+
prefix := " "
207+
for _, v := range node {
208+
if v != "" {
209+
buf.Myprintf("%s%s", prefix, v)
210+
}
211+
}
212+
}
213+
203214
type ColumnDefinition struct {
204-
ColName string
205-
ColType string
206-
IsPrimaryKey string
215+
ColName string
216+
ColType string
217+
ColumnAtts ColumnAtts
207218
}
208219

209220
func (node ColumnDefinition) Format(buf *TrackedBuffer) {
210-
s := ""
211-
s += node.ColName
212-
s += " " + node.ColType
213-
if node.IsPrimaryKey != "" {
214-
s += " " + node.IsPrimaryKey
215-
}
216-
buf.Myprintf(s)
221+
buf.Myprintf("%s %s%v", node.ColName, node.ColType, node.ColumnAtts)
217222
}
218223

219224
type ColumnDefinitions []ColumnDefinition
220225

221226
func (node ColumnDefinitions) Format(buf *TrackedBuffer) {
222-
sep := ",\n"
227+
prefix := ""
223228
buf.Myprintf("(\n")
224229
for i := 0; i < len(node); i++ {
225-
if i == len(node)-1 {
226-
sep = "\n"
227-
}
228-
buf.Myprintf("\t%v%s", node[i], sep)
230+
buf.Myprintf("%s\t%v", prefix, node[i])
231+
prefix = ",\n"
229232
}
230-
buf.Myprintf(")")
233+
buf.Myprintf("\n)")
231234
}
232235

233236
type CreateTable struct {
234237
Name []byte
235238
ColumnDefinitions ColumnDefinitions
236239
}
237240

238-
func (node *CreateTable) FindPrimaryKey() string {
239-
for _, col := range node.ColumnDefinitions {
240-
if col.IsPrimaryKey != "" {
241-
return col.ColName
242-
}
243-
}
244-
return ""
245-
}
246-
247241
func (node *CreateTable) Format(buf *TrackedBuffer) {
248242
buf.Myprintf("create table %s %v", node.Name, node.ColumnDefinitions)
249243
}
@@ -254,10 +248,6 @@ const (
254248
AST_VIEW = "view"
255249
)
256250

257-
const (
258-
AST_PRIMARY_KEY = "primary key"
259-
)
260-
261251
const (
262252
AST_CREATE = "create"
263253
AST_ALTER = "alter"
@@ -1040,4 +1030,12 @@ const (
10401030
AST_CHAR = "char"
10411031
AST_VARCHAR = "varchar"
10421032
AST_TEXT = "text"
1033+
1034+
AST_PRIMARY_KEY = "primary key"
1035+
1036+
AST_UNIQUE_KEY = "unique key"
1037+
AST_AUTO_INCREMENT = "auto_increment"
1038+
AST_NOT_NULL = "not null"
1039+
AST_DEFAULT = "default"
1040+
AST_KEY = "key"
10431041
)

parse_test.go

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ func TestParse(t *testing.T) {
2424
}
2525
}
2626

27-
func TestCreatTable(t *testing.T) {
27+
func TestCreatTable1(t *testing.T) {
2828
sql := `create table t1 (
2929
ID int primary key,
3030
LastName varchar(255),
@@ -39,22 +39,34 @@ func TestCreatTable(t *testing.T) {
3939
assert.Equal(t, sql, s)
4040
}
4141

42-
func TestPrimaryKey(t *testing.T) {
42+
func TestCreatTable2(t *testing.T) {
4343
sql := `create table t1 (
44+
ID int primary key not null auto_increment,
4445
LastName varchar(255),
45-
FirstName varchar(255),
46-
ID int primary key
46+
FirstName varchar(255)
4747
)`
4848
tree, err := Parse(sql)
4949
if err != nil {
5050
t.Fatal(err)
5151
}
52-
create, ok := tree.(*CreateTable)
53-
if !ok {
54-
t.Fatal("not CreateTable")
52+
s := String(tree)
53+
54+
assert.Equal(t, sql, s)
55+
}
56+
57+
func TestCreatTable3(t *testing.T) {
58+
sql := `create table t1 (
59+
ID int unique key not null auto_increment,
60+
LastName varchar(255),
61+
FirstName varchar(255)
62+
)`
63+
tree, err := Parse(sql)
64+
if err != nil {
65+
t.Fatal(err)
5566
}
56-
primary_key := create.FindPrimaryKey()
57-
assert.Equal(t, "ID", primary_key)
67+
s := String(tree)
68+
69+
assert.Equal(t, sql, s)
5870
}
5971

6072
func BenchmarkParse1(b *testing.B) {

sql.y

Lines changed: 65 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -63,18 +63,24 @@ var (
6363
updateExprs UpdateExprs
6464
updateExpr *UpdateExpr
6565

66+
/*
67+
for CreateTable
68+
*/
6669
createTableStmt CreateTable
6770
columnDefinition ColumnDefinition
6871
columnDefinitions []ColumnDefinition
72+
columnAtts ColumnAtts
6973
}
7074

7175
%token LEX_ERROR
72-
%token <empty> SELECT INSERT UPDATE DELETE FROM WHERE GROUP HAVING ORDER BY LIMIT FOR PRIMARY
76+
%token <empty> SELECT INSERT UPDATE DELETE FROM WHERE GROUP HAVING ORDER BY LIMIT FOR
7377
%token <empty> ALL DISTINCT AS EXISTS IN IS LIKE BETWEEN NULL ASC DESC VALUES INTO DUPLICATE KEY DEFAULT SET LOCK
7478
%token <bytes> ID STRING NUMBER VALUE_ARG LIST_ARG COMMENT
7579
%token <empty> LE GE NE NULL_SAFE_EQUAL
7680
%token <empty> '(' '=' '<' '>' '~'
7781

82+
%token <empty> PRIMARY
83+
%token <empty> UNIQUE
7884
%left <empty> UNION MINUS EXCEPT INTERSECT
7985
%left <empty> ','
8086
%left <empty> JOIN STRAIGHT_JOIN LEFT RIGHT INNER OUTER CROSS NATURAL USE FORCE
@@ -85,14 +91,14 @@ var (
8591
%left <empty> '&' '|' '^'
8692
%left <empty> '+' '-'
8793
%left <empty> '*' '/' '%'
88-
%nonassoc <empty> '.'
89-
%left <empty> UNARY
94+
%nonassoc <empty> '.'
95+
%left <empty> UNARY
9096
%right <empty> CASE WHEN THEN ELSE
9197
%left <empty> END
9298

9399
// DDL Tokens
94100
%token <empty> CREATE ALTER DROP RENAME ANALYZE
95-
%token <empty> TABLE INDEX VIEW TO IGNORE IF UNIQUE USING
101+
%token <empty> TABLE INDEX VIEW TO IGNORE IF USING
96102
%token <empty> SHOW DESCRIBE EXPLAIN
97103

98104
%start any_command
@@ -151,17 +157,21 @@ var (
151157
/*
152158
Below are modification to extract primary key
153159
*/
154-
%type <str> data_type
155-
%type <columnDefinition> column_definition
156-
%type <columnDefinitions> column_definition_list
157-
%type <statement> create_table_statement
158-
%type <str> primary_key_opt
159160
/*
160-
Datatypes
161+
keywords
161162
*/
162163
%token <empty> BIT TINYINT SMALLINT MEDIUMINT INT INTEGER BIGINT REAL DOUBLE FLOAT UNSIGNED ZEROFILL DECIMAL NUMERIC
163164
%token <empty> TEXT CHAR VARCHAR
164-
%type <str> length_opt char_type numeric_type unsigned_opt zero_fill_opt
165+
166+
%token <empty> NULLX AUTO_INCREMENT BOOL APPROXNUM INTNUM
167+
168+
%type <str> data_type
169+
%type <columnDefinition> column_definition
170+
%type <columnDefinitions> column_definition_list
171+
%type <statement> create_table_statement
172+
%type <str> length_opt char_type numeric_type unsigned_opt zero_fill_opt key_att
173+
%type <columnAtts> column_atts
174+
165175

166176

167177
%%
@@ -328,19 +338,58 @@ unsigned_opt:
328338
{
329339
$$ = AST_UNSIGNED
330340
}
331-
primary_key_opt:
341+
342+
column_atts:
332343
{
333-
$$ = ""
344+
$$ = ColumnAtts{}
334345
}
335-
| PRIMARY KEY
346+
| column_atts NOT NULL
336347
{
348+
$$ = append($$, AST_NOT_NULL)
349+
}
350+
351+
| column_atts NULL
352+
| column_atts DEFAULT STRING
353+
{
354+
node := StrVal($3)
355+
$$ = append($$, "default " + String(node))
356+
}
357+
| column_atts DEFAULT NUMBER
358+
{
359+
node := NumVal($3)
360+
$$ = append($$, "default " + String(node))
361+
}
362+
| column_atts AUTO_INCREMENT
363+
{
364+
$$ = append($$, AST_AUTO_INCREMENT)
365+
}
366+
| column_atts key_att
367+
{
368+
$$ = append($$, $2)
369+
}
370+
371+
key_att:
372+
primary_key
373+
{
337374
$$ = AST_PRIMARY_KEY
338375
}
376+
| unique_key
377+
{
378+
$$ = AST_UNIQUE_KEY
379+
}
380+
381+
primary_key:
382+
PRIMARY KEY
383+
| KEY
384+
385+
unique_key:
386+
UNIQUE
387+
| UNIQUE KEY
339388

340389
column_definition:
341-
ID data_type primary_key_opt
390+
ID data_type column_atts
342391
{
343-
$$ = ColumnDefinition{ColName: string($1), ColType: $2, IsPrimaryKey: $3 }
392+
$$ = ColumnDefinition{ColName: string($1), ColType: $2, ColumnAtts: $3 }
344393
}
345394

346395
column_definition_list:

token.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -126,9 +126,10 @@ var keywords = map[string]int{
126126
"text": TEXT,
127127

128128
//other keywords
129-
"unsigned": UNSIGNED,
130-
"zerofill": ZEROFILL,
131-
"primary": PRIMARY,
129+
"unsigned": UNSIGNED,
130+
"zerofill": ZEROFILL,
131+
"primary": PRIMARY,
132+
"auto_increment": AUTO_INCREMENT,
132133
}
133134

134135
// Lex returns the next token form the Tokenizer.

0 commit comments

Comments
 (0)