From b9e090fccccb80ade8aeb07943c8b0e3049fc96d Mon Sep 17 00:00:00 2001 From: Brennan Lamey Date: Mon, 16 Dec 2024 19:44:50 -0600 Subject: [PATCH] added engine v2 ripping old engine out replaced old engine --- Taskfile.yml | 2 - _previous/core/types/schema.go | 4 +- app/node/build.go | 16 +- app/shared/display/message_test.go | 1 - cmd/kwil-cli/cmds/database/call.go | 204 +- cmd/kwil-cli/cmds/database/cmd.go | 4 - cmd/kwil-cli/cmds/database/deploy.go | 131 - cmd/kwil-cli/cmds/database/drop.go | 57 - cmd/kwil-cli/cmds/database/execute.go | 8 +- cmd/kwil-cli/cmds/database/list.go | 82 - cmd/kwil-cli/cmds/database/message.go | 69 +- cmd/kwil-cli/cmds/database/message_test.go | 139 +- cmd/kwil-cli/cmds/database/query.go | 9 +- cmd/kwil-cli/cmds/database/read_schema.go | 53 - cmd/kwil-cli/cmds/utils/parse.go | 199 - cmd/kwil-cli/cmds/utils/test.go | 4 +- cmd/kwil-cli/cmds/utils/utils.go | 1 - common/common.go | 57 +- core/client/client.go | 79 +- core/client/types/client.go | 15 +- core/gatewayclient/client.go | 3 +- core/rpc/client/user/jsonrpc/methods.go | 52 +- core/rpc/client/user/txsvc.go | 6 +- core/rpc/json/errors.go | 1 - core/rpc/json/user/commands.go | 4 +- core/rpc/json/user/responses.go | 24 +- core/types/data_types.go | 402 + core/types/data_types_test.go | 198 + core/types/decimal/decimal.go | 9 + core/types/marshal_fuzz_test.go | 94 +- core/types/message.go | 11 + core/types/payloads.go | 30 +- core/types/results.go | 29 +- core/types/schema.go | 3081 --- core/types/schema_test.go | 2383 -- core/types/transaction_test.go | 6 +- extensions/precompiles/actions.go | 252 +- extensions/precompiles/adhoc.go | 76 - extensions/precompiles/math.go | 218 +- go.mod | 5 +- go.sum | 8 +- node/engine/execution/dataset.go | 103 - node/engine/execution/execution_test.go | 645 - node/engine/execution/global.go | 502 - node/engine/execution/procedure.go | 656 - node/engine/execution/procedure_test.go | 59 - node/engine/execution/queries.go | 374 - node/engine/execution/queries_test.go | 61 - node/engine/execution/schema_v07.go | 239 - node/engine/generate/actions.go | 177 - node/engine/generate/actions_test.go | 141 - node/engine/generate/foreign_procedure.go | 277 - node/engine/generate/generate.go | 36 - node/engine/generate/generate_test.go | 301 - node/engine/generate/index.go | 54 - node/engine/generate/metadata.go | 8 - node/engine/generate/procedure.go | 276 - node/engine/generate/procedure_test.go | 226 - node/engine/generate/sql.go | 32 - node/engine/generate/table.go | 113 - node/engine/generate/utils.go | 62 - node/engine/integration/deployment_test.go | 256 - node/engine/integration/execution_test.go | 816 - node/engine/integration/procedure_test.go | 914 - node/engine/integration/schema_test.go | 304 - .../integration/schemas/social_media.kf | 130 - node/engine/integration/schemas/users.kf | 40 - node/engine/integration/schemas/video_game.kf | 77 - node/engine/integration/setup_test.go | 137 - node/engine/integration/sql_test.go | 265 - node/engine/interpreter/README.md | 12 + node/engine/interpreter/benchmark_test.go | 66 + node/engine/interpreter/context.go | 360 + node/engine/interpreter/errors.go | 32 + node/engine/interpreter/extension.go | 82 + node/engine/interpreter/interpreter.go | 494 + node/engine/interpreter/interpreter_test.go | 415 + node/engine/interpreter/ops.go | 142 + node/engine/interpreter/planner.go | 1674 ++ node/engine/interpreter/roles.go | 616 + node/engine/interpreter/schema.sql | 438 + node/engine/interpreter/schema_testdata.sql | 33 + node/engine/interpreter/sql.go | 497 + node/engine/interpreter/sql_test.go | 342 + node/engine/interpreter/types.go | 243 + node/engine/interpreter/types_test.go | 53 + node/engine/interpreter/values.go | 2436 ++ node/engine/interpreter/values_test.go | 830 + {parse => node/engine/parse}/.gitignore | 0 {parse => node/engine/parse}/antlr.go | 1543 +- node/engine/parse/ast.go | 1948 ++ {parse => node/engine/parse}/contextual.go | 11 +- {parse => node/engine/parse}/errors.go | 37 +- {parse => node/engine/parse}/functions.go | 561 +- node/engine/parse/functions_test.go | 41 + node/engine/parse/gen/kuneiform_lexer.go | 806 + .../engine/parse}/gen/kuneiform_parser.go | 22780 +++++++++------- .../gen/kuneiformparser_base_visitor.go | 190 +- .../parse}/gen/kuneiformparser_visitor.go | 240 +- .../engine/parse}/grammar/KuneiformLexer.g4 | 35 +- node/engine/parse/grammar/KuneiformParser.g4 | 452 + .../engine/parse}/grammar/README.md | 0 .../engine/parse}/grammar/generate.sh | 0 .../engine/parse}/grammar/rrdiagrams.html | 0 node/engine/parse/parse.go | 196 + node/engine/parse/parse_test.go | 1965 ++ {parse => node/engine/parse}/postgres/doc.go | 0 .../engine/parse}/postgres/parse.go | 2 +- .../engine/parse/postgres/parse_cgo.go | 0 .../engine/parse}/postgres/parse_test.go | 2 +- node/engine/parse/types.go | 60 + .../plpgsql.go => pg_generate/generate.go} | 776 +- node/engine/pg_generate/generate_test.go | 327 + .../engine}/planner/logical/errors.go | 6 +- .../engine}/planner/logical/mappings.go | 2 +- .../engine}/planner/logical/nodes.go | 376 +- .../engine}/planner/logical/planner.go | 1857 +- .../engine}/planner/logical/planner_test.go | 328 +- .../engine}/planner/logical/relation.go | 40 +- .../engine}/planner/logical/rewriter.go | 21 +- .../engine}/planner/optimizer/pushdown.go | 18 +- .../planner/optimizer/pushdown_test.go | 133 +- node/engine/testdata/actions.go | 98 - node/engine/testdata/extension.go | 16 - node/engine/testdata/procedures.go | 71 - node/engine/testdata/schema.go | 33 - node/engine/testdata/tables.go | 183 - node/engine/types.go | 183 + node/pg/db.go | 24 + node/pg/repl_changeset.go | 1 - node/pg/repl_changeset_test.go | 5 +- node/pg/types.go | 1 - node/services/jsonrpc/usersvc/service.go | 271 +- node/txapp/interfaces.go | 3 - node/txapp/mempool.go | 6 +- node/txapp/routes.go | 110 +- node/txapp/txapp.go | 6 +- parse/actions.go | 69 - parse/analyze.go | 2783 -- parse/analyze_test.go | 29 - parse/ast.go | 1151 - parse/gen/kuneiform_lexer.go | 698 - parse/go.mod | 23 - parse/go.sum | 39 - parse/grammar/KuneiformParser.g4 | 423 - parse/parse.go | 559 - parse/parse_test.go | 3223 --- parse/types.go | 215 - parse/wasm/wasm.go | 59 - testing/proxy/impl_1.kf | 20 - testing/proxy/proxy_test.go | 203 +- testing/proxy/seed_1.sql | 15 + testing/testing.go | 161 +- 153 files changed, 32515 insertions(+), 36991 deletions(-) delete mode 100644 cmd/kwil-cli/cmds/database/deploy.go delete mode 100644 cmd/kwil-cli/cmds/database/drop.go delete mode 100644 cmd/kwil-cli/cmds/database/list.go delete mode 100644 cmd/kwil-cli/cmds/database/read_schema.go delete mode 100644 cmd/kwil-cli/cmds/utils/parse.go create mode 100644 core/types/data_types.go create mode 100644 core/types/data_types_test.go delete mode 100644 core/types/schema.go delete mode 100644 core/types/schema_test.go delete mode 100644 extensions/precompiles/adhoc.go delete mode 100644 node/engine/execution/dataset.go delete mode 100644 node/engine/execution/execution_test.go delete mode 100644 node/engine/execution/global.go delete mode 100644 node/engine/execution/procedure.go delete mode 100644 node/engine/execution/procedure_test.go delete mode 100644 node/engine/execution/queries.go delete mode 100644 node/engine/execution/queries_test.go delete mode 100644 node/engine/execution/schema_v07.go delete mode 100644 node/engine/generate/actions.go delete mode 100644 node/engine/generate/actions_test.go delete mode 100644 node/engine/generate/foreign_procedure.go delete mode 100644 node/engine/generate/generate.go delete mode 100644 node/engine/generate/generate_test.go delete mode 100644 node/engine/generate/index.go delete mode 100644 node/engine/generate/metadata.go delete mode 100644 node/engine/generate/procedure.go delete mode 100644 node/engine/generate/procedure_test.go delete mode 100644 node/engine/generate/sql.go delete mode 100644 node/engine/generate/table.go delete mode 100644 node/engine/generate/utils.go delete mode 100644 node/engine/integration/deployment_test.go delete mode 100644 node/engine/integration/execution_test.go delete mode 100644 node/engine/integration/procedure_test.go delete mode 100644 node/engine/integration/schema_test.go delete mode 100644 node/engine/integration/schemas/social_media.kf delete mode 100644 node/engine/integration/schemas/users.kf delete mode 100644 node/engine/integration/schemas/video_game.kf delete mode 100644 node/engine/integration/setup_test.go delete mode 100644 node/engine/integration/sql_test.go create mode 100644 node/engine/interpreter/README.md create mode 100644 node/engine/interpreter/benchmark_test.go create mode 100644 node/engine/interpreter/context.go create mode 100644 node/engine/interpreter/errors.go create mode 100644 node/engine/interpreter/extension.go create mode 100644 node/engine/interpreter/interpreter.go create mode 100644 node/engine/interpreter/interpreter_test.go create mode 100644 node/engine/interpreter/ops.go create mode 100644 node/engine/interpreter/planner.go create mode 100644 node/engine/interpreter/roles.go create mode 100644 node/engine/interpreter/schema.sql create mode 100644 node/engine/interpreter/schema_testdata.sql create mode 100644 node/engine/interpreter/sql.go create mode 100644 node/engine/interpreter/sql_test.go create mode 100644 node/engine/interpreter/types.go create mode 100644 node/engine/interpreter/types_test.go create mode 100644 node/engine/interpreter/values.go create mode 100644 node/engine/interpreter/values_test.go rename {parse => node/engine/parse}/.gitignore (100%) rename {parse => node/engine/parse}/antlr.go (56%) create mode 100644 node/engine/parse/ast.go rename {parse => node/engine/parse}/contextual.go (80%) rename {parse => node/engine/parse}/errors.go (78%) rename {parse => node/engine/parse}/functions.go (56%) create mode 100644 node/engine/parse/functions_test.go create mode 100644 node/engine/parse/gen/kuneiform_lexer.go rename {parse => node/engine/parse}/gen/kuneiform_parser.go (59%) rename {parse => node/engine/parse}/gen/kuneiformparser_base_visitor.go (62%) rename {parse => node/engine/parse}/gen/kuneiformparser_visitor.go (55%) rename {parse => node/engine/parse}/grammar/KuneiformLexer.g4 (83%) create mode 100644 node/engine/parse/grammar/KuneiformParser.g4 rename {parse => node/engine/parse}/grammar/README.md (100%) rename {parse => node/engine/parse}/grammar/generate.sh (100%) rename {parse => node/engine/parse}/grammar/rrdiagrams.html (100%) create mode 100644 node/engine/parse/parse.go create mode 100644 node/engine/parse/parse_test.go rename {parse => node/engine/parse}/postgres/doc.go (100%) rename {parse => node/engine/parse}/postgres/parse.go (89%) rename parse/postgres/_parse_cgo.go => node/engine/parse/postgres/parse_cgo.go (100%) rename {parse => node/engine/parse}/postgres/parse_test.go (82%) create mode 100644 node/engine/parse/types.go rename node/engine/{generate/plpgsql.go => pg_generate/generate.go} (53%) create mode 100644 node/engine/pg_generate/generate_test.go rename {parse => node/engine}/planner/logical/errors.go (66%) rename {parse => node/engine}/planner/logical/mappings.go (97%) rename {parse => node/engine}/planner/logical/nodes.go (88%) rename {parse => node/engine}/planner/logical/planner.go (52%) rename {parse => node/engine}/planner/logical/planner_test.go (72%) rename {parse => node/engine}/planner/logical/relation.go (86%) rename {parse => node/engine}/planner/logical/rewriter.go (96%) rename {parse => node/engine}/planner/optimizer/pushdown.go (96%) rename {parse => node/engine}/planner/optimizer/pushdown_test.go (65%) delete mode 100644 node/engine/testdata/actions.go delete mode 100644 node/engine/testdata/extension.go delete mode 100644 node/engine/testdata/procedures.go delete mode 100644 node/engine/testdata/schema.go delete mode 100644 node/engine/testdata/tables.go create mode 100644 node/engine/types.go delete mode 100644 parse/actions.go delete mode 100644 parse/analyze.go delete mode 100644 parse/analyze_test.go delete mode 100644 parse/ast.go delete mode 100644 parse/gen/kuneiform_lexer.go delete mode 100644 parse/go.mod delete mode 100644 parse/go.sum delete mode 100644 parse/grammar/KuneiformParser.g4 delete mode 100644 parse/parse.go delete mode 100644 parse/parse_test.go delete mode 100644 parse/types.go delete mode 100644 parse/wasm/wasm.go delete mode 100644 testing/proxy/impl_1.kf create mode 100644 testing/proxy/seed_1.sql diff --git a/Taskfile.yml b/Taskfile.yml index de91241f6..09613f025 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -230,14 +230,12 @@ tasks: desc: Run unit tests cmds: - go test ./core/... -tags=ext_test -count=1 - - CGO_ENABLED=1 go test ./parse/... -tags=ext_test -count=1 - CGO_ENABLED=1 go test ./... -tags=ext_test,pglive -count=1 -p=1 # no parallel for now because several try to use one pg database test:unit:race: desc: Run unit tests with the race detector cmds: - go test ./core/... -tags=ext_test -count=1 -race - - CGO_ENABLED=1 go test ./parse/... -tags=ext_test -count=1 -race - CGO_ENABLED=1 go test ./... -tags=ext_test,pglive -count=1 -race # test:it: diff --git a/_previous/core/types/schema.go b/_previous/core/types/schema.go index 803060332..1c7c8a9cc 100644 --- a/_previous/core/types/schema.go +++ b/_previous/core/types/schema.go @@ -1141,7 +1141,7 @@ func (c *DataType) PGString() (string, error) { scalar = "UINT256" case DecimalStr: if c.Metadata == ZeroMetadata { - return "", fmt.Errorf("decimal type must have metadata") + return "NUMERIC", nil } scalar = fmt.Sprintf("NUMERIC(%d,%d)", c.Metadata[0], c.Metadata[1]) @@ -1171,7 +1171,7 @@ func (c *DataType) Clean() error { return nil case DecimalStr: if c.Metadata == ZeroMetadata { - return fmt.Errorf("decimal type must have metadata") + return nil } err := decimal.CheckPrecisionAndScale(c.Metadata[0], c.Metadata[1]) diff --git a/app/node/build.go b/app/node/build.go index 8a2e123ed..e86842f05 100644 --- a/app/node/build.go +++ b/app/node/build.go @@ -26,7 +26,7 @@ import ( "github.com/kwilteam/kwil-db/node/accounts" blockprocessor "github.com/kwilteam/kwil-db/node/block_processor" "github.com/kwilteam/kwil-db/node/consensus" - "github.com/kwilteam/kwil-db/node/engine/execution" + "github.com/kwilteam/kwil-db/node/engine/interpreter" "github.com/kwilteam/kwil-db/node/listeners" "github.com/kwilteam/kwil-db/node/mempool" "github.com/kwilteam/kwil-db/node/meta" @@ -346,7 +346,7 @@ func (c *coreDependencies) service(loggerName string) *common.Service { } func buildTxApp(ctx context.Context, d *coreDependencies, db *pg.DB, accounts *accounts.Accounts, - votestore *voting.VoteStore, engine *execution.GlobalContext) *txapp.TxApp { + votestore *voting.VoteStore, engine common.Engine) *txapp.TxApp { signer := auth.GetNodeSigner(d.privKey) txapp, err := txapp.NewTxApp(ctx, db, engine, signer, nil, d.service("TxAPP"), accounts, votestore) @@ -472,7 +472,7 @@ func failBuild(err error, msg string) { }) } -func buildEngine(d *coreDependencies, db *pg.DB) *execution.GlobalContext { +func buildEngine(d *coreDependencies, db *pg.DB) *interpreter.ThreadSafeInterpreter { extensions := precompiles.RegisteredPrecompiles() for name := range extensions { d.logger.Info("registered extension", "name", name) @@ -484,23 +484,17 @@ func buildEngine(d *coreDependencies, db *pg.DB) *execution.GlobalContext { } defer tx.Rollback(d.ctx) - err = execution.InitializeEngine(d.ctx, tx) + interp, err := interpreter.NewInterpreter(d.ctx, tx, d.service("engine")) if err != nil { failBuild(err, "failed to initialize engine") } - eng, err := execution.NewGlobalContext(d.ctx, tx, - extensions, d.newService("engine")) - if err != nil { - failBuild(err, "failed to build engine") - } - err = tx.Commit(d.ctx) if err != nil { failBuild(err, "failed to commit engine init db txn") } - return eng + return interp } func buildSnapshotStore(d *coreDependencies) *snapshotter.SnapshotStore { diff --git a/app/shared/display/message_test.go b/app/shared/display/message_test.go index ffb8f6a68..2c4d92753 100644 --- a/app/shared/display/message_test.go +++ b/app/shared/display/message_test.go @@ -12,7 +12,6 @@ import ( "github.com/kwilteam/kwil-db/core/crypto/auth" "github.com/kwilteam/kwil-db/core/types" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/cmd/kwil-cli/cmds/database/call.go b/cmd/kwil-cli/cmds/database/call.go index 06d04f0cd..1c0001855 100644 --- a/cmd/kwil-cli/cmds/database/call.go +++ b/cmd/kwil-cli/cmds/database/call.go @@ -84,6 +84,9 @@ func callCmd() *cobra.Command { if len(tuples) == 0 { tuples = append(tuples, []any{}) } + if len(tuples) > 1 { + return display.PrintErr(cmd, errors.New("only one set of inputs can be provided to call")) + } data, err := clnt.Call(ctx, dbid, action, tuples[0]) if err != nil { @@ -91,7 +94,7 @@ func callCmd() *cobra.Command { } if data == nil { - data = &clientType.CallResult{} + data = &types.CallResult{} } return display.PrintCmd(cmd, &respCall{ @@ -109,13 +112,13 @@ func callCmd() *cobra.Command { } type respCall struct { - Data *clientType.CallResult + Data *types.CallResult PrintLogs bool } func (r *respCall) MarshalJSON() ([]byte, error) { if !r.PrintLogs { - return json.Marshal(r.Data.Records.ToStrings()) // this is for backwards compatibility + return json.Marshal(r.Data.QueryResult) // this is for backwards compatibility } bts, err := json.Marshal(r.Data) @@ -128,10 +131,10 @@ func (r *respCall) MarshalJSON() ([]byte, error) { func (r *respCall) MarshalText() (text []byte, err error) { if !r.PrintLogs { - return recordsToTable(r.Data.Records), nil + return recordsToTable(r.Data.QueryResult.ExportToStringMap()), nil } - bts := recordsToTable(r.Data.Records) + bts := recordsToTable(r.Data.QueryResult.ExportToStringMap()) if len(r.Data.Logs) > 0 { bts = append(bts, []byte("\n\nLogs:")...) @@ -145,25 +148,90 @@ func (r *respCall) MarshalText() (text []byte, err error) { // buildProcedureInputs will build the inputs for either // an action or procedure executon/call. -func buildExecutionInputs(ctx context.Context, client clientType.Client, dbid string, proc string, inputs []map[string]string) ([][]any, error) { - schema, err := client.GetSchema(ctx, dbid) +func buildExecutionInputs(ctx context.Context, client clientType.Client, namespace string, action string, inputs []map[string]string) ([][]any, error) { + params, err := getParamList(ctx, client, namespace, action) if err != nil { - return nil, fmt.Errorf("error getting schema: %w", err) + return nil, err } - for _, a := range schema.Actions { - if strings.EqualFold(a.Name, proc) { - return buildActionInputs(a, inputs) + var results [][]any + for _, in := range inputs { + var tuple []any + for _, p := range params { + val, ok := in[p.Name] + if !ok { + tuple = append(tuple, nil) + continue + } + + encoded, err := encodeBasedOnType(p.Type, val) + if err != nil { + return nil, err + } + + tuple = append(tuple, encoded) } + + results = append(results, tuple) + } + + return results, nil +} + +func getParamList(ctx context.Context, client clientType.Client, namespace, action string) ([]paramList, error) { + res, err := client.Query(ctx, "{info}SELECT parameters FROM actions WHERE namespace = $namespace AND name = $action", map[string]any{ + "namespace": namespace, + "action": action, + }) + if err != nil { + return nil, err + } + + if len(res.Values) == 0 { + return nil, errors.New(`action "%s" not found in namespace "%s"`) + } + if len(res.Values) > 1 { + return nil, errors.New(`action "%s" is ambiguous in namespace "%s"`) + } + + var strVal string + switch res.Values[0][0].(type) { + case nil: + return nil, nil // no inputs + case string: + strVal = res.Values[0][0].(string) + default: + return nil, errors.New("unexpected type for action parameters. this is a bug") + } + + p := []struct { + Name string `json:"name"` + DataType string `json:"data_type"` + }{} + + if err := json.Unmarshal([]byte(strVal), &p); err != nil { + return nil, err } - for _, p := range schema.Procedures { - if strings.EqualFold(p.Name, proc) { - return buildProcedureInputs(p, inputs) + params := make([]paramList, len(p)) + for i, param := range p { + dt, err := types.ParseDataType(param.DataType) + if err != nil { + return nil, err + } + + params[i] = paramList{ + Name: param.Name, + Type: dt, } } - return nil, errors.New("procedure/action not found") + return params, nil +} + +type paramList struct { + Name string + Type *types.DataType } // decodeMany attempts to parse command-line inputs as base64 encoded values. @@ -190,97 +258,33 @@ func decodeMany(inputs []string) ([][]byte, bool) { return b64Arr, b64Ok } -func buildActionInputs(a *types.Action, inputs []map[string]string) ([][]any, error) { - tuples := [][]any{} - for _, input := range inputs { - newTuple := []any{} - for _, inputField := range a.Parameters { - // unlike procedures, actions do not have typed parameters, - // so we should try to always parse arrays. - - val, ok := input[inputField] - if !ok { - fmt.Println(len(newTuple)) - // if not found, we should just add nil - newTuple = append(newTuple, nil) - continue - } - - split, err := splitIgnoringQuotedCommas(val) - if err != nil { - return nil, err - } - - // attempt to decode base64 encoded values - b64Arr, b64Ok := decodeMany(split) - if b64Ok { - // additional check here in case user is sending a single base64 value, we don't - // want to encode it as an array. - if len(b64Arr) == 1 { - newTuple = append(newTuple, b64Arr[0]) - continue - } - - newTuple = append(newTuple, b64Arr) - } else { - // if nothing was split, then keep the original value, not the []string{} - if len(split) == 1 { - newTuple = append(newTuple, split[0]) - continue - } - - newTuple = append(newTuple, split) - } +// encodeBasedOnType will encode the input value based on the type of the input. +// If it is an array, it will properly split the input value by commas. +// If the input value is base64 encoded, it will decode it. +func encodeBasedOnType(t *types.DataType, v string) (any, error) { + if t.IsArray { + split, err := splitIgnoringQuotedCommas(v) + if err != nil { + return nil, err } - tuples = append(tuples, newTuple) - } - - return tuples, nil -} - -func buildProcedureInputs(p *types.Procedure, inputs []map[string]string) ([][]any, error) { - tuples := [][]any{} - for _, input := range inputs { - newTuple := []any{} - for _, inputField := range p.Parameters { - v, ok := input[inputField.Name] - if !ok { - // if not found, we should just add nil - newTuple = append(newTuple, nil) - continue - } - - // if the input is an array, split it by commas - if inputField.Type.IsArray { - split, err := splitIgnoringQuotedCommas(v) - if err != nil { - return nil, err - } - // attempt to decode base64 encoded values - b64Arr, b64Ok := decodeMany(split) - if b64Ok { - newTuple = append(newTuple, b64Arr) - } else { - newTuple = append(newTuple, split) - } - continue - } - - // attempt to decode base64 encoded values - - bts, ok := decodeMany([]string{v}) - if ok { - newTuple = append(newTuple, bts[0]) - } else { - newTuple = append(newTuple, input[inputField.Name]) - } + // attempt to decode base64 encoded values + b64Arr, b64Ok := decodeMany(split) + if b64Ok { + return b64Arr, nil } - tuples = append(tuples, newTuple) + return split, nil + } + + // attempt to decode base64 encoded values + bts, ok := decodeMany([]string{v}) + if ok { + return bts[0], nil } - return tuples, nil + // otherwise, just keep it as string and let the server handle it + return v, nil } // splitIgnoringQuotedCommas splits a string by commas, but ignores commas that are inside single or double quotes. diff --git a/cmd/kwil-cli/cmds/database/cmd.go b/cmd/kwil-cli/cmds/database/cmd.go index 54ba49193..054a701f6 100644 --- a/cmd/kwil-cli/cmds/database/cmd.go +++ b/cmd/kwil-cli/cmds/database/cmd.go @@ -19,8 +19,6 @@ var ( func NewCmdDatabase() *cobra.Command { // readOnlyCmds do not create a transaction. readOnlyCmds := []*cobra.Command{ - listCmd(), - readSchemaCmd(), queryCmd(), callCmd(), // no tx, but may required key for signature, for now } @@ -28,8 +26,6 @@ func NewCmdDatabase() *cobra.Command { // writeCmds create a transactions, requiring a private key for signing/ writeCmds := []*cobra.Command{ - deployCmd(), - dropCmd(), executeCmd(), batchCmd(), } diff --git a/cmd/kwil-cli/cmds/database/deploy.go b/cmd/kwil-cli/cmds/database/deploy.go deleted file mode 100644 index 62fa84862..000000000 --- a/cmd/kwil-cli/cmds/database/deploy.go +++ /dev/null @@ -1,131 +0,0 @@ -package database - -import ( - "context" - "encoding/json" - "fmt" - "io" - "os" - "time" - - "github.com/spf13/cobra" - - "github.com/kwilteam/kwil-db/app/shared/display" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/client" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/config" - clientType "github.com/kwilteam/kwil-db/core/client/types" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/parse" -) - -var ( - deployLong = `Deploy a database schema to the target Kwil node. -A path to a file containing the database schema must be provided as the first positional argument. - -Either a Kuneiform or a JSON file can be provided. The file type is determined by the --type flag. -By default, the file type is kf (Kuneiform). Pass --type json to deploy a JSON file.` - - deployExample = `# Deploy a database schema to the target Kwil node -kwil-cli database deploy ./schema.kf` -) - -func deployCmd() *cobra.Command { - var filePath, fileType, overrideName string - - cmd := &cobra.Command{ - Use: "deploy ", - Short: "Deploy a database schema to the target Kwil node.", - Long: deployLong, - Example: deployExample, - RunE: func(cmd *cobra.Command, args []string) error { - return client.DialClient(cmd.Context(), cmd, 0, func(ctx context.Context, cl clientType.Client, conf *config.KwilCliConfig) error { - if cmd.Flags().Changed("path") { - if len(args) > 0 { - return display.PrintErr(cmd, fmt.Errorf("no positional arguments are allowed when using the --path flag")) - } - } else { - if len(args) == 0 { - return display.PrintErr(cmd, fmt.Errorf("must provide a path to the database schema file")) - } - filePath = args[0] - } - - // read in the file - file, err := os.Open(filePath) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("failed to read file: %w", err)) - } - defer file.Close() - - var db *types.Schema - if fileType == "kf" { - db, err = UnmarshalKf(file) - } else if fileType == "json" { - db, err = UnmarshalJson(file) - } else { - return display.PrintErr(cmd, fmt.Errorf("invalid file type: %s", fileType)) - } - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("failed to unmarshal file: %w", err)) - } - - if cmd.Flags().Changed("name") { - if overrideName == "" { - return display.PrintErr(cmd, fmt.Errorf("--name flag cannot be empty string")) - } - db.Name = overrideName - } - - txHash, err := cl.DeployDatabase(ctx, db, clientType.WithNonce(nonceOverride), - clientType.WithSyncBroadcast(syncBcast)) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("failed to deploy database: %w", err)) - } - // If sycnBcast, and we have a txHash (error or not), do a query-tx. - if len(txHash) != 0 && syncBcast { - time.Sleep(500 * time.Millisecond) // otherwise it says not found at first - resp, err := cl.TxQuery(ctx, txHash) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("tx query failed: %w", err)) - } - return display.PrintCmd(cmd, display.NewTxHashAndExecResponse(resp)) - } - return display.PrintCmd(cmd, display.RespTxHash(txHash)) - }) - }, - } - - cmd.Flags().StringVarP(&filePath, "path", "p", "", "path to the database definition file (required)") - err := cmd.Flags().MarkDeprecated("path", "specify the path as the first positional argument instead") - if err != nil { - panic(err) - } - - cmd.Flags().StringVarP(&fileType, "type", "t", "kf", "file type of the database definition file (kf or json)") - cmd.Flags().StringVarP(&overrideName, "name", "n", "", "set the name of the database, overriding the name in the schema file") - return cmd -} - -func UnmarshalKf(file *os.File) (*types.Schema, error) { - source, err := io.ReadAll(file) - if err != nil { - return nil, fmt.Errorf("failed to read Kuneiform source file: %w", err) - } - - return parse.Parse(source) -} - -func UnmarshalJson(file *os.File) (*types.Schema, error) { - bts, err := io.ReadAll(file) - if err != nil { - return nil, fmt.Errorf("failed to read file: %w", err) - } - - var db types.Schema - err = json.Unmarshal(bts, &db) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal file: %w", err) - } - - return &db, nil -} diff --git a/cmd/kwil-cli/cmds/database/drop.go b/cmd/kwil-cli/cmds/database/drop.go deleted file mode 100644 index ab4b664a2..000000000 --- a/cmd/kwil-cli/cmds/database/drop.go +++ /dev/null @@ -1,57 +0,0 @@ -package database - -import ( - "context" - "fmt" - "time" - - "github.com/spf13/cobra" - - "github.com/kwilteam/kwil-db/app/shared/display" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/client" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/config" - clientType "github.com/kwilteam/kwil-db/core/client/types" -) - -var ( - dropLong = `Drops a database from the connected network. - -The drop coommand will drop a database schema, and all of its data, from the connected network. -This will only work if the wallet address that signs the transaction is the owner of the database. - -Drop takes one argument: the name of the database to drop.` - - dropExample = `# Drop a database deployed by the current wallet named "mydb" -kwil-cli database drop mydb` -) - -func dropCmd() *cobra.Command { - cmd := &cobra.Command{ - Use: "drop ", - Short: "Drops a database from the connected network.", - Long: dropLong, - Example: dropExample, - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - return client.DialClient(cmd.Context(), cmd, 0, func(ctx context.Context, cl clientType.Client, conf *config.KwilCliConfig) error { - var err error - txHash, err := cl.DropDatabase(ctx, args[0], clientType.WithNonce(nonceOverride), - clientType.WithSyncBroadcast(syncBcast)) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("error dropping database: %w", err)) - } - // If sycnBcast, and we have a txHash (error or not), do a query-tx. - if len(txHash) != 0 && syncBcast { - time.Sleep(500 * time.Millisecond) // otherwise it says not found at first - resp, err := cl.TxQuery(ctx, txHash) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("tx query failed: %w", err)) - } - return display.PrintCmd(cmd, display.NewTxHashAndExecResponse(resp)) - } - return display.PrintCmd(cmd, display.RespTxHash(txHash)) - }) - }, - } - return cmd -} diff --git a/cmd/kwil-cli/cmds/database/execute.go b/cmd/kwil-cli/cmds/database/execute.go index 10cf71791..9c6e0277a 100644 --- a/cmd/kwil-cli/cmds/database/execute.go +++ b/cmd/kwil-cli/cmds/database/execute.go @@ -15,9 +15,9 @@ import ( ) var ( - executeLong = `Execute a procedure or action against a database. + executeLong = `Execute an action against a database. -The procedure or action name is specified as the first positional argument, and the procedure parameters as all subsequent arguments. +The action name is specified as the first positional argument, and the procedure parameters as all subsequent arguments. In order to specify a procedure parameter, you first need to specify the parameter name, then the parameter value, delimited by a colon. For example, for procedure ` + "`" + `get_user($username)` + "`" + `, you would specify the procedure as follows: @@ -36,8 +36,8 @@ kwil-cli database execute create_user username:satoshi age:32 --dbid 0x9228624C3 func executeCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "execute ...", - Short: "Execute a procedure or action against a database.", + Use: "execute ...", + Short: "Execute an action against a database.", Long: executeLong, Example: executeExample, RunE: func(cmd *cobra.Command, args []string) error { diff --git a/cmd/kwil-cli/cmds/database/list.go b/cmd/kwil-cli/cmds/database/list.go deleted file mode 100644 index 512b31db1..000000000 --- a/cmd/kwil-cli/cmds/database/list.go +++ /dev/null @@ -1,82 +0,0 @@ -package database - -import ( - "context" - "encoding/hex" - "errors" - "fmt" - - "github.com/spf13/cobra" - - "github.com/kwilteam/kwil-db/app/shared/display" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/client" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/config" - clientType "github.com/kwilteam/kwil-db/core/client/types" - "github.com/kwilteam/kwil-db/core/crypto/auth" -) - -var ( - listLong = `List databases owned by a wallet. - -An owner can be specified with the ` + "`" + `--owner` + "`" + ` flag. If no owner is specified, then it will return all databases deployed on the network. -If the ` + "`" + `--self` + "`" + ` flag is specified, then the owner will be set to the current configured wallet.` - - listExample = `# list databases owned by the wallet "0x9228624C3185FCBcf24c1c9dB76D8Bef5f5DAd64" -kwil-cli database list --owner 0x9228624C3185FCBcf24c1c9dB76D8Bef5f5DAd64 - -# list all databases deployed on the network -kwil-cli database list - -# list databases owned by the current configured wallet -kwil-cli database list --self` -) - -func listCmd() *cobra.Command { - var owner string - var self bool - - cmd := &cobra.Command{ - Use: "list", - Short: "List databases owned by a wallet.", - Long: listLong, - Example: listExample, - Args: cobra.NoArgs, - SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - return client.DialClient(cmd.Context(), cmd, client.WithoutPrivateKey, func(ctx context.Context, client clientType.Client, conf *config.KwilCliConfig) error { - if owner != "" && self { - return display.PrintErr(cmd, errors.New("cannot specify both --owner and --self")) - } - - var ownerIdent []byte - if self { - if conf.PrivateKey == nil { - return display.PrintErr(cmd, errors.New("must have a configured wallet to use --self")) - } - ownerIdent = (&auth.EthPersonalSigner{Key: *conf.PrivateKey}).Identity() - } else if owner != "" { - var err error - ownerIdent, err = hex.DecodeString(owner) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("failed to decode hex owner: %w", err)) - } - } - - dbs, err := client.ListDatabases(ctx, ownerIdent) - if err != nil { - return display.PrintErr(cmd, err) - } - - return display.PrintCmd(cmd, &respDBList{ - Info: dbs, - owner: ownerIdent, - }) - }) - }, - } - - cmd.Flags().StringVarP(&owner, ownerFlag, "o", "", "the owner of the database") - cmd.Flags().BoolVar(&self, "self", false, "use the current configured wallet as the owner") - - return cmd -} diff --git a/cmd/kwil-cli/cmds/database/message.go b/cmd/kwil-cli/cmds/database/message.go index 8edc934c0..0b50d6e83 100644 --- a/cmd/kwil-cli/cmds/database/message.go +++ b/cmd/kwil-cli/cmds/database/message.go @@ -8,7 +8,6 @@ import ( "github.com/olekukonko/tablewriter" - clientType "github.com/kwilteam/kwil-db/core/client/types" "github.com/kwilteam/kwil-db/core/types" ) @@ -56,7 +55,7 @@ func (d *respDBList) MarshalText() ([]byte, error) { // of a database in cli type respRelations struct { // to avoid recursive call of MarshalJSON - Data clientType.Records + Data *types.QueryResult } func (r *respRelations) MarshalJSON() ([]byte, error) { @@ -64,14 +63,12 @@ func (r *respRelations) MarshalJSON() ([]byte, error) { } func (r *respRelations) MarshalText() ([]byte, error) { - return recordsToTable(r.Data), nil + return recordsToTable(r.Data.ExportToStringMap()), nil } // recordsToTable converts records to a formatted table structure // that can be printed -func recordsToTable(r clientType.Records) []byte { - data := r.ToStrings() - +func recordsToTable(data []map[string]string) []byte { if len(data) == 0 { return []byte("No data to display.") } @@ -103,63 +100,3 @@ func recordsToTable(r clientType.Records) []byte { table.Render() return buf.Bytes() } - -// respSchema is used to represent a database schema in cli -type respSchema struct { - Schema *types.Schema -} - -func (s *respSchema) MarshalJSON() ([]byte, error) { - return json.Marshal(s.Schema) -} - -func (s *respSchema) MarshalText() ([]byte, error) { - // TODO: make output more readable - var msg bytes.Buffer - - // now we print the metadata - msg.WriteString("Tables:\n") - for _, t := range s.Schema.Tables { - msg.WriteString(fmt.Sprintf(" %s\n", t.Name)) - msg.WriteString(" Columns:\n") - for _, c := range t.Columns { - msg.WriteString(fmt.Sprintf(" %s\n", c.Name)) - msg.WriteString(fmt.Sprintf(" Type: %s\n", c.Type.String())) - - for _, a := range c.Attributes { - msg.WriteString(fmt.Sprintf(" %s\n", a.Type)) - if a.Value != "" { - msg.WriteString(fmt.Sprintf(" %s\n", a.Value)) - } - } - } - } - - // print queries - msg.WriteString("Actions:\n") - for _, q := range s.Schema.Actions { - public := "private" - if q.Public { - public = "public" - } - - msg.WriteString(fmt.Sprintf(" %s (%s)\n", q.Name, public)) - msg.WriteString(fmt.Sprintf(" Inputs: %s\n", q.Parameters)) - } - - // print procedures - msg.WriteString("Procedures:\n") - for _, p := range s.Schema.Procedures { - public := "private" - if p.Public { - public = "public" - } - - msg.WriteString(fmt.Sprintf(" %s (%s)\n", p.Name, public)) - for _, param := range p.Parameters { - msg.WriteString(fmt.Sprintf(" %s: %s\n", param.Name, param.Type.String())) - } - } - - return msg.Bytes(), nil -} diff --git a/cmd/kwil-cli/cmds/database/message_test.go b/cmd/kwil-cli/cmds/database/message_test.go index 1b1b26894..95b03f186 100644 --- a/cmd/kwil-cli/cmds/database/message_test.go +++ b/cmd/kwil-cli/cmds/database/message_test.go @@ -4,7 +4,6 @@ import ( "encoding/hex" "github.com/kwilteam/kwil-db/app/shared/display" - clientType "github.com/kwilteam/kwil-db/core/client/types" "github.com/kwilteam/kwil-db/core/types" ) @@ -95,7 +94,12 @@ func Example_respDBlist_json() { func Example_respRelations_text() { display.Print(&respRelations{ - Data: clientType.Records([]map[string]any{{"a": "1", "b": "2"}, {"a": "3", "b": "4"}})}, + Data: &types.QueryResult{ + ColumnNames: []string{"a", "b"}, + ColumnTypes: []*types.DataType{types.TextType, types.TextType}, + Values: [][]any{{"1", "2"}, {"3", "4"}}, + }, + }, nil, "text") // Output: // | a | b | @@ -106,100 +110,49 @@ func Example_respRelations_text() { func Example_respRelations_json() { display.Print(&respRelations{ - Data: clientType.Records([]map[string]any{{"a": "1", "b": "2"}, {"a": "3", "b": "4"}})}, + Data: &types.QueryResult{ + ColumnNames: []string{"a", "b"}, + ColumnTypes: []*types.DataType{types.TextType, types.TextType}, + Values: [][]any{{"1", "2"}, {"3", "4"}}, + }, + }, nil, "json") // Output: // { - // "result": [ - // { - // "a": "1", - // "b": "2" - // }, - // { - // "a": "3", - // "b": "4" - // } - // ], + // "result": { + // "column_names": [ + // "a", + // "b" + // ], + // "column_types": [ + // { + // "name": "text", + // "is_array": false, + // "metadata": [ + // 0, + // 0 + // ] + // }, + // { + // "name": "text", + // "is_array": false, + // "metadata": [ + // 0, + // 0 + // ] + // } + // ], + // "values": [ + // [ + // "1", + // "2" + // ], + // [ + // "3", + // "4" + // ] + // ] + // }, // "error": "" // } } - -var demoSchema = &respSchema{ - Schema: &types.Schema{ - Owner: []byte("user"), - Name: "test_schema", - Tables: []*types.Table{ - { - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: "primary_key", - Value: "true", - }, - }, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"child_id"}, - ParentKeys: []string{"parent_id"}, - ParentTable: "parent_table", - Actions: []*types.ForeignKeyAction{ - { - On: "delete", - Do: "cascade", - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "index_name", - Columns: []string{"id", "name"}, - Type: "btree", - }, - }, - }, - }, - Actions: []*types.Action{ - { - Name: "get_user", - Parameters: []string{"user_id"}, - Public: true, - Body: "SELECT * FROM users WHERE id = $user_id", - }, - }, - Extensions: []*types.Extension{ - { - Name: "auth", - Initialization: []*types.ExtensionConfig{ - { - Key: "token", - Value: "abc123", - }, - }, - Alias: "authentication", - }, - }, - }, -} - -func Example_respSchema_text() { - display.Print(demoSchema, nil, "text") - // Output: - // Tables: - // users - // Columns: - // id - // Type: int - // primary_key - // true - // Actions: - // get_user (public) - // Inputs: [user_id] - // Procedures: -} diff --git a/cmd/kwil-cli/cmds/database/query.go b/cmd/kwil-cli/cmds/database/query.go index 614c89926..a202696d5 100644 --- a/cmd/kwil-cli/cmds/database/query.go +++ b/cmd/kwil-cli/cmds/database/query.go @@ -38,12 +38,7 @@ func queryCmd() *cobra.Command { RunE: func(cmd *cobra.Command, args []string) error { return client.DialClient(cmd.Context(), cmd, client.WithoutPrivateKey, func(ctx context.Context, client clientType.Client, conf *config.KwilCliConfig) error { - dbid, err := getSelectedDbid(cmd, conf) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("target database not properly specified: %w", err)) - } - - data, err := client.Query(ctx, dbid, args[0]) + data, err := client.Query(ctx, args[0], nil) if err != nil { return display.PrintErr(cmd, fmt.Errorf("error querying database: %w", err)) } @@ -52,7 +47,5 @@ func queryCmd() *cobra.Command { }) }, } - - bindFlagsTargetingDatabase(cmd) return cmd } diff --git a/cmd/kwil-cli/cmds/database/read_schema.go b/cmd/kwil-cli/cmds/database/read_schema.go deleted file mode 100644 index b6fe4c624..000000000 --- a/cmd/kwil-cli/cmds/database/read_schema.go +++ /dev/null @@ -1,53 +0,0 @@ -package database - -import ( - "context" - "fmt" - - "github.com/spf13/cobra" - - "github.com/kwilteam/kwil-db/app/shared/display" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/client" - "github.com/kwilteam/kwil-db/cmd/kwil-cli/config" - clientType "github.com/kwilteam/kwil-db/core/client/types" -) - -// TODO: @brennan: make the way this prints out the metadata more readable -var ( - readSchemaLong = `Read schema is used to view the details of a deployed database schema. - -You can either specify the database to execute this against with the ` + "`" + `--name` + "`" + ` and ` + "`" + `--owner` + "`" + ` -flags, or you can specify the database by passing the database id with the ` + "`" + `--dbid` + "`" + ` flag. If a ` + "`" + `--name` + "`" + ` -flag is passed and no ` + "`" + `--owner` + "`" + ` flag is passed, the owner will be inferred from your configured wallet.` - - readSchemaExample = `# Reading the schema of the "mydb" database, owned by 0x9228624C3185FCBcf24c1c9dB76D8Bef5f5DAd64 -kwil-cli database read-schema --name mydb --owner 0x9228624C3185FCBcf24c1c9dB76D8Bef5f5DAd64` -) - -func readSchemaCmd() *cobra.Command { - var cmd = &cobra.Command{ - Use: "read-schema", - Short: "Read schema is used to view the details of a deployed database schema.", - Long: readSchemaLong, - Example: readSchemaExample, - Args: cobra.NoArgs, - RunE: func(cmd *cobra.Command, _ []string) error { - return client.DialClient(cmd.Context(), cmd, client.WithoutPrivateKey, func(ctx context.Context, client clientType.Client, conf *config.KwilCliConfig) error { - dbid, err := getSelectedDbid(cmd, conf) - if err != nil { - return display.PrintErr(cmd, err) - } - - schema, err := client.GetSchema(ctx, dbid) - if err != nil { - return display.PrintErr(cmd, fmt.Errorf("error getting schema: %w", err)) - } - - return display.PrintCmd(cmd, &respSchema{Schema: schema}) - }) - }, - } - - bindFlagsTargetingDatabase(cmd) - return cmd -} diff --git a/cmd/kwil-cli/cmds/utils/parse.go b/cmd/kwil-cli/cmds/utils/parse.go deleted file mode 100644 index 6de9dc50e..000000000 --- a/cmd/kwil-cli/cmds/utils/parse.go +++ /dev/null @@ -1,199 +0,0 @@ -package utils - -import ( - "encoding/json" - "errors" - "fmt" - "os" - - "github.com/spf13/cobra" - - "github.com/kwilteam/kwil-db/app/shared/display" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/engine/generate" - "github.com/kwilteam/kwil-db/parse" -) - -func newParseCmd() *cobra.Command { - var debug, includePositions bool - var out string - - cmd := &cobra.Command{ - Use: "parse ", - Short: "Parse a Kuneiform schema", - Long: `Parse a Kuneiform schema and output the JSON schema.`, - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - if args[0] == "" { - return display.PrintErr(cmd, errors.New("file path is required")) - } - - if includePositions && !debug { - return display.PrintErr(cmd, errors.New("include-positions flag can only be used with debug")) - } - - file, err := os.ReadFile(args[0]) - if err != nil { - return display.PrintErr(cmd, err) - } - - res, err := parse.ParseAndValidate(file) - if err != nil { - return display.PrintErr(cmd, err) - } - - // if not in debug mode, throw any errors and swallow the schema, - // since the schema is invalid. - if !debug { - if res.Err() != nil { - return display.PrintErr(cmd, res.Err()) - } - - if out == "" { - return display.PrintCmd(cmd, &schemaDisplay{Result: res.Schema}) - } - - bts, err := json.MarshalIndent(res.Schema, "", " ") - if err != nil { - return display.PrintErr(cmd, err) - } - - err = os.WriteFile(out, bts, 0644) - if err != nil { - return display.PrintErr(cmd, err) - } - - return display.PrintCmd(cmd, display.RespString("Schema written to "+out)) - } - - // if in debug mode, output the schema and the debug information. - // We also want to attempt to generate plpgsql functions. - dis := &debugDisplay{ - Result: res, - Generated: generateAll(res.Schema), - } - - if !includePositions { - parse.RecursivelyVisitPositions(dis, func(gp parse.GetPositioner) { - gp.Clear() - }) - } - - if out == "" { - return display.PrintCmd(cmd, dis) - } - - bts, err := dis.MarshalText() - if err != nil { - return display.PrintErr(cmd, err) - } - - err = os.WriteFile(out, bts, 0644) - if err != nil { - return display.PrintErr(cmd, err) - } - - return display.PrintCmd(cmd, display.RespString("Debug information written to "+out)) - }, - } - - cmd.Flags().BoolVarP(&debug, "debug", "d", false, "Display debug information") - cmd.Flags().BoolVarP(&includePositions, "include-positions", "p", false, "Include positions in the debug output") - cmd.Flags().StringVarP(&out, "out", "o", "", "Output file. If debug is true, errors will also be written to this file") - - return cmd -} - -// schemaDisplay is a struct that will be used to display the schema. -type schemaDisplay struct { - Result *types.Schema -} - -func (s *schemaDisplay) MarshalJSON() ([]byte, error) { - return json.Marshal(s.Result) -} - -func (s *schemaDisplay) MarshalText() (text []byte, err error) { - return json.MarshalIndent(s.Result, "", " ") -} - -// debugDisplay is a struct that will be used to display the schema. -// It is used to display the debug information. -type debugDisplay struct { - Result *parse.SchemaParseResult `json:"parse_result"` - Generated *genResult `json:"generated"` -} - -func (d *debugDisplay) MarshalJSON() ([]byte, error) { - type res debugDisplay // prevent recursion - return json.Marshal((*res)(d)) -} - -func (d *debugDisplay) MarshalText() (text []byte, err error) { - return json.MarshalIndent(d, "", " ") -} - -// generateAll attempts to generate all ddl statements, sql, and plpgsql functions. -func generateAll(schema *types.Schema) *genResult { - r := genResult{ - Tables: make(map[string][]string), - Actions: make(map[string][]generate.GeneratedActionStmt), - Procedures: make(map[string]string), - ForeignProcedures: make(map[string]string), - Errors: make([]error, 0), - } - defer func() { - // catch any panics - if e := recover(); e != nil { - e2, ok := e.(error) - if !ok { - r.Errors = append(r.Errors, fmt.Errorf("panic: %v", e)) - } else { - r.Errors = append(r.Errors, e2) - } - } - }() - - wrapErr := func(s string, e error) error { - return fmt.Errorf("%s: %w", s, e) - } - - var err error - for _, table := range schema.Tables { - r.Tables[table.Name], err = generate.GenerateDDL(schema.Name, table) - if err != nil { - r.Errors = append(r.Errors, wrapErr("table "+table.Name, err)) - } - } - - for _, action := range schema.Actions { - r.Actions[action.Name], err = generate.GenerateActionBody(action, schema, schema.Name) - if err != nil { - r.Errors = append(r.Errors, wrapErr("action "+action.Name, err)) - } - } - - for _, proc := range schema.Procedures { - r.Procedures[proc.Name], err = generate.GenerateProcedure(proc, schema, schema.Name) - if err != nil { - r.Errors = append(r.Errors, wrapErr("procedure "+proc.Name, err)) - } - } - - for _, proc := range schema.ForeignProcedures { - r.ForeignProcedures[proc.Name], err = generate.GenerateForeignProcedure(proc, schema.Name, schema.DBID()) - if err != nil { - r.Errors = append(r.Errors, wrapErr("foreign procedure "+proc.Name, err)) - } - } - - return &r -} - -type genResult struct { - Tables map[string][]string `json:"tables"` - Actions map[string][]generate.GeneratedActionStmt `json:"actions"` - Procedures map[string]string `json:"procedures"` - ForeignProcedures map[string]string `json:"foreign_procedures"` - Errors []error `json:"gen_errors"` -} diff --git a/cmd/kwil-cli/cmds/utils/test.go b/cmd/kwil-cli/cmds/utils/test.go index edfe66792..4af63dc56 100644 --- a/cmd/kwil-cli/cmds/utils/test.go +++ b/cmd/kwil-cli/cmds/utils/test.go @@ -242,13 +242,13 @@ func expandHome(s *string) (changed bool, err error) { // makeSchemaPathsRelative makes all schema paths relative for a test. func makeSchemaPathsRelative(test *testing.SchemaTest, jsonFilepath string) error { - for i, path := range test.SchemaFiles { + for i, path := range test.SeedScripts { adjusted, err := adjustPath(path, jsonFilepath) if err != nil { return err } - test.SchemaFiles[i] = adjusted + test.SeedScripts[i] = adjusted } return nil diff --git a/cmd/kwil-cli/cmds/utils/utils.go b/cmd/kwil-cli/cmds/utils/utils.go index 31ca453d3..4720591a4 100644 --- a/cmd/kwil-cli/cmds/utils/utils.go +++ b/cmd/kwil-cli/cmds/utils/utils.go @@ -19,7 +19,6 @@ func NewCmdUtils() *cobra.Command { decodeTxCmd(), chainInfoCmd(), kgwAuthnCmd(), - newParseCmd(), testCmd(), dbidCmd(), generateKeyCmd(), diff --git a/common/common.go b/common/common.go index 38045a7d5..f751b3e90 100644 --- a/common/common.go +++ b/common/common.go @@ -72,28 +72,36 @@ type TxContext struct { Authenticator string } -// Engine is an interface for the main database engine that is responsible for deploying -// and executing Kuneiform datasets. type Engine interface { - SchemaGetter - // CreateDataset deploys a new dataset from a schema. - // The dataset will be owned by the caller. - CreateDataset(ctx *TxContext, tx sql.DB, schema *types.Schema) error - // DeleteDataset deletes a dataset. - // The caller must be the owner of the dataset. - DeleteDataset(ctx *TxContext, tx sql.DB, dbid string) error - // Procedure executes a procedure in a dataset. It can be given - // either a readwrite or readonly database transaction. If it is - // given a read-only transaction, it will not be able to execute - // any procedures that are not `view`. - Procedure(ctx *TxContext, tx sql.DB, options *ExecutionData) (*sql.ResultSet, error) - // ListDatasets returns a list of all datasets on the network. - ListDatasets(caller []byte) ([]*types.DatasetIdentifier, error) - // Execute executes a SQL statement on a dataset. - // It uses Kwil's SQL dialect. - Execute(ctx *TxContext, tx sql.DB, dbid, query string, values map[string]any) (*sql.ResultSet, error) - // Reload reloads the engine with the latest db state - Reload(ctx context.Context, tx sql.Executor) error + // Call calls an action in the database. The resultFn callback is + // called for each row in the result set. If the resultFn returns + // an error, the call will be aborted and the error will be returned. + Call(ctx *TxContext, db sql.DB, namespace, action string, args []any, resultFn func(*Row) error) (*CallResult, error) + // Execute executes a statement in the database. The fn callback is + // called for each row in the result set. If the fn returns an error, + // the call will be aborted and the error will be returned. + Execute(ctx *TxContext, db sql.DB, statement string, params map[string]any, fn func(*Row) error) error + // SetOwner sets the owner of the database. + // There can only be one owner of a database at a time. + SetOwner(ctx context.Context, db sql.DB, owner string) error +} + +// CallResult is the result of a call to an action. +// It does not include the records, as they should be consumed +// via the resultFn callback. +type CallResult struct { + // Logs are the logs generated by the action. + Logs []string +} + +// Row contains information about a row in a table. +type Row struct { + // ColumnNames are the names of the columns in the row. + ColumnNames []string + // ColumnTypes are the types of the columns in the row. + ColumnTypes []*types.DataType + // Values are the values of the columns in the row. + Values []any } // Accounts is an interface for managing accounts on the Kwil network. It @@ -136,13 +144,6 @@ type Validators interface { SetValidatorPower(ctx context.Context, tx sql.Executor, validator []byte, power int64) error } -// SchemaGetter is an interface for getting the schema of a dataset. -type SchemaGetter interface { - // GetSchema returns the schema of a dataset. - // It will return an error if the dataset does not exist. - GetSchema(dbid string) (*types.Schema, error) -} - // ExecutionOptions is contextual data that is passed to a procedure // during call / execution. It is scoped to the lifetime of a single // execution. diff --git a/core/client/client.go b/core/client/client.go index 7ecd6cea2..e4365fbe9 100644 --- a/core/client/client.go +++ b/core/client/client.go @@ -19,7 +19,6 @@ import ( "github.com/kwilteam/kwil-db/core/rpc/client/user" userClient "github.com/kwilteam/kwil-db/core/rpc/client/user/jsonrpc" "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/utils" ) // Client is a client that interacts with a public Kwil provider. @@ -222,63 +221,6 @@ func (c *Client) ChainInfo(ctx context.Context) (*types.ChainInfo, error) { return c.txClient.ChainInfo(ctx) } -// GetSchema gets a schema by dbid. -func (c *Client) GetSchema(ctx context.Context, dbid string) (*types.Schema, error) { - ds, err := c.txClient.GetSchema(ctx, dbid) - if err != nil { - return nil, err - } - - return ds, nil -} - -// DeployDatabase deploys a database. TODO: remove -func (c *Client) DeployDatabase(ctx context.Context, schema *types.Schema, opts ...clientType.TxOpt) (types.Hash, error) { - txOpts := clientType.GetTxOpts(opts) - tx, err := c.newTx(ctx, schema, txOpts) - if err != nil { - return types.Hash{}, err - } - - c.logger.Debug("deploying database", - "signature_type", tx.Signature.Type, - "signature", base64.StdEncoding.EncodeToString(tx.Signature.Data), - "fee", tx.Body.Fee.String(), "nonce", tx.Body.Nonce) - return c.txClient.Broadcast(ctx, tx, syncBcastFlag(txOpts.SyncBcast)) -} - -// DropDatabase drops a database by name, using the configured signer to derive -// the DB ID. TODO: remove -func (c *Client) DropDatabase(ctx context.Context, name string, opts ...clientType.TxOpt) (types.Hash, error) { - dbid := utils.GenerateDBID(name, c.Signer.Identity()) - return c.DropDatabaseID(ctx, dbid, opts...) -} - -// DropDatabaseID drops a database by ID. TODO: remove -func (c *Client) DropDatabaseID(ctx context.Context, dbid string, opts ...clientType.TxOpt) (types.Hash, error) { - identifier := &types.DropSchema{ - DBID: dbid, - } - - txOpts := clientType.GetTxOpts(opts) - tx, err := c.newTx(ctx, identifier, txOpts) - if err != nil { - return types.Hash{}, err - } - - c.logger.Debug("deploying database", - "signature_type", tx.Signature.Type, - "signature", base64.StdEncoding.EncodeToString(tx.Signature.Data), - "fee", tx.Body.Fee.String(), "nonce", tx.Body.Nonce) - - res, err := c.txClient.Broadcast(ctx, tx, syncBcastFlag(txOpts.SyncBcast)) - if err != nil { - return types.Hash{}, err - } - - return res, nil -} - // Execute executes a procedure or action. // It returns the receipt, as well as outputs which is the decoded body of the receipt. // It can take any number of inputs, and if multiple tuples of inputs are passed, @@ -315,7 +257,7 @@ func (c *Client) Execute(ctx context.Context, dbid string, procedure string, tup } // Call calls a procedure or action. It returns the result records. -func (c *Client) Call(ctx context.Context, dbid string, procedure string, inputs []any) (*clientType.CallResult, error) { +func (c *Client) Call(ctx context.Context, dbid string, procedure string, inputs []any) (*types.CallResult, error) { encoded, err := encodeTuple(inputs) if err != nil { return nil, err @@ -345,31 +287,22 @@ func (c *Client) Call(ctx context.Context, dbid string, procedure string, inputs return nil, fmt.Errorf("create signed message: %w", err) } - res, logs, err := c.txClient.Call(ctx, msg) + res, err := c.txClient.Call(ctx, msg) if err != nil { return nil, fmt.Errorf("call action: %w", err) } - return &clientType.CallResult{ - Records: clientType.Records(res), - Logs: logs, - }, nil + return res, nil } // Query executes a query. -func (c *Client) Query(ctx context.Context, dbid string, query string) (clientType.Records, error) { - res, err := c.txClient.Query(ctx, dbid, query) +func (c *Client) Query(ctx context.Context, query string, params map[string]any) (*types.QueryResult, error) { + res, err := c.txClient.Query(ctx, query, params) if err != nil { return nil, err } - return clientType.Records(res), nil -} - -// ListDatabases lists databases belonging to an owner. -// If no owner is passed, it will list all databases. -func (c *Client) ListDatabases(ctx context.Context, owner []byte) ([]*types.DatasetIdentifier, error) { - return c.txClient.ListDatabases(ctx, owner) + return res, nil } // Ping pings the remote host. diff --git a/core/client/types/client.go b/core/client/types/client.go index 7b6326fec..f3103f1a1 100644 --- a/core/client/types/client.go +++ b/core/client/types/client.go @@ -15,25 +15,14 @@ import ( // Client defines methods are used to talk to a Kwil provider. type Client interface { - Call(ctx context.Context, dbid string, procedure string, inputs []any) (*CallResult, error) + Call(ctx context.Context, dbid string, procedure string, inputs []any) (*types.CallResult, error) ChainID() string ChainInfo(ctx context.Context) (*types.ChainInfo, error) - DeployDatabase(ctx context.Context, payload *types.Schema, opts ...TxOpt) (types.Hash, error) - DropDatabase(ctx context.Context, name string, opts ...TxOpt) (types.Hash, error) - DropDatabaseID(ctx context.Context, dbid string, opts ...TxOpt) (types.Hash, error) Execute(ctx context.Context, dbid string, action string, tuples [][]any, opts ...TxOpt) (types.Hash, error) GetAccount(ctx context.Context, pubKey []byte, status types.AccountStatus) (*types.Account, error) - GetSchema(ctx context.Context, dbid string) (*types.Schema, error) - ListDatabases(ctx context.Context, owner []byte) ([]*types.DatasetIdentifier, error) Ping(ctx context.Context) (string, error) - Query(ctx context.Context, dbid string, query string) (Records, error) + Query(ctx context.Context, query string, params map[string]any) (*types.QueryResult, error) TxQuery(ctx context.Context, txHash types.Hash) (*types.TxQueryResponse, error) WaitTx(ctx context.Context, txHash types.Hash, interval time.Duration) (*types.TxQueryResponse, error) Transfer(ctx context.Context, to []byte, amount *big.Int, opts ...TxOpt) (types.Hash, error) } - -// CallResult is the result of a call to a procedure. -type CallResult struct { - Records Records `json:"records"` - Logs []string `json:"logs,omitempty"` -} diff --git a/core/gatewayclient/client.go b/core/gatewayclient/client.go index df0fd4d0e..d2ae7e35b 100644 --- a/core/gatewayclient/client.go +++ b/core/gatewayclient/client.go @@ -18,6 +18,7 @@ import ( gwClient "github.com/kwilteam/kwil-db/core/rpc/client/gateway/jsonrpc" userClient "github.com/kwilteam/kwil-db/core/rpc/client/user/jsonrpc" jsonrpc "github.com/kwilteam/kwil-db/core/rpc/json" + "github.com/kwilteam/kwil-db/core/types" ) // GatewayClient is a client that is made to interact with a kwil gateway. @@ -141,7 +142,7 @@ func NewClient(ctx context.Context, target string, opts *GatewayOptions) (*Gatew // Call call an action. It returns the result records. If authentication is needed, // it will call the gatewaySigner to sign the authentication message. -func (c *GatewayClient) Call(ctx context.Context, dbid string, action string, inputs []any) (*clientType.CallResult, error) { +func (c *GatewayClient) Call(ctx context.Context, dbid string, action string, inputs []any) (*types.CallResult, error) { // we will try to call with the current cookies set. If we receive an error and it is an auth error, // we will re-auth and retry. We will only retry once. res, err := c.Client.Call(ctx, dbid, action, inputs) diff --git a/core/rpc/client/user/jsonrpc/methods.go b/core/rpc/client/user/jsonrpc/methods.go index 973c40cd4..9bccd8b44 100644 --- a/core/rpc/client/user/jsonrpc/methods.go +++ b/core/rpc/client/user/jsonrpc/methods.go @@ -15,7 +15,6 @@ import ( jsonrpc "github.com/kwilteam/kwil-db/core/rpc/json" userjson "github.com/kwilteam/kwil-db/core/rpc/json/user" "github.com/kwilteam/kwil-db/core/types" - jsonUtil "github.com/kwilteam/kwil-db/core/utils/json" ) // Client is a JSON-RPC client for the Kwil user service. It use the JSONRPCClient @@ -79,19 +78,15 @@ func (cl *Client) Broadcast(ctx context.Context, tx *types.Transaction, sync rpc return res.TxHash, nil } -func (cl *Client) Call(ctx context.Context, msg *types.CallMessage, opts ...rpcclient.ActionCallOption) ([]map[string]any, []string, error) { +func (cl *Client) Call(ctx context.Context, msg *types.CallMessage, opts ...rpcclient.ActionCallOption) (*types.CallResult, error) { cmd := msg // same underlying type presently - res := &userjson.CallResponse{} - err := cl.CallMethod(ctx, string(userjson.MethodCall), cmd, res) + res := userjson.CallResponse{} + err := cl.CallMethod(ctx, string(userjson.MethodCall), cmd, &res) if err != nil { - return nil, nil, err - } - records, err := jsonUtil.UnmarshalMapWithoutFloat[[]map[string]any](res.Result) - if err != nil { - return nil, nil, err + return nil, err } - return records, res.Logs, nil + return (*types.CallResult)(&res), nil } func (cl *Client) ChainInfo(ctx context.Context) (*types.ChainInfo, error) { @@ -154,45 +149,18 @@ func (cl *Client) GetAccount(ctx context.Context, pubKey []byte, status types.Ac }, nil } -func (cl *Client) GetSchema(ctx context.Context, dbid string) (*types.Schema, error) { - cmd := &userjson.SchemaRequest{ - DBID: dbid, - } - res := &userjson.SchemaResponse{} - err := cl.CallMethod(ctx, string(userjson.MethodSchema), cmd, res) - if err != nil { - return nil, err - } - return res.Schema, nil -} - -func (cl *Client) ListDatabases(ctx context.Context, ownerPubKey []byte) ([]*types.DatasetIdentifier, error) { - cmd := &userjson.ListDatabasesRequest{ - Owner: ownerPubKey, - } - res := &userjson.ListDatabasesResponse{} - err := cl.CallMethod(ctx, string(userjson.MethodDatabases), cmd, res) - if err != nil { - return nil, err - } - if res.Databases == nil { - return nil, err - } - // A type alias makes a slice copy and conversions unnecessary. - return res.Databases, nil -} - -func (cl *Client) Query(ctx context.Context, dbid, query string) ([]map[string]any, error) { +func (cl *Client) Query(ctx context.Context, query string, params map[string]any) (*types.QueryResult, error) { cmd := &userjson.QueryRequest{ - DBID: dbid, - Query: query, + Query: query, + Params: params, } res := &userjson.QueryResponse{} err := cl.CallMethod(ctx, string(userjson.MethodQuery), cmd, res) if err != nil { return nil, err } - return jsonUtil.UnmarshalMapWithoutFloat[[]map[string]any](res.Result) + + return (*types.QueryResult)(res), nil } func (cl *Client) TxQuery(ctx context.Context, txHash types.Hash) (*types.TxQueryResponse, error) { diff --git a/core/rpc/client/user/txsvc.go b/core/rpc/client/user/txsvc.go index bb59d5875..38fa23b39 100644 --- a/core/rpc/client/user/txsvc.go +++ b/core/rpc/client/user/txsvc.go @@ -14,14 +14,12 @@ import ( // The txsvc is the main service for end users to interact with a Kwil network. type TxSvcClient interface { Broadcast(ctx context.Context, tx *types.Transaction, sync client.BroadcastWait) (types.Hash, error) - Call(ctx context.Context, msg *types.CallMessage, opts ...client.ActionCallOption) ([]map[string]any, []string, error) + Call(ctx context.Context, msg *types.CallMessage, opts ...client.ActionCallOption) (*types.CallResult, error) ChainInfo(ctx context.Context) (*types.ChainInfo, error) EstimateCost(ctx context.Context, tx *types.Transaction) (*big.Int, error) GetAccount(ctx context.Context, pubKey []byte, status types.AccountStatus) (*types.Account, error) - GetSchema(ctx context.Context, dbid string) (*types.Schema, error) - ListDatabases(ctx context.Context, ownerPubKey []byte) ([]*types.DatasetIdentifier, error) Ping(ctx context.Context) (string, error) - Query(ctx context.Context, dbid string, query string) ([]map[string]any, error) + Query(ctx context.Context, query string, params map[string]any) (*types.QueryResult, error) TxQuery(ctx context.Context, txHash types.Hash) (*types.TxQueryResponse, error) // Migration methods diff --git a/core/rpc/json/errors.go b/core/rpc/json/errors.go index 00b0f06e2..6f97f7663 100644 --- a/core/rpc/json/errors.go +++ b/core/rpc/json/errors.go @@ -27,7 +27,6 @@ const ( ErrorEngineInternal ErrorCode = -300 ErrorEngineDatasetNotFound ErrorCode = -301 ErrorEngineDatasetExists ErrorCode = -302 - ErrorEngineInvalidSchema ErrorCode = -303 ErrorDBInternal ErrorCode = -400 diff --git a/core/rpc/json/user/commands.go b/core/rpc/json/user/commands.go index faca1454f..064758655 100644 --- a/core/rpc/json/user/commands.go +++ b/core/rpc/json/user/commands.go @@ -88,8 +88,8 @@ type EstimatePriceRequest struct { // QueryRequest contains the request parameters for MethodQuery. type QueryRequest struct { - DBID string `json:"dbid"` - Query string `json:"query"` + Query string `json:"query"` + Params map[string]interface{} `json:"params"` } // TxQueryRequest contains the request parameters for MethodTxQuery. diff --git a/core/rpc/json/user/responses.go b/core/rpc/json/user/responses.go index caeee9a7a..e3339a173 100644 --- a/core/rpc/json/user/responses.go +++ b/core/rpc/json/user/responses.go @@ -32,14 +32,11 @@ type Result struct { // for other types, but embedding it is kinda annoying when Result []byte `json:"result,omitempty"` } -// CallResponse contains the response object for MethodCall. -type CallResponse struct { - Result []byte `json:"result,omitempty"` - Logs []string `json:"logs,omitempty"` -} +// QueryResponse contains the response object for MethodCall and MethodQuery. +type QueryResponse types.QueryResult -// QueryResponse contains the response object for MethodQuery. -type QueryResponse Result +// CallResponse contains the response object for MethodCall. +type CallResponse types.CallResult // ChainInfoResponse contains the response object for MethodChainInfo. type ChainInfoResponse = types.ChainInfo @@ -49,19 +46,6 @@ type ChainInfoResponse = types.ChainInfo // interface. This is the response with which most health checks will be concerned. type HealthResponse = types.Health -// SchemaResponse contains the response object for MethodSchema. -type SchemaResponse struct { - Schema *types.Schema `json:"schema,omitempty"` -} - -// SchemaResponse contains the response object for MethodSchema. -type ListDatabasesResponse struct { - Databases []*DatasetInfo `json:"databases,omitempty"` -} - -// SchemaResponse contains the response object for MethodSchema. -type DatasetInfo = types.DatasetIdentifier - // SchemaResponse contains the response object for MethodSchema. type PingResponse struct { Message string `json:"message,omitempty"` diff --git a/core/types/data_types.go b/core/types/data_types.go new file mode 100644 index 000000000..1586bd782 --- /dev/null +++ b/core/types/data_types.go @@ -0,0 +1,402 @@ +package types + +import ( + "encoding/binary" + "errors" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/kwilteam/kwil-db/core/types/decimal" +) + +// DataType is a data type. +// It includes both built-in types and user-defined types. +type DataType struct { + // Name is the name of the type. + Name string `json:"name"` + // IsArray is true if the type is an array. + IsArray bool `json:"is_array"` + // Metadata is the metadata of the type. + Metadata [2]uint16 `json:"metadata"` +} + +func (c DataType) SerializeSize() int { + // uint16 version + uint32 name length + name + uint8 is_array + + // 2 x uint16 metadata + return 2 + 4 + len(c.Name) + 1 + 4 +} + +func boolToByte(b bool) byte { + if b { + return 1 + } + return 0 +} + +func (c DataType) MarshalBinary() ([]byte, error) { + b := make([]byte, c.SerializeSize()) + const ver uint16 = 0 + binary.BigEndian.PutUint16(b, ver) + offset := 2 + binary.BigEndian.PutUint32(b[offset:], uint32(len(c.Name))) + offset += 4 + copy(b[offset:], c.Name) + offset += len(c.Name) + b[offset] = boolToByte(c.IsArray) + offset++ + binary.BigEndian.PutUint16(b[offset:], c.Metadata[0]) + offset += 2 + binary.BigEndian.PutUint16(b[offset:], c.Metadata[1]) + return b, nil +} + +func (c *DataType) UnmarshalBinary(data []byte) error { + if len(data) < 6 { + return fmt.Errorf("invalid data length: %d", len(data)) + } + ver := binary.BigEndian.Uint16(data) + if ver != 0 { + return fmt.Errorf("invalid tuple data, unknown version %d", ver) + } + offset := 2 + nameLen := int(binary.BigEndian.Uint32(data[offset:])) + offset += 4 + if len(data) < offset+nameLen+1+2*2 { + return fmt.Errorf("invalid data length: %d", len(data)) + } + c.Name = string(data[offset : offset+nameLen]) + offset += nameLen + + switch data[offset] { + case 0: + case 1: + c.IsArray = true + default: + return fmt.Errorf("invalid data length: %d", len(data)) + } + offset++ + + c.Metadata[0] = binary.BigEndian.Uint16(data[offset : offset+2]) + offset += 2 + c.Metadata[1] = binary.BigEndian.Uint16(data[offset : offset+2]) + offset += 2 + if offset != c.SerializeSize() { // bug, must match + return fmt.Errorf("invalid data length: %d", len(data)) + } + return nil +} + +// String returns the string representation of the type. +func (c *DataType) String() string { + str := strings.Builder{} + str.WriteString(c.Name) + if c.IsArray { + return str.String() + "[]" + } + + if c.Name == DecimalStr { + str.WriteString("(") + str.WriteString(strconv.FormatUint(uint64(c.Metadata[0]), 10)) + str.WriteString(",") + str.WriteString(strconv.FormatUint(uint64(c.Metadata[1]), 10)) + str.WriteString(")") + } + + return str.String() +} + +func (c *DataType) HasMetadata() bool { + return c.Metadata != ZeroMetadata +} + +var ZeroMetadata = [2]uint16{} + +// PGString returns the string representation of the type in Postgres. +func (c *DataType) PGString() (string, error) { + var scalar string + switch strings.ToLower(c.Name) { + case intStr: + scalar = "INT8" + case textStr: + scalar = "TEXT" + case boolStr: + scalar = "BOOL" + case blobStr: + scalar = "BYTEA" + case uuidStr: + scalar = "UUID" + case uint256Str: + scalar = "UINT256" + case DecimalStr: + if c.Metadata == ZeroMetadata { + scalar = "NUMERIC" + } + + scalar = fmt.Sprintf("NUMERIC(%d,%d)", c.Metadata[0], c.Metadata[1]) + case nullStr: + return "", errors.New("cannot have null column type") + case unknownStr: + return "", errors.New("cannot have unknown column type") + default: + return "", fmt.Errorf("unknown column type: %s", c.Name) + } + + if c.IsArray { + return scalar + "[]", nil + } + + return scalar, nil +} + +func (c *DataType) Clean() error { + lName := strings.ToLower(c.Name) + + referencedType, ok := typeAlias[lName] + if !ok { + return fmt.Errorf("unknown type: %s", c.Name) + } + + switch referencedType { + case intStr, textStr, boolStr, blobStr, uuidStr, uint256Str: // ok + if c.Metadata != ZeroMetadata { + return fmt.Errorf("type %s cannot have metadata", c.Name) + } + case DecimalStr: + if c.Metadata != ZeroMetadata { + err := decimal.CheckPrecisionAndScale(c.Metadata[0], c.Metadata[1]) + if err != nil { + return err + } + } + case nullStr, unknownStr: + if c.IsArray { + return fmt.Errorf("type %s cannot be an array", c.Name) + } + + if c.Metadata != ZeroMetadata { + return fmt.Errorf("type %s cannot have metadata", c.Name) + } + default: + return fmt.Errorf("unknown type: %s", c.Name) + } + + c.Name = referencedType + + return nil +} + +// Copy returns a copy of the type. +func (c *DataType) Copy() *DataType { + d := &DataType{ + Name: c.Name, + IsArray: c.IsArray, + Metadata: c.Metadata, + } + + return d +} + +// EqualsStrict returns true if the type is equal to the other type. +// The types must be exactly the same, including metadata. +func (c *DataType) EqualsStrict(other *DataType) bool { + // if unknown, return true. unknown is a special case used + // internally when type checking is disabled. + if c.Name == unknownStr || other.Name == unknownStr { + return true + } + + if c.IsArray != other.IsArray { + return false + } + + if (c.Metadata == ZeroMetadata) != (other.Metadata == ZeroMetadata) { + return false + } + if c.Metadata != ZeroMetadata { + if c.Metadata[0] != other.Metadata[0] || c.Metadata[1] != other.Metadata[1] { + return false + } + } + + return strings.EqualFold(c.Name, other.Name) +} + +// Equals returns true if the type is equal to the other type, or if either type is null. +func (c *DataType) Equals(other *DataType) bool { + if c.Name == nullStr || other.Name == nullStr { + return true + } + + return c.EqualsStrict(other) +} + +func (c *DataType) IsNumeric() bool { + if c.IsArray { + return false + } + + return c.Name == intStr || c.Name == DecimalStr || c.Name == uint256Str || c.Name == unknownStr +} + +// declared DataType constants. +// We do not have one for fixed because fixed types require metadata. +var ( + IntType = &DataType{ + Name: intStr, + } + IntArrayType = ArrayType(IntType) + TextType = &DataType{ + Name: textStr, + } + TextArrayType = ArrayType(TextType) + BoolType = &DataType{ + Name: boolStr, + } + BoolArrayType = ArrayType(BoolType) + BlobType = &DataType{ + Name: blobStr, + } + BlobArrayType = ArrayType(BlobType) + UUIDType = &DataType{ + Name: uuidStr, + } + UUIDArrayType = ArrayType(UUIDType) + // DecimalType contains 1,0 metadata. + // For type detection, users should prefer compare a datatype + // name with the DecimalStr constant. + DecimalType = &DataType{ + Name: DecimalStr, + Metadata: [2]uint16{1, 0}, // the minimum precision and scale + } + DecimalArrayType = ArrayType(DecimalType) + Uint256Type = &DataType{ + Name: uint256Str, + } + Uint256ArrayType = ArrayType(Uint256Type) + // NullType is a special type used internally + NullType = &DataType{ + Name: nullStr, + } + // Unknown is a special type used internally + // when a type is unknown until runtime. + UnknownType = &DataType{ + Name: unknownStr, + } +) + +// ArrayType creates an array type of the given type. +// It panics if the type is already an array. +func ArrayType(t *DataType) *DataType { + if t.IsArray { + panic("cannot create an array of an array") + } + return &DataType{ + Name: t.Name, + IsArray: true, + Metadata: t.Metadata, + } +} + +const ( + textStr = "text" + intStr = "int8" + boolStr = "bool" + blobStr = "blob" + uuidStr = "uuid" + uint256Str = "uint256" + // DecimalStr is a fixed point number. + DecimalStr = "decimal" + nullStr = "null" + unknownStr = "unknown" +) + +// NewDecimalType creates a new fixed point decimal type. +func NewDecimalType(precision, scale uint16) (*DataType, error) { + err := decimal.CheckPrecisionAndScale(precision, scale) + if err != nil { + return nil, err + } + + return &DataType{ + Name: DecimalStr, + Metadata: [2]uint16{precision, scale}, + }, nil +} + +// ParseDataType parses a string into a data type. +func ParseDataType(s string) (*DataType, error) { + // four cases: TEXT, TEXT[], TEXT(1,2), TEXT(1,2)[] + // we will parse the type first, then the array, then the metadata + // we will not allow metadata without an array + + s = strings.TrimSpace(strings.ToLower(s)) + if s == "" { + return nil, errors.New("empty data type") + } + + // Regular expression to parse the data type + re := regexp.MustCompile(`^([a-z0-9]+)(\(([\d, ]+)\))?(\[\])?$`) + matches := re.FindStringSubmatch(s) + + if len(matches) == 0 { + return nil, fmt.Errorf("invalid data type format: %s", s) + } + + baseType := matches[1] + rawMetadata := matches[3] + isArray := matches[4] == "[]" + + var metadata [2]uint16 + if rawMetadata != "" { + metadata = [2]uint16{} + // only decimal types can have metadata + if baseType != DecimalStr { + return nil, fmt.Errorf("metadata is only allowed for decimal type") + } + + parts := strings.Split(rawMetadata, ",") + // can be either DECIMAL(10,5) or just DECIMAL + if len(parts) != 2 && len(parts) != 0 { + return nil, fmt.Errorf("invalid metadata format: %s", rawMetadata) + } + for i, part := range parts { + num, err := strconv.Atoi(strings.TrimSpace(part)) + if err != nil { + return nil, fmt.Errorf("invalid metadata value: %s", part) + } + metadata[i] = uint16(num) + } + } + + baseName, ok := typeAlias[baseType] + if !ok { + return nil, fmt.Errorf("unknown data type: %s", baseType) + } + + dt := &DataType{ + Name: baseName, + Metadata: metadata, + IsArray: isArray, + } + + return dt, dt.Clean() +} + +// maps type names to their base names +var typeAlias = map[string]string{ + "string": textStr, + "text": textStr, + "int": intStr, + "integer": intStr, + "bigint": intStr, + "int8": intStr, + "bool": boolStr, + "boolean": boolStr, + "blob": blobStr, + "bytea": blobStr, + "uuid": uuidStr, + "decimal": DecimalStr, + "numeric": DecimalStr, +} diff --git a/core/types/data_types_test.go b/core/types/data_types_test.go new file mode 100644 index 000000000..9c4c9be5a --- /dev/null +++ b/core/types/data_types_test.go @@ -0,0 +1,198 @@ +package types + +import ( + "testing" +) + +func TestDataTypeBinaryMarshaling(t *testing.T) { + t.Run("marshal and unmarshal valid data type", func(t *testing.T) { + original := DataType{ + Name: "test_type", + IsArray: true, + Metadata: [2]uint16{42, 123}, + } + + data, err := original.MarshalBinary() + if err != nil { + t.Fatal(err) + } + + var decoded DataType + err = decoded.UnmarshalBinary(data) + if err != nil { + t.Fatal(err) + } + + if decoded.Name != original.Name { + t.Errorf("got name %s, want %s", decoded.Name, original.Name) + } + if decoded.IsArray != original.IsArray { + t.Errorf("got isArray %v, want %v", decoded.IsArray, original.IsArray) + } + if decoded.Metadata != original.Metadata { + t.Errorf("got metadata %v, want %v", decoded.Metadata, original.Metadata) + } + }) + + t.Run("unmarshal with insufficient data length", func(t *testing.T) { + data := []byte{0, 0, 0, 0} + var dt DataType + err := dt.UnmarshalBinary(data) + if err == nil { + t.Error("expected error for insufficient data length") + } + }) + + t.Run("unmarshal with invalid version", func(t *testing.T) { + data := []byte{0, 1, 0, 0, 0, 0} + var dt DataType + err := dt.UnmarshalBinary(data) + if err == nil { + t.Error("expected error for invalid version") + } + }) + + t.Run("unmarshal with invalid name length", func(t *testing.T) { + data := []byte{0, 0, 255, 255, 255, 255} + var dt DataType + err := dt.UnmarshalBinary(data) + if err == nil { + t.Error("expected error for invalid name length") + } + }) + + t.Run("marshal empty name", func(t *testing.T) { + original := DataType{ + Name: "", + IsArray: false, + Metadata: [2]uint16{0, 0}, + } + + data, err := original.MarshalBinary() + if err != nil { + t.Fatal(err) + } + + var decoded DataType + err = decoded.UnmarshalBinary(data) + if err != nil { + t.Fatal(err) + } + + if decoded != original { + t.Errorf("got %v, want %v", decoded, original) + } + }) + + t.Run("marshal with maximum metadata values", func(t *testing.T) { + original := DataType{ + Name: "test", + IsArray: true, + Metadata: [2]uint16{65535, 65535}, + } + + data, err := original.MarshalBinary() + if err != nil { + t.Fatal(err) + } + + var decoded DataType + err = decoded.UnmarshalBinary(data) + if err != nil { + t.Fatal(err) + } + + if decoded != original { + t.Errorf("got %v, want %v", decoded, original) + } + }) +} + +func Test_ParseDataTypes(t *testing.T) { + type testcase struct { + in string + out DataType + wantError bool + } + + tests := []testcase{ + { + in: "int8", + out: DataType{ + Name: "int8", + }, + }, + { + in: "int8[]", + out: DataType{ + Name: "int8", + IsArray: true, + }, + }, + { + in: "text[]", + out: DataType{ + Name: "text", + IsArray: true, + }, + }, + { + in: "decimal(10, 2)", + out: DataType{ + Name: "decimal", + Metadata: [2]uint16{10, 2}, + }, + }, + { + in: "decimal(10, 2)[]", + out: DataType{ + Name: "decimal", + Metadata: [2]uint16{10, 2}, + IsArray: true, + }, + }, + { + in: "decimal(10, 2)[][]", + wantError: true, + }, + { + in: "text(10, 2)", + wantError: true, + }, + { + in: "text(10)", + wantError: true, + }, + { + in: "decimal(10)", + wantError: true, + }, + { + in: "decimal(10, 2, 3)", + wantError: true, + }, + { + in: "decimal(10, a)", + wantError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.in, func(t *testing.T) { + res, err := ParseDataType(tt.in) + if tt.wantError { + if err == nil { + t.Fatalf("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if !tt.out.Equals(res) { + t.Fatalf("expected %v, got %v", tt.out, res) + } + }) + } +} diff --git a/core/types/decimal/decimal.go b/core/types/decimal/decimal.go index 2f85a09d2..ba94c0113 100644 --- a/core/types/decimal/decimal.go +++ b/core/types/decimal/decimal.go @@ -101,6 +101,15 @@ func NewFromBigInt(i *big.Int, exp int32) (*Decimal, error) { return dec, nil } +// NewNaN creates a new NaN Decimal. +func NewNaN() *Decimal { + return &Decimal{ + dec: apd.Decimal{ + Form: apd.NaN, + }, + } +} + // SetString sets the value of the decimal from a string. func (d *Decimal) SetString(s string) error { res, _, err := d.context().NewFromString(s) diff --git a/core/types/marshal_fuzz_test.go b/core/types/marshal_fuzz_test.go index 8b51bff63..ae49fa3ca 100644 --- a/core/types/marshal_fuzz_test.go +++ b/core/types/marshal_fuzz_test.go @@ -60,52 +60,11 @@ func FuzzTransactionUnmarshalBinary(f *testing.F) { }) } -func FuzzProcedureReturnUnmarshalBinary(f *testing.F) { - // Add seed corpus - seeds := [][]byte{ - // Valid empty return - {0, 0, 0, 0, 0, 0, 0, 0}, - // Valid table return with one field - {0, 0, 1, 0, 0, 0, 1, // version, isTable, fieldCount - 0, 0, 0, 0, 3, 'i', 'd', 't', // field name "idt" - 0, 0, 0, 0, 3, 'i', 'n', 't', 0, 0, 0, 0, 0}, // field type "int" - } - - for _, seed := range seeds { - f.Add(seed) - } - - f.Fuzz(func(t *testing.T, data []byte) { - pr := &ProcedureReturn{} - err := pr.UnmarshalBinary(data) - - if err == nil { - // Verify round-trip marshaling - marshaled, marshalErr := pr.MarshalBinary() - if marshalErr != nil { - t.Errorf("failed to marshal unmarshaled data: %v", marshalErr) - } - - // Verify fields if present - for _, field := range pr.Fields { - if field.Type == nil { - t.Error("field type cannot be nil") - } - } - - // Verify size calculation - if len(marshaled) != pr.SerializeSize() { - t.Errorf("size mismatch: got %d, want %d", len(marshaled), pr.SerializeSize()) - } - } - }) -} - func FuzzDataTypeUnmarshalBinary(f *testing.F) { // Add seed corpus seeds := [][]byte{ // Valid int type - {0, 0, 0, 0, 0, 3, 'i', 'n', 't', 0, 0, 0, 0, 0}, + {0, 0, 0, 0, 0, 3, 'i', 'n', 't', '8', 0, 0, 0, 0, 0}, // Valid text array type {0, 0, 0, 0, 0, 4, 't', 'e', 'x', 't', 1, 0, 0, 0, 0}, // Valid decimal type with metadata @@ -133,7 +92,7 @@ func FuzzDataTypeUnmarshalBinary(f *testing.F) { if cleanErr == nil { // If Clean() succeeds, verify the type is one of the known types validTypes := map[string]bool{ - "int": true, "text": true, "bool": true, + "int8": true, "text": true, "bool": true, "blob": true, "uuid": true, "uint256": true, "decimal": true, "null": true, "unknown": true, } @@ -150,52 +109,3 @@ func FuzzDataTypeUnmarshalBinary(f *testing.F) { } }) } - -func FuzzForeignProcedureUnmarshalBinary(f *testing.F) { - // Add seed corpus with valid structures - seeds := [][]byte{ - // Basic foreign procedure with no params and nil returns - {0, 0, // version - 0, 0, 0, 0, // empty name - 0, 0, 0, 0}, // no parameters - - // Foreign procedure with int parameter and table return - {0, 0, // version - 0, 0, 0, 3, 'f', 'o', 'o', // name - 0, 0, 0, 1, // one parameter - 0, 0, 0, 0, 0, 3, 'i', 'n', 't', 0, 0, 3, 0, 4, // int parameter - 1, // non-nil returns - 0, 0, 1, 0, 0, 0, 1, // returns table with 1 field - 0, 0, 0, 0, 0, 2, 'i', 'd', // field name - 0, 0, 0, 0, 0, 3, 'i', 'n', 't', 0, 0, 1, 0, 2}, // field type - } - - for _, seed := range seeds { - f.Add(seed) - } - - f.Fuzz(func(t *testing.T, data []byte) { - fp := &ForeignProcedure{} - err := fp.UnmarshalBinary(data) - - if err == nil { - // Verify parameters - for _, param := range fp.Parameters { - if param == nil { - t.Error("parameter cannot be nil after successful unmarshal") - } - } - - // Verify round-trip marshaling - marshaled, marshalErr := fp.MarshalBinary() - if marshalErr != nil { - t.Errorf("marshal failed after successful unmarshal: %v", marshalErr) - } - - // Verify marshaled length matches original - if !bytes.Equal(marshaled, data[:len(marshaled)]) { - t.Errorf("marshaled data mismatch") - } - } - }) -} diff --git a/core/types/message.go b/core/types/message.go index 0ebb9ee1a..eec36cab6 100644 --- a/core/types/message.go +++ b/core/types/message.go @@ -55,6 +55,17 @@ func CallSigText(dbid, action string, payload []byte, challenge []byte) string { return fmt.Sprintf(callMsgToSignTmplV0, dbid, action, digest[:20], challenge) } +const stmtMsgToSignTmplV0 = `Kwil SQL statement. + +Statement: %s +Digest: %x +Challenge: %x +` + +func StmtSigText(stmt string, digest []byte, challenge []byte) string { + return fmt.Sprintf(stmtMsgToSignTmplV0, stmt, digest[:20], challenge) +} + // CreateCallMessage creates a new call message from a ActionCall payload. If a // signer is provided, the sender and authenticator type are set. If a challenge // is also provided, it will also sign a serialization of the request that diff --git a/core/types/payloads.go b/core/types/payloads.go index 723ff4493..19d3c20fa 100644 --- a/core/types/payloads.go +++ b/core/types/payloads.go @@ -30,8 +30,7 @@ type Payload interface { } const ( - PayloadTypeDeploySchema PayloadType = "deploy_schema" - PayloadTypeDropSchema PayloadType = "drop_schema" + PayloadTypeRawStatement PayloadType = "raw_statement" PayloadTypeExecute PayloadType = "execute" PayloadTypeTransfer PayloadType = "transfer" PayloadTypeValidatorJoin PayloadType = "validator_join" @@ -50,6 +49,7 @@ const ( var payloadConcreteTypes = map[PayloadType]Payload{ // PayloadTypeDropSchema: &DropSchema{}, // PayloadTypeDeploySchema: &Schema{}, + PayloadTypeRawStatement: &RawStatement{}, PayloadTypeExecute: &ActionExecution{}, PayloadTypeValidatorJoin: &ValidatorJoin{}, PayloadTypeValidatorApprove: &ValidatorApprove{}, @@ -90,8 +90,7 @@ func UnmarshalPayload(payloadType PayloadType, payload []byte) (Payload, error) // payloadTypes includes native types and types registered from extensions. var payloadTypes = map[PayloadType]bool{ - PayloadTypeDeploySchema: true, - PayloadTypeDropSchema: true, + PayloadTypeRawStatement: true, PayloadTypeExecute: true, PayloadTypeTransfer: true, PayloadTypeValidatorJoin: true, @@ -118,8 +117,7 @@ func (p PayloadType) Valid() bool { PayloadTypeCreateResolution, PayloadTypeApproveResolution, PayloadTypeDeleteResolution, - PayloadTypeDeploySchema, - PayloadTypeDropSchema, + PayloadTypeRawStatement, PayloadTypeExecute, // These should not come in user transactions, but they are not invalid // payload types in general. @@ -143,23 +141,23 @@ func RegisterPayload(pType PayloadType) { payloadTypes[pType] = true } -// DropSchema is the payload that is used to drop a schema -type DropSchema struct { - DBID string +// RawStatement is a raw SQL statement that is executed as a transaction +type RawStatement struct { + Statement string } -var _ Payload = (*DropSchema)(nil) +var _ Payload = (*RawStatement)(nil) -func (s *DropSchema) MarshalBinary() ([]byte, error) { - return serialize.Encode(s) +func (r *RawStatement) MarshalBinary() ([]byte, error) { + return serialize.Encode(r) } -func (s *DropSchema) UnmarshalBinary(b []byte) error { - return serialize.Decode(b, s) +func (r *RawStatement) UnmarshalBinary(b []byte) error { + return serialize.Decode(b, r) } -func (s *DropSchema) Type() PayloadType { - return PayloadTypeDropSchema +func (r *RawStatement) Type() PayloadType { + return PayloadTypeRawStatement } // ActionExecution is the payload that is used to execute an action diff --git a/core/types/results.go b/core/types/results.go index f89627b05..ce5613034 100644 --- a/core/types/results.go +++ b/core/types/results.go @@ -3,6 +3,7 @@ package types import ( "encoding/binary" "errors" + "fmt" "math" ) @@ -21,7 +22,7 @@ const ( CodeInvalidSender TxCode = 9 // engine-related error code - CodeInvalidSchema TxCode = 100 + CodeInvalidSchema TxCode = 100 // TODO: remove, as this is not applicable to the engine CodeDatasetMissing TxCode = 110 CodeDatasetExists TxCode = 120 CodeInvalidResolutionType TxCode = 130 @@ -134,3 +135,29 @@ func (e Event) MarshalBinary() ([]byte, error) { func (e *Event) UnmarshalBinary(data []byte) error { return nil } + +// QueryResult is the result of a SQL query or action. +type QueryResult struct { + ColumnNames []string `json:"column_names"` + ColumnTypes []*DataType `json:"column_types"` + Values [][]any `json:"values"` +} + +// ExportToStringMap converts the QueryResult to a slice of maps. +func (qr *QueryResult) ExportToStringMap() []map[string]string { + var res []map[string]string + for _, row := range qr.Values { + m := make(map[string]string) + for i, val := range row { + m[qr.ColumnNames[i]] = fmt.Sprintf("%v", val) + } + res = append(res, m) + } + return res +} + +// CallResult is the result of a procedure call. +type CallResult struct { + QueryResult *QueryResult `json:"query_result"` + Logs []string `json:"logs"` +} diff --git a/core/types/schema.go b/core/types/schema.go deleted file mode 100644 index 30e09f126..000000000 --- a/core/types/schema.go +++ /dev/null @@ -1,3081 +0,0 @@ -package types - -import ( - "encoding/binary" - "errors" - "fmt" - "math" - "slices" - "strconv" - "strings" - - "github.com/kwilteam/kwil-db/core/types/decimal" - "github.com/kwilteam/kwil-db/core/types/validation" - "github.com/kwilteam/kwil-db/core/utils" -) - -// Schema is a database schema that contains tables, procedures, and extensions. -type Schema struct { - // Name is the name of the schema given by the deployer. - Name string `json:"name"` - // Owner is the identifier (generally an address in bytes or public key) of the owner of the schema - Owner HexBytes `json:"owner"` - Extensions []*Extension `json:"extensions"` - Tables []*Table `json:"tables"` - Actions []*Action `json:"actions"` - Procedures []*Procedure `json:"procedures"` - ForeignProcedures []*ForeignProcedure `json:"foreign_calls"` -} - -var _ Payload = (*Schema)(nil) - -func (s *Schema) Type() PayloadType { - return PayloadTypeDeploySchema -} - -func (s Schema) SerializeSize() int { - // uint16 version + uint32 name length + name + owner length + owner + - // uint32 extensions length + extensions + uint32 tables length + tables + - // uint32 actions length + actions + uint32 procedures length + procedures + - // uint32 foreign_procedures length + foreign_procedures - size := 2 + 4 + len(s.Name) + 4 + len(s.Owner) + 4 + 4 + 4 + 4 + 4 - - for _, ext := range s.Extensions { - size += ext.SerializeSize() - } - for _, table := range s.Tables { - size += table.SerializeSize() - } - for _, action := range s.Actions { - size += action.SerializeSize() - } - for _, proc := range s.Procedures { - size += proc.SerializeSize() - } - for _, fp := range s.ForeignProcedures { - size += fp.SerializeSize() - } - return size -} - -func (s Schema) MarshalBinary() ([]byte, error) { - b := make([]byte, s.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Name))) - offset += 4 - copy(b[offset:], s.Name) - offset += len(s.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Owner))) - offset += 4 - copy(b[offset:], s.Owner) - offset += len(s.Owner) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Extensions))) - offset += 4 - for _, ext := range s.Extensions { - extData, err := ext.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], extData) - offset += len(extData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Tables))) - offset += 4 - for _, table := range s.Tables { - tableData, err := table.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], tableData) - offset += len(tableData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Actions))) - offset += 4 - for _, action := range s.Actions { - actionData, err := action.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], actionData) - offset += len(actionData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.Procedures))) - offset += 4 - for _, proc := range s.Procedures { - procData, err := proc.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], procData) - offset += len(procData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(s.ForeignProcedures))) - offset += 4 - for _, fp := range s.ForeignProcedures { - fpData, err := fp.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], fpData) - offset += len(fpData) - } - - return b, nil -} - -func (s *Schema) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid schema data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - s.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - ownerLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+ownerLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.Owner = make([]byte, ownerLen) - copy(s.Owner, data[offset:offset+ownerLen]) - offset += ownerLen - - extCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if extCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.Extensions = make([]*Extension, extCount) - for i := range extCount { - ext := &Extension{} - if err := ext.UnmarshalBinary(data[offset:]); err != nil { - return err - } - s.Extensions[i] = ext - offset += ext.SerializeSize() - } - - tableCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if tableCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.Tables = make([]*Table, tableCount) - for i := range tableCount { - table := &Table{} - if err := table.UnmarshalBinary(data[offset:]); err != nil { - return err - } - s.Tables[i] = table - offset += table.SerializeSize() - } - - actionCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if actionCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.Actions = make([]*Action, actionCount) - for i := range actionCount { - action := &Action{} - if err := action.UnmarshalBinary(data[offset:]); err != nil { - return err - } - s.Actions[i] = action - offset += action.SerializeSize() - } - - procCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if procCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.Procedures = make([]*Procedure, procCount) - for i := range procCount { - proc := &Procedure{} - if err := proc.UnmarshalBinary(data[offset:]); err != nil { - return err - } - s.Procedures[i] = proc - offset += proc.SerializeSize() - } - - fpCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if fpCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - s.ForeignProcedures = make([]*ForeignProcedure, fpCount) - for i := range fpCount { - fp := &ForeignProcedure{} - if err := fp.UnmarshalBinary(data[offset:]); err != nil { - return err - } - s.ForeignProcedures[i] = fp - offset += fp.SerializeSize() - } - - if offset != s.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (s *Schema) Clean() (err error) { - err = cleanIdent(&s.Name) - if err != nil { - return err - } - - // nameSet is used to check for duplicate names - // among tables, procedures, and extensions - nameSet := make(map[string]struct{}) - checkName := func(name string) error { - _, ok := nameSet[name] - if ok { - return fmt.Errorf(`duplicate name: "%s"`, name) - } - - nameSet[name] = struct{}{} - return nil - } - - for _, table := range s.Tables { - err := table.Clean(s.Tables) - if err != nil { - return err - } - - err = checkName(table.Name) - if err != nil { - return err - } - } - - for _, action := range s.Actions { - err := action.Clean() - if err != nil { - return err - } - - err = checkName(action.Name) - if err != nil { - return err - } - } - - for _, procedure := range s.Procedures { - err := procedure.Clean() - if err != nil { - return err - } - - err = checkName(procedure.Name) - if err != nil { - return err - } - } - - for _, extension := range s.Extensions { - err := extension.Clean() - if err != nil { - return err - } - - err = checkName(extension.Alias) - if err != nil { - return err - } - } - - for _, foreignCall := range s.ForeignProcedures { - err := foreignCall.Clean() - if err != nil { - return err - } - - err = checkName(foreignCall.Name) - if err != nil { - return err - } - } - - return nil -} - -// FindTable finds a table based on its name. -// It returns false if the table is not found. -func (s *Schema) FindTable(name string) (table *Table, found bool) { - for _, tbl := range s.Tables { - if strings.EqualFold(tbl.Name, name) { - return tbl, true - } - } - - return nil, false -} - -// FindAction finds an action based on its name. -// It returns false if the action is not found. -func (s *Schema) FindAction(name string) (action *Action, found bool) { - for _, act := range s.Actions { - if strings.EqualFold(act.Name, name) { - return act, true - } - } - - return nil, false -} - -// FindProcedure finds a procedure based on its name. -// It returns false if the procedure is not found. -func (s *Schema) FindProcedure(name string) (procedure *Procedure, found bool) { - for _, proc := range s.Procedures { - if strings.EqualFold(proc.Name, name) { - return proc, true - } - } - - return nil, false -} - -// FindForeignProcedure finds a foreign procedure based on its name. -// It returns false if the procedure is not found. -func (s *Schema) FindForeignProcedure(name string) (procedure *ForeignProcedure, found bool) { - for _, proc := range s.ForeignProcedures { - if strings.EqualFold(proc.Name, name) { - return proc, true - } - } - - return nil, false -} - -// FindExtensionImport finds an extension based on its alias. -// It returns false if the extension is not found. -func (s *Schema) FindExtensionImport(alias string) (extension *Extension, found bool) { - for _, ext := range s.Extensions { - if strings.EqualFold(ext.Alias, alias) { - return ext, true - } - } - - return nil, false -} - -func (s *Schema) DBID() string { - return utils.GenerateDBID(s.Name, s.Owner) -} - -// Table is a table in a database schema. -type Table struct { - Name string `json:"name"` - Columns []*Column `json:"columns"` - Indexes []*Index `json:"indexes"` - ForeignKeys []*ForeignKey `json:"foreign_keys"` -} - -func (t Table) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 columns length + columns + uint32 indexes length + indexes + uint32 foreignKeys length + foreignKeys - size := 2 + 4 + len(t.Name) + 4 + 4 + 4 - for _, col := range t.Columns { - size += col.SerializeSize() - } - for _, idx := range t.Indexes { - size += idx.SerializeSize() - } - for _, fk := range t.ForeignKeys { - size += fk.SerializeSize() - } - return size -} - -func (t Table) MarshalBinary() ([]byte, error) { - b := make([]byte, t.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(t.Name))) - offset += 4 - copy(b[offset:], t.Name) - offset += len(t.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(t.Columns))) - offset += 4 - for _, col := range t.Columns { - colData, err := col.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], colData) - offset += len(colData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(t.Indexes))) - offset += 4 - for _, idx := range t.Indexes { - idxData, err := idx.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], idxData) - offset += len(idxData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(t.ForeignKeys))) - offset += 4 - for _, fk := range t.ForeignKeys { - fkData, err := fk.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], fkData) - offset += len(fkData) - } - - return b, nil -} - -func (t *Table) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid table data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - t.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - colCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if colCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - t.Columns = make([]*Column, colCount) - for i := range colCount { - col := &Column{} - if err := col.UnmarshalBinary(data[offset:]); err != nil { - return err - } - t.Columns[i] = col - offset += col.SerializeSize() - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - idxCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if idxCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - t.Indexes = make([]*Index, idxCount) - for i := range idxCount { - idx := &Index{} - if err := idx.UnmarshalBinary(data[offset:]); err != nil { - return err - } - t.Indexes[i] = idx - offset += idx.SerializeSize() - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - fkCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if fkCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - t.ForeignKeys = make([]*ForeignKey, fkCount) - for i := range fkCount { - fk := &ForeignKey{} - if err := fk.UnmarshalBinary(data[offset:]); err != nil { - return err - } - t.ForeignKeys[i] = fk - offset += fk.SerializeSize() - } - - if offset != t.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -// It takes a slice of all tables in the schema, which is used to check for foreign key references. -func (t *Table) Clean(tables []*Table) error { - hasPrimaryAttribute := false - for _, col := range t.Columns { - if err := col.Clean(); err != nil { - return err - } - if col.hasPrimary() { - if hasPrimaryAttribute { - return fmt.Errorf("table %s has multiple primary attributes", t.Name) - } - hasPrimaryAttribute = true - } - } - - hasPrimaryIndex := false - idxNames := make(map[string]struct{}) - for _, idx := range t.Indexes { - if err := idx.Clean(t); err != nil { - return err - } - - _, ok := idxNames[idx.Name] - if ok { - return fmt.Errorf("table %s has multiple indexes with the same name: %s", t.Name, idx.Name) - } - idxNames[idx.Name] = struct{}{} - - if idx.Type == PRIMARY { - if hasPrimaryIndex { - return fmt.Errorf("table %s has multiple primary indexes", t.Name) - } - hasPrimaryIndex = true - } - } - - if !hasPrimaryAttribute && !hasPrimaryIndex { - return fmt.Errorf("table %s has no primary key", t.Name) - } - - if hasPrimaryAttribute && hasPrimaryIndex { - return fmt.Errorf("table %s has both primary attribute and primary index", t.Name) - } - - _, err := t.GetPrimaryKey() - if err != nil { - return err - } - - for _, fk := range t.ForeignKeys { - if err := fk.Clean(t, tables); err != nil { - return err - } - } - - return cleanIdent(&t.Name) - -} - -// GetPrimaryKey returns the names of the column(s) that make up the primary key. -// If there is more than one, or no primary key, an error is returned. -func (t *Table) GetPrimaryKey() ([]string, error) { - var primaryKey []string - - hasAttributePrimaryKey := false - for _, col := range t.Columns { - for _, attr := range col.Attributes { - if attr.Type == PRIMARY_KEY { - if hasAttributePrimaryKey { - return nil, fmt.Errorf("table %s has multiple primary attributes", t.Name) - } - hasAttributePrimaryKey = true - primaryKey = []string{col.Name} - } - } - } - - hasIndexPrimaryKey := false - for _, idx := range t.Indexes { - if idx.Type == PRIMARY { - if hasIndexPrimaryKey { - return nil, fmt.Errorf("table %s has multiple primary indexes", t.Name) - } - hasIndexPrimaryKey = true - - // copy - // if we do not copy, then the returned slice will allow modification of the index - primaryKey = make([]string, len(idx.Columns)) - copy(primaryKey, idx.Columns) - } - } - - if !hasAttributePrimaryKey && !hasIndexPrimaryKey { - return nil, fmt.Errorf("table %s has no primary key", t.Name) - } - - if hasAttributePrimaryKey && hasIndexPrimaryKey { - return nil, fmt.Errorf("table %s has both primary attribute and primary index", t.Name) - } - - return primaryKey, nil -} - -// Copy returns a copy of the table -func (t *Table) Copy() *Table { - res := &Table{ - Name: t.Name, - } - - for _, col := range t.Columns { - res.Columns = append(res.Columns, col.Copy()) - } - - for _, idx := range t.Indexes { - res.Indexes = append(res.Indexes, idx.Copy()) - } - - for _, fk := range t.ForeignKeys { - res.ForeignKeys = append(res.ForeignKeys, fk.Copy()) - } - - return res -} - -// FindColumn finds a column based on its name. -// It returns false if the column is not found. -func (t *Table) FindColumn(name string) (column *Column, found bool) { - for _, col := range t.Columns { - if strings.EqualFold(col.Name, name) { - return col, true - } - } - - return nil, false -} - -// Column is a column in a table. -type Column struct { - Name string `json:"name"` - Type *DataType `json:"type"` - Attributes []*Attribute `json:"attributes"` -} - -func (c Column) SerializeSize() int { - // uint16 version + uint32 name length + name + type size + uint32 attributes length + attributes - size := 2 + 4 + len(c.Name) + c.Type.SerializeSize() + 4 - for _, attr := range c.Attributes { - size += attr.SerializeSize() - } - return size -} - -func (c Column) MarshalBinary() ([]byte, error) { - b := make([]byte, c.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(c.Name))) - offset += 4 - copy(b[offset:], c.Name) - offset += len(c.Name) - - typeData, err := c.Type.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], typeData) - offset += len(typeData) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(c.Attributes))) - offset += 4 - - for _, attr := range c.Attributes { - attrData, err := attr.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], attrData) - offset += len(attrData) - } - - return b, nil -} - -func (c *Column) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid column data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - c.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - c.Type = &DataType{} - if err := c.Type.UnmarshalBinary(data[offset:]); err != nil { - return err - } - offset += c.Type.SerializeSize() - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - attrCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if attrCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - c.Attributes = make([]*Attribute, attrCount) - for i := range attrCount { - attr := &Attribute{} - if err := attr.UnmarshalBinary(data[offset:]); err != nil { - return err - } - c.Attributes[i] = attr - offset += attr.SerializeSize() - } - - if offset != c.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func (c *Column) Clean() error { - for _, attr := range c.Attributes { - if err := attr.Clean(c); err != nil { - return err - } - } - - return errors.Join( - cleanIdent(&c.Name), - c.Type.Clean(), - ) -} - -// Copy returns a copy of the column -func (c *Column) Copy() *Column { - res := &Column{ - Name: c.Name, - Type: c.Type.Copy(), - } - - for _, attr := range c.Attributes { - res.Attributes = append(res.Attributes, attr.Copy()) - } - - return res -} - -// HasAttribute returns true if the column has the given attribute. -func (c *Column) HasAttribute(attr AttributeType) bool { - for _, a := range c.Attributes { - if a.Type == attr { - return true - } - } - - return false -} - -func (c *Column) hasPrimary() bool { - for _, attr := range c.Attributes { - if attr.Type == PRIMARY_KEY { - return true - } - } - return false -} - -// Attribute is a column attribute. -// These are constraints and default values. -type Attribute struct { - Type AttributeType `json:"type"` - Value string `json:"value"` -} - -func (a Attribute) SerializeSize() int { - // uint16 version + uint32 type length + type + uint32 value length + value - return 2 + 4 + len(a.Type) + 4 + len(a.Value) -} - -func (a Attribute) MarshalBinary() ([]byte, error) { - b := make([]byte, a.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Type))) - offset += 4 - copy(b[offset:], a.Type) - offset += len(a.Type) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Value))) - offset += 4 - copy(b[offset:], a.Value) - - return b, nil -} - -func (a *Attribute) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid attribute data, unknown version %d", ver) - } - - offset := 2 - typeLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+typeLen+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Type = AttributeType(data[offset : offset+typeLen]) - offset += typeLen - - valueLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+valueLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Value = string(data[offset : offset+valueLen]) - offset += valueLen - - if offset != a.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (a *Attribute) Clean(col *Column) error { - switch a.Type { - case MIN, MAX: - if !col.Type.EqualsStrict(IntType) && !col.Type.EqualsStrict(Uint256Type) && col.Type.Name != DecimalStr { - return fmt.Errorf("attribute %s is only valid for int columns", a.Type) - } - case MIN_LENGTH, MAX_LENGTH: - if !col.Type.EqualsStrict(TextType) && !col.Type.EqualsStrict(BlobType) { - return fmt.Errorf("attribute %s is only valid for text and blob columns", a.Type) - } - } - - return a.Type.Clean() -} - -// Copy returns a copy of the attribute -func (a *Attribute) Copy() *Attribute { - return &Attribute{ - Type: a.Type, - Value: a.Value, - } -} - -// IndexType is a type of index (e.g. BTREE, UNIQUE_BTREE, PRIMARY) -type IndexType string - -// Index is an index on a table. -type Index struct { - Name string `json:"name"` - Columns []string `json:"columns"` - Type IndexType `json:"type"` -} - -func (i Index) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 columns length + columns + uint32 type length + type - size := 2 + 4 + len(i.Name) + 4 - for _, col := range i.Columns { - size += 4 + len(col) - } - size += 4 + len(i.Type) - return size -} - -func (i Index) MarshalBinary() ([]byte, error) { - b := make([]byte, i.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(i.Name))) - offset += 4 - copy(b[offset:], i.Name) - offset += len(i.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(i.Columns))) - offset += 4 - - for _, col := range i.Columns { - binary.BigEndian.PutUint32(b[offset:], uint32(len(col))) - offset += 4 - copy(b[offset:], col) - offset += len(col) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(i.Type))) - offset += 4 - copy(b[offset:], i.Type) - - return b, nil -} - -func (i *Index) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid index data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - i.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - colCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if colCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - i.Columns = make([]string, colCount) - for j := range colCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - colLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+colLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - i.Columns[j] = string(data[offset : offset+colLen]) - offset += colLen - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - typeLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+typeLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - i.Type = IndexType(data[offset : offset+typeLen]) - offset += typeLen - - if offset != i.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (i *Index) Clean(tbl *Table) error { - for _, col := range i.Columns { - if !hasColumn(tbl, col) { - return fmt.Errorf("column %s not found in table %s", col, tbl.Name) - } - } - - return errors.Join( - cleanIdent(&i.Name), - cleanIdents(&i.Columns), - i.Type.Clean(), - ) -} - -// Copy returns a copy of the index. -func (i *Index) Copy() *Index { - return &Index{ - Name: i.Name, - Columns: i.Columns, - Type: i.Type, - } -} - -// index types -const ( - // BTREE is the default index type. - BTREE IndexType = "BTREE" - // UNIQUE_BTREE is a unique BTREE index. - UNIQUE_BTREE IndexType = "UNIQUE_BTREE" - // PRIMARY is a primary index. - // Only one primary index is allowed per table. - // A primary index cannot exist on a table that also has a primary key. - PRIMARY IndexType = "PRIMARY" -) - -func (i IndexType) String() string { - return string(i) -} - -func (i *IndexType) IsValid() bool { - upper := strings.ToUpper(i.String()) - - return upper == BTREE.String() || - upper == UNIQUE_BTREE.String() || - upper == PRIMARY.String() -} - -func (i *IndexType) Clean() error { - if !i.IsValid() { - return fmt.Errorf("invalid index type: %s", i.String()) - } - - *i = IndexType(strings.ToUpper(i.String())) - - return nil -} - -// ForeignKey is a foreign key in a table. -type ForeignKey struct { - // ChildKeys are the columns that are referencing another. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "a" is the child key - ChildKeys []string `json:"child_keys"` - - // ParentKeys are the columns that are being referred to. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "b" is the parent key - ParentKeys []string `json:"parent_keys"` - - // ParentTable is the table that holds the parent columns. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "tbl2" is the parent table - ParentTable string `json:"parent_table"` - - // Do we need parent schema stored with meta data or should assume and - // enforce same schema when creating the dataset with generated DDL. - // ParentSchema string `json:"parent_schema"` - - // Action refers to what the foreign key should do when the parent is altered. - // This is NOT the same as a database action; - // however sqlite's docs refer to these as actions, - // so we should be consistent with that. - // For example, ON DELETE CASCADE is a foreign key action - Actions []*ForeignKeyAction `json:"actions"` -} - -func (f ForeignKey) SerializeSize() int { - // uint16 version + - // uint32 childKeys length + childKeys + - // uint32 parentKeys length + parentKeys + - // uint32 parentTable length + parentTable + - // uint32 actions length + actions - size := 2 + 4 + 4 + 4 + len(f.ParentTable) + 4 - for _, key := range f.ChildKeys { - size += 4 + len(key) - } - for _, key := range f.ParentKeys { - size += 4 + len(key) - } - for _, action := range f.Actions { - size += action.SerializeSize() - } - return size -} - -func (f ForeignKey) MarshalBinary() ([]byte, error) { - b := make([]byte, f.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.ChildKeys))) - offset += 4 - for _, key := range f.ChildKeys { - binary.BigEndian.PutUint32(b[offset:], uint32(len(key))) - offset += 4 - copy(b[offset:], key) - offset += len(key) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.ParentKeys))) - offset += 4 - for _, key := range f.ParentKeys { - binary.BigEndian.PutUint32(b[offset:], uint32(len(key))) - offset += 4 - copy(b[offset:], key) - offset += len(key) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.ParentTable))) - offset += 4 - copy(b[offset:], f.ParentTable) - offset += len(f.ParentTable) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.Actions))) - offset += 4 - for _, action := range f.Actions { - actionData, err := action.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], actionData) - offset += len(actionData) - } - - return b, nil -} - -func (f *ForeignKey) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid foreign key data, unknown version %d", ver) - } - - offset := 2 - childKeyCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if childKeyCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.ChildKeys = make([]string, childKeyCount) - for i := range childKeyCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - keyLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+keyLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - f.ChildKeys[i] = string(data[offset : offset+keyLen]) - offset += keyLen - } - - if offset+4 > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - parentKeyCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if parentKeyCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.ParentKeys = make([]string, parentKeyCount) - for i := range parentKeyCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - keyLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+keyLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - f.ParentKeys[i] = string(data[offset : offset+keyLen]) - offset += keyLen - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - tableLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+tableLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - f.ParentTable = string(data[offset : offset+tableLen]) - offset += tableLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - actionCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if actionCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.Actions = make([]*ForeignKeyAction, actionCount) - for i := range actionCount { - action := &ForeignKeyAction{} - if err := action.UnmarshalBinary(data[offset:]); err != nil { - return err - } - f.Actions[i] = action - offset += action.SerializeSize() - } - - if offset != f.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean runs a set of validations and cleans the foreign key -func (f *ForeignKey) Clean(currentTable *Table, allTables []*Table) error { - if len(f.ChildKeys) != len(f.ParentKeys) { - return errors.New("foreign key must have same number of child and parent keys") - } - - for _, action := range f.Actions { - err := action.Clean() - if err != nil { - return err - } - } - - for _, childKey := range f.ChildKeys { - if !hasColumn(currentTable, childKey) { - return fmt.Errorf("column %s not found in table %s", childKey, currentTable.Name) - } - } - - found := false - for _, table := range allTables { - // we need to use equal fold since this can be used - // in a case insensitive context - if strings.EqualFold(table.Name, f.ParentTable) { - found = true - for _, parentKey := range f.ParentKeys { - if !hasColumn(table, parentKey) { - return fmt.Errorf("column %s not found in table %s", parentKey, table.Name) - } - } - break - } - } - - if !found { - return fmt.Errorf("parent table %s not found", f.ParentTable) - } - - return errors.Join( - cleanIdents(&f.ChildKeys), - cleanIdents(&f.ParentKeys), - // cleanIdent(&f.ParentSchema), - cleanIdent(&f.ParentTable), - ) -} - -func hasColumn(table *Table, colName string) bool { - return slices.ContainsFunc(table.Columns, func(col *Column) bool { - return strings.EqualFold(col.Name, colName) - }) -} - -// Copy returns a copy of the foreign key -func (f *ForeignKey) Copy() *ForeignKey { - actions := make([]*ForeignKeyAction, len(f.Actions)) - for i, action := range f.Actions { - actions[i] = action.Copy() - } - - return &ForeignKey{ - ChildKeys: f.ChildKeys, - ParentKeys: f.ParentKeys, - ParentTable: f.ParentTable, - // ParentSchema: f.ParentSchema, - Actions: actions, - } -} - -// ForeignKeyAction is used to specify what should occur -// if a parent key is updated or deleted -type ForeignKeyAction struct { - // On can be either "UPDATE" or "DELETE" - On ForeignKeyActionOn `json:"on"` - - // Do specifies what a foreign key action should do - Do ForeignKeyActionDo `json:"do"` -} - -func (f ForeignKeyAction) SerializeSize() int { - // uint16 version + uint32 on length + on + uint32 do length + do - return 2 + 4 + len(f.On) + 4 + len(f.Do) -} - -func (f ForeignKeyAction) MarshalBinary() ([]byte, error) { - b := make([]byte, f.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.On))) - offset += 4 - copy(b[offset:], f.On) - offset += len(f.On) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.Do))) - offset += 4 - copy(b[offset:], f.Do) - - return b, nil -} - -func (f *ForeignKeyAction) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid foreign key action data, unknown version %d", ver) - } - - offset := 2 - onLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+onLen+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.On = ForeignKeyActionOn(data[offset : offset+onLen]) - offset += onLen - - doLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+doLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.Do = ForeignKeyActionDo(data[offset : offset+doLen]) - offset += doLen - - if offset != f.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean runs a set of validations and cleans the attributes in ForeignKeyAction -func (f *ForeignKeyAction) Clean() error { - return errors.Join( - f.On.Clean(), - f.Do.Clean(), - ) -} - -// Copy returns a copy of the foreign key action -func (f *ForeignKeyAction) Copy() *ForeignKeyAction { - return &ForeignKeyAction{ - On: f.On, - Do: f.Do, - } -} - -// ForeignKeyActionOn specifies when a foreign key action should occur. -// It can be either "UPDATE" or "DELETE". -type ForeignKeyActionOn string - -// ForeignKeyActionOn types -const ( - // ON_UPDATE is used to specify an action should occur when a parent key is updated - ON_UPDATE ForeignKeyActionOn = "UPDATE" - // ON_DELETE is used to specify an action should occur when a parent key is deleted - ON_DELETE ForeignKeyActionOn = "DELETE" -) - -// IsValid checks whether or not the string is a valid ForeignKeyActionOn -func (f *ForeignKeyActionOn) IsValid() bool { - upper := strings.ToUpper(f.String()) - return upper == ON_UPDATE.String() || - upper == ON_DELETE.String() -} - -// Clean checks whether the string is valid, and will convert it to the correct case. -func (f *ForeignKeyActionOn) Clean() error { - upper := strings.ToUpper(f.String()) - - if !f.IsValid() { - return fmt.Errorf("invalid ForeignKeyActionOn. received: %s", f.String()) - } - - *f = ForeignKeyActionOn(upper) - - return nil -} - -// String returns the ForeignKeyActionOn as a string -func (f ForeignKeyActionOn) String() string { - return string(f) -} - -// ForeignKeyActionDo specifies what should be done when a foreign key action is triggered. -type ForeignKeyActionDo string - -// ForeignKeyActionDo types -const ( - // DO_NO_ACTION does nothing when a parent key is altered - DO_NO_ACTION ForeignKeyActionDo = "NO ACTION" - - // DO_RESTRICT prevents the parent key from being altered - DO_RESTRICT ForeignKeyActionDo = "RESTRICT" - - // DO_SET_NULL sets the child key(s) to NULL - DO_SET_NULL ForeignKeyActionDo = "SET NULL" - - // DO_SET_DEFAULT sets the child key(s) to their default values - DO_SET_DEFAULT ForeignKeyActionDo = "SET DEFAULT" - - // DO_CASCADE updates the child key(s) or deletes the records (depending on the action type) - DO_CASCADE ForeignKeyActionDo = "CASCADE" -) - -// String returns the ForeignKeyActionDo as a string -func (f ForeignKeyActionDo) String() string { - return string(f) -} - -// IsValid checks if the string is a valid ForeignKeyActionDo -func (f *ForeignKeyActionDo) IsValid() bool { - upper := strings.ToUpper(f.String()) - - return upper == DO_NO_ACTION.String() || - upper == DO_RESTRICT.String() || - upper == DO_SET_NULL.String() || - upper == DO_SET_DEFAULT.String() || - upper == DO_CASCADE.String() -} - -// Clean checks the validity or the string, and converts it to the correct case -func (f *ForeignKeyActionDo) Clean() error { - upper := strings.ToUpper(f.String()) - - if !f.IsValid() { - return fmt.Errorf("invalid ForeignKeyActionDo. received: %s", upper) - } - - *f = ForeignKeyActionDo(upper) - - return nil -} - -// Extension defines what extensions the schema uses, and how they are initialized. -type Extension struct { - // Name is the name of the extension registered in the node - Name string `json:"name"` - // Initialization is a list of key value pairs that are used to initialize the extension - Initialization []*ExtensionConfig `json:"initialization"` - // Alias is the alias of the extension, which is how its instance is referred to in the schema - Alias string `json:"alias"` -} - -func (e Extension) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 initialization length + initialization + uint32 alias length + alias - size := 2 + 4 + len(e.Name) + 4 + 4 + len(e.Alias) - for _, init := range e.Initialization { - size += init.SerializeSize() - } - return size -} - -func (e Extension) MarshalBinary() ([]byte, error) { - b := make([]byte, e.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(e.Name))) - offset += 4 - copy(b[offset:], e.Name) - offset += len(e.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(e.Initialization))) - offset += 4 - for _, init := range e.Initialization { - initData, err := init.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], initData) - offset += len(initData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(e.Alias))) - offset += 4 - copy(b[offset:], e.Alias) - - return b, nil -} - -func (e *Extension) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid extension data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - e.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - initCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if initCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - e.Initialization = make([]*ExtensionConfig, initCount) - for i := range initCount { - config := &ExtensionConfig{} - if err := config.UnmarshalBinary(data[offset:]); err != nil { - return err - } - e.Initialization[i] = config - offset += config.SerializeSize() - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - aliasLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+aliasLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - e.Alias = string(data[offset : offset+aliasLen]) - offset += aliasLen - - if offset != e.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (e *Extension) Clean() error { - keys := make(map[string]struct{}) - for _, config := range e.Initialization { - _, ok := keys[config.Key] - if ok { - return fmt.Errorf("duplicate key %s in extension %s", config.Key, e.Name) - } - - keys[config.Key] = struct{}{} - } - - return errors.Join( - cleanIdent(&e.Name), - cleanIdent(&e.Alias), - ) -} - -// CleanMap returns a map of the config values for the extension. -// Since the Kuneiform parser parses all values as strings, it cleans -// the single quotes from the values. -func (e *Extension) CleanMap() map[string]string { - config := make(map[string]string) - for _, c := range e.Initialization { - config[c.Key] = strings.Trim(c.Value, "'") - } - - return config -} - -// ExtensionConfig is a key value pair that represents a configuration value for an extension -type ExtensionConfig struct { - Key string `json:"name"` - Value string `json:"value"` -} - -func (e ExtensionConfig) SerializeSize() int { - // uint16 version + uint32 key length + key + uint32 value length + value - return 2 + 4 + len(e.Key) + 4 + len(e.Value) -} - -func (e ExtensionConfig) MarshalBinary() ([]byte, error) { - b := make([]byte, e.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(e.Key))) - offset += 4 - copy(b[offset:], e.Key) - offset += len(e.Key) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(e.Value))) - offset += 4 - copy(b[offset:], e.Value) - - return b, nil -} - -func (e *ExtensionConfig) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid extension config data, unknown version %d", ver) - } - - offset := 2 - keyLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+keyLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - e.Key = string(data[offset : offset+keyLen]) - offset += keyLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - valueLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+valueLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - e.Value = string(data[offset : offset+valueLen]) - offset += valueLen - - if offset != e.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func cleanIdent(ident *string) error { - if ident == nil { - return errors.New("ident cannot be nil") - } - - *ident = strings.TrimSpace(*ident) - *ident = strings.ToLower(*ident) - - err := validation.ValidateIdentifier(*ident) - if err != nil { - return err - } - - return nil -} - -func cleanIdents(idents *[]string) error { - if idents == nil { - return errors.New("identifiers cannot be nil") - } - - for i := range *idents { - err := cleanIdent(&(*idents)[i]) - if err != nil { - return err - } - } - - return nil -} - -func cleanActionParameters(inputs *[]string) error { - if inputs == nil { - return nil - } - - for i := range *inputs { - err := cleanParameter(&(*inputs)[i]) - if err != nil { - return err - } - } - - return nil -} - -// cleanParameter applies only to the unparsed instructions/statements. -func cleanParameter(input *string) error { - if len(*input) < 2 { - return errors.New("parameter cannot be empty") - } - - if len(*input) > validation.MAX_IDENT_NAME_LENGTH { - return fmt.Errorf("parameter cannot be longer than %d characters", validation.MAX_IDENT_NAME_LENGTH) - } - - if !strings.HasPrefix(*input, "$") { - return errors.New("parameter must start with $") - } - - *input = strings.ToLower(*input) - - return nil -} - -// AttributeType is a type of attribute (e.g. PRIMARY_KEY, UNIQUE, NOT_NULL, DEFAULT, MIN, MAX, MIN_LENGTH, MAX_LENGTH) -type AttributeType string - -// Attribute Types -const ( - PRIMARY_KEY AttributeType = "PRIMARY_KEY" - UNIQUE AttributeType = "UNIQUE" - NOT_NULL AttributeType = "NOT_NULL" - DEFAULT AttributeType = "DEFAULT" - MIN AttributeType = "MIN" - MAX AttributeType = "MAX" - MIN_LENGTH AttributeType = "MIN_LENGTH" - MAX_LENGTH AttributeType = "MAX_LENGTH" // is this kwil custom? -) - -func (a AttributeType) String() string { - return string(a) -} - -func (a *AttributeType) IsValid() bool { - upper := strings.ToUpper(a.String()) - - return upper == PRIMARY_KEY.String() || - upper == UNIQUE.String() || - upper == NOT_NULL.String() || - upper == DEFAULT.String() || - upper == MIN.String() || - upper == MAX.String() || - upper == MIN_LENGTH.String() || - upper == MAX_LENGTH.String() -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (a *AttributeType) Clean() error { - if !a.IsValid() { - return fmt.Errorf("invalid attribute type: %s", a.String()) - } - - *a = AttributeType(strings.ToUpper(a.String())) - - return nil -} - -// Action is a procedure in a database schema. -// These are defined by Kuneiform's `action` keyword. -type Action struct { - Name string `json:"name"` - Annotations []string `json:"annotations"` - Parameters []string `json:"parameters"` - Public bool `json:"public"` - Modifiers []Modifier `json:"modifiers"` - Body string `json:"body"` -} - -func (a Action) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 annotations length + annotations + uint32 parameters length + parameters + uint8 public + uint32 modifiers length + modifiers + uint32 body length + body - size := 2 + 4 + len(a.Name) + 4 + 4 + 1 + 4 + 4 + len(a.Body) - for _, ann := range a.Annotations { - size += 4 + len(ann) - } - for _, param := range a.Parameters { - size += 4 + len(param) - } - for _, mod := range a.Modifiers { - size += 4 + len(mod) - } - return size -} - -func (a Action) MarshalBinary() ([]byte, error) { - b := make([]byte, a.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Name))) - offset += 4 - copy(b[offset:], a.Name) - offset += len(a.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Annotations))) - offset += 4 - for _, ann := range a.Annotations { - binary.BigEndian.PutUint32(b[offset:], uint32(len(ann))) - offset += 4 - copy(b[offset:], ann) - offset += len(ann) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Parameters))) - offset += 4 - for _, param := range a.Parameters { - binary.BigEndian.PutUint32(b[offset:], uint32(len(param))) - offset += 4 - copy(b[offset:], param) - offset += len(param) - } - - if a.Public { - b[offset] = 1 - } - offset++ - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Modifiers))) - offset += 4 - for _, mod := range a.Modifiers { - binary.BigEndian.PutUint32(b[offset:], uint32(len(mod))) - offset += 4 - copy(b[offset:], mod) - offset += len(mod) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(a.Body))) - offset += 4 - copy(b[offset:], a.Body) - - return b, nil -} - -func (a *Action) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid action data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - annCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if annCount > len(data) { // do no over allocate - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Annotations = make([]string, annCount) - for i := range annCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - annLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+annLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - a.Annotations[i] = string(data[offset : offset+annLen]) - offset += annLen - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - paramCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if paramCount > len(data) { // do no over allocate - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Parameters = make([]string, paramCount) - for i := range paramCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - paramLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+paramLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - a.Parameters[i] = string(data[offset : offset+paramLen]) - offset += paramLen - } - - if len(data) < offset+1 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - switch data[offset] { - case 0: - case 1: - a.Public = true - default: - return fmt.Errorf("invalid is-public flag: %d", data[offset]) - } - offset++ - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - modCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if modCount > len(data) { // do no over allocate - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Modifiers = make([]Modifier, modCount) - for i := range modCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - modLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+modLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - a.Modifiers[i] = Modifier(data[offset : offset+modLen]) - offset += modLen - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - bodyLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+bodyLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - a.Body = string(data[offset : offset+bodyLen]) - offset += bodyLen - - if offset != a.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (p *Action) Clean() error { - for _, m := range p.Modifiers { - if err := m.Clean(); err != nil { - return err - } - } - - p.Body = strings.TrimSpace(p.Body) - - return errors.Join( - cleanIdent(&p.Name), - cleanActionParameters(&p.Parameters), - ) -} - -// IsView returns true if the procedure has a view modifier. -func (p *Action) IsView() bool { - for _, m := range p.Modifiers { - if m == ModifierView { - return true - } - } - - return false -} - -// IsOwnerOnly returns true if the procedure has an owner modifier. -func (p *Action) IsOwnerOnly() bool { - for _, m := range p.Modifiers { - if m == ModifierOwner { - return true - } - } - - return false -} - -// Modifier modifies the access to a procedure. -type Modifier string - -const ( - // View means that an action does not modify the database. - ModifierView Modifier = "VIEW" - - // Authenticated requires that the caller is identified. - ModifierAuthenticated Modifier = "AUTHENTICATED" - - // Owner requires that the caller is the owner of the database. - ModifierOwner Modifier = "OWNER" -) - -func (m *Modifier) IsValid() bool { - upper := strings.ToUpper(m.String()) - - return upper == ModifierView.String() || - upper == ModifierAuthenticated.String() || - upper == ModifierOwner.String() -} - -// Clean validates rules about the data in the struct (naming conventions, syntax, etc.). -func (m *Modifier) Clean() error { - if !m.IsValid() { - return fmt.Errorf("invalid modifier: %s", m.String()) - } - - *m = Modifier(strings.ToUpper(m.String())) - - return nil -} - -func (m Modifier) String() string { - return string(m) -} - -type Procedure struct { - // Name is the name of the procedure. - // It should always be lower case. - Name string `json:"name"` - - // Parameters are the parameters of the procedure. - Parameters []*ProcedureParameter `json:"parameters"` - - // Public is true if the procedure is public. - Public bool `json:"public"` - - // Modifiers are the modifiers of the procedure. - Modifiers []Modifier `json:"modifiers"` - - // Body is the body of the procedure. - Body string `json:"body"` - - // Returns is the return type of the procedure. This may be nil. - Returns *ProcedureReturn `json:"return_types"` - // Annotations are the annotations of the procedure. - Annotations []string `json:"annotations"` -} - -func (p Procedure) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 parameters length + - // parameters + uint8 public + uint32 modifiers length + modifiers + - // uint32 body length + body + return types size + uint32 annotations length + annotations - size := 2 + 4 + len(p.Name) + 4 + 1 + 4 + 4 + len(p.Body) + 4 - for _, param := range p.Parameters { - size += param.SerializeSize() - } - for _, mod := range p.Modifiers { - size += 4 + len(mod) - } - if p.Returns != nil { - size += p.Returns.SerializeSize() - } else { - size += 4 - } - for _, ann := range p.Annotations { - size += 4 + len(ann) - } - return size -} - -func (p Procedure) MarshalBinary() ([]byte, error) { - b := make([]byte, p.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Name))) - offset += 4 - copy(b[offset:], p.Name) - offset += len(p.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Parameters))) - offset += 4 - for _, param := range p.Parameters { - paramData, err := param.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], paramData) - offset += len(paramData) - } - - if p.Public { - b[offset] = 1 - } - offset++ - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Modifiers))) - offset += 4 - for _, mod := range p.Modifiers { - binary.BigEndian.PutUint32(b[offset:], uint32(len(mod))) - offset += 4 - copy(b[offset:], mod) - offset += len(mod) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Body))) - offset += 4 - copy(b[offset:], p.Body) - offset += len(p.Body) - - if p.Returns == nil { - // write math.MaxUint32 to indicate that there is no return type - binary.BigEndian.PutUint32(b[offset:], math.MaxUint32) - offset += 4 - } else { - returnData, err := p.Returns.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], returnData) - offset += len(returnData) - } - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Annotations))) - offset += 4 - for _, ann := range p.Annotations { - binary.BigEndian.PutUint32(b[offset:], uint32(len(ann))) - offset += 4 - copy(b[offset:], ann) - offset += len(ann) - } - - return b, nil -} - -func (p *Procedure) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid procedure data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - paramCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if paramCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Parameters = make([]*ProcedureParameter, paramCount) - for i := range paramCount { - param := &ProcedureParameter{} - if err := param.UnmarshalBinary(data[offset:]); err != nil { - return err - } - p.Parameters[i] = param - offset += param.SerializeSize() - } - - if len(data) < offset+1 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - switch data[offset] { - case 0: - case 1: - p.Public = true - default: - return fmt.Errorf("invalid is-public flag: %d", data[offset]) - } - offset++ - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - modCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if modCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Modifiers = make([]Modifier, modCount) - for i := range modCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - modLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+modLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - p.Modifiers[i] = Modifier(data[offset : offset+modLen]) - offset += modLen - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - bodyLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+bodyLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Body = string(data[offset : offset+bodyLen]) - offset += bodyLen - - // compare to math.MaxUint32 to determine if there is a return type - if binary.BigEndian.Uint32(data[offset:]) == math.MaxUint32 { - offset += 4 - } else { - p.Returns = &ProcedureReturn{} - if err := p.Returns.UnmarshalBinary(data[offset:]); err != nil { - return err - } - offset += p.Returns.SerializeSize() - } - - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - annCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if annCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Annotations = make([]string, annCount) - for i := range annCount { - if len(data) < offset+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - annLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+annLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - p.Annotations[i] = string(data[offset : offset+annLen]) - offset += annLen - } - - if offset != p.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func (p *Procedure) Clean() error { - params := make(map[string]struct{}) - for _, param := range p.Parameters { - err := param.Clean() - if err != nil { - return err - } - - _, ok := params[param.Name] - if ok { - return fmt.Errorf(`duplicate parameter name: "%s"`, param.Name) - } - - params[param.Name] = struct{}{} - } - - if p.Returns != nil { - err := p.Returns.Clean() - if err != nil { - return err - } - } - - p.Body = strings.TrimSpace(p.Body) - - return cleanIdent(&p.Name) -} - -// IsView returns true if the procedure has a view modifier. -func (p *Procedure) IsView() bool { - for _, m := range p.Modifiers { - if m == ModifierView { - return true - } - } - - return false -} - -// IsOwnerOnly returns true if the procedure has an owner modifier. -func (p *Procedure) IsOwnerOnly() bool { - for _, m := range p.Modifiers { - if m == ModifierOwner { - return true - } - } - - return false -} - -// ProcedureReturn holds the return type of a procedure. -type ProcedureReturn struct { - IsTable bool `json:"is_table"` - Fields []*NamedType `json:"fields"` -} - -func (p ProcedureReturn) SerializeSize() int { - // uint16 version + uint8 isTable + uint32 fields length + fields - size := 2 + 1 + 4 - for _, field := range p.Fields { - size += field.SerializeSize() - } - return size -} - -func (p ProcedureReturn) MarshalBinary() ([]byte, error) { - b := make([]byte, p.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - if p.IsTable { - b[offset] = 1 - } - offset++ - - binary.BigEndian.PutUint32(b[offset:], uint32(len(p.Fields))) - offset += 4 - for _, field := range p.Fields { - fieldData, err := field.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], fieldData) - offset += len(fieldData) - } - - return b, nil -} - -func (p *ProcedureReturn) UnmarshalBinary(data []byte) error { - if len(data) < 7 { // version(2) + isTable(1) + fieldsLen(4) - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid procedure return data, unknown version %d", ver) - } - - offset := 2 - switch data[offset] { - case 0: - case 1: - p.IsTable = true - default: - return fmt.Errorf("invalid is-table flag: %d", data[offset]) - } - offset++ - - fieldCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - - if fieldCount > len(data) { // don't over-allocate - return fmt.Errorf("invalid data length: %d", len(data)) - } - - p.Fields = make([]*NamedType, fieldCount) - for i := range fieldCount { - field := &NamedType{} - if err := field.UnmarshalBinary(data[offset:]); err != nil { - return err - } - p.Fields[i] = field - offset += field.SerializeSize() - } - - if offset != p.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func (p *ProcedureReturn) Clean() error { - for _, t := range p.Fields { - return t.Clean() - } - - return nil -} - -func (p *ProcedureReturn) Copy() *ProcedureReturn { - fields := make([]*NamedType, len(p.Fields)) - for i, field := range p.Fields { - fields[i] = field.Copy() - } - - return &ProcedureReturn{ - IsTable: p.IsTable, - Fields: fields, - } -} - -// NamedType is a single column in a -// RETURN TABLE(...) statement in a procedure. -type NamedType struct { - // Name is the name of the column. - Name string `json:"name"` - // Type is the type of the column. - Type *DataType `json:"type"` -} - -func (n NamedType) SerializeSize() int { - // uint16 version + uint32 name length + name + type size - if n.Type == nil { - return 0 - } - return 2 + 4 + len(n.Name) + n.Type.SerializeSize() -} - -func (n NamedType) MarshalBinary() ([]byte, error) { - if n.Type == nil { - return nil, errors.New("invalid procedure parameter, type is nil") - } - b := make([]byte, n.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(n.Name))) - offset += 4 - copy(b[offset:], n.Name) - offset += len(n.Name) - - if n.Type == nil { - return nil, errors.New("invalid procedure parameter, type is nil") - } - - typeData, err := n.Type.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], typeData) - - return b, nil -} - -func (n *NamedType) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid procedure parameter data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - n.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - n.Type = &DataType{} - if err := n.Type.UnmarshalBinary(data[offset:]); err != nil { - return err - } - offset += n.Type.SerializeSize() - - if offset != n.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func (p *NamedType) Clean() error { - return errors.Join( - cleanIdent(&p.Name), - p.Type.Clean(), - ) -} - -func (p *NamedType) Copy() *NamedType { - return &NamedType{ - Name: p.Name, - Type: p.Type.Copy(), - } -} - -// ProcedureParameter is a parameter in a procedure. -type ProcedureParameter struct { - // Name is the name of the parameter. - // It should always be lower case. - Name string `json:"name"` - // Type is the type of the parameter. - Type *DataType `json:"type"` -} - -func (p ProcedureParameter) SerializeSize() int { - return NamedType(p).SerializeSize() -} - -func (p ProcedureParameter) MarshalBinary() ([]byte, error) { - return NamedType(p).MarshalBinary() -} - -func (p *ProcedureParameter) UnmarshalBinary(data []byte) error { - n := (*NamedType)(p) - return n.UnmarshalBinary(data) -} - -func (c *ProcedureParameter) Clean() error { - return errors.Join( - cleanParameter(&c.Name), - c.Type.Clean(), - ) -} - -// ForeignProcedure is used to define foreign procedures that can be -// dynamically called by the procedure. -type ForeignProcedure struct { - // Name is the name of the foreign procedure. - Name string `json:"name"` - // Parameters are the parameters of the foreign procedure. - Parameters []*DataType `json:"parameters"` - // Returns specifies what the foreign procedure returns. - // If it does not return a table, the names of the return - // values are not needed, and should be left empty. - Returns *ProcedureReturn `json:"return_types"` -} - -func (f ForeignProcedure) SerializeSize() int { - // uint16 version + uint32 name length + name + uint32 parameters length + - // parameters + returns nil flag + return types size - size := 2 + 4 + len(f.Name) + 4 + 1 - for _, param := range f.Parameters { - size += param.SerializeSize() - } - if f.Returns != nil { - size += f.Returns.SerializeSize() - } - return size -} - -func (f ForeignProcedure) MarshalBinary() ([]byte, error) { - b := make([]byte, f.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.Name))) - offset += 4 - copy(b[offset:], f.Name) - offset += len(f.Name) - - binary.BigEndian.PutUint32(b[offset:], uint32(len(f.Parameters))) - offset += 4 - for _, param := range f.Parameters { - paramData, err := param.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], paramData) - offset += len(paramData) - } - - if f.Returns == nil { - b[offset] = 0 - // offset++ - } else { - b[offset] = 1 - offset++ - returnData, err := f.Returns.MarshalBinary() - if err != nil { - return nil, err - } - copy(b[offset:], returnData) - } - - return b, nil -} - -func (f *ForeignProcedure) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid foreign procedure data, unknown version %d", ver) - } - - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen+4 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - f.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - paramCount := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if paramCount > len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - f.Parameters = make([]*DataType, paramCount) - for i := range paramCount { - param := &DataType{} - if err := param.UnmarshalBinary(data[offset:]); err != nil { - return err - } - f.Parameters[i] = param - offset += param.SerializeSize() - } - - if offset >= len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - var haveReturns bool - switch data[offset] { - case 0: - case 1: - haveReturns = true - default: - return fmt.Errorf("invalid have-returns flag: %d", data[offset]) - } - offset++ - if haveReturns { - if offset >= len(data) { - return fmt.Errorf("invalid data length: %d", len(data)) - } - f.Returns = &ProcedureReturn{} - if err := f.Returns.UnmarshalBinary(data[offset:]); err != nil { - return err - } - offset += f.Returns.SerializeSize() - } - - if offset != f.SerializeSize() { - return fmt.Errorf("invalid data length: %d", len(data)) - } - - return nil -} - -func (f *ForeignProcedure) Clean() error { - err := cleanIdent(&f.Name) - if err != nil { - return err - } - - for _, param := range f.Parameters { - err := param.Clean() - if err != nil { - return err - } - } - - if f.Returns != nil { - err := f.Returns.Clean() - if err != nil { - return err - } - } - - return nil -} - -// DataType is a data type. -// It includes both built-in types and user-defined types. -type DataType struct { - // Name is the name of the type. - Name string `json:"name"` - // IsArray is true if the type is an array. - IsArray bool `json:"is_array"` - // Metadata is the metadata of the type. - Metadata [2]uint16 `json:"metadata"` -} - -func (c DataType) SerializeSize() int { - // uint16 version + uint32 name length + name + uint8 is_array + - // 2 x uint16 metadata - return 2 + 4 + len(c.Name) + 1 + 4 -} - -func boolToByte(b bool) byte { - if b { - return 1 - } - return 0 -} - -func (c DataType) MarshalBinary() ([]byte, error) { - b := make([]byte, c.SerializeSize()) - const ver uint16 = 0 - binary.BigEndian.PutUint16(b, ver) - offset := 2 - binary.BigEndian.PutUint32(b[offset:], uint32(len(c.Name))) - offset += 4 - copy(b[offset:], c.Name) - offset += len(c.Name) - b[offset] = boolToByte(c.IsArray) - offset++ - binary.BigEndian.PutUint16(b[offset:], c.Metadata[0]) - offset += 2 - binary.BigEndian.PutUint16(b[offset:], c.Metadata[1]) - return b, nil -} - -func (c *DataType) UnmarshalBinary(data []byte) error { - if len(data) < 6 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - ver := binary.BigEndian.Uint16(data) - if ver != 0 { - return fmt.Errorf("invalid tuple data, unknown version %d", ver) - } - offset := 2 - nameLen := int(binary.BigEndian.Uint32(data[offset:])) - offset += 4 - if len(data) < offset+nameLen+1+2*2 { - return fmt.Errorf("invalid data length: %d", len(data)) - } - c.Name = string(data[offset : offset+nameLen]) - offset += nameLen - - switch data[offset] { - case 0: - case 1: - c.IsArray = true - default: - return fmt.Errorf("invalid data length: %d", len(data)) - } - offset++ - - c.Metadata[0] = binary.BigEndian.Uint16(data[offset : offset+2]) - offset += 2 - c.Metadata[1] = binary.BigEndian.Uint16(data[offset : offset+2]) - offset += 2 - if offset != c.SerializeSize() { // bug, must match - return fmt.Errorf("invalid data length: %d", len(data)) - } - return nil -} - -// String returns the string representation of the type. -func (c *DataType) String() string { - str := strings.Builder{} - str.WriteString(c.Name) - if c.IsArray { - return str.String() + "[]" - } - - if c.Name == DecimalStr { - str.WriteString("(") - str.WriteString(strconv.FormatUint(uint64(c.Metadata[0]), 10)) - str.WriteString(",") - str.WriteString(strconv.FormatUint(uint64(c.Metadata[1]), 10)) - str.WriteString(")") - } - - return str.String() -} - -var ZeroMetadata = [2]uint16{} - -// PGString returns the string representation of the type in Postgres. -func (c *DataType) PGString() (string, error) { - var scalar string - switch strings.ToLower(c.Name) { - case intStr: - scalar = "INT8" - case textStr: - scalar = "TEXT" - case boolStr: - scalar = "BOOL" - case blobStr: - scalar = "BYTEA" - case uuidStr: - scalar = "UUID" - case uint256Str: - scalar = "UINT256" - case DecimalStr: - if c.Metadata == ZeroMetadata { - return "", errors.New("decimal type must have metadata") - } - - scalar = fmt.Sprintf("NUMERIC(%d,%d)", c.Metadata[0], c.Metadata[1]) - case nullStr: - return "", errors.New("cannot have null column type") - case unknownStr: - return "", errors.New("cannot have unknown column type") - default: - return "", fmt.Errorf("unknown column type: %s", c.Name) - } - - if c.IsArray { - return scalar + "[]", nil - } - - return scalar, nil -} - -func (c *DataType) Clean() error { - lName := strings.ToLower(c.Name) - switch lName { - case intStr, textStr, boolStr, blobStr, uuidStr, uint256Str: // ok - if c.Metadata != ZeroMetadata { - return fmt.Errorf("type %s cannot have metadata", c.Name) - } - case DecimalStr: - if c.Metadata == ZeroMetadata { - return errors.New("decimal type must have metadata") - } - - err := decimal.CheckPrecisionAndScale(c.Metadata[0], c.Metadata[1]) - if err != nil { - return err - } - case nullStr, unknownStr: - if c.IsArray { - return fmt.Errorf("type %s cannot be an array", c.Name) - } - - if c.Metadata != ZeroMetadata { - return fmt.Errorf("type %s cannot have metadata", c.Name) - } - default: - return fmt.Errorf("unknown type: %s", c.Name) - } - - c.Name = lName - - return nil -} - -// Copy returns a copy of the type. -func (c *DataType) Copy() *DataType { - d := &DataType{ - Name: c.Name, - IsArray: c.IsArray, - Metadata: c.Metadata, - } - - return d -} - -// EqualsStrict returns true if the type is equal to the other type. -// The types must be exactly the same, including metadata. -func (c *DataType) EqualsStrict(other *DataType) bool { - // if unknown, return true. unknown is a special case used - // internally when type checking is disabled. - if c.Name == unknownStr || other.Name == unknownStr { - return true - } - - if c.IsArray != other.IsArray { - return false - } - - if (c.Metadata == ZeroMetadata) != (other.Metadata == ZeroMetadata) { - return false - } - if c.Metadata != ZeroMetadata { - if c.Metadata[0] != other.Metadata[0] || c.Metadata[1] != other.Metadata[1] { - return false - } - } - - return strings.EqualFold(c.Name, other.Name) -} - -// Equals returns true if the type is equal to the other type, or if either type is null. -func (c *DataType) Equals(other *DataType) bool { - if c.Name == nullStr || other.Name == nullStr { - return true - } - - return c.EqualsStrict(other) -} - -func (c *DataType) IsNumeric() bool { - if c.IsArray { - return false - } - - return c.Name == intStr || c.Name == DecimalStr || c.Name == uint256Str || c.Name == unknownStr -} - -// declared DataType constants. -// We do not have one for fixed because fixed types require metadata. -var ( - IntType = &DataType{ - Name: intStr, - } - IntArrayType = ArrayType(IntType) - TextType = &DataType{ - Name: textStr, - } - TextArrayType = ArrayType(TextType) - BoolType = &DataType{ - Name: boolStr, - } - BoolArrayType = ArrayType(BoolType) - BlobType = &DataType{ - Name: blobStr, - } - BlobArrayType = ArrayType(BlobType) - UUIDType = &DataType{ - Name: uuidStr, - } - UUIDArrayType = ArrayType(UUIDType) - // DecimalType contains 1,0 metadata. - // For type detection, users should prefer compare a datatype - // name with the DecimalStr constant. - DecimalType = &DataType{ - Name: DecimalStr, - Metadata: [2]uint16{1, 0}, // the minimum precision and scale - } - DecimalArrayType = ArrayType(DecimalType) - Uint256Type = &DataType{ - Name: uint256Str, - } - Uint256ArrayType = ArrayType(Uint256Type) - // NullType is a special type used internally - NullType = &DataType{ - Name: nullStr, - } - // Unknown is a special type used internally - // when a type is unknown until runtime. - UnknownType = &DataType{ - Name: unknownStr, - } -) - -// ArrayType creates an array type of the given type. -// It panics if the type is already an array. -func ArrayType(t *DataType) *DataType { - if t.IsArray { - panic("cannot create an array of an array") - } - return &DataType{ - Name: t.Name, - IsArray: true, - Metadata: t.Metadata, - } -} - -const ( - textStr = "text" - intStr = "int" - boolStr = "bool" - blobStr = "blob" - uuidStr = "uuid" - uint256Str = "uint256" - // DecimalStr is a fixed point number. - DecimalStr = "decimal" - nullStr = "null" - unknownStr = "unknown" -) - -// NewDecimalType creates a new fixed point decimal type. -func NewDecimalType(precision, scale uint16) (*DataType, error) { - err := decimal.CheckPrecisionAndScale(precision, scale) - if err != nil { - return nil, err - } - - return &DataType{ - Name: DecimalStr, - Metadata: [2]uint16{precision, scale}, - }, nil -} diff --git a/core/types/schema_test.go b/core/types/schema_test.go deleted file mode 100644 index 3ed0d1067..000000000 --- a/core/types/schema_test.go +++ /dev/null @@ -1,2383 +0,0 @@ -package types - -import ( - "encoding/binary" - "fmt" - "reflect" - "strings" - "testing" -) - -func TestDataTypeBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid data type", func(t *testing.T) { - original := DataType{ - Name: "test_type", - IsArray: true, - Metadata: [2]uint16{42, 123}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded DataType - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if decoded.IsArray != original.IsArray { - t.Errorf("got isArray %v, want %v", decoded.IsArray, original.IsArray) - } - if decoded.Metadata != original.Metadata { - t.Errorf("got metadata %v, want %v", decoded.Metadata, original.Metadata) - } - }) - - t.Run("unmarshal with insufficient data length", func(t *testing.T) { - data := []byte{0, 0, 0, 0} - var dt DataType - err := dt.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for insufficient data length") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var dt DataType - err := dt.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) - - t.Run("unmarshal with invalid name length", func(t *testing.T) { - data := []byte{0, 0, 255, 255, 255, 255} - var dt DataType - err := dt.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid name length") - } - }) - - t.Run("marshal empty name", func(t *testing.T) { - original := DataType{ - Name: "", - IsArray: false, - Metadata: [2]uint16{0, 0}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded DataType - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded != original { - t.Errorf("got %v, want %v", decoded, original) - } - }) - - t.Run("marshal with maximum metadata values", func(t *testing.T) { - original := DataType{ - Name: "test", - IsArray: true, - Metadata: [2]uint16{65535, 65535}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded DataType - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded != original { - t.Errorf("got %v, want %v", decoded, original) - } - }) -} - -func TestAttributeBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid attribute", func(t *testing.T) { - original := Attribute{ - Type: "test_type", - Value: "test_value", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Attribute - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Type != original.Type { - t.Errorf("got type %s, want %s", decoded.Type, original.Type) - } - if decoded.Value != original.Value { - t.Errorf("got value %s, want %s", decoded.Value, original.Value) - } - }) - - t.Run("marshal and unmarshal empty attribute", func(t *testing.T) { - original := Attribute{ - Type: "", - Value: "", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Attribute - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded != original { - t.Errorf("got %v, want %v", decoded, original) - } - }) - - t.Run("unmarshal with truncated version", func(t *testing.T) { - data := []byte{0} - var a Attribute - err := a.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated version") - } - }) - - t.Run("unmarshal with truncated type length", func(t *testing.T) { - data := []byte{0, 0, 0, 0} - var a Attribute - err := a.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated type length") - } - }) - - t.Run("unmarshal with truncated type data", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 5, 't', 'e'} - var a Attribute - err := a.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated type data") - } - }) - - t.Run("unmarshal with truncated value length", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 'x'} - var a Attribute - err := a.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated value length") - } - }) - - t.Run("unmarshal with truncated value data", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 'x', 0, 0, 0, 5, 'v'} - var a Attribute - err := a.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated value data") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - a := Attribute{ - Type: "test_type", - Value: "test_value", - } - expected := 2 + 4 + len(a.Type) + 4 + len(a.Value) - if size := a.SerializeSize(); size != expected { - t.Errorf("got size %d, want %d", size, expected) - } - }) -} - -func TestColumnBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid column", func(t *testing.T) { - original := Column{ - Name: "test_column", - Type: &DataType{ - Name: "string", - IsArray: false, - Metadata: [2]uint16{1, 2}, - }, - Attributes: []*Attribute{ - {Type: "attr1", Value: "val1"}, - {Type: "attr2", Value: "val2"}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Column - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if decoded.Type.Name != original.Type.Name { - t.Errorf("got type name %s, want %s", decoded.Type.Name, original.Type.Name) - } - if len(decoded.Attributes) != len(original.Attributes) { - t.Errorf("got %d attributes, want %d", len(decoded.Attributes), len(original.Attributes)) - } - }) - - t.Run("marshal and unmarshal column with no attributes", func(t *testing.T) { - original := Column{ - Name: "empty_attrs", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - Attributes: []*Attribute{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Column - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.Attributes) != 0 { - t.Errorf("got %d attributes, want 0", len(decoded.Attributes)) - } - }) - - t.Run("unmarshal with insufficient data", func(t *testing.T) { - data := []byte{0, 0, 0, 0} - var c Column - err := c.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for insufficient data") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var c Column - err := c.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) - - t.Run("unmarshal with invalid name length", func(t *testing.T) { - data := []byte{0, 0, 255, 255, 255, 255} - var c Column - err := c.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid name length") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - c := Column{ - Name: "test", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{1, 1}, - }, - Attributes: []*Attribute{ - {Type: "attr", Value: "val"}, - }, - } - - data, err := c.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != c.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), c.SerializeSize()) - } - }) -} - -func TestIndexBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid index", func(t *testing.T) { - original := Index{ - Name: "test_index", - Columns: []string{"col1", "col2", "col3"}, - Type: "BTREE", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Index - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if len(decoded.Columns) != len(original.Columns) { - t.Errorf("got %d columns, want %d", len(decoded.Columns), len(original.Columns)) - } - if decoded.Type != original.Type { - t.Errorf("got type %s, want %s", decoded.Type, original.Type) - } - }) - - t.Run("marshal and unmarshal index with empty columns", func(t *testing.T) { - original := Index{ - Name: "empty_cols", - Columns: []string{}, - Type: "HASH", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Index - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.Columns) != 0 { - t.Errorf("got %d columns, want 0", len(decoded.Columns)) - } - }) - - t.Run("unmarshal with truncated columns count", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 4, 't', 'e', 's', 't'} - var idx Index - err := idx.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated columns count") - } - }) - - t.Run("unmarshal with invalid column length", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 'x', 0, 0, 0, 1, 255, 255, 255, 255} - var idx Index - err := idx.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid column length") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - idx := Index{ - Name: "test", - Columns: []string{"col1", "col2"}, - Type: "BTREE", - } - - data, err := idx.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != idx.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), idx.SerializeSize()) - } - }) -} - -func TestForeignKeyActionBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid foreign key action", func(t *testing.T) { - original := ForeignKeyAction{ - On: "UPDATE", - Do: "CASCADE", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignKeyAction - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.On != original.On { - t.Errorf("got On %s, want %s", decoded.On, original.On) - } - if decoded.Do != original.Do { - t.Errorf("got Do %s, want %s", decoded.Do, original.Do) - } - }) - - t.Run("marshal and unmarshal with empty values", func(t *testing.T) { - original := ForeignKeyAction{ - On: "", - Do: "", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignKeyAction - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded != original { - t.Errorf("got %v, want %v", decoded, original) - } - }) - - t.Run("unmarshal with truncated on length", func(t *testing.T) { - data := []byte{0, 0, 255, 255} - var fka ForeignKeyAction - err := fka.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated on length") - } - }) - - t.Run("unmarshal with truncated do length", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 'X', 255, 255} - var fka ForeignKeyAction - err := fka.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated do length") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - fka := ForeignKeyAction{ - On: "DELETE", - Do: "SET NULL", - } - - data, err := fka.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != fka.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), fka.SerializeSize()) - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var fka ForeignKeyAction - err := fka.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) -} - -func TestForeignKeyBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid foreign key", func(t *testing.T) { - original := ForeignKey{ - ChildKeys: []string{"id", "type"}, - ParentKeys: []string{"parent_id", "parent_type"}, - ParentTable: "parent_table", - Actions: []*ForeignKeyAction{ - {On: "DELETE", Do: "CASCADE"}, - {On: "UPDATE", Do: "SET NULL"}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignKey - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if !reflect.DeepEqual(decoded.ChildKeys, original.ChildKeys) { - t.Errorf("got child keys %v, want %v", decoded.ChildKeys, original.ChildKeys) - } - if !reflect.DeepEqual(decoded.ParentKeys, original.ParentKeys) { - t.Errorf("got parent keys %v, want %v", decoded.ParentKeys, original.ParentKeys) - } - if decoded.ParentTable != original.ParentTable { - t.Errorf("got parent table %s, want %s", decoded.ParentTable, original.ParentTable) - } - if !reflect.DeepEqual(decoded.Actions, original.Actions) { - t.Errorf("got actions %v, want %v", decoded.Actions, original.Actions) - } - }) - - t.Run("marshal and unmarshal empty foreign key", func(t *testing.T) { - original := ForeignKey{ - ChildKeys: []string{}, - ParentKeys: []string{}, - ParentTable: "", - Actions: []*ForeignKeyAction{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignKey - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.ChildKeys) != 0 { - t.Errorf("got %d child keys, want 0", len(decoded.ChildKeys)) - } - if len(decoded.ParentKeys) != 0 { - t.Errorf("got %d parent keys, want 0", len(decoded.ParentKeys)) - } - if len(decoded.Actions) != 0 { - t.Errorf("got %d actions, want 0", len(decoded.Actions)) - } - }) - - t.Run("unmarshal with truncated child keys count", func(t *testing.T) { - data := []byte{0, 0, 255, 255} - var fk ForeignKey - err := fk.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated child keys count") - } - }) - - t.Run("unmarshal with invalid child key length", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 255, 255, 255, 255} - var fk ForeignKey - err := fk.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid child key length") - } - }) - - t.Run("unmarshal with truncated parent keys count", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 0} - var fk ForeignKey - err := fk.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated parent keys count") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - fk := ForeignKey{ - ChildKeys: []string{"id"}, - ParentKeys: []string{"parent_id"}, - ParentTable: "users", - Actions: []*ForeignKeyAction{ - {On: "DELETE", Do: "CASCADE"}, - }, - } - - data, err := fk.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != fk.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), fk.SerializeSize()) - } - }) - - t.Run("unmarshal with mismatched data length", func(t *testing.T) { - original := ForeignKey{ - ChildKeys: []string{"id"}, - ParentKeys: []string{"parent_id"}, - ParentTable: "users", - Actions: []*ForeignKeyAction{{On: "DELETE", Do: "CASCADE"}}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - truncatedData := data[:len(data)-1] - var decoded ForeignKey - err = decoded.UnmarshalBinary(truncatedData) - if err == nil { - t.Error("expected error for mismatched data length") - } - }) -} - -func TestTableBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex table", func(t *testing.T) { - original := Table{ - Name: "users", - Columns: []*Column{ - { - Name: "id", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{1, 0}, - }, - Attributes: []*Attribute{ - {Type: "primary_key", Value: "true"}, - }, - }, - { - Name: "emails", - Type: &DataType{ - Name: "string", - IsArray: true, - Metadata: [2]uint16{255, 0}, - }, - }, - }, - Indexes: []*Index{ - { - Name: "email_idx", - Columns: []string{"emails"}, - Type: "HASH", - }, - }, - ForeignKeys: []*ForeignKey{ - { - ChildKeys: []string{"department_id"}, - ParentKeys: []string{"id"}, - ParentTable: "departments", - Actions: []*ForeignKeyAction{ - {On: "DELETE", Do: "SET NULL"}, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Table - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if len(decoded.Columns) != len(original.Columns) { - t.Errorf("got %d columns, want %d", len(decoded.Columns), len(original.Columns)) - } - if len(decoded.Indexes) != len(original.Indexes) { - t.Errorf("got %d indexes, want %d", len(decoded.Indexes), len(original.Indexes)) - } - if len(decoded.ForeignKeys) != len(original.ForeignKeys) { - t.Errorf("got %d foreign keys, want %d", len(decoded.ForeignKeys), len(original.ForeignKeys)) - } - }) - - t.Run("marshal and unmarshal empty table", func(t *testing.T) { - original := Table{ - Name: "empty", - Columns: []*Column{}, - Indexes: []*Index{}, - ForeignKeys: []*ForeignKey{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Table - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if len(decoded.Columns) != 0 { - t.Errorf("got %d columns, want 0", len(decoded.Columns)) - } - if len(decoded.Indexes) != 0 { - t.Errorf("got %d indexes, want 0", len(decoded.Indexes)) - } - if len(decoded.ForeignKeys) != 0 { - t.Errorf("got %d foreign keys, want 0", len(decoded.ForeignKeys)) - } - }) - - t.Run("unmarshal with insufficient version data", func(t *testing.T) { - data := []byte{0} - var table Table - err := table.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for insufficient version data") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var table Table - err := table.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - table := Table{ - Name: "test", - Columns: []*Column{ - { - Name: "id", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{1, 0}, - }, - }, - }, - Indexes: []*Index{ - { - Name: "idx", - Columns: []string{"id"}, - Type: "BTREE", - }, - }, - ForeignKeys: []*ForeignKey{ - { - ChildKeys: []string{"id"}, - ParentKeys: []string{"id"}, - ParentTable: "parent", - Actions: []*ForeignKeyAction{{On: "DELETE", Do: "CASCADE"}}, - }, - }, - } - - data, err := table.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != table.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), table.SerializeSize()) - } - }) - - t.Run("unmarshal with truncated name length", func(t *testing.T) { - data := []byte{0, 0, 255, 255} - var table Table - err := table.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for truncated name length") - } - }) - - t.Run("unmarshal with invalid name data", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 10, 't', 'e', 's', 't'} - var table Table - err := table.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid name data") - } - }) -} - -func TestActionBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex action", func(t *testing.T) { - original := Action{ - Name: "ComplexAction", - Annotations: []string{"@Deprecated", "@Beta"}, - Parameters: []string{"param1: string", "param2: int"}, - Public: true, - Modifiers: []Modifier{"async", "final"}, - Body: "return x + y;", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Action - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if !reflect.DeepEqual(decoded.Annotations, original.Annotations) { - t.Errorf("got annotations %v, want %v", decoded.Annotations, original.Annotations) - } - if !reflect.DeepEqual(decoded.Parameters, original.Parameters) { - t.Errorf("got parameters %v, want %v", decoded.Parameters, original.Parameters) - } - if decoded.Public != original.Public { - t.Errorf("got public %v, want %v", decoded.Public, original.Public) - } - if !reflect.DeepEqual(decoded.Modifiers, original.Modifiers) { - t.Errorf("got modifiers %v, want %v", decoded.Modifiers, original.Modifiers) - } - if decoded.Body != original.Body { - t.Errorf("got body %s, want %s", decoded.Body, original.Body) - } - }) - - t.Run("marshal and unmarshal minimal action", func(t *testing.T) { - original := Action{ - Name: "MinimalAction", - Annotations: []string{}, - Parameters: []string{}, - Public: false, - Modifiers: []Modifier{}, - Body: "", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Action - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if !reflect.DeepEqual(decoded, original) { - t.Errorf("got %v, want %v", decoded, original) - } - }) - - t.Run("verify serialize size with unicode characters", func(t *testing.T) { - action := Action{ - Name: "测试", - Annotations: []string{"🔥"}, - Parameters: []string{"param1: 字符串"}, - Public: true, - Modifiers: []Modifier{"异步"}, - Body: "返回;", - } - - data, err := action.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != action.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), action.SerializeSize()) - } - }) - - t.Run("unmarshal with corrupted annotation length", func(t *testing.T) { - original := Action{ - Name: "Test", - Annotations: []string{"test"}, - } - data, _ := original.MarshalBinary() - - // Corrupt annotation length - offset := 6 + len(original.Name) - binary.BigEndian.PutUint32(data[offset:], uint32(255)) - - var decoded Action - err := decoded.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for corrupted annotation length") - } - }) - - t.Run("unmarshal with corrupted parameter length", func(t *testing.T) { - original := Action{ - Name: "Test", - Parameters: []string{"param"}, - } - data, _ := original.MarshalBinary() - - // Corrupt parameter count - offset := 10 + len(original.Name) - binary.BigEndian.PutUint32(data[offset:], uint32(255)) - - var decoded Action - err := decoded.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for corrupted parameter length") - } - }) - - t.Run("unmarshal with corrupted annotations length", func(t *testing.T) { - original := Action{ - Name: "Test", - Annotations: []string{"test"}, - } - data, _ := original.MarshalBinary() - - // Corrupt modifier count - offset := 11 + len(original.Name) - binary.BigEndian.PutUint32(data[offset:], uint32(255)) - - var decoded Action - err := decoded.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for corrupted modifier length") - } - }) -} - -func TestProcedureParameterBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal valid procedure parameter", func(t *testing.T) { - original := ProcedureParameter{ - Name: "test_param", - Type: &DataType{ - Name: "varchar", - IsArray: false, - Metadata: [2]uint16{100, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureParameter - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if decoded.Type.Name != original.Type.Name { - t.Errorf("got type name %s, want %s", decoded.Type.Name, original.Type.Name) - } - if decoded.Type.IsArray != original.Type.IsArray { - t.Errorf("got type isArray %v, want %v", decoded.Type.IsArray, original.Type.IsArray) - } - if decoded.Type.Metadata != original.Type.Metadata { - t.Errorf("got type metadata %v, want %v", decoded.Type.Metadata, original.Type.Metadata) - } - }) - - t.Run("marshal and unmarshal with unicode name", func(t *testing.T) { - original := ProcedureParameter{ - Name: "测试参数", - Type: &DataType{ - Name: "text", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureParameter - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - }) - - t.Run("unmarshal with truncated type data", func(t *testing.T) { - original := ProcedureParameter{ - Name: "test", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - truncatedData := data[:len(data)-1] - var decoded ProcedureParameter - err = decoded.UnmarshalBinary(truncatedData) - if err == nil { - t.Error("expected error for truncated type data") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - param := ProcedureParameter{ - Name: "param", - Type: &DataType{ - Name: "decimal", - IsArray: true, - Metadata: [2]uint16{10, 2}, - }, - } - - data, err := param.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != param.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), param.SerializeSize()) - } - }) - - t.Run("unmarshal with empty name", func(t *testing.T) { - original := ProcedureParameter{ - Name: "", - Type: &DataType{ - Name: "bool", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureParameter - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != "" { - t.Errorf("got name %s, want empty string", decoded.Name) - } - }) -} - -func TestNamedTypeBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal named type", func(t *testing.T) { - original := NamedType{ - Name: "CustomType", - Type: &DataType{ - Name: "int", - IsArray: true, - Metadata: [2]uint16{8, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded NamedType - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if decoded.Type.Name != original.Type.Name { - t.Errorf("got type name %s, want %s", decoded.Type.Name, original.Type.Name) - } - if decoded.Type.IsArray != original.Type.IsArray { - t.Errorf("got isArray %v, want %v", decoded.Type.IsArray, original.Type.IsArray) - } - if decoded.Type.Metadata != original.Type.Metadata { - t.Errorf("got metadata %v, want %v", decoded.Type.Metadata, original.Type.Metadata) - } - }) - - t.Run("verify serialize size matches actual size", func(t *testing.T) { - nt := NamedType{ - Name: "MyType", - Type: &DataType{ - Name: "varchar", - IsArray: false, - Metadata: [2]uint16{255, 0}, - }, - } - - data, err := nt.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != nt.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), nt.SerializeSize()) - } - }) - - t.Run("marshal and unmarshal with empty name", func(t *testing.T) { - original := NamedType{ - Name: "", - Type: &DataType{ - Name: "bool", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded NamedType - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != "" { - t.Errorf("got name %s, want empty string", decoded.Name) - } - }) - - t.Run("unmarshal with nil type", func(t *testing.T) { - original := NamedType{ - Name: "Test", - Type: nil, - } - - _, err := original.MarshalBinary() - if err == nil { - t.Error("expected error for nil type") - } - }) - - t.Run("unmarshal with invalid data length", func(t *testing.T) { - data := []byte{0, 0} - var nt NamedType - err := nt.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid data length") - } - }) -} - -func TestProcedureReturnBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex procedure return", func(t *testing.T) { - original := ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "id", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{4, 0}, - }, - }, - { - Name: "names", - Type: &DataType{ - Name: "varchar", - IsArray: true, - Metadata: [2]uint16{100, 0}, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureReturn - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.IsTable != original.IsTable { - t.Errorf("got isTable %v, want %v", decoded.IsTable, original.IsTable) - } - if len(decoded.Fields) != len(original.Fields) { - t.Errorf("got %d fields, want %d", len(decoded.Fields), len(original.Fields)) - } - for i, field := range decoded.Fields { - if field.Name != original.Fields[i].Name { - t.Errorf("field %d: got name %s, want %s", i, field.Name, original.Fields[i].Name) - } - if field.Type.Name != original.Fields[i].Type.Name { - t.Errorf("field %d: got type %s, want %s", i, field.Type.Name, original.Fields[i].Type.Name) - } - } - }) - - t.Run("marshal and unmarshal scalar return", func(t *testing.T) { - original := ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "boolean", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureReturn - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.IsTable { - t.Error("got isTable true, want false") - } - if len(decoded.Fields) != 1 { - t.Errorf("got %d fields, want 1", len(decoded.Fields)) - } - }) - - t.Run("marshal and unmarshal empty fields", func(t *testing.T) { - original := ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureReturn - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.Fields) != 0 { - t.Errorf("got %d fields, want 0", len(decoded.Fields)) - } - }) - - t.Run("unmarshal with insufficient data", func(t *testing.T) { - data := []byte{0, 0, 1} - var pr ProcedureReturn - err := pr.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for insufficient data") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0, 0} - var pr ProcedureReturn - err := pr.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - pr := ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "field1", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - } - - data, err := pr.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != pr.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), pr.SerializeSize()) - } - }) -} - -func TestProcedureReturnBinaryMarshalingExtended(t *testing.T) { - t.Run("marshal and unmarshal with max fields", func(t *testing.T) { - original := ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "field1", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - { - Name: "field2", - Type: &DataType{ - Name: "varchar", - IsArray: true, - Metadata: [2]uint16{255, 0}, - }, - }, - { - Name: "field3", - Type: &DataType{ - Name: "decimal", - IsArray: false, - Metadata: [2]uint16{10, 2}, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ProcedureReturn - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.Fields) != len(original.Fields) { - t.Errorf("got %d fields, want %d", len(decoded.Fields), len(original.Fields)) - } - for i, field := range decoded.Fields { - if !reflect.DeepEqual(field, original.Fields[i]) { - t.Errorf("field %d mismatch: got %v, want %v", i, field, original.Fields[i]) - } - } - }) - - t.Run("unmarshal with corrupted field data", func(t *testing.T) { - original := ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "test", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - // Corrupt the field data - data = data[:len(data)-1] - - var decoded ProcedureReturn - err = decoded.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for corrupted field data") - } - }) - - t.Run("marshal with nil field type", func(t *testing.T) { - pr := ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "test", - Type: nil, - }, - }, - } - - _, err := pr.MarshalBinary() - if err == nil { - t.Error("expected error for nil field type") - } - }) - - t.Run("verify size calculation with unicode field names", func(t *testing.T) { - pr := ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "测试字段", - Type: &DataType{ - Name: "text", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - } - - data, err := pr.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != pr.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), pr.SerializeSize()) - } - }) -} - -func TestExtensionConfigBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal with special characters", func(t *testing.T) { - original := ExtensionConfig{ - Key: "config.🔑", - Value: "value.⚡", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ExtensionConfig - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Key != original.Key { - t.Errorf("got key %s, want %s", decoded.Key, original.Key) - } - if decoded.Value != original.Value { - t.Errorf("got value %s, want %s", decoded.Value, original.Value) - } - }) - - t.Run("marshal and unmarshal with very long strings", func(t *testing.T) { - key := strings.Repeat("k", 1000) - value := strings.Repeat("v", 1000) - original := ExtensionConfig{ - Key: key, - Value: value, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ExtensionConfig - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Key != original.Key { - t.Errorf("key length mismatch: got %d, want %d", len(decoded.Key), len(original.Key)) - } - if decoded.Value != original.Value { - t.Errorf("value length mismatch: got %d, want %d", len(decoded.Value), len(original.Value)) - } - }) - - t.Run("unmarshal with truncated value length", func(t *testing.T) { - original := ExtensionConfig{ - Key: "test", - Value: "value", - } - data, _ := original.MarshalBinary() - truncatedData := data[:len(data)-2] - - var decoded ExtensionConfig - err := decoded.UnmarshalBinary(truncatedData) - if err == nil { - t.Error("expected error for truncated value length") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - configs := []ExtensionConfig{ - {Key: "", Value: ""}, - {Key: "a", Value: "b"}, - {Key: "key", Value: strings.Repeat("v", 100)}, - } - - for _, config := range configs { - data, err := config.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != config.SerializeSize() { - t.Errorf("size mismatch for config %v: got %d, want %d", - config, len(data), config.SerializeSize()) - } - } - }) - - t.Run("unmarshal with invalid key length", func(t *testing.T) { - data := []byte{0, 0, 255, 255, 255, 255} - var config ExtensionConfig - err := config.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid key length") - } - }) - - t.Run("unmarshal with invalid value length", func(t *testing.T) { - data := []byte{0, 0, 0, 0, 0, 1, 'x', 255, 255, 255, 255} - var config ExtensionConfig - err := config.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid value length") - } - }) -} - -func TestExtensionBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex extension", func(t *testing.T) { - original := Extension{ - Name: "test_extension", - Initialization: []*ExtensionConfig{ - {Key: "key1", Value: "value1"}, - {Key: "key2", Value: "value2"}, - {Key: "key3", Value: "value3"}, - }, - Alias: "test_alias", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Extension - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if decoded.Alias != original.Alias { - t.Errorf("got alias %s, want %s", decoded.Alias, original.Alias) - } - if len(decoded.Initialization) != len(original.Initialization) { - t.Errorf("got %d configs, want %d", len(decoded.Initialization), len(original.Initialization)) - } - for i, config := range decoded.Initialization { - if config.Key != original.Initialization[i].Key { - t.Errorf("config %d: got key %s, want %s", i, config.Key, original.Initialization[i].Key) - } - if config.Value != original.Initialization[i].Value { - t.Errorf("config %d: got value %s, want %s", i, config.Value, original.Initialization[i].Value) - } - } - }) - - t.Run("marshal and unmarshal empty extension", func(t *testing.T) { - original := Extension{ - Name: "", - Initialization: []*ExtensionConfig{}, - Alias: "", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Extension - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != "" { - t.Error("expected empty name") - } - if len(decoded.Initialization) != 0 { - t.Error("expected empty initialization") - } - if decoded.Alias != "" { - t.Error("expected empty alias") - } - }) - - t.Run("unmarshal with invalid initialization data", func(t *testing.T) { - original := Extension{ - Name: "test", - Initialization: []*ExtensionConfig{ - {Key: "key", Value: "value"}, - }, - Alias: "alias", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - // Corrupt the initialization data - corrupted := make([]byte, len(data)) - copy(corrupted, data) - offset := 6 + len(original.Name) + 4 - corrupted[offset] = 255 - - var decoded Extension - err = decoded.UnmarshalBinary(corrupted) - if err == nil { - t.Error("expected error for corrupted initialization data") - } - }) - - t.Run("verify serialize size with large initialization", func(t *testing.T) { - configs := make([]*ExtensionConfig, 100) - for i := range configs { - configs[i] = &ExtensionConfig{ - Key: fmt.Sprintf("key%d", i), - Value: fmt.Sprintf("value%d", i), - } - } - - ext := Extension{ - Name: "large_test", - Initialization: configs, - Alias: "large_alias", - } - - data, err := ext.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != ext.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), ext.SerializeSize()) - } - }) - - t.Run("unmarshal with truncated alias length", func(t *testing.T) { - original := Extension{ - Name: "test", - Initialization: []*ExtensionConfig{}, - Alias: "alias", - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - truncated := data[:len(data)-2] - var decoded Extension - err = decoded.UnmarshalBinary(truncated) - if err == nil { - t.Error("expected error for truncated alias length") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var ext Extension - err := ext.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) -} - -func TestForeignProcedureBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex foreign procedure", func(t *testing.T) { - original := ForeignProcedure{ - Name: "complex_proc", - Parameters: []*DataType{ - { - Name: "param1", - IsArray: true, - Metadata: [2]uint16{10, 2}, - }, - { - Name: "param2", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - Returns: &ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{4, 0}, - }, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignProcedure - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if len(decoded.Parameters) != len(original.Parameters) { - t.Errorf("got %d parameters, want %d", len(decoded.Parameters), len(original.Parameters)) - } - if !reflect.DeepEqual(decoded.Returns, original.Returns) { - t.Errorf("got returns %v, want %v", decoded.Returns, original.Returns) - } - }) - - t.Run("marshal and unmarshal with no parameters", func(t *testing.T) { - original := ForeignProcedure{ - Name: "no_params", - Parameters: []*DataType{}, - Returns: &ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "bool", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignProcedure - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if len(decoded.Parameters) != 0 { - t.Errorf("got %d parameters, want 0", len(decoded.Parameters)) - } - }) - - t.Run("unmarshal with invalid parameter data", func(t *testing.T) { - original := ForeignProcedure{ - Name: "test", - Parameters: []*DataType{ - { - Name: "param", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - Returns: &ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - }, - } - - data, _ := original.MarshalBinary() - corrupted := make([]byte, len(data)) - copy(corrupted, data) - offset := 10 + len(original.Name) - corrupted[offset] = 255 - - var decoded ForeignProcedure - err := decoded.UnmarshalBinary(corrupted) - if err == nil { - t.Error("expected error for corrupted parameter data") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - fp := ForeignProcedure{ - Name: "size_test", - Parameters: []*DataType{ - { - Name: "param", - IsArray: true, - Metadata: [2]uint16{1, 1}, - }, - }, - Returns: &ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "varchar", - IsArray: false, - Metadata: [2]uint16{100, 0}, - }, - }, - }, - }, - } - - data, err := fp.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != fp.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), fp.SerializeSize()) - } - }) - - t.Run("unmarshal with nil returns", func(t *testing.T) { - fp := ForeignProcedure{ - Name: "test", - Parameters: []*DataType{ - { - Name: "param", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - Returns: nil, - } - - data, err := fp.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded ForeignProcedure - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var fp ForeignProcedure - err := fp.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) -} - -func TestSchemaBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal complex schema", func(t *testing.T) { - original := Schema{ - Name: "test_schema", - Owner: []byte{0x1, 0x2, 0x3}, - Extensions: []*Extension{ - { - Name: "ext1", - Initialization: []*ExtensionConfig{ - {Key: "k1", Value: "v1"}, - }, - Alias: "e1", - }, - }, - Tables: []*Table{ - { - Name: "table1", - Columns: []*Column{ - { - Name: "col1", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{4, 0}, - }, - }, - }, - }, - }, - Actions: []*Action{ - { - Name: "action1", - Annotations: []string{"@test"}, - Parameters: []string{"p1"}, - Public: true, - Body: "body", - }, - }, - Procedures: []*Procedure{ - { - Name: "proc1", - Parameters: []*ProcedureParameter{ - { - Name: "param1", - Type: &DataType{ - Name: "text", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - }, - }, - ForeignProcedures: []*ForeignProcedure{ - { - Name: "fp1", - Parameters: []*DataType{ - { - Name: "param1", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - Returns: &ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "bool", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Schema - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != original.Name { - t.Errorf("got name %s, want %s", decoded.Name, original.Name) - } - if !reflect.DeepEqual(decoded.Owner, original.Owner) { - t.Errorf("got owner %v, want %v", decoded.Owner, original.Owner) - } - if len(decoded.Extensions) != len(original.Extensions) { - t.Errorf("got %d extensions, want %d", len(decoded.Extensions), len(original.Extensions)) - } - if len(decoded.Tables) != len(original.Tables) { - t.Errorf("got %d tables, want %d", len(decoded.Tables), len(original.Tables)) - } - if len(decoded.Actions) != len(original.Actions) { - t.Errorf("got %d actions, want %d", len(decoded.Actions), len(original.Actions)) - } - if len(decoded.Procedures) != len(original.Procedures) { - t.Errorf("got %d procedures, want %d", len(decoded.Procedures), len(original.Procedures)) - } - if len(decoded.ForeignProcedures) != len(original.ForeignProcedures) { - t.Errorf("got %d foreign procedures, want %d", len(decoded.ForeignProcedures), len(original.ForeignProcedures)) - } - }) - - t.Run("marshal and unmarshal empty schema", func(t *testing.T) { - original := Schema{ - Name: "", - Owner: []byte{}, - Extensions: []*Extension{}, - Tables: []*Table{}, - Actions: []*Action{}, - Procedures: []*Procedure{}, - ForeignProcedures: []*ForeignProcedure{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Schema - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Name != "" { - t.Error("expected empty name") - } - if len(decoded.Owner) != 0 { - t.Error("expected empty owner") - } - if len(decoded.Extensions) != 0 { - t.Error("expected empty extensions") - } - if len(decoded.Tables) != 0 { - t.Error("expected empty tables") - } - if len(decoded.Actions) != 0 { - t.Error("expected empty actions") - } - if len(decoded.Procedures) != 0 { - t.Error("expected empty procedures") - } - if len(decoded.ForeignProcedures) != 0 { - t.Error("expected empty foreign procedures") - } - }) - - t.Run("unmarshal with insufficient data", func(t *testing.T) { - data := []byte{0, 0} - var s Schema - err := s.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for insufficient data") - } - }) - - t.Run("unmarshal with invalid version", func(t *testing.T) { - data := []byte{0, 1, 0, 0, 0, 0} - var s Schema - err := s.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid version") - } - }) - - t.Run("unmarshal with invalid name length", func(t *testing.T) { - data := []byte{0, 0, 255, 255, 255, 255} - var s Schema - err := s.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid name length") - } - }) - - t.Run("verify serialize size calculation", func(t *testing.T) { - schema := Schema{ - Name: "test", - Owner: []byte{0x1}, - Extensions: []*Extension{ - { - Name: "ext", - Alias: "e", - }, - }, - Tables: []*Table{ - { - Name: "table", - }, - }, - Actions: []*Action{ - { - Name: "action", - Body: "body", - }, - }, - Procedures: []*Procedure{ - { - Name: "proc", - }, - }, - ForeignProcedures: []*ForeignProcedure{ - { - Name: "fp", - Returns: &ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "void", - }, - }, - }, - }, - }, - }, - } - - data, err := schema.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != schema.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), schema.SerializeSize()) - } - }) -} - -func TestProcedureBinaryMarshaling(t *testing.T) { - t.Run("marshal and unmarshal with all fields populated", func(t *testing.T) { - original := Procedure{ - Name: "test_procedure", - Parameters: []*ProcedureParameter{ - { - Name: "param1", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{4, 0}, - }, - }, - { - Name: "param2", - Type: &DataType{ - Name: "varchar", - IsArray: true, - Metadata: [2]uint16{255, 0}, - }, - }, - }, - Public: true, - Modifiers: []Modifier{ - "IMMUTABLE", - "STRICT", - }, - Body: "SELECT * FROM table WHERE id = $1", - Returns: &ProcedureReturn{ - IsTable: true, - Fields: []*NamedType{ - { - Name: "id", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{4, 0}, - }, - }, - }, - }, - Annotations: []string{ - "@deprecated", - "@returns(int)", - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Procedure - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if !reflect.DeepEqual(decoded, original) { - t.Errorf("decoded procedure does not match original") - } - }) - - t.Run("marshal and unmarshal with no returns", func(t *testing.T) { - original := Procedure{ - Name: "void_proc", - Parameters: []*ProcedureParameter{ - { - Name: "param", - Type: &DataType{ - Name: "text", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - Public: true, - Modifiers: []Modifier{"VOLATILE"}, - Body: "INSERT INTO logs(message) VALUES ($1)", - Returns: nil, - Annotations: []string{}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - var decoded Procedure - err = decoded.UnmarshalBinary(data) - if err != nil { - t.Fatal(err) - } - - if decoded.Returns != nil { - t.Error("expected nil returns") - } - if !reflect.DeepEqual(decoded, original) { - t.Errorf("decoded procedure does not match original") - } - }) - - t.Run("unmarshal with truncated modifier length", func(t *testing.T) { - original := Procedure{ - Name: "test", - Public: false, - Modifiers: []Modifier{"TEST"}, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - truncatedData := data[:len(data)-2] - var decoded Procedure - err = decoded.UnmarshalBinary(truncatedData) - if err == nil { - t.Error("expected error for truncated modifier length") - } - }) - - t.Run("verify serialize size with unicode characters", func(t *testing.T) { - proc := Procedure{ - Name: "测试过程", - Parameters: []*ProcedureParameter{ - { - Name: "参数", - Type: &DataType{ - Name: "text", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - Public: true, - Modifiers: []Modifier{"异步"}, - Body: "返回 TRUE;", - Returns: nil, - Annotations: []string{"@测试"}, - } - - data, err := proc.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - if len(data) != proc.SerializeSize() { - t.Errorf("got size %d, want %d", len(data), proc.SerializeSize()) - } - }) - - t.Run("unmarshal with invalid parameter data", func(t *testing.T) { - data := []byte{ - 0, 0, // version - 0, 0, 0, 4, 't', 'e', 's', 't', // name - 0, 0, 0, 1, // parameter count - 255, 255, // invalid parameter data - } - var proc Procedure - err := proc.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid parameter data") - } - }) - - t.Run("unmarshal with invalid return data", func(t *testing.T) { - original := Procedure{ - Name: "test", - Returns: &ProcedureReturn{ - IsTable: false, - Fields: []*NamedType{ - { - Name: "result", - Type: &DataType{ - Name: "int", - IsArray: false, - Metadata: [2]uint16{0, 0}, - }, - }, - }, - }, - } - - data, err := original.MarshalBinary() - if err != nil { - t.Fatal(err) - } - - // Corrupt return data - offset := 6 + len(original.Name) + 4 + 1 + 4 + 4 - data[offset] = 255 - - var decoded Procedure - err = decoded.UnmarshalBinary(data) - if err == nil { - t.Error("expected error for invalid return data") - } - }) -} diff --git a/core/types/transaction_test.go b/core/types/transaction_test.go index 548c0b15f..e9164873a 100644 --- a/core/types/transaction_test.go +++ b/core/types/transaction_test.go @@ -797,7 +797,7 @@ func Test_TransactionBodyJSONEdgeCases(t *testing.T) { txB := TransactionBody{ Description: "test", Payload: []byte("test payload"), - PayloadType: PayloadTypeDeploySchema, + PayloadType: PayloadTypeExecute, Fee: big.NewInt(0), Nonce: 1, ChainID: "test-chain", @@ -817,7 +817,7 @@ func Test_TransactionBodyJSONEdgeCases(t *testing.T) { txB := TransactionBody{ Description: "test", Payload: []byte("test payload"), - PayloadType: PayloadTypeDeploySchema, + PayloadType: PayloadTypeExecute, Fee: largeFee, Nonce: 1, ChainID: "test-chain", @@ -883,7 +883,7 @@ func Test_TransactionBodyJSONEdgeCases(t *testing.T) { txB := TransactionBody{ Description: "test", Payload: []byte("test payload"), - PayloadType: PayloadTypeDeploySchema, + PayloadType: PayloadTypeExecute, Fee: nil, Nonce: 1, ChainID: "test-chain", diff --git a/extensions/precompiles/actions.go b/extensions/precompiles/actions.go index 742ccea2f..512b953c6 100644 --- a/extensions/precompiles/actions.go +++ b/extensions/precompiles/actions.go @@ -8,18 +8,17 @@ import ( "strings" "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/node/types/sql" ) -// Initializer initializes a new instance of a precompile. -// It is called when a Kuneiform schema is deployed that calls -// "use {key: "value"} as ". It is also called -// when the node starts up, if a database is already deployed that -// uses the precompile. The key/value pairs are passed as the -// metadata parameter. When initialize is called, the dataset is not -// yet accessible. -type Initializer func(ctx *DeploymentContext, service *common.Service, metadata map[string]string) (Instance, error) +// Initializer is a function that creates a new instance of an extension. +// It is called: +// - Each time an extension is instantiated using `USE ... AS ...` +// - Once for every instantiated extension on node startup +// It should be used for reading values into memory, creating +// connections, and other setup that should only be done once per +// extension instance. +type Initializer func(ctx context.Context, service *common.Service, db sql.DB, metadata map[string]any) (Instance, error) // Instance is a named initialized instance of a precompile. It is // returned from the precompile initialization, as specified by the @@ -27,98 +26,152 @@ type Initializer func(ctx *DeploymentContext, service *common.Service, metadata // dataset, and a single dataset can have multiple instances of the // same precompile. type Instance interface { - // Call executes the requested method of the precompile. It is up - // to the instance implementation to determine if a method is - // valid, and to subsequently decode the arguments. The arguments - // passed in as args, as well as returned, are scalar values. - Call(scoper *ProcedureContext, app *common.App, method string, inputs []any) ([]any, error) + // OnUse is called when a `USE ... AS ...` statement is executed. + // It is only called once per "USE" function, and is called after the + // initializer. + // It should be used for setting up state such as tables, indexes, + // and other data structures that are part of the application state. + OnUse(ctx *common.TxContext, app *common.App) error + // Methods returns the methods that are available on the instance. + Methods() []*ExportedMethod + // OnUnuse is called when a `UNUSE ...` statement is executed. + OnUnuse(ctx *common.TxContext, app *common.App) error } -// DeploymentContext is the context for a dataset deployment -// transaction. -type DeploymentContext struct { - Ctx context.Context - Schema *types.Schema +// ConcreteInstance is a concrete implementation of an extension instance. +type PrecompileExtension[T any] struct { + // Initialize is the function that creates a new instance of the extension. + Initialize func(ctx context.Context, service *common.Service, db sql.DB, metadata map[string]any) (*T, error) + // OnUse is called when a `USE ... AS ...` statement is executed + OnUse func(ctx *common.TxContext, app *common.App, t *T) error + // Methods is a map of method names to method implementations. + Methods []*Method[T] + // OnUnuse is called when a `UNUSE ...` statement is executed + OnUnuse func(ctx *common.TxContext, app *common.App, t *T) error } -// ProcedureContext is the context for a procedure and action execution. -type ProcedureContext struct { - // TxCtx is the transaction context of the current execution. - TxCtx *common.TxContext - // Signer is the address or public key of the caller. - - // values are the variables that are available to the execution. - values map[string]any // note: bind $args or @caller - - // DBID is the database identifier for the current scope. - // if calling a precompile instance instead of a procedure, it - // will be the last used DBID. - DBID string - - // Procedure is the Procedure identifier for the current scope. - // if calling a precompile instance instead of a Procedure, it - // will be the last used Procedure. - Procedure string - // Result is the result of the most recent SQL query. - Result *sql.ResultSet - - // StackDepth tracks the current depth of the procedure call stack. It is - // incremented each time a procedure calls another procedure. - StackDepth int - // UsedGas is the amount of gas used in the current execution. - UsedGas uint64 +// Export exports the extension to a form that does not rely on generics, allowing the extension to be consumed by callers without forcing +// the callers to know the generic type. +func (p *PrecompileExtension[T]) Export() Initializer { + return func(ctx context.Context, service *common.Service, db sql.DB, metadata map[string]any) (Instance, error) { + var t *T + if p.Initialize == nil { + t = new(T) + } else { + t2, err := p.Initialize(ctx, service, db, metadata) + if err != nil { + return nil, err + } + + t = t2 + } + + methods := make([]*ExportedMethod, len(p.Methods)) + for i, method := range p.Methods { + methods[i] = method.export(t) + } + + return &ExportedExtension{ + onUse: func(ctx *common.TxContext, app *common.App) error { + if p.OnUse == nil { + return nil + } + return p.OnUse(ctx, app, t) + }, + methods: methods, + onUnuse: func(ctx *common.TxContext, app *common.App) error { + if p.OnUnuse == nil { + return nil + } + return p.OnUnuse(ctx, app, t) + }, + }, nil + } } -// SetValue sets a value in the scope. -// Values are case-insensitive. -// If a value for the key already exists, it will be overwritten. -func (p *ProcedureContext) SetValue(key string, value any) { - if p.values == nil { - p.values = make(map[string]any) - } - p.values[strings.ToLower(key)] = value +type ExportedExtension struct { + methods []*ExportedMethod + onUse func(ctx *common.TxContext, app *common.App) error + onUnuse func(ctx *common.TxContext, app *common.App) error } -// Values copies the values from the scope into a map. It will also -// include contextual variables, such as the caller. If a context -// variable has the same name as a scope variable, the scope variable -// will be overwritten. -func (p *ProcedureContext) Values() map[string]any { - if p.values == nil { - p.values = make(map[string]any) - } +func (e *ExportedExtension) OnUse(ctx *common.TxContext, app *common.App) error { + return e.onUse(ctx, app) +} - values := make(map[string]any) - for k, v := range p.values { - values[strings.ToLower(k)] = v - } +func (e *ExportedExtension) Methods() []*ExportedMethod { + return e.methods +} - // set environment variables - values["@caller"] = p.TxCtx.Caller - values["@txid"] = p.TxCtx.TxID - values["@signer"] = p.TxCtx.Signer - values["@height"] = p.TxCtx.BlockContext.Height - values["@foreign_caller"] = p.DBID - values["@block_timestamp"] = p.TxCtx.BlockContext.Timestamp - values["@authenticator"] = p.TxCtx.Authenticator +func (e *ExportedExtension) OnUnuse(ctx *common.TxContext, app *common.App) error { + return e.onUnuse(ctx, app) +} - return values +type Method[T any] struct { + // Name is the name of the method. + // It is case-insensitive, and should be unique within the extension. + Name string + // AccessModifiers is a list of access modifiers for the method. + // It must have exactly one of PUBLIC, PRIVATE, or SYSTEM, + // and can have any number of other modifiers. + AccessModifiers []Modifier + // Call is the function that is called when the method is invoked. + Call func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error, t *T) error + // ReturnColumns is a list of the returned column names. It is optional. If it is set, its length must be + // equal to the length of the returned values passed to the resultFn. If it is not set, the returned column + // names will be generated based on their position in the returned values. + ReturnColumns []string } -// NewScope creates a new procedure context for a child procedure. -// It will not inherit the values or last result from the parent. -// It will inherit the dbid, procedure, and stack depth from the parent. -func (p *ProcedureContext) NewScope() *ProcedureContext { - return &ProcedureContext{ - TxCtx: p.TxCtx, - values: make(map[string]any), - DBID: p.DBID, - Procedure: p.Procedure, - StackDepth: p.StackDepth, - UsedGas: p.UsedGas, +// Modifier modifies the access to a procedure. +type Modifier string + +const ( + // PUBLIC means that the action is public. + PUBLIC Modifier = "PUBLIC" + // PRIVATE means that the action is private. + PRIVATE Modifier = "PRIVATE" + // SYSTEM means that the action can only be called by the system. + SYSTEM Modifier = "SYSTEM" + // View means that an action does not modify the database. + VIEW Modifier = "VIEW" + + // Owner requires that the caller is the owner of the database. + OWNER Modifier = "OWNER" +) + +type Modifiers []Modifier + +func (m Modifiers) Has(mod Modifier) bool { + for _, mod2 := range m { + if mod2 == mod { + return true + } + } + return false +} + +// export exports the method to a form that does not rely on generics, allowing the method to be consumed by callers without forcing +// the callers to know the generic type. +func (m *Method[T]) export(t *T) *ExportedMethod { + return &ExportedMethod{ + Name: m.Name, + AccessModifiers: m.AccessModifiers, + Call: func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error) error { + return m.Call(ctx, app, inputs, resultFn, t) + }, } } +type ExportedMethod struct { + Name string + AccessModifiers []Modifier + Call func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error) error + // ReturnColumns is a list of the returned column names. It is optional. If it is set, its length must be + // equal to the length of the returned values passed to the resultFn. + ReturnColumns []string +} + var registeredPrecompiles = make(map[string]Initializer) func RegisteredPrecompiles() map[string]Initializer { @@ -127,12 +180,37 @@ func RegisteredPrecompiles() map[string]Initializer { // RegisterPrecompile registers a precompile extension with the // engine. -func RegisterPrecompile(name string, ext Initializer) error { +func RegisterPrecompile[T any](name string, ext PrecompileExtension[T]) error { name = strings.ToLower(name) if _, ok := registeredPrecompiles[name]; ok { return fmt.Errorf("precompile of same name already registered:%s ", name) } - registeredPrecompiles[name] = ext + methodNames := make(map[string]struct{}) + for _, method := range ext.Methods { + lowerName := strings.ToLower(method.Name) + if _, ok := methodNames[lowerName]; ok { + return fmt.Errorf("duplicate method %s", lowerName) + } + + methodNames[lowerName] = struct{}{} + + if len(method.AccessModifiers) == 0 { + return fmt.Errorf("method %s has no access modifiers", method.Name) + } + + found := 0 + for _, mod := range method.AccessModifiers { + if mod == PUBLIC || mod == PRIVATE || mod == SYSTEM { + found++ + } + } + + if found != 1 { + return fmt.Errorf("method %s must have exactly one of PUBLIC, PRIVATE, or SYSTEM", method.Name) + } + } + + registeredPrecompiles[name] = ext.Export() return nil } diff --git a/extensions/precompiles/adhoc.go b/extensions/precompiles/adhoc.go deleted file mode 100644 index a337f7515..000000000 --- a/extensions/precompiles/adhoc.go +++ /dev/null @@ -1,76 +0,0 @@ -//go:build precompiles_adhoc || ext_test - -package precompiles - -import ( - "fmt" - "strings" - - "github.com/kwilteam/kwil-db/common" -) - -/* - This file contains an extension that allows Kwil users to execute - ad-hoc SQL statements. This works both in regular and view - actions, however view actions will not be able to modify data. - It is expected that query strings are passed as arguments in the - action. - - It has one method, "Execute", which takes a string as an argument. - When executed, it will execute the query against the dataset, and - return the result. If called during a blockchain tx, the query - can modify the underlying dataset. - - While it is mostly meant to be an example, it likely has some - practical use cases. Some examples include: - - a user might want to give users ad-hoc read access based - on some access control / authentication mechanism. -*/ - -const adhocName = "adhoc" - -func init() { - err := RegisterPrecompile(adhocName, InitializeAdhoc) - if err != nil { - panic(err) - } -} - -// Takes no initialization parameters. -func InitializeAdhoc(ctx *DeploymentContext, service *common.Service, metadata map[string]string) (Instance, error) { - return &adhocExtension{}, nil -} - -// adhocExtension is an extension that is not registered with the -// extension registry. It allows execution of ad-hoc SQL statements -// in the engine. It will return results to the engine. -type adhocExtension struct{} - -// Has one method: Call. It takes a string as an argument, which is -// the ad-hoc SQL statement to execute. -func (adhocExtension) Call(scope *ProcedureContext, app *common.App, method string, inputs []any) ([]any, error) { - if len(inputs) != 1 { - return nil, fmt.Errorf("adhoc: expected 1 string argument, got %d", len(inputs)) - } - stmt, ok := inputs[0].(string) - if !ok { - return nil, fmt.Errorf("adhoc: expected string argument, got %T", inputs[0]) - } - - // we will pass the scope.Values() as the arguments. This makes - // it possible to use @caller, etc in the ad-hoc statement. - if strings.ToLower(method) != "execute" { - return nil, fmt.Errorf(`adhoc: unknown method "%s"`, method) - } - - res, err := app.Engine.Execute(scope.TxCtx, app.DB, scope.DBID, stmt, scope.Values()) - if err != nil { - return nil, err - } - - // We set the result, so that if an ad-hoc read is executed in a - // view action, the result will be returned to the engine. - scope.Result = res - - return nil, nil -} diff --git a/extensions/precompiles/math.go b/extensions/precompiles/math.go index cfc7236ac..2d25d75bf 100644 --- a/extensions/precompiles/math.go +++ b/extensions/precompiles/math.go @@ -3,149 +3,125 @@ package precompiles import ( + "context" "fmt" "math/big" "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/node/types/sql" ) func init() { - err := RegisterPrecompile("math-precompile", InitializeMath) + err := RegisterPrecompile("math-precompile", PrecompileExtension[MathExtension]{ + Initialize: func(ctx context.Context, service *common.Service, db sql.DB, metadata map[string]any) (*MathExtension, error) { + _, ok := metadata["round"] + if !ok { + metadata["round"] = "up" + } + + roundVal := metadata["round"] + if roundVal != "up" && roundVal != "down" { + return nil, fmt.Errorf("round must be either 'up' or 'down'. default is 'up'") + } + + roundUp := roundVal == "up" + + return &MathExtension{roundUp: roundUp}, nil + }, + Methods: []*Method[MathExtension]{ + { + Name: "add", + AccessModifiers: []Modifier{SYSTEM}, + Call: func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error, t *MathExtension) error { + a, b, err := getArgs(inputs) + if err != nil { + return err + } + + return resultFn([]any{a + b}) + }, + }, + { + Name: "subtract", + AccessModifiers: []Modifier{SYSTEM}, + Call: func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error, t *MathExtension) error { + a, b, err := getArgs(inputs) + if err != nil { + return err + } + + return resultFn([]any{a - b}) + }, + }, + { + Name: "multiply", + AccessModifiers: []Modifier{SYSTEM}, + Call: func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error, t *MathExtension) error { + a, b, err := getArgs(inputs) + if err != nil { + return err + } + + return resultFn([]any{a * b}) + }, + }, + { + Name: "divide", + AccessModifiers: []Modifier{SYSTEM}, + Call: func(ctx *common.TxContext, app *common.App, inputs []any, resultFn func([]any) error, t *MathExtension) error { + a, b, err := getArgs(inputs) + if err != nil { + return err + } + + bigVal1 := newBigFloat(float64(a)) + + bigVal2 := newBigFloat(float64(b)) + + result := new(big.Float).Quo(bigVal1, bigVal2) + + var IntResult *big.Int + var results []any + if t.roundUp { + IntResult = roundUp(result) + } else { + IntResult = roundDown(result) + } + results = append(results, IntResult) + return resultFn(results) + }, + }, + }, + }) if err != nil { panic(err) } } -type MathExtension struct { - roundUp bool // if true, round up. If false, round down. -} - -// this initialize function checks if round is set. If not, it sets it to "up" -func InitializeMath(ctx *DeploymentContext, service *common.Service, metadata map[string]string) (Instance, error) { - _, ok := metadata["round"] - if !ok { - metadata["round"] = "up" - } - - roundVal := metadata["round"] - if roundVal != "up" && roundVal != "down" { - return nil, fmt.Errorf("round must be either 'up' or 'down'. default is 'up'") - } - - roundUp := roundVal == "up" - - return &MathExtension{roundUp: roundUp}, nil -} - -func (e *MathExtension) Call(ctx *ProcedureContext, app *common.App, method string, inputs []any) ([]any, error) { - switch method { - case "add": - return e.add(inputs...) - case "subtract": - return e.subtract(inputs...) - case "multiply": - return e.multiply(inputs...) - case "divide": - return e.divide(inputs...) - default: - return nil, fmt.Errorf("method %s not found", method) - } -} - -// add takes two integers and returns their sum -func (e *MathExtension) add(values ...any) ([]any, error) { - if len(values) != 2 { - return nil, fmt.Errorf("expected 2 values for method Add, got %d", len(values)) - } - - val0Int, ok := values[0].(int) - if !ok { - return nil, fmt.Errorf("argument 1 is not an int") - } - - val1Int, ok := values[1].(int) - if !ok { - return nil, fmt.Errorf("argument 2 is not an int") - } - - var results []any - results = append(results, val0Int+val1Int) - return results, nil -} - -// subtract takes two integers and returns their difference -func (e *MathExtension) subtract(values ...any) ([]any, error) { - if len(values) != 2 { - return nil, fmt.Errorf("expected 2 values for method Add, got %d", len(values)) +// getArgs is a helper function that takes a slice of any and returns two integers and an error +func getArgs(args []any) (a, b int64, err error) { + if len(args) != 2 { + err = fmt.Errorf("expected 2 values, got %d", len(args)) + return } - val0Int, ok := values[0].(int) + a, ok := args[0].(int64) if !ok { - return nil, fmt.Errorf("argument 1 is not an int") + err = fmt.Errorf("argument 1 is not an int") + return } - val1Int, ok := values[1].(int) + b, ok = args[1].(int64) if !ok { - return nil, fmt.Errorf("argument 2 is not an int") + err = fmt.Errorf("argument 2 is not an int") + return } - var results []any - results = append(results, val0Int-val1Int) - return results, nil + return a, b, nil } -// multiply takes two integers and returns their product -func (e *MathExtension) multiply(values ...any) ([]any, error) { - if len(values) != 2 { - return nil, fmt.Errorf("expected 2 values for method Add, got %d", len(values)) - } - - val0Int, ok := values[0].(int) - if !ok { - return nil, fmt.Errorf("argument 1 is not an int") - } - - val1Int, ok := values[1].(int) - if !ok { - return nil, fmt.Errorf("argument 2 is not an int") - } - - var results []any - results = append(results, val0Int*val1Int) - return results, nil -} - -// divide takes two integers and returns their quotient rounded up or down depending on how the extension was initialized -func (e *MathExtension) divide(values ...any) ([]any, error) { - if len(values) != 2 { - return nil, fmt.Errorf("expected 2 values for method Divide, got %d", len(values)) - } - - val0Int, ok := values[0].(int) - if !ok { - return nil, fmt.Errorf("argument 1 is not an int") - } - - val1Int, ok := values[1].(int) - if !ok { - return nil, fmt.Errorf("argument 2 is not an int") - } - - bigVal1 := newBigFloat(float64(val0Int)) - - bigVal2 := newBigFloat(float64(val1Int)) - - result := new(big.Float).Quo(bigVal1, bigVal2) - - var IntResult *big.Int - var results []any - if e.roundUp { - IntResult = roundUp(result) - } else { - IntResult = roundDown(result) - } - results = append(results, IntResult) - return results, nil +type MathExtension struct { + roundUp bool // if true, round up. If false, round down. } // roundUp takes a big.Float and returns a new big.Float rounded up. diff --git a/go.mod b/go.mod index dfbf43de1..51b922fb7 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/dgraph-io/badger/v4 v4.5.0 github.com/ethereum/go-ethereum v1.14.12 github.com/go-chi/chi/v5 v5.2.0 + github.com/google/go-cmp v0.6.0 github.com/jackc/pglogrepl v0.0.0-20240307033717-828fbfe908e9 github.com/jackc/pgx/v5 v5.7.1 github.com/jpillora/backoff v1.0.0 @@ -18,7 +19,6 @@ require ( github.com/knadh/koanf/providers/structs v0.1.0 github.com/knadh/koanf/v2 v2.1.2 github.com/kwilteam/kwil-db/core v0.3.1-0.20241212163115-7353f2761884 - github.com/kwilteam/kwil-db/parse v0.3.1-0.20241212163115-7353f2761884 github.com/libp2p/go-libp2p v0.37.2 github.com/libp2p/go-libp2p-kad-dht v0.28.1 github.com/libp2p/go-libp2p-pubsub v0.12.0 @@ -38,7 +38,7 @@ require ( require ( github.com/Microsoft/go-winio v0.6.2 // indirect github.com/StackExchange/wmi v1.2.1 // indirect - github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 github.com/bits-and-blooms/bitset v1.13.0 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/cockroachdb/apd/v3 v3.2.1 // indirect @@ -84,6 +84,7 @@ require ( github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/pganalyze/pg_query_go/v5 v5.1.0 github.com/polydawn/refmt v0.89.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect diff --git a/go.sum b/go.sum index b31492360..687d23e16 100644 --- a/go.sum +++ b/go.sum @@ -182,6 +182,7 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -194,6 +195,7 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= @@ -323,8 +325,6 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kwilteam/kwil-db/core v0.3.1-0.20241212163115-7353f2761884 h1:R8nARxaVrX5wlIIoB02NCcdLQmhwXamyWhw86VrCk1w= github.com/kwilteam/kwil-db/core v0.3.1-0.20241212163115-7353f2761884/go.mod h1:v3YA0w26s82aSVWoZMFFxFD39Bqvy/8eHKLEZhn6bBQ= -github.com/kwilteam/kwil-db/parse v0.3.1-0.20241212163115-7353f2761884 h1:DyPpPyiuO2MwBrQ1QPkILJXgzdmFjVlRlJerICSp6ks= -github.com/kwilteam/kwil-db/parse v0.3.1-0.20241212163115-7353f2761884/go.mod h1:REQKF6pOCxTzY6WaZwRU+QWuIWsRmviYcSgqdKa5zfc= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= @@ -449,6 +449,8 @@ github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2D github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= +github.com/pganalyze/pg_query_go/v5 v5.1.0 h1:MlxQqHZnvA3cbRQYyIrjxEjzo560P6MyTgtlaf3pmXg= +github.com/pganalyze/pg_query_go/v5 v5.1.0/go.mod h1:FsglvxidZsVN+Ltw3Ai6nTgPVcK2BPukH3jCDEqc1Ug= github.com/pion/datachannel v1.5.9 h1:LpIWAOYPyDrXtU+BW7X0Yt/vGtYxtXQ8ql7dFfYUVZA= github.com/pion/datachannel v1.5.9/go.mod h1:kDUuk4CU4Uxp82NH4LQZbISULkX/HtzKa4P7ldf9izE= github.com/pion/dtls/v2 v2.2.7/go.mod h1:8WiMkebSHFD0T+dIU+UeBaoV7kDhOW5oDCzZ7WZ/F9s= @@ -822,6 +824,8 @@ google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/node/engine/execution/dataset.go b/node/engine/execution/dataset.go deleted file mode 100644 index e8d3aa6cd..000000000 --- a/node/engine/execution/dataset.go +++ /dev/null @@ -1,103 +0,0 @@ -package execution - -import ( - "bytes" - "errors" - "fmt" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/types/sql" -) - -// baseDataset is a deployed database schema. -// It implements the precompiles.Instance interface. -type baseDataset struct { - // schema is the schema of the dataset. - schema *types.Schema - - // extensions are the extensions available for use in the dataset. - extensions map[string]precompiles.Instance - - // actions are the actions that are available for use in the dataset. - actions map[string]*preparedAction - - // procedures are the procedures that are available for use in the dataset. - // It only includes public procedures. - procedures map[string]*preparedProcedure - - // global is the global context. - global *GlobalContext -} - -var _ precompiles.Instance = (*baseDataset)(nil) - -var ( - ErrPrivate = errors.New("procedure/action is not public") - ErrOwnerOnly = errors.New("procedure/action is owner only") -) - -// Call calls a procedure from the dataset. -// If the procedure is not public, it will return an error. -// It satisfies precompiles.Instance. -func (d *baseDataset) Call(caller *precompiles.ProcedureContext, app *common.App, method string, inputs []any) ([]any, error) { - // check if it is a procedure - proc, ok := d.procedures[method] - if ok { - if !proc.public { - return nil, fmt.Errorf(`%w: "%s"`, ErrPrivate, method) - } - if proc.ownerOnly && !bytes.Equal(caller.TxCtx.Signer, d.schema.Owner) { - return nil, fmt.Errorf(`%w: "%s"`, ErrOwnerOnly, method) - } - if !proc.view && app.DB.(sql.AccessModer).AccessMode() == sql.ReadOnly { - return nil, fmt.Errorf(`%w: "%s"`, ErrMutativeProcedure, method) - } - - // this is not a strictly necessary check, as postgres will throw an error, but this gives a more - // helpful error message - if len(inputs) != len(proc.parameters) { - return nil, fmt.Errorf(`procedure "%s" expects %d argument(s), got %d`, method, len(proc.parameters), len(inputs)) - } - - res, err := app.DB.Execute(caller.TxCtx.Ctx, proc.callString(d.schema.DBID()), append([]any{pg.QueryModeExec}, inputs...)...) - if err != nil { - return nil, err - } - - err = proc.shapeReturn(res) - if err != nil { - return nil, err - } - - caller.Result = res - return nil, nil - } - - // otherwise, it is an action - act, ok := d.actions[method] - if !ok { - return nil, fmt.Errorf(`action "%s" not found`, method) - } - - if !act.public { - return nil, fmt.Errorf(`%w: "%s"`, ErrPrivate, method) - } - - newCtx := caller.NewScope() - newCtx.DBID = d.schema.DBID() - newCtx.Procedure = method - - err := act.call(newCtx, d.global, app.DB, inputs) - if err != nil { - return nil, err - } - - caller.Result = newCtx.Result - - // we currently do not support returning values from dataset procedures - // if we do, then we will need to return the result here - return nil, nil -} diff --git a/node/engine/execution/execution_test.go b/node/engine/execution/execution_test.go deleted file mode 100644 index e17636fb7..000000000 --- a/node/engine/execution/execution_test.go +++ /dev/null @@ -1,645 +0,0 @@ -package execution - -import ( - "context" - "math/rand" - "testing" - "time" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/log" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/engine/testdata" - "github.com/kwilteam/kwil-db/node/types/sql" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func Test_Execution(t *testing.T) { - type testCase struct { - name string - fn func(t *testing.T, ctx *GlobalContext) - } - - tests := []testCase{ - { - name: "create database", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - schema, err := eng.GetSchema(testdata.TestSchema.DBID()) - assert.NoError(t, err) - - assert.EqualValues(t, testdata.TestSchema, schema) - }, - }, - { - name: "drop database", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - _, ok := db.dbs[testdata.TestSchema.DBID()] - assert.True(t, ok) - - err = eng.DeleteDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, testdata.TestSchema.DBID()) - assert.NoError(t, err) - - _, ok = db.dbs[testdata.TestSchema.DBID()] - assert.False(t, ok) - }, - }, - { - name: "drop database with non-owner fails", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - err = eng.DeleteDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("not_owner"), - Caller: "not_owner", - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema.DBID()) - assert.Error(t, err) - }, - }, - { - name: "drop non-existent database fails", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.DeleteDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, "not_a_real_db") - assert.Error(t, err) - }, - }, - { - name: "call an action", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: "create_user", - Args: []any{1, "brennan", 22}, - }) - assert.NoError(t, err) - }, - }, - { - name: "call an action with invalid arguments", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: "create_user", - Args: []any{1, "brennan"}, // missing age - - }) - assert.Error(t, err) - }, - }, - { - name: "call a recursive procedure", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionRecursive.Name, - Args: []any{"id000000", "asdfasdfasdfasdf", "bigbigbigbigbigbigbigbigbigbig"}, - }) - assert.ErrorIs(t, err, ErrMaxStackDepth) - }, - }, - { - name: "call a procedure that hits max call stack depth less directly", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionRecursiveSneakyA.Name, - Args: []any{}, - }) - assert.ErrorIs(t, err, ErrMaxStackDepth) - }, - }, - { - name: "call a non-view action fails if not mutative; view action succeeds", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - db2 := newDB(true) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, db2, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: "create_user", - Args: []any{1, "brennan", 22}, - }) - assert.Error(t, err) - assert.ErrorIs(t, err, ErrMutativeProcedure) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid3", - Ctx: ctx, - }, db2, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: "get_user_by_address", - Args: []any{"address"}, - }) - assert.NoError(t, err) - }, - }, - { - name: "call an extension", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testSchema.Owner, - Caller: string(testSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, db, testSchema) - assert.NoError(t, err) - - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testSchema.Owner, - Caller: string(testSchema.Owner), - // no txid since it is non-mutative - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testSchema.DBID(), - Procedure: "use_math", - Args: []any{1, 2}, - }) - assert.NoError(t, err) - - // call non-mutative - // since we do not have a sql connection, we cannot evaluate the result - _, err = eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testSchema.Owner, - Caller: string(testSchema.Owner), - TxID: "txid3", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testSchema.DBID(), - Procedure: "use_math", - Args: []any{1, 2}, - }) - assert.NoError(t, err) - }, - }, - { - name: "list datasets", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := newDB(false) - - owner := "owner" - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(owner), - Caller: owner, - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - datasets, err := eng.ListDatasets([]byte(owner)) - assert.NoError(t, err) - - assert.Equal(t, 1, len(datasets)) - assert.Equal(t, testdata.TestSchema.Name, datasets[0].Name) - assert.Equal(t, testdata.TestSchema.Owner, datasets[0].Owner) - assert.Equal(t, testdata.TestSchema.DBID(), datasets[0].DBID) - }, - }, - { - name: "procedure returning table", - fn: func(t *testing.T, eng *GlobalContext) { - ctx := context.Background() - db := mockResultDB(&sql.ResultSet{ - Columns: []string{"_out_id", "_out_name", "_out_age"}, - }) - - owner := "owner" - - err := eng.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(owner), - Caller: owner, - TxID: "txid1", - Ctx: ctx, - }, db, testdata.TestSchema) - assert.NoError(t, err) - - res, err := eng.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(owner), - Caller: owner, - TxID: "txid2", - Ctx: ctx, - }, db, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ProcGetUsersByAge.Name, - Args: []any{22}, - }) - assert.NoError(t, err) - - for i, expected := range testdata.ProcGetUsersByAge.Returns.Fields { - assert.Equal(t, expected.Name, res.Columns[i]) - } - }, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - mth := &mathInitializer{} - - ctx := context.Background() - - engine, err := NewGlobalContext(ctx, - newDB(false), map[string]precompiles.Initializer{ - "math": mth.initialize, - }, &common.Service{ - Logger: log.DiscardLogger, - }, - ) - require.NoError(t, err) - tc.fn(t, engine) - }) - } -} - -func newDB(readonly bool) *mockDB { - am := sql.ReadWrite - if readonly { - am = sql.ReadOnly - } - - return &mockDB{ - accessMode: am, - dbs: make(map[string][]byte), - executedStmts: make([]string, 0), - } -} - -// mockResultDB can be used to mock a result set for a query -func mockResultDB(result *sql.ResultSet) *mockDB { - db := newDB(false) - db.resultSet = result - - return db -} - -type mockDB struct { - accessMode sql.AccessMode - dbs map[string][]byte // serialized schemas - executedStmts []string - resultSet *sql.ResultSet -} - -var _ sql.AccessModer = (*mockDB)(nil) - -func (m *mockDB) AccessMode() sql.AccessMode { - return m.accessMode -} - -func (m *mockDB) BeginTx(ctx context.Context) (sql.Tx, error) { - return &mockTx{ - m, - }, nil -} - -func (m *mockDB) Execute(ctx context.Context, stmt string, args ...any) (*sql.ResultSet, error) { - // mock some expected queries used internally - switch stmt { - case sqlStoreKwilSchema: - // first arg is uuid, 2nd is dbid, 3rd is schema content, 4th is schema version - m.dbs[args[1].(string)] = args[2].([]byte) - case sqlListSchemaContent: - rows := make([][]any, 0) - for _, bts := range m.dbs { - rows = append(rows, []any{bts}) - } - - return &sql.ResultSet{ - Columns: []string{"schema_content"}, - Rows: rows, - }, nil - case sqlDeleteKwilSchema: - delete(m.dbs, args[0].(string)) - default: - m.executedStmts = append(m.executedStmts, stmt) - - if m.resultSet != nil { - return m.resultSet, nil - } - } - - return &sql.ResultSet{ - Columns: []string{}, - Rows: [][]any{}, - }, nil -} - -type mockTx struct { - *mockDB -} - -func (m *mockTx) Commit(ctx context.Context) error { - return nil -} - -func (m *mockTx) Rollback(ctx context.Context) error { - return nil -} - -// identitySchema is a schema that relies on the testdata user's schema -// it creates an example credential application -var testSchema = &types.Schema{ - Name: "identity_db", - Owner: []byte(`owner`), - Tables: []*types.Table{ - { - Name: "credentials", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "user_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "credential", - Type: types.TextType, - }, - }, - Indexes: []*types.Index{ - { - Name: "user_id", - Columns: []string{"user_id"}, - Type: types.BTREE, - }, - }, - }, - }, - Actions: []*types.Action{ - { - Name: "use_math", - Parameters: []string{"$a", "$b"}, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: `math.add($a, $b);`, - }, - }, - Extensions: []*types.Extension{ - { - Name: "math", - Initialization: []*types.ExtensionConfig{ - { - Key: "math_key", - Value: "math_val", - }, - }, - Alias: "math", - }, - }, -} - -// mocks a namespace initializer -type mathInitializer struct { - vals map[string]string -} - -func (m *mathInitializer) initialize(_ *precompiles.DeploymentContext, _ *common.Service, mp map[string]string) (precompiles.Instance, error) { - m.vals = mp - - return &mathExt{}, nil -} - -type mathExt struct{} - -var _ precompiles.Instance = &mathExt{} - -func (m *mathExt) Call(caller *precompiles.ProcedureContext, app *common.App, method string, inputs []any) ([]any, error) { - return nil, nil -} - -// Test_OrderSchemas tests that schemas are ordered correctly when importing with dependencies -func Test_OrderSchemas(t *testing.T) { - // create random schemas, and randomly add others as dependencies - schemas := make([]*types.Schema, 0) - - for range 100 { - schema := randomSchema() - - for _, schema2 := range schemas { - schema2.Extensions = append(schema.Extensions, &types.Extension{ - Name: schema.DBID(), - Alias: schema.Name, - }) - } - - schemas = append(schemas, schema) - } - - // add some more that have zero dependencies - for _, schema := range schemas { - for range 10 { - dep := randomSchema() - schema.Extensions = append(schema.Extensions, &types.Extension{ - Name: dep.DBID(), - Alias: dep.Name, - }) - - // add the dependency to the list of schemas - schemas = append(schemas, dep) - } - } - - // now create a datastore to see if it imports the schemas in the correct order - - ctx := context.Background() - mth := &mathInitializer{} - _, err := NewGlobalContext(ctx, - newDB(false), map[string]precompiles.Initializer{ - "math": mth.initialize, - }, &common.Service{ - Logger: log.DiscardLogger, - }, - ) - require.NoError(t, err) - -} - -const charset = "abcdefghijklmnopqrstuvwxyz" + - "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" - -func randomString(length int) string { - var seededRand *rand.Rand = rand.New( - rand.NewSource(time.Now().UnixNano())) - - b := make([]byte, length) - for i := range b { - b[i] = charset[seededRand.Intn(len(charset))] - } - return string(b) -} - -func randomSchema() *types.Schema { - return &types.Schema{ - Name: randomString(10), - Owner: []byte(randomString(10)), - } -} diff --git a/node/engine/execution/global.go b/node/engine/execution/global.go deleted file mode 100644 index 9181e4038..000000000 --- a/node/engine/execution/global.go +++ /dev/null @@ -1,502 +0,0 @@ -package execution - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "sort" - "sync" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/engine/generate" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/node/versioning" - "github.com/kwilteam/kwil-db/parse" -) - -// GlobalContext is the context for the entire execution. -// It exists for the lifetime of the server. -// It stores information about deployed datasets in-memory, and provides methods to interact with them. -type GlobalContext struct { - // mu protects the datasets maps, which is written to during block execution - // and read from during calls / queries. - // It also implicitly protects maps held in the *baseDataset struct. - mu sync.RWMutex - - // initializers are the namespaces that are available to datasets. - // This includes other datasets, or loaded extensions. - initializers map[string]precompiles.Initializer - - // datasets are the top level namespaces that are available to engine callers. - // These only include datasets, and do not include extensions. - datasets map[string]*baseDataset - - service *common.Service -} - -var ( - ErrDatasetNotFound = errors.New("dataset not found") - ErrDatasetExists = errors.New("dataset exists") - ErrInvalidSchema = errors.New("invalid schema") - ErrDBInternal = errors.New("internal database error") -) - -func InitializeEngine(ctx context.Context, tx sql.DB) error { - upgradeFns := map[int64]versioning.UpgradeFunc{ - 0: initTables, - 1: func(ctx context.Context, db sql.DB) error { - - // add the uuid column to the kwil_schemas table - _, err := db.Execute(ctx, sqlUpgradeSchemaTableV1AddUUIDColumn) - if err != nil { - return err - } - - // backfill the uuid column with uuids - _, err = db.Execute(ctx, sqlBackfillSchemaTableV1UUID) - if err != nil { - return err - } - - // remove the primary key constraint from the kwil_schemas table - _, err = db.Execute(ctx, sqlUpgradeRemovePrimaryKey) - if err != nil { - return err - } - - // add the new primary key constraint to the kwil_schemas table - _, err = db.Execute(ctx, sqlUpgradeAddPrimaryKeyV1UUID) - if err != nil { - return err - } - - // add a unique constraint to the dbid column - _, err = db.Execute(ctx, sqlUpgradeAddUniqueConstraintV1DBID) - if err != nil { - return err - } - - _, err = db.Execute(ctx, sqlUpgradeSchemaTableV1AddOwnerColumn) - if err != nil { - return err - } - - _, err = db.Execute(ctx, sqlUpgradeSchemaTableV1AddNameColumn) - if err != nil { - return err - } - - // we now need to read out all schemas to backfill the changes to - // the datasets table. This includes: - // - upgrading the version of the schema - // - setting the owner of the schema - // - setting the name of the schema - schemas, err := getSchemas(ctx, db, convertV07Schema) - if err != nil { - return err - } - - for _, schema := range schemas { - bts, err := json.Marshal(schema) - if err != nil { - return err - } - - _, err = db.Execute(ctx, sqlBackfillSchemaTableV1, schema.Owner, schema.Name, schema.DBID(), bts) - if err != nil { - return err - } - } - - _, err = db.Execute(ctx, sqlAddProceduresTableV1) - if err != nil { - return err - } - - _, err = db.Execute(ctx, sqlIndexProceduresTableV1SchemaID) - if err != nil { - return err - } - - return nil - }, - } - - err := versioning.Upgrade(ctx, tx, pg.InternalSchemaName, upgradeFns, engineVersion) - if err != nil { - return err - } - - return nil -} - -// NewGlobalContext creates a new global context. It will load any persisted -// datasets from the datastore. The provided database is only used for -// construction. -func NewGlobalContext(ctx context.Context, db sql.Executor, extensionInitializers map[string]precompiles.Initializer, - service *common.Service) (*GlobalContext, error) { - g := &GlobalContext{ - initializers: extensionInitializers, - datasets: make(map[string]*baseDataset), - service: service, - } - - schemas, err := getSchemas(ctx, db, nil) - if err != nil { - return nil, err - } - - // we need to make sure schemas are ordered by their dependencies - // if one schema is dependent on another, it must be loaded after the other - // this is handled by the orderSchemas function - for _, schema := range orderSchemas(schemas) { - err := g.loadDataset(ctx, schema) - if err != nil { - return nil, fmt.Errorf("%w: schema (%s / %s / %s)", err, schema.Name, schema.DBID(), schema.Owner) - } - } - - return g, nil -} - -// Reload is used to reload the global context based on the current state of the database. -// It is used after state sync to ensure that the global context is up to date. -func (g *GlobalContext) Reload(ctx context.Context, db sql.Executor) error { - g.mu.Lock() - defer g.mu.Unlock() - - schemas, err := getSchemas(ctx, db, nil) - if err != nil { - return err - } - - for _, schema := range orderSchemas(schemas) { - err := g.loadDataset(ctx, schema) - if err != nil { - return err - } - } - - return nil -} - -// CreateDataset deploys a schema. -// It will create the requisite tables, and perform the required initializations. -func (g *GlobalContext) CreateDataset(ctx *common.TxContext, tx sql.DB, schema *types.Schema) (err error) { - g.mu.Lock() - defer g.mu.Unlock() - - err = schema.Clean() - if err != nil { - return errors.Join(err, ErrInvalidSchema) - } - schema.Owner = ctx.Signer - - err = g.loadDataset(ctx.Ctx, schema) - if err != nil { - return err - } - - // it is critical that the schema is loaded before being created. - // the engine will not be able to parse the schema if it is not loaded. - err = createSchema(ctx.Ctx, tx, schema, ctx.TxID) - if err != nil { - g.unloadDataset(schema.DBID()) - return err - } - - return nil -} - -// DeleteDataset deletes a dataset. -// It will ensure that the caller is the owner of the dataset. -func (g *GlobalContext) DeleteDataset(ctx *common.TxContext, tx sql.DB, dbid string) error { - g.mu.Lock() - defer g.mu.Unlock() - - dataset, ok := g.datasets[dbid] - if !ok { - return ErrDatasetNotFound - } - - if !bytes.Equal(ctx.Signer, dataset.schema.Owner) { - return fmt.Errorf(`cannot delete dataset "%s", not owner`, dbid) - } - - err := deleteSchema(ctx.Ctx, tx, dbid) - if err != nil { - return errors.Join(err, ErrDBInternal) - } - - g.unloadDataset(dbid) - - return nil -} - -// Procedure calls a procedure on a dataset. It can be given either a readwrite or -// readonly transaction. If it is given a read-only transaction, it will not be -// able to execute any procedures that are not `view`. -func (g *GlobalContext) Procedure(ctx *common.TxContext, tx sql.DB, options *common.ExecutionData) (*sql.ResultSet, error) { - err := options.Clean() - if err != nil { - return nil, err - } - - g.mu.RLock() // even if tx is readwrite, we will not change GlobalContext state, so we can use RLock - defer g.mu.RUnlock() - - dataset, ok := g.datasets[options.Dataset] - if !ok { - return nil, ErrDatasetNotFound - } - - procedureCtx := &precompiles.ProcedureContext{ - TxCtx: ctx, - DBID: options.Dataset, - Procedure: options.Procedure, - // starting with stack depth 0, increment in each action call - } - - tx2, err := tx.BeginTx(ctx.Ctx) - if err != nil { - return nil, errors.Join(err, ErrDBInternal) - } - defer tx2.Rollback(ctx.Ctx) - - err = setContextualVars(ctx, tx2, options) - if err != nil { - return nil, err - } - - _, err = dataset.Call(procedureCtx, &common.App{ - Service: g.service, - DB: tx2, - Engine: g, - }, options.Procedure, options.Args) - if err != nil { - return nil, err - } - - return procedureCtx.Result, tx2.Commit(ctx.Ctx) -} - -// ListDatasets list datasets deployed by a specific caller. -// If caller is empty, it will list all datasets. -func (g *GlobalContext) ListDatasets(caller []byte) ([]*types.DatasetIdentifier, error) { - g.mu.RLock() - defer g.mu.RUnlock() - - var datasets []*types.DatasetIdentifier - if len(caller) == 0 { // prealloc only for all users' dataset - datasets = make([]*types.DatasetIdentifier, 0, len(g.datasets)) - } - for dbid, dataset := range g.datasets { - if len(caller) == 0 || bytes.Equal(dataset.schema.Owner, caller) { - datasets = append(datasets, &types.DatasetIdentifier{ - Name: dataset.schema.Name, - Owner: dataset.schema.Owner, - DBID: dbid, - }) - } - } - - return datasets, nil -} - -// GetSchema gets a schema from a deployed dataset. -func (g *GlobalContext) GetSchema(dbid string) (*types.Schema, error) { - g.mu.RLock() - defer g.mu.RUnlock() - - dataset, ok := g.datasets[dbid] - if !ok { - return nil, ErrDatasetNotFound - } - - return dataset.schema, nil -} - -// Execute executes a SQL statement on a dataset. If the statement is mutative, -// the tx must also be a sql.AccessModer. It uses Kwil's SQL dialect. -func (g *GlobalContext) Execute(ctx *common.TxContext, tx sql.DB, dbid, query string, values map[string]any) (*sql.ResultSet, error) { - g.mu.RLock() - defer g.mu.RUnlock() - dataset, ok := g.datasets[dbid] - if !ok { - return nil, ErrDatasetNotFound - } - - res, err := parse.ParseSQL(query, dataset.schema, false) - if err != nil { - return nil, err - } - - if res.ParseErrs.Err() != nil { - return nil, res.ParseErrs.Err() - } - - sqlStmt, params, err := generate.WriteSQL(res.AST, true, dbidSchema(dbid)) - if err != nil { - return nil, err - } - - if res.Mutative { - txm, ok := tx.(sql.AccessModer) - if !ok { - return nil, errors.New("DB does not provide access mode needed for mutative statement") - } - if txm.AccessMode() == sql.ReadOnly { - return nil, errors.New("cannot execute a mutative query in a read-only transaction") - } - } - - args := orderAndCleanValueMap(values, params) - args = append([]any{pg.QueryModeExec}, args...) - - // all execution data is empty, but things like @caller can still be used - err = setContextualVars(ctx, tx, &common.ExecutionData{}) - if err != nil { - return nil, err - } - - result, err := tx.Execute(ctx.Ctx, sqlStmt, args...) - if err != nil { - return nil, decorateExecuteErr(err, query) - } - - return result, nil -} - -type dbQueryFn func(ctx context.Context, stmt string, args ...any) (*sql.ResultSet, error) - -// loadDataset loads a dataset into the global context. -// It does not create the dataset in the datastore. -func (g *GlobalContext) loadDataset(ctx context.Context, schema *types.Schema) error { - dbid := schema.DBID() - _, ok := g.initializers[dbid] - if ok { - return fmt.Errorf("%w: %s", ErrDatasetExists, dbid) - } - - datasetCtx := &baseDataset{ - schema: schema, - extensions: make(map[string]precompiles.Instance), - actions: make(map[string]*preparedAction), - procedures: make(map[string]*preparedProcedure), - global: g, - } - - preparedActions, err := prepareActions(schema) - if err != nil { - return errors.Join(err, ErrInvalidSchema) - } - - for _, prepared := range preparedActions { - _, ok := datasetCtx.actions[prepared.name] - if ok { - return fmt.Errorf(`%w: duplicate action name: "%s"`, ErrInvalidSchema, prepared.name) - } - - datasetCtx.actions[prepared.name] = prepared - } - - for _, unprepared := range schema.Procedures { - prepared := prepareProcedure(unprepared) - - _, ok := datasetCtx.procedures[prepared.name] - if ok { - return fmt.Errorf(`%w: duplicate procedure name: "%s"`, ErrInvalidSchema, prepared.name) - } - - datasetCtx.procedures[prepared.name] = prepared - } - - for _, ext := range schema.Extensions { - _, ok := datasetCtx.extensions[ext.Alias] - if ok { - return fmt.Errorf(`%w duplicate namespace assignment: "%s"`, ErrInvalidSchema, ext.Alias) - } - - initializer, ok := g.initializers[ext.Name] - if !ok { - return fmt.Errorf(`namespace "%s" not found`, ext.Name) // ErrMissingExtension? - } - - namespace, err := initializer(&precompiles.DeploymentContext{ - Ctx: ctx, - Schema: schema, - }, g.service, ext.CleanMap()) - if err != nil { - return err - } - - datasetCtx.extensions[ext.Alias] = namespace - } - - g.initializers[dbid] = func(_ *precompiles.DeploymentContext, _ *common.Service, _ map[string]string) (precompiles.Instance, error) { - return datasetCtx, nil - } - g.datasets[dbid] = datasetCtx - - return nil -} - -// unloadDataset unloads a dataset from the global context. -// It does not delete the dataset from the datastore. -func (g *GlobalContext) unloadDataset(dbid string) { - delete(g.datasets, dbid) - delete(g.initializers, dbid) -} - -// orderSchemas orders schemas based on their dependencies to other schemas. -func orderSchemas(schemas []*types.Schema) []*types.Schema { - // Mapping from schema DBID to its extensions - schemaMap := make(map[string][]string) - for _, schema := range schemas { - var exts []string - for _, ext := range schema.Extensions { - exts = append(exts, ext.Name) - } - schemaMap[schema.DBID()] = exts - } - - // Topological sort - var result []string - visited := make(map[string]bool) - var visitAll func(items []string) - - visitAll = func(items []string) { - for _, item := range items { - if !visited[item] { - visited[item] = true - visitAll(schemaMap[item]) - result = append(result, item) - } - } - } - - keys := make([]string, 0, len(schemaMap)) - for key := range schemaMap { - keys = append(keys, key) - } - sort.Strings(keys) // sort the keys for deterministic output - visitAll(keys) - - // Reorder schemas based on result - var orderedSchemas []*types.Schema - for _, dbid := range result { - for _, schema := range schemas { - if schema.DBID() == dbid { - orderedSchemas = append(orderedSchemas, schema) - break - } - } - } - - return orderedSchemas -} diff --git a/node/engine/execution/procedure.go b/node/engine/execution/procedure.go deleted file mode 100644 index 4dd835a26..000000000 --- a/node/engine/execution/procedure.go +++ /dev/null @@ -1,656 +0,0 @@ -package execution - -import ( - "bytes" - "context" - "errors" - "fmt" - "maps" - "strings" - - "github.com/jackc/pgx/v5/pgconn" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/types/decimal" - "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/engine/generate" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/node/utils/conv" -) - -// MaxStackDepth is the limit on the number of nested procedure calls allowed. -// This is different from the Go call stack depth, which may be much higher as -// it depends on the program design. The value 1,000 was empirically selected to -// be a call stack size of about 1MB and to provide a very high limit that no -// reasonable schema would exceed (even 100 would suggest a poorly designed -// schema). -// -// In addition to exorbitant memory required to support a call stack 1 million -// deep (>1GB), the execution of that many calls can take seconds, even if they -// do nothing else. -// -// Progressive gas metering may be used in the future to limit resources used by -// abusive recursive calls, but a hard upper limit will likely be necessary -// unless the price of an action call is extremely expensive or rises -// exponentially at each level of the call stack. -const MaxStackDepth = 1000 - -var ( - ErrIncorrectNumberOfArguments = errors.New("incorrect number of arguments") - ErrPrivateProcedure = errors.New("procedure is private") - ErrMutativeProcedure = errors.New("procedure is mutative") - ErrMaxStackDepth = errors.New("max call stack depth reached") - ErrCannotInferType = errors.New("cannot infer type") -) - -// instruction is an instruction that can be executed. -// It is used to define the behavior of a procedure. -type instruction interface { // i.e. dmlStmt, callMethod, or instructionFunc - execute(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error -} - -// preparedAction is a predefined action that can be executed. -// Unlike the action declared in the shared types, -// preparedAction's statements are parsed into a set of instructions. -type preparedAction struct { - // name is the name of the procedure. - name string - - // public indicates whether the procedure is public or privately scoped. - public bool - - // parameters are the parameters of the procedure. - parameters []string - - // view indicates whether the procedure has a `view` tag. - view bool - - // instructions are the instructions that the procedure executes when called. - instructions []instruction -} - -// prepareActions parses all actions. -// It converts all modifiers and statements into instructions. -// these instructions are then used to execute the action. -// It will convert modifiers first, since these should be checked immediately -// when the action is called. It will then convert the statements into -// instructions. -func prepareActions(schema *types.Schema) ([]*preparedAction, error) { - owner := make([]byte, len(schema.Owner)) - copy(owner, schema.Owner) // copy this here since caller may modify the passed schema. maybe not necessary - - preparedActions := make([]*preparedAction, len(schema.Actions)) - - for idx, action := range schema.Actions { - instructions := make([]instruction, 0) - - actionStmt, err := generate.GenerateActionBody(action, schema, dbidSchema(schema.DBID())) - if err != nil { - return nil, err - } - - // add instructions for both owner only and view procedures - if action.IsOwnerOnly() { - instructions = append(instructions, instructionFunc(func(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error { - if !bytes.Equal(scope.TxCtx.Signer, owner) { - return errors.New("cannot call owner action, not owner") - } - - return nil - })) - } - - if !action.IsView() { - instructions = append(instructions, instructionFunc(func(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error { - tx, ok := db.(sql.AccessModer) - if !ok { - return errors.New("DB does not provide access mode needed for mutative action") - } - if tx.AccessMode() != sql.ReadWrite { - return fmt.Errorf("%w, not in a chain transaction", ErrMutativeProcedure) - } - - return nil - })) - } - - for _, parsedStmt := range actionStmt { - switch stmt := parsedStmt.(type) { - default: - return nil, fmt.Errorf("unknown statement type %T", stmt) - case *generate.ActionExtensionCall: - i := &callMethod{ - Namespace: stmt.Extension, - Method: stmt.Method, - Args: makeExecutables(stmt.Params), - Receivers: stmt.Receivers, - } - instructions = append(instructions, i) - case *generate.ActionSQL: - i := &dmlStmt{ - SQLStatement: stmt.Statement, - OrderedParameters: stmt.ParameterOrder, - } - instructions = append(instructions, i) - case *generate.ActionCall: - - var calledAction *types.Action - for _, p := range schema.Actions { - if p.Name == stmt.Action { - calledAction = p - break - } - } - if calledAction == nil { - return nil, fmt.Errorf(`action "%s" not found`, stmt.Action) - } - - // we leave the namespace and receivers empty, since action calls can only - // call actions within the same schema, and actions cannot return values. - i := &callMethod{ - Method: stmt.Action, - Args: makeExecutables(stmt.Params), - } - instructions = append(instructions, i) - } - } - - preparedActions[idx] = &preparedAction{ - name: action.Name, - public: action.Public, - parameters: action.Parameters, - view: action.IsView(), - instructions: instructions, - } - } - - return preparedActions, nil -} - -// Call executes an action. -func (p *preparedAction) call(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB, inputs []any) error { - if len(inputs) != len(p.parameters) { - return fmt.Errorf(`%w: action "%s" requires %d arguments, but %d were provided`, ErrIncorrectNumberOfArguments, p.name, len(p.parameters), len(inputs)) - } - - for i, param := range p.parameters { - scope.SetValue(param, inputs[i]) - } - - for _, inst := range p.instructions { - if err := inst.execute(scope, global, db); err != nil { - return err - } - } - - return nil -} - -// callMethod is a statement that calls a method. -// This can be a local method, or a method from a namespace. -type callMethod struct { - // Namespace is the namespace that the method is in. - // If no namespace is specified, the local namespace is used. - Namespace string - - // Method is the name of the method. - Method string - - // Args are the arguments to the method. - // They are evaluated in order, and passed to the method. - Args []evaluatable - // for Args we might consider some literals to avoid pointless and error - // prone evaluation of certain trivial in-line expressions such as `SELECT @arg`; - - // Receivers are the variables that the return values are assigned to. - Receivers []string -} - -var _ instructionFunc = (&callMethod{}).execute - -// Execute calls a method from a namespace that is accessible within this dataset. -// If no namespace is specified, the local namespace is used. -// It will pass all arguments to the method, and assign the return values to the receivers. -func (e *callMethod) execute(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error { - // This instruction is about to call into another procedure in this dataset - // or another baseDataset. Check current call stack depth first. - if scope.StackDepth >= MaxStackDepth { - // NOTE: the actual Go call stack depth can be much more (e.g. more than - // double) the procedure call depth depending on program design and the - // number of Go function calls for each procedure. As of writing, it is - // approximately double plus a handful from the caller: - // - // var pcs [4096]uintptr; fmt.Println("call stack depth", runtime.Callers(0, pcs[:])) - return ErrMaxStackDepth - } - - dataset, ok := global.datasets[scope.DBID] - if !ok { - return fmt.Errorf("%w: %s", ErrDatasetNotFound, scope.DBID) - } - - // getting these types to match the type required by the the ultimate DML - // statement is tricky. stuff like `SELECT $1;` breaks extended query - // protocol mechanisms or ends up with the return as a string even if it's - // input as an int like 1. If we decide to be more type-strict, we should - // consider special Arg types that are literals (pass through functions?) - // that avoid the round trip to the database. Expressions with arithmetic, - // unary, binary, etc. operators still need to go through the DB. - var inputs []any - vals := scope.Values() // declare here since scope.Values() is expensive - for _, arg := range e.Args { - val, err := arg(scope.TxCtx.Ctx, db.Execute, vals) - if err != nil { - return err - } - - inputs = append(inputs, val) - } - - var results []any - var err error - - scope.UsedGas += 10 - if scope.UsedGas >= 10000000 { - return errors.New("out of gas") - } - - newScope := scope.NewScope() - newScope.StackDepth++ // not done by NewScope since (*baseDataset).Call would do it again - - // if no namespace is specified, we call a local procedure. - // this can access public and private procedures. - if e.Namespace == "" { - procedure, ok := dataset.actions[e.Method] - if !ok { - return fmt.Errorf(`action "%s" not found`, e.Method) - } - - err = procedure.call(newScope, global, db, inputs) - } else { - namespace, ok := dataset.extensions[e.Namespace] - if !ok { - return fmt.Errorf(`namespace "%s" not found`, e.Namespace) - } - - // new scope since we are calling a namespace - results, err = namespace.Call(newScope, &common.App{ - Service: global.service, - DB: db, - Engine: global, - }, e.Method, inputs) - } - if err != nil { - return err - } - - scope.Result = newScope.Result - - if len(e.Receivers) > len(results) { - return fmt.Errorf(`%w: action "%s" returned %d values, but only %d receivers were specified`, ErrIncorrectNumberOfArguments, e.Method, len(results), len(e.Receivers)) - } - - // Make the result available to either subsequent instructions or as the FinalResult. - for i, result := range results { // fmt.Println("res::", i, e.Receivers[i], result) - // make sure there is a receiver for the result - if i >= len(e.Receivers) { - break - } - - scope.SetValue(e.Receivers[i], result) - } - - return nil -} - -// dmlStmt is a DML statement, we leave the parsing to sqlparser -type dmlStmt struct { - // SQLStatement is the transformed, deterministic, Postgres compatible SQL statement. - SQLStatement string - - // OrderedParameters is the named parameters in the order they need to be passed to the database. - // Since Postgres doesn't support named parameters, we parse them to positional params, and then - // pass them to the database in the order they are expected. - OrderedParameters []string -} - -// decorateExecuteErr parses an execute error from postgres and tries to give a more helpful error message. -// this allows us to give a more helpful error message when users hit this, -// since the Postgres error message is not helpful, and this is a common error. -func decorateExecuteErr(err error, stmt string) error { - // this catches a common error case for in-line expressions, where the type cannot be inferred - var pgErr *pgconn.PgError - if errors.As(err, &pgErr) && pgErr.Code == "42P08" || pgErr.Code == "42P18" { - return fmt.Errorf(`%w: could not dynamically determine the data type in statement "%s". try type casting using ::, e.g. $id::text`, - ErrCannotInferType, stmt) - } - - return err -} - -var _ instructionFunc = (&dmlStmt{}).execute - -func (e *dmlStmt) execute(scope *precompiles.ProcedureContext, _ *GlobalContext, db sql.DB) error { - // Expend the arguments based on the ordered parameters for the DML statement. - params := orderAndCleanValueMap(scope.Values(), e.OrderedParameters) - // args := append([]any{pg.QueryModeExec}, params...) - results, err := db.Execute(scope.TxCtx.Ctx, e.SQLStatement, append([]any{pg.QueryModeExec}, params...)...) - if err != nil { - return decorateExecuteErr(err, e.SQLStatement) - } - - // we need to check for any pg numeric types returned, and convert them to int64 - for i, row := range results.Rows { - for j, val := range row { - int64Val, ok := sql.Int64(val) - if ok { - results.Rows[i][j] = int64Val - } - } - } - - scope.Result = results - - return nil -} - -type instructionFunc func(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error - -// implement instruction -func (f instructionFunc) execute(scope *precompiles.ProcedureContext, global *GlobalContext, db sql.DB) error { - return f(scope, global, db) -} - -// evaluatable is an expression that can be evaluated to a scalar value. -// It is used to handle inline expressions, such as within action calls. -type evaluatable func(ctx context.Context, exec dbQueryFn, values map[string]any) (any, error) - -// makeExecutables converts inline expressions into a set of evaluatables. -// These are SQL statements that executed with arguments from previously bound -// values (either from the action call params or results from preceding -// instructions in the procedure), and whose results are used as the input -// arguments for action or extension calls. -// -// See their execution in (*callMethod).execute inside the `range e.Args` to -// collect the `inputs` passed to the call of a dataset method or other -// "namespace" method, such as an extension method. -func makeExecutables(params []*generate.InlineExpression) []evaluatable { - var evaluatables []evaluatable - - for _, param := range params { - // copy the param to avoid loop variable capture - param2 := &generate.InlineExpression{ - Statement: param.Statement, - OrderedParams: param.OrderedParams, - } - evaluatables = append(evaluatables, func(ctx context.Context, exec dbQueryFn, values map[string]any) (any, error) { - // we need to start with a slice of the mode key - // for in-line expressions, we need to use the inferred arg types - valSlice := []any{pg.QueryModeInferredArgTypes} - - // ordering the map values according to the bind names - valSlice = append(valSlice, orderAndCleanValueMap(values, param2.OrderedParams)...) - - result, err := exec(ctx, param2.Statement, valSlice...) // more values than binds - if err != nil { - return nil, err - } - - if len(result.Rows) == 0 { - return nil, nil - } - if len(result.Rows) > 1 { - return nil, fmt.Errorf("expected max 1 row for in-line expression, got %d", len(result.Rows)) - } - - record := result.Rows[0] - if len(record) != 1 { - return nil, fmt.Errorf("expected 1 value for in-line expression, got %d", len(record)) - } - - // Kwil supports nils in in-line expressions, so we need to check for nils - if record[0] == nil { - return nil, nil - } - // TODO: I am currently making changes to PG that will remove the need for this (I think) - // there is an edge case here where if the value is an array, it needs to be of the exact array type. - // For example, pgx only understands []string, and not []any, however it will return arrays to us as - // []any. If the returned type here is an array, we need to convert it to an array of the correct type. - // typeOf := reflect.TypeOf(record[0]) - // if typeOf.Kind() == reflect.Slice && typeOf.Elem().Kind() != reflect.Uint8 { - // // if it is an array, we need to convert it to the correct type. - // // if of length 0, we can simply set it to a text array - // if len(record[0].([]any)) == 0 { - // return []string{}, nil - // } - - // switch v := record[0].([]any)[0].(type) { - // case string: - // textArr := make([]string, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // textArr[i] = val.(string) - // } - // return textArr, nil - // case int64: - // intArr := make([]int64, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // intArr[i] = val.(int64) - // } - // return intArr, nil - // case []byte: - // blobArr := make([][]byte, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // blobArr[i] = val.([]byte) - // } - // return blobArr, nil - // case bool: - // boolArr := make([]bool, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // boolArr[i] = val.(bool) - // } - // return boolArr, nil - // case *types.UUID: - // uuidArr := make(types.UUIDArray, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // uuidArr[i] = val.(*types.UUID) - // } - // return uuidArr, nil - // case *types.Uint256: - // uint256Arr := make(types.Uint256Array, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // uint256Arr[i] = val.(*types.Uint256) - // } - // return uint256Arr, nil - // case *decimal.Decimal: - // decArr := make(decimal.DecimalArray, len(record[0].([]any))) - // for i, val := range record[0].([]any) { - // decArr[i] = val.(*decimal.Decimal) - // } - // return decArr, nil - // default: - // return nil, fmt.Errorf("unsupported in-line array type %T", v) - // } - // } - - return record[0], nil - }) - } - - return evaluatables -} - -// orderAndCleanValueMap takes a map of values and a slice of keys, and returns -// a slice of values in the order of the keys. If a value can be converted to an -// int, it will be. If a value does not exist, it will be set to nil. -// They keys are expected to match Kwil's bind syntax, i.e. $key. -// The values in the value map can be either $key or key. -func orderAndCleanValueMap(values map[string]any, keys []string) []any { - // we need to iterate over all values, and see if it has a $. If not, - // we need to add one so that the keys (which have $) match the values - // (which do not have $) - cloned := false - for k, v := range values { - if k[0] != '$' { - // we need to copy the values map to ensure - // we do not modify the original map - if !cloned { - values = maps.Clone(values) - cloned = true - } - - delete(values, k) - values["$"+k] = v - } - } - - ordered := make([]any, 0, len(keys)) - for _, key := range keys { - val, ok := values[key] - if ok { - val = cleanseIntValue(val) - } // leave nil if it doesn't exist, still append - - ordered = append(ordered, val) - } - - return ordered -} - -// cleanseIntValue attempts to coerce a value to an int64. -// bools are not converted. -// -// Client tooling sends everything as a string, and we don't have typing in any -// action arguments or variables. So we have no choice but to attempt to coerce -// a string or other value into an int so that the inline expression, which is -// basically always expecting integer arguments, does not bomb. I don't like -// this a lot, but it's essentially what SQLite did although maybe more -// judiciously depending on the needs of the query? -func cleanseIntValue(val any) any { - if _, isBool := val.(bool); isBool { - return val - } - intVal, err := conv.Int(val) - if err == nil { - return intVal - } - - return val -} - -func prepareProcedure(proc *types.Procedure) *preparedProcedure { - return &preparedProcedure{ - name: proc.Name, - public: proc.Public, - parameters: proc.Parameters, - ownerOnly: proc.IsOwnerOnly(), - view: proc.IsView(), - returns: proc.Returns, - } -} - -// preparedProcedure is a predefined procedure that can be executed. -type preparedProcedure struct { - // name is the name of the procedure. - name string - - // public indicates whether the procedure is public or privately scoped. - public bool - // ownerOnly indicates whether the procedure is owner only. - ownerOnly bool - - // parameters are the parameters of the procedure. - parameters []*types.ProcedureParameter - - // view indicates whether the procedure has a `view` tag. - view bool - - returns *types.ProcedureReturn -} - -func (p *preparedProcedure) callString(schema string) string { - str := strings.Builder{} - str.WriteString("SELECT * FROM ") - str.WriteString(dbidSchema(schema)) - str.WriteString(".") - str.WriteString(p.name) - str.WriteString("(") - for i := range p.parameters { - if i != 0 { - str.WriteString(", ") - } - str.WriteString(fmt.Sprintf("$%d", i+1)) - } - str.WriteString(");") - - return str.String() -} - -// shapeReturn takes a sql result and ensures it matches the expected return shape -// of the procedure. It will modify the passed result to match the expected shape. -func (p *preparedProcedure) shapeReturn(result *sql.ResultSet) error { - // in postgres, `select * from proc()`, where proc() returns nothing, - // will return a single empty column and row. We need to remove this. - if p.returns == nil { - result.Columns = nil - result.Rows = nil - return nil - } - - if len(p.returns.Fields) != len(result.Columns) { - // I'm quite positive this will get caught before the schema is even deployed, - // but just in case, we should check here. - return fmt.Errorf("shapeReturn: procedure definition expects result %d columns, but returned %d", len(p.returns.Fields), len(result.Columns)) - } - - for i, col := range p.returns.Fields { - result.Columns[i] = col.Name - - // if the column is a decimal or a decimal array, we need to convert the values to - // the specified scale and precision - if col.Type.Name == types.DecimalStr { - // if it is an array, we need to convert each value in the array - if col.Type.IsArray { - for _, row := range result.Rows { - if row[i] == nil { - continue - } - - arr, ok := row[i].(decimal.DecimalArray) - if !ok { - return fmt.Errorf("shapeReturn: expected decimal array, got %T", row[i]) - } - - for _, v := range arr { - if v == nil { - continue - } - err := v.SetPrecisionAndScale(col.Type.Metadata[0], col.Type.Metadata[1]) - if err != nil { - return err - } - } - } - } else { - for _, row := range result.Rows { - if row[i] == nil { - continue - } - - dec, ok := row[i].(*decimal.Decimal) - if !ok { - return fmt.Errorf("shapeReturn: expected decimal, got %T", row[i]) - } - - err := dec.SetPrecisionAndScale(col.Type.Metadata[0], col.Type.Metadata[1]) - if err != nil { - return err - } - } - } - } - } - - return nil -} diff --git a/node/engine/execution/procedure_test.go b/node/engine/execution/procedure_test.go deleted file mode 100644 index 6ce16240f..000000000 --- a/node/engine/execution/procedure_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package execution - -import ( - "maps" - "testing" - - "github.com/stretchr/testify/require" -) - -func Test_OrderAndClean(t *testing.T) { - type testcase struct { - name string - values map[string]any - keys []string - res []any - } - - tests := []testcase{ - { - name: "using $", - values: map[string]any{ - "$key1": "value1", - "$key2": []byte("value2"), - }, - keys: []string{"$key1", "$key2"}, - res: []any{"value1", []byte("value2")}, - }, - { - name: "using $ and without $", - values: map[string]any{ - "$key1": "value1", - "key2": []byte("value2"), - }, - keys: []string{"$key1", "$key2"}, - res: []any{"value1", []byte("value2")}, - }, - { - name: "missing key", - values: map[string]any{ - "$key1": "value1", - "key2": []byte("value2"), - }, - keys: []string{"$key1", "$key3"}, - res: []any{"value1", nil}, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - // copy the values to check that the function does not modify the input - oldVals := maps.Clone(test.values) - - res := orderAndCleanValueMap(test.values, test.keys) - require.EqualValues(t, test.res, res) - - require.EqualValues(t, oldVals, test.values) - }) - } -} diff --git a/node/engine/execution/queries.go b/node/engine/execution/queries.go deleted file mode 100644 index 37357a008..000000000 --- a/node/engine/execution/queries.go +++ /dev/null @@ -1,374 +0,0 @@ -package execution - -import ( - "context" - "encoding/base64" - "encoding/json" - "fmt" - - _ "embed" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/engine/generate" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/parse" -) - -var ( - // engineVersion is the version of the 'kwild_internal' schema - engineVersion int64 = 1 - - schemaVersion = 0 // schema version allows upgrading schemas in the future - sqlCreateSchemaTable = fmt.Sprintf(`CREATE TABLE IF NOT EXISTS %s.kwil_schemas ( - dbid TEXT PRIMARY KEY, - schema_content BYTEA, - version INT DEFAULT %d -);`, pg.InternalSchemaName, schemaVersion) - sqlCreateSchema = `CREATE SCHEMA "%s";` - sqlStoreKwilSchema = fmt.Sprintf(`INSERT INTO %s.kwil_schemas (id, dbid, schema_content, version, owner, name) - VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT (dbid) DO UPDATE SET schema_content = $3, version = $4, owner = $5, name = $6;`, pg.InternalSchemaName) - sqlStoreProcedure = fmt.Sprintf(`INSERT INTO %s.procedures (name, schema_id, param_types, param_names, return_types, return_names, returns_table, public, owner_only, is_view) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10);`, pg.InternalSchemaName) - sqlListSchemaContent = fmt.Sprintf(`SELECT schema_content FROM %s.kwil_schemas;`, pg.InternalSchemaName) - sqlDropSchema = `DROP SCHEMA "%s" CASCADE;` - sqlDeleteKwilSchema = fmt.Sprintf(`DELETE FROM %s.kwil_schemas WHERE dbid = $1;`, pg.InternalSchemaName) - - // v1 upgrades the schema to be: - // TABLE kwil_schemas ( - // ID uuid PRIMARY KEY, - // dbid TEXT, - // schema_content BYTEA, - // version INT DEFAULT 0, - // owner BYTEA, - // name TEXT - // ) - // TABLE procedures ( - // name TEXT, - // schema_id UUID, - // param_types TEXT[], - // param_names TEXT[], - // return_types TEXT[], - // return_names TEXT[], - // returns_table BOOLEAN, - // public BOOLEAN, - // owner_only BOOLEAN, - // is_view BOOLEAN, - // primary key (name, schema) - // FOREIGN KEY (schema) REFERENCES kwil_schemas (id) ON UPDATE CASCADE ON DELETE CASCADE - // - - // upgrades for v1: - // to change the primary key, we will: - // 1. add a new uuid column, and generate a uuid for each schema - // 2. remove the old primary key - // 3. add a new primary key - sqlUpgradeSchemaTableV1AddUUIDColumn = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas ADD COLUMN id UUID; - `, pg.InternalSchemaName) - // sqlBackfillSchemaTableV1UUID adds a UUID to all existing schemas. - // It uses a random UUID namespace to generate the UUIDs from the dbid. - // This namespace is not used anywhere else. We want to decouple the UUID - // from the DBID, so separate UUIDs will be used in the future that are based on - // the txid. - sqlBackfillSchemaTableV1UUID = fmt.Sprintf(` - UPDATE %s.kwil_schemas SET id = uuid_generate_v5('052d10c4-acf8-4ec9-a616-105bf1d1e873'::uuid, dbid); - `, pg.InternalSchemaName) - sqlUpgradeRemovePrimaryKey = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas DROP CONSTRAINT kwil_schemas_pkey; - `, pg.InternalSchemaName) - sqlUpgradeAddPrimaryKeyV1UUID = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas ADD PRIMARY KEY (id); - `, pg.InternalSchemaName) - - // unique constraint for the dbid - sqlUpgradeAddUniqueConstraintV1DBID = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas ADD CONSTRAINT kwil_schemas_dbid_unique UNIQUE (dbid); - `, pg.InternalSchemaName) - - sqlUpgradeSchemaTableV1AddOwnerColumn = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas ADD COLUMN name TEXT; - `, pg.InternalSchemaName) - sqlUpgradeSchemaTableV1AddNameColumn = fmt.Sprintf(` - ALTER TABLE %s.kwil_schemas ADD COLUMN owner BYTEA; - `, pg.InternalSchemaName) - // sqlBackfillSchemaTableV1 adds the owner and name to all existing schemas, - // and updates the version to 1. - sqlBackfillSchemaTableV1 = fmt.Sprintf(` - UPDATE %s.kwil_schemas SET owner = $1, name = $2, version = 1, schema_content = $4 WHERE dbid = $3; - `, pg.InternalSchemaName) - - sqlAddProceduresTableV1 = fmt.Sprintf(` - CREATE TABLE %s.procedures ( - name TEXT not null, - schema_id uuid not null, - param_types TEXT[], - param_names TEXT[], - return_types TEXT[], - return_names TEXT[], - returns_table BOOLEAN not null, - public BOOLEAN not null, - owner_only BOOLEAN not null, - is_view BOOLEAN not null, - primary key (name, schema_id), - FOREIGN KEY (schema_id) REFERENCES %s.kwil_schemas (id) ON UPDATE CASCADE ON DELETE CASCADE - ) - `, pg.InternalSchemaName, pg.InternalSchemaName) - sqlIndexProceduresTableV1SchemaID = fmt.Sprintf(` - CREATE INDEX procedures_schema_id ON %s.procedures (schema_id); - `, pg.InternalSchemaName) -) - -func initTables(ctx context.Context, db sql.DB) error { - if err := createSchemasTableIfNotExists(ctx, db); err != nil { - return err - } - - return nil -} - -func dbidSchema(dbid string) string { - return pg.DefaultSchemaFilterPrefix + dbid -} - -// createSchemasTableIfNotExists creates the schemas table if it does not exist -func createSchemasTableIfNotExists(ctx context.Context, tx sql.DB) error { - _, err := tx.Execute(ctx, sqlCreateSchemaTable) - return err -} - -// a random uuidNamespace for generating UUIDs for schemas. -var uuidNamespace = "01c32544-c21f-4522-98c5-40e6fb0a0831" - -// createSchema creates a schema in the database. -// It will also store the schema in the kwil_schemas table. -// It also creates the relevant tables, indexes, etc. -// If the schema already exists in the Kwil schemas table, it will be updated. -func createSchema(ctx context.Context, tx sql.TxMaker, schema *types.Schema, txid string) error { - schemaName := dbidSchema(schema.DBID()) - - sp, err := tx.BeginTx(ctx) - if err != nil { - return err - } - defer sp.Rollback(ctx) - - _, err = sp.Execute(ctx, fmt.Sprintf(sqlCreateSchema, schemaName)) - if err != nil { - return err - } - - // we can json marshal without concern for non-determinism - // because kwil_schemas exists outside of consensus / replicated state - schemaBts, err := json.Marshal(schema) - if err != nil { - return err - } - - uuidNamespace, err := types.ParseUUID(uuidNamespace) - if err != nil { - return err - } - - uuid := types.NewUUIDV5WithNamespace(*uuidNamespace, []byte(txid)) - - // since we will fail if the schema already exists, we can assume that it does not exist - // in the kwil_schemas table. If it does for some reason, we will update it. - _, err = sp.Execute(ctx, sqlStoreKwilSchema, uuid, schema.DBID(), schemaBts, schemaVersion, schema.Owner, schema.Name) - if err != nil { - return err - } - - for _, table := range schema.Tables { - statements, err := generate.GenerateDDL(schemaName, table) - if err != nil { - return err - } - - for _, stmt := range statements { - _, err = sp.Execute(ctx, stmt) - if err != nil { - return err - } - } - } - - for _, proc := range schema.Procedures { - stmt, err := generate.GenerateProcedure(proc, schema, schemaName) - if err != nil { - return err - } - - _, err = sp.Execute(ctx, stmt) - if err != nil { - return err - } - } - - // To support foreign procedure calls, we generate a function for each procedure. - // The function ensures that, whatever target procedure is chosen at runtime, that - // its input and output types are compatible with the expected types. - for _, proc := range schema.ForeignProcedures { - stmt, err := generate.GenerateForeignProcedure(proc, schemaName, schema.DBID()) - if err != nil { - return err - } - - _, err = sp.Execute(ctx, stmt) - if err != nil { - return err - } - } - - // store the procedures in the kwil_procedures table - for _, proc := range schema.Procedures { - - var paramTypes []string - var paramNames []string - for _, col := range proc.Parameters { - paramTypes = append(paramTypes, col.Type.String()) - paramNames = append(paramNames, col.Name) - } - - var returnTypes []string - var returnNames []string - returnsTable := false - if proc.Returns != nil { - returnsTable = proc.Returns.IsTable - for _, col := range proc.Returns.Fields { - returnTypes = append(returnTypes, col.Type.String()) - returnNames = append(returnNames, col.Name) - } - } - - _, err = sp.Execute(ctx, sqlStoreProcedure, - proc.Name, - uuid, - paramTypes, - paramNames, - returnTypes, - returnNames, - returnsTable, - proc.Public, - proc.IsOwnerOnly(), - proc.IsView()) - if err != nil { - return err - } - - } - - return sp.Commit(ctx) -} - -// getSchemas returns all schemas in the kwil_schemas table. -// convertFunc converts bytes into a schema. If nil, it will simply unmarshal the bytes. -func getSchemas(ctx context.Context, tx sql.Executor, convertFunc func([]byte) (*types.Schema, error)) ([]*types.Schema, error) { - res, err := tx.Execute(ctx, sqlListSchemaContent) - if err != nil { - return nil, err - } - - if convertFunc == nil { - convertFunc = func(b []byte) (*types.Schema, error) { - schema := &types.Schema{} - err := json.Unmarshal(b, schema) - return schema, err - } - } - - schemas := make([]*types.Schema, len(res.Rows)) - for i, row := range res.Rows { - if len(row) != 1 { - return nil, fmt.Errorf("expected 1 column, got %d", len(row)) - } - - bts, ok := row[0].([]byte) - if !ok { - return nil, fmt.Errorf("expected []byte, got %T", row[0]) - } - - schema, err := convertFunc(bts) - if err != nil { - return nil, err - } - - schemas[i] = schema - } - - return schemas, nil -} - -// deleteSchema deletes a schema from the database. -// It will also delete the schema from the kwil_schemas table. -func deleteSchema(ctx context.Context, tx sql.TxMaker, dbid string) error { - schemaName := dbidSchema(dbid) - - sp, err := tx.BeginTx(ctx) - if err != nil { - return err - } - defer sp.Rollback(ctx) - - _, err = sp.Execute(ctx, fmt.Sprintf(sqlDropSchema, schemaName)) - if err != nil { - return err - } - - _, err = sp.Execute(ctx, sqlDeleteKwilSchema, dbid) - if err != nil { - return err - } - - return sp.Commit(ctx) -} - -// setContextualVars sets the contextual variables for the given postgres session. -func setContextualVars(ctx *common.TxContext, db sql.DB, _ *common.ExecutionData) error { - // for contextual parameters, we use postgres's current_setting() - // feature for setting session variables. For example, @caller - // is accessed via current_setting('ctx.caller') - - _, err := db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = '%s';`, generate.PgSessionPrefix, parse.CallerVar, ctx.Caller)) - if err != nil { - return err - } - - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = '%s';`, generate.PgSessionPrefix, parse.TxidVar, ctx.TxID)) - if err != nil { - return err - } - - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = '%s';`, generate.PgSessionPrefix, parse.SignerVar, base64.StdEncoding.EncodeToString(ctx.Signer))) - if err != nil { - return err - } - - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = %d;`, generate.PgSessionPrefix, parse.HeightVar, ctx.BlockContext.Height)) - if err != nil { - return err - } - - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = %d;`, generate.PgSessionPrefix, parse.BlockTimestamp, ctx.BlockContext.Timestamp)) - if err != nil { - return err - } - - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = '%s';`, generate.PgSessionPrefix, parse.Authenticator, ctx.Authenticator)) - if err != nil { - return err - } - - // we have to set the foreign caller to the empty string if it is nil. - // We can't leave it nil because once a config parameter is set, it cannot be unset. - // This means that we cannot properly handle scoping of the foreign caller in the outermost - // function call. - _, err = db.Execute(ctx.Ctx, fmt.Sprintf(`SET LOCAL %s.%s = '';`, generate.PgSessionPrefix, parse.ForeignCaller)) - if err != nil { - return err - } - - return nil -} diff --git a/node/engine/execution/queries_test.go b/node/engine/execution/queries_test.go deleted file mode 100644 index 70828f605..000000000 --- a/node/engine/execution/queries_test.go +++ /dev/null @@ -1,61 +0,0 @@ -//go:build pglive - -package execution - -import ( - "context" - "strings" - "testing" - - "github.com/kwilteam/kwil-db/node/engine/testdata" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func Test_StoringSchemas(t *testing.T) { - ctx := context.Background() - cfg := &pg.DBConfig{ - PoolConfig: pg.PoolConfig{ - ConnConfig: pg.ConnConfig{ - Host: "127.0.0.1", - Port: "5432", - User: "kwild", - Pass: "kwild", // would be ignored if pg_hba.conf set with trust - DBName: "kwil_test_db", - }, - MaxConns: 11, - }, - SchemaFilter: func(s string) bool { - return strings.Contains(s, pg.DefaultSchemaFilterPrefix) - }, - } - - db, err := pg.NewDB(ctx, cfg) - require.NoError(t, err) - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) // we always want to rollback, never commit - - err = InitializeEngine(ctx, tx) - require.NoError(t, err) - - err = createSchemasTableIfNotExists(ctx, tx) - require.NoError(t, err) - - err = createSchema(ctx, tx, testdata.TestSchema, "txid") - require.NoError(t, err) - - defer func() { - err := deleteSchema(ctx, tx, testdata.TestSchema.DBID()) - require.NoError(t, err) - }() - - schemas, err := getSchemas(ctx, tx, nil) - require.NoError(t, err) - - require.Len(t, schemas, 1) - - assert.EqualValuesf(t, testdata.TestSchema, schemas[0], "expected: %v, got: %v", testdata.TestSchema, schemas[0]) -} diff --git a/node/engine/execution/schema_v07.go b/node/engine/execution/schema_v07.go deleted file mode 100644 index fa172229f..000000000 --- a/node/engine/execution/schema_v07.go +++ /dev/null @@ -1,239 +0,0 @@ -package execution - -import ( - "encoding/json" - - "github.com/kwilteam/kwil-db/core/types" -) - -// this file contains the schema from Kwil v0.7, which is used for proper migration to -// the new schema in v0.8 - -// convertV07Schema converts a v0.7 schema to a v0.8 schema -func convertV07Schema(bts []byte) (*types.Schema, error) { - s := &v07Schema{} - err := json.Unmarshal(bts, s) - if err != nil { - return nil, err - } - - tables := make([]*types.Table, len(s.Tables)) - for i, t := range s.Tables { - columns := make([]*types.Column, len(t.Columns)) - for j, c := range t.Columns { - attrs := make([]*types.Attribute, len(c.Attributes)) - for k, a := range c.Attributes { - attrs[k] = &types.Attribute{ - Type: types.AttributeType(a.Type), - Value: a.Value, - } - } - columns[j] = &types.Column{ - Name: c.Name, - Type: c.Type.convert(), - Attributes: attrs, - } - } - - indexes := make([]*types.Index, len(t.Indexes)) - for j, idx := range t.Indexes { - indexes[j] = &types.Index{ - Name: idx.Name, - Columns: idx.Columns, - Type: types.IndexType(idx.Type), - } - } - - foreignKeys := make([]*types.ForeignKey, len(t.ForeignKeys)) - for j, fk := range t.ForeignKeys { - fkActions := make([]*types.ForeignKeyAction, len(fk.Actions)) - for k, fkAct := range fk.Actions { - fkActions[k] = &types.ForeignKeyAction{ - On: types.ForeignKeyActionOn(fkAct.On), - Do: types.ForeignKeyActionDo(fkAct.Do), - } - } - foreignKeys[j] = &types.ForeignKey{ - ChildKeys: fk.ChildKeys, - ParentKeys: fk.ParentKeys, - ParentTable: fk.ParentTable, - Actions: fkActions, - } - } - - tables[i] = &types.Table{ - Name: t.Name, - Columns: columns, - Indexes: indexes, - ForeignKeys: foreignKeys, - } - } - - extensions := make([]*types.Extension, len(s.Extensions)) - for i, e := range s.Extensions { - init := make([]*types.ExtensionConfig, len(e.Initialization)) - for j, i := range e.Initialization { - init[j] = &types.ExtensionConfig{ - Key: i.Key, - Value: i.Value, - } - } - extensions[i] = &types.Extension{ - Name: e.Name, - Initialization: init, - Alias: e.Alias, - } - } - - actions := make([]*types.Action, len(s.Procedures)) - for i, p := range s.Procedures { - actions[i] = &types.Action{ - Name: p.Name, - Annotations: p.Annotations, - Parameters: p.Args, - Public: p.Public, - } - - for _, m := range p.Modifiers { - actions[i].Modifiers = append(actions[i].Modifiers, types.Modifier(m)) - } - - var body string - for _, s := range p.Statements { - body += s + "\n" - } - actions[i].Body = body - } - - return &types.Schema{ - Name: s.Name, - Owner: s.Owner, - Extensions: extensions, - Tables: tables, - Actions: actions, - }, nil - -} - -// v07Schema is a database schema that contains tables, procedures, and extensions. -type v07Schema struct { - // Name is the name of the schema given by the deployer. - Name string `json:"name"` - // Owner is the identifier (generally an address in bytes or public key) of the owner of the schema - Owner []byte `json:"owner"` - Extensions []*v07Extension `json:"extensions"` - Tables []*v07Table `json:"tables"` - Procedures []*v07Procedure `json:"procedures"` -} - -// v07Table is a table in a database schema. -type v07Table struct { - Name string `json:"name"` - Columns []*v07Column `json:"columns"` - Indexes []*v07Index `json:"indexes,omitempty"` - ForeignKeys []*v07ForeignKey `json:"foreign_keys"` -} - -// v07Column is a column in a table. -type v07Column struct { - Name string `json:"name"` - Type v07DataType `json:"type"` - Attributes []*v07Attribute `json:"attributes,omitempty"` -} - -// v07Attribute is a column attribute. -// These are constraints and default values. -type v07Attribute struct { - Type string `json:"type"` - Value string `json:"value,omitempty"` -} - -// v07Index is an index on a table. -type v07Index struct { - Name string `json:"name"` - Columns []string `json:"columns"` - Type string `json:"type"` -} - -// v07ForeignKey is a foreign key in a table. -type v07ForeignKey struct { - // ChildKeys are the columns that are referencing another. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "a" is the child key - ChildKeys []string `json:"child_keys"` - - // ParentKeys are the columns that are being referred to. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "b" is the parent key - ParentKeys []string `json:"parent_keys"` - - // ParentTable is the table that holds the parent columns. - // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "tbl2" is the parent table - ParentTable string `json:"parent_table"` - - // Do we need parent schema stored with meta data or should assume and - // enforce same schema when creating the dataset with generated DDL. - // ParentSchema string `json:"parent_schema"` - - // Action refers to what the foreign key should do when the parent is altered. - // This is NOT the same as a database action; - // however sqlite's docs refer to these as actions, - // so we should be consistent with that. - // For example, ON DELETE CASCADE is a foreign key action - Actions []*v07ForeignKeyAction `json:"actions"` -} - -// v07ForeignKeyAction is used to specify what should occur -// if a parent key is updated or deleted -type v07ForeignKeyAction struct { - // On can be either "UPDATE" or "DELETE" - On string `json:"on"` - - // Do specifies what a foreign key action should do - Do string `json:"do"` -} - -// v07Extension defines what extensions the schema uses, and how they are initialized. -type v07Extension struct { - // Name is the name of the extension registered in the node - Name string `json:"name"` - // Initialization is a list of key value pairs that are used to initialize the extension - Initialization []*v07ExtensionConfig `json:"initialization"` - // Alias is the alias of the extension, which is how its instance is referred to in the schema - Alias string `json:"alias"` -} - -// v07ExtensionConfig is a key value pair that represents a configuration value for an extension -type v07ExtensionConfig struct { - Key string `json:"name"` - Value string `json:"value"` -} - -// v07DataType is a type of data (e.g. NULL, TEXT, INT, BLOB, BOOLEAN) -type v07DataType string - -func (d v07DataType) convert() *types.DataType { - switch d { - case "NULL": - return types.NullType - case "TEXT": - return types.TextType - case "INT": - return types.IntType - case "BLOB": - return types.BlobType - case "BOOLEAN", "BOOL": - return types.BoolType - default: - panic("unknown data type") - } -} - -// v07Procedure is a procedure in a database schema. -// These are defined by Kuneiform's `action` keyword. -type v07Procedure struct { - Name string `json:"name"` - Annotations []string `json:"annotations,omitempty"` - Args []string `json:"inputs"` - Public bool `json:"public"` - Modifiers []string `json:"modifiers"` - Statements []string `json:"statements"` -} diff --git a/node/engine/generate/actions.go b/node/engine/generate/actions.go deleted file mode 100644 index 00d1c45d4..000000000 --- a/node/engine/generate/actions.go +++ /dev/null @@ -1,177 +0,0 @@ -package generate - -import ( - "fmt" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/parse" -) - -// this package handles generating code for actions. - -// GeneratedActionStmt is an interface for analyzed statements. -type GeneratedActionStmt interface { - generatedAction() -} - -// there are exactly three types of analyzed statements: -// - ActionExtensionCall: a statement that calls an extension -// - ActionCall: a statement that calls an action -// - ActionSQL: a statement that contains SQL - -// ActionExtensionCall is an analyzed statement that calls an action or extension. -type ActionExtensionCall struct { - // Extension is the name of the extension alias. - Extension string - // Method is the name of the method being called. - Method string - // Params are the parameters to the method. - Params []*InlineExpression - // Receivers are the receivers of the method. - Receivers []string -} - -func (c *ActionExtensionCall) generatedAction() {} - -// ActionCall is an analyzed statement that calls an action. -type ActionCall struct { - // Action is the name of the action being called. - Action string - // Params are the parameters to the action. - Params []*InlineExpression -} - -func (c *ActionCall) generatedAction() {} - -// ActionSQL is an analyzed statement that contains SQL. -type ActionSQL struct { - // Statement is the Statement statement that should be executed. - // It is deterministic. - Statement string - // ParameterOrder is a list of the parameters in the order they appear in the statement. - // This is set if the ReplaceNamedParameters flag is set. - // For example, if the statement is "SELECT * FROM table WHERE id = $id AND name = $name", - // then the parameter order would be ["$id", "$name"] - ParameterOrder []string -} - -func (s *ActionSQL) generatedAction() {} - -// InlineExpression is an expression that is inlined in an action or procedure call. -// For example, this can be "extension.call($id+1)" -type InlineExpression struct { - // Statement is the sql statement that is inlined. - Statement string - // OrderedParams is the order of the parameters in the statement. - OrderedParams []string -} - -// GenerateActionBody generates the body of an action. -// If the action is a VIEW and contains mutative SQL, it will return an error. -func GenerateActionBody(action *types.Action, schema *types.Schema, pgSchema string) (stmts []GeneratedActionStmt, err error) { - defer func() { - if r := recover(); r != nil { - var ok bool - err, ok = r.(error) - if !ok { - err = fmt.Errorf("panic: %v", r) - } - } - - // add action name to error - if err != nil { - err = fmt.Errorf("action %s: %w", action.Name, err) - } - }() - - res, err := parse.ParseAction(action, schema) - if err != nil { - return nil, err - } - - // syntax errors, as well as mutative SQL, will be thrown here. - if res.ParseErrs.Err() != nil { - return nil, res.ParseErrs.Err() - } - - g := &actionGenerator{ - sqlGenerator: sqlGenerator{ - pgSchema: pgSchema, - numberParameters: true, - }, - } - for _, stmt := range res.AST { - stmt.Accept(g) - } - - return g.actions, nil -} - -// actionGenerator is a struct that generates code for actions. -// it totally relies on the SQL generator, except for the action-specific -// visits and Variables, since it needs to rewrite the variables to be numbered. -type actionGenerator struct { - sqlGenerator - // actions is the order of all actions that are generated. - actions []GeneratedActionStmt -} - -func (a *actionGenerator) VisitActionStmtSQL(p0 *parse.ActionStmtSQL) any { - a.sqlGenerator.orderedParams = nil // reset order since it is a new statement - stmt := p0.SQL.Accept(a).(string) - - params := make([]string, len(a.sqlGenerator.orderedParams)) - copy(params, a.sqlGenerator.orderedParams) - - a.actions = append(a.actions, &ActionSQL{ - Statement: stmt + ";", - ParameterOrder: params, - }) - - return nil -} - -func (a *actionGenerator) VisitActionStmtExtensionCall(p0 *parse.ActionStmtExtensionCall) any { - inlines := make([]*InlineExpression, len(p0.Args)) - for i, arg := range p0.Args { - inlines[i] = a.createInline(arg) - } - - a.actions = append(a.actions, &ActionExtensionCall{ - Receivers: p0.Receivers, - Extension: p0.Extension, - Method: p0.Method, - Params: inlines, - }) - - return nil -} - -func (a *actionGenerator) VisitActionStmtActionCall(p0 *parse.ActionStmtActionCall) any { - inlines := make([]*InlineExpression, len(p0.Args)) - for i, arg := range p0.Args { - inlines[i] = a.createInline(arg) - } - - a.actions = append(a.actions, &ActionCall{ - Action: p0.Action, - Params: inlines, - }) - - return nil -} - -// createInline creates an inline from an expression -func (a *actionGenerator) createInline(p0 parse.Expression) *InlineExpression { - a.sqlGenerator.orderedParams = nil // reset order since it is a new statement - - str := p0.Accept(a).(string) - - params := make([]string, len(a.sqlGenerator.orderedParams)) - copy(params, a.sqlGenerator.orderedParams) - - return &InlineExpression{ - Statement: "SELECT " + str + ";", - OrderedParams: params, - } -} diff --git a/node/engine/generate/actions_test.go b/node/engine/generate/actions_test.go deleted file mode 100644 index a518711a7..000000000 --- a/node/engine/generate/actions_test.go +++ /dev/null @@ -1,141 +0,0 @@ -package generate - -import ( - "testing" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/stretchr/testify/assert" -) - -func TestGenerateActionBody(t *testing.T) { - tests := []struct { - name string - action *types.Action - schema *types.Schema - pgSchema string - wantErr bool - wantGenActStmts []any - }{ - { - name: "invalid schema", - action: &types.Action{ - Name: "test_action", - Body: "SELECT * FROM nonexistent_table;", - }, - schema: nil, - pgSchema: "test_schema", - wantErr: true, - }, - { - name: "invalid SQL syntax", - action: &types.Action{ - Name: "invalid_syntax", - Body: "SELECT * FROM;", - }, - schema: &types.Schema{}, - pgSchema: "test_schema", - wantErr: true, - }, - { - name: "valid select action", - action: &types.Action{ - Name: "valid_action", - Body: "SELECT id FROM users;", - }, - schema: &types.Schema{ - Tables: []*types.Table{ - { - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - }, - }, - }, - }, - pgSchema: "test_schema", - wantErr: false, - }, - { - name: "faithful typecast with repeat variable number in (*sqlGenerator).VisitExpressionVariable", - action: &types.Action{ - Name: "insert_typecasts", - Parameters: []string{"$id", "$intval"}, - Body: `INSERT INTO id_and_int ( - id, - intval -) VALUES ( - uuid_generate_v5('31276fd4-105f-4ff7-9f64-644942c14b79'::uuid, format('%s-%s', $id::text, $intval::text)), - $intval::int -);`, - }, - schema: &types.Schema{ - Tables: []*types.Table{ - { - Name: "id_and_int", - Columns: []*types.Column{ - { - Name: "id", - Type: types.UUIDType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "intval", - Type: types.IntType, - }, - }, - }, - }, - }, - pgSchema: "test_schema", - wantErr: false, - wantGenActStmts: []any{ - &ActionSQL{ - Statement: ` -INSERT INTO test_schema.id_and_int (id, intval) -VALUES -(uuid_generate_v5('31276fd4-105f-4ff7-9f64-644942c14b79'::UUID, format('%s-%s'::TEXT, $1::TEXT, $2::TEXT)), $2::INT8);`, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - stmts, err := GenerateActionBody(tt.action, tt.schema, tt.pgSchema) - if tt.wantErr { - assert.Error(t, err) - return - } - assert.NoError(t, err) - assert.NotNil(t, stmts) - - if len(tt.wantGenActStmts) == 0 { - return - } - for i, stmt := range stmts { - switch st := stmt.(type) { - case *ActionSQL: - wantStmt, ok := tt.wantGenActStmts[i].(*ActionSQL) - if !ok { - t.Errorf("wanted *ActionSQL, got %T", tt.wantGenActStmts[i]) - return - } - assert.Equal(t, wantStmt.Statement, st.Statement) - default: - } - } - }) - } -} diff --git a/node/engine/generate/foreign_procedure.go b/node/engine/generate/foreign_procedure.go deleted file mode 100644 index 5de6c8fe2..000000000 --- a/node/engine/generate/foreign_procedure.go +++ /dev/null @@ -1,277 +0,0 @@ -package generate - -import ( - "fmt" - "strings" - - "github.com/kwilteam/kwil-db/core/types" -) - -// This is implicitly -// coupled to the schema defined in internal/engine.execution/queries.go, and therefore is implicitly -// a circular dependency. I am unsure how to resolve this, but am punting on it for now since the structure -// of the new parts of the engine are still in flux. - -// GenerateForeignProcedure generates a plpgsql function that allows the schema to dynamically -// call procedures in other schemas, expecting certain inputs and return values. It will prefix -// the generated function with _fp_ (for "foreign procedure"). -func GenerateForeignProcedure(proc *types.ForeignProcedure, pgSchema string, dbid string) (string, error) { - str := strings.Builder{} - - // first write the header - str.WriteString(fmt.Sprintf(`CREATE OR REPLACE FUNCTION %s._fp_%s(_dbid TEXT, _procedure TEXT`, pgSchema, proc.Name)) - - // we now need to format the inputs. Inputs will be named _arg1, _arg2, etc. - // we start at 1 since postgres is 1-indexed. - argList := make([]string, len(proc.Parameters)) - for i, in := range proc.Parameters { - pgStr, err := in.PGString() - if err != nil { - return "", err - } - name := fmt.Sprintf("_arg%d", i+1) - str.WriteString(fmt.Sprintf(", %s %s", name, pgStr)) - argList[i] = name - } - - var outList []string - // if there are non-table outputs, we need to format them. - // we can ignore the name of the output, since it is not a table - if proc.Returns != nil && !proc.Returns.IsTable { - for i, out := range proc.Returns.Fields { - str.WriteString(", OUT ") - pgStr, err := out.Type.PGString() - if err != nil { - return "", err - } - - name := fmt.Sprintf("_out_%d", i+1) - str.WriteString(name) - str.WriteString(" ") - str.WriteString(pgStr) - outList = append(outList, name) - } - } - - str.WriteString(`)`) - - // If the return type is a table, we need to format the returns as a table. - if proc.Returns != nil && proc.Returns.IsTable { - str.WriteString(" RETURNS TABLE(") - for i, out := range proc.Returns.Fields { - if i > 0 { - str.WriteString(", ") - } - - str.WriteString(out.Name) - str.WriteString(" ") - pgStr, err := out.Type.PGString() - if err != nil { - return "", err - } - - str.WriteString(pgStr) - } - str.WriteString(")") - } else if proc.Returns != nil && len(proc.Returns.Fields) == 0 { - // if we are returning nothing, we need to specify that we are returning nothing. - str.WriteString(" RETURNS VOID") - } else if proc.Returns == nil { - // if we are returning nothing, we need to specify that we are returning nothing. - str.WriteString(" RETURNS VOID") - } //if none of the above trigger, then there must be OUT variables, so we do not need to specify a return type. - - str.WriteString(` AS $$ `) - - // declare variables - str.WriteString(`DECLARE - _schema_owner BYTEA; - _is_view BOOLEAN; - _is_owner_only BOOLEAN; - _is_public BOOLEAN; - _returns_table BOOLEAN; - _expected_input_types TEXT[]; - _expected_return_names TEXT[]; - _expected_return_types TEXT[]; - __old_foreign_caller TEXT;`) - - // begin block - str.WriteString("\nBEGIN") - - // select the procedure info, and perform checks 1-3 - str.WriteString(` - SELECT p.param_types, p.return_types, p.return_names, p.is_view, p.owner_only, p.public, s.owner, p.returns_table - INTO _expected_input_types, _expected_return_types, _expected_return_names, _is_view, _is_owner_only, _is_public, _schema_owner, _returns_table - FROM kwild_internal.procedures as p INNER JOIN kwild_internal.kwil_schemas as s - ON p.schema_id = s.id - WHERE p.name = _procedure AND s.dbid = _dbid; - - IF _schema_owner IS NULL THEN - RAISE EXCEPTION 'Procedure "%" not found in schema "%"', _procedure, _dbid; - END IF; - - IF _is_view = FALSE AND current_setting('transaction_read_only')::boolean = true THEN - RAISE EXCEPTION 'Non-view procedure "%" called in view-only connection', _procedure; - END IF; - - IF _is_owner_only = TRUE AND _schema_owner != decode(current_setting('ctx.signer'), 'base64') THEN - RAISE EXCEPTION 'Procedure "%" is owner-only and cannot be called by signer "%" in schema "%", expected signer "%"', _procedure, decode(current_setting('ctx.signer'), 'base64'), _dbid, _schema_owner; - END IF; - - IF _is_public = FALSE THEN - RAISE EXCEPTION 'Procedure "%" is not public and cannot be foreign called', _procedure; - END IF; - `) - - // check the length of the expected input types - // if no proc inputs, we check that inputs in the schema should be nil. - // If there are proc inputs, we first check that the array_length is not null, - // and then that it is equal, and then that the types match. - if len(proc.Parameters) == 0 { - // first check the length of the array - str.WriteString(fmt.Sprintf(` - IF array_length(_expected_input_types, 1) IS NOT NULL THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects no args, but procedure "%%" located at DBID "%%" requires %% arg(s)', _procedure, _dbid, array_length(_expected_input_types, 1); - END IF; - `, proc.Name)) - } else { - str.WriteString(fmt.Sprintf(` - IF array_length(_expected_input_types, 1) IS NULL THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects %d args, but procedure "%%" located at DBID "%%" requires no args', _procedure, _dbid; - END IF; - - IF array_length(_expected_input_types, 1) != %d THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects %d args, but procedure "%%" located at DBID "%%" requires %% arg(s)', _procedure, _dbid, array_length(_expected_input_types, 1); - END IF;`, proc.Name, len(proc.Parameters), len(proc.Parameters), proc.Name, len(proc.Parameters))) - } - - // now we check that the types match - for i, in := range proc.Parameters { - str.WriteString(fmt.Sprintf(` - IF _expected_input_types[%d] != '%s' THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects arg type "%s", but procedure "%%" located at DBID "%%" requires %%', _procedure, _dbid, _expected_input_types[%d]; - END IF;`, i+1, in.String(), proc.Name, in.String(), i+1)) - } - - // if there is an expected return, check that the return fields are the same count and type. - // If it returns a table, also check to make sure that the return names are the same. - if proc.Returns != nil { - // if foreign proc returns a table, check that the called procedure returns a table - // if foreign proc does not return a table, check that the called procedure does not return a table - if proc.Returns.IsTable { - str.WriteString(fmt.Sprintf(` - IF _returns_table = FALSE THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects a table return, but procedure "%%" located at DBID "%%" does not return a table', _procedure, _dbid; - END IF;`, proc.Name)) - } else { - str.WriteString(fmt.Sprintf(` - IF _returns_table = TRUE THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects a non-table return, but procedure "%%" located at DBID "%%" returns a table', _procedure, _dbid; - END IF;`, proc.Name)) - } - - str.WriteString(fmt.Sprintf(` - IF array_length(_expected_return_types, 1) IS NULL THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects %d returns, but procedure "%%" located at DBID "%%" returns nothing', _procedure, _dbid; - END IF; - - IF array_length(_expected_return_types, 1) != %d THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects %d returns, but procedure "%%" located at DBID "%%" returns %% fields', _procedure, _dbid, array_length(_expected_return_types, 1); - END IF;`, proc.Name, len(proc.Returns.Fields), len(proc.Returns.Fields), proc.Name, len(proc.Returns.Fields))) - - // check that the return types match - for i, out := range proc.Returns.Fields { - str.WriteString(fmt.Sprintf(` - IF _expected_return_types[%d] != '%s' THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects return type "%s" at return position %d, but procedure "%%" located at DBID "%%" returns %%', _procedure, _dbid, _expected_return_types[%d]; - END IF;`, i+1, out.Type.String(), proc.Name, out.Type.String(), i+1, i+1)) - - // if it returns a table, check that the return names match - if proc.Returns.IsTable { - str.WriteString(fmt.Sprintf(` - IF _expected_return_names[%d] != '%s' THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects return name "%s" at return column position %d, but procedure "%%" located at DBID "%%" returns %%', _procedure, _dbid, _expected_return_names[%d]; - END IF;`, i+1, out.Name, proc.Name, out.Name, i+1, i+1)) - } - } - - } else { - // if not expecting returns, ensure that the expected return types are nil - str.WriteString(fmt.Sprintf(` - IF _expected_return_types IS NOT NULL THEN - RAISE EXCEPTION 'Foreign procedure definition "%s" expects no returns, but procedure "%%" located at DBID "%%" returns non-nil value(s)', _procedure, _dbid; - END IF;`, proc.Name)) - } - - // we need to set the @foreign_caller variable to the schema calling this, - // and then set it back to what it was originally after the call. - str.WriteString(` - __old_foreign_caller := current_setting('ctx.foreign_caller'); - SET LOCAL ctx.foreign_caller =`) - str.WriteString(fmt.Sprintf(` '%s';`, dbid)) - - // now we call the procedure. - // If we are calling a table procedure, we need to use RETURN QUERY EXECUTE. - // Otherwise, we can just use EXECUTE INTO. - // we only have to worry about SQL injection for the DBID and the procedure name. - // Everything else is a string variable defined in this function - if proc.Returns != nil && proc.Returns.IsTable { - // if it returns a table, we need to use RETURN QUERY EXECUTE - str.WriteString(` - RETURN QUERY EXECUTE format('SELECT * FROM ds_%%I.%%I(`) - str.WriteString(dollarsignVars(argList)) - str.WriteString(`)', _dbid, _procedure)`) - } else { - // if it returns nothing, we do not need to worry - // about selecting INTO - str.WriteString(` - EXECUTE format('SELECT * FROM ds_%%I.%%I(`) - str.WriteString(dollarsignVars(argList)) - str.WriteString(`)', _dbid, _procedure)`) - - if proc.Returns != nil { - str.WriteString(` INTO `) - str.WriteString(formatStringList(outList)) - } - } - if len(argList) > 0 { - str.WriteString(" USING ") - str.WriteString(formatStringList(argList)) - } - - // set the foreign caller back to what it was - str.WriteString(`;`) - str.WriteString(`PERFORM set_config('ctx.foreign_caller', __old_foreign_caller, true);`) - - // end block - str.WriteString(` END; $$ LANGUAGE plpgsql;`) - - return str.String(), nil -} - -// dollarsignVars returns enough dollar signs to be used as a variable in a plpgsql function. -func dollarsignVars(strs []string) string { - str := strings.Builder{} - for i := range strs { - if i > 0 { - str.WriteString(", ") - } - - str.WriteString(fmt.Sprintf("$%d", i+1)) - } - - return str.String() -} - -func formatStringList(strs []string) string { - str := strings.Builder{} - for i, s := range strs { - if i > 0 { - str.WriteString(", ") - } - - str.WriteString(s) - } - - return str.String() -} diff --git a/node/engine/generate/generate.go b/node/engine/generate/generate.go deleted file mode 100644 index 3ee4076df..000000000 --- a/node/engine/generate/generate.go +++ /dev/null @@ -1,36 +0,0 @@ -package generate - -import ( - "fmt" - - "github.com/kwilteam/kwil-db/core/types" -) - -// GenerateDDL generates the necessary table and index ddl statements for the given table -func GenerateDDL(pgSchema string, table *types.Table) ([]string, error) { - var statements []string - - createTableStatement, err := GenerateCreateTableStatement(pgSchema, table) - if err != nil { - return nil, err - } - statements = append(statements, createTableStatement) - - createIndexStatements, err := GenerateCreateIndexStatements(pgSchema, table.Name, table.Indexes) - if err != nil { - return nil, err - } - statements = append(statements, createIndexStatements...) - - for _, stmt := range statements { - if containsDisallowedDelimiter(stmt) { - return nil, fmt.Errorf("statement contains disallowed delimiter: %s", stmt) - } - } - - return statements, nil -} - -func wrapIdent(str string) string { - return fmt.Sprintf(`"%s"`, str) -} diff --git a/node/engine/generate/generate_test.go b/node/engine/generate/generate_test.go deleted file mode 100644 index 498362584..000000000 --- a/node/engine/generate/generate_test.go +++ /dev/null @@ -1,301 +0,0 @@ -package generate_test - -import ( - "strings" - "testing" - "unicode" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/engine/generate" - "github.com/kwilteam/kwil-db/parse/postgres" - "github.com/stretchr/testify/assert" -) - -func TestGenerateDDL(t *testing.T) { - type args struct { - table *types.Table - } - tests := []struct { - name string - args args - want []string - wantErr bool - }{ - { - name: "table with composite primary key", - args: args{ - table: &types.Table{ - Name: "test", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "name", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.DEFAULT, - Value: "'foo'", - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "test_index", - Type: types.UNIQUE_BTREE, - Columns: []string{"id", "name"}, - }, - { - Name: "CompositePrimaryKey", - Type: types.PRIMARY, - Columns: []string{"id", "name"}, - }, - }, - }, - }, - want: []string{ - `CREATE TABLE "dbid"."test" ("id" INT8 NOT NULL, "name" TEXT NOT NULL DEFAULT 'foo', PRIMARY KEY ("id", "name"));`, - `CREATE UNIQUE INDEX "test_index" ON "dbid"."test" ("id", "name");`, - }, - }, - { - name: "table with composite primary key and composite index", - args: args{ - table: &types.Table{ - Name: "test", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "name", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.DEFAULT, - Value: "'foo'", - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "test_index", - Type: types.UNIQUE_BTREE, - Columns: []string{"id", "name"}, - }, - }, - }, - }, - wantErr: true, - }, - { - name: "table with foreign key on update set cascade", - args: args{ - table: &types.Table{ - Name: "test", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "name", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.DEFAULT, - Value: "'foo'", - }, - }, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - - ChildKeys: []string{"name"}, - ParentKeys: []string{"username"}, - ParentTable: "users", - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - }, - }, - }, - }, - }, - want: []string{`CREATE TABLE "dbid"."test" ("id" INT8, "name" TEXT DEFAULT 'foo', FOREIGN KEY ("name") REFERENCES "dbid"."users"("username") ON UPDATE CASCADE, PRIMARY KEY ("id"));`}, - }, - { - name: "table with multiple foreign keys and multiple actions per foreign key", - args: args{ - table: &types.Table{ - Name: "table1", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "name", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.DEFAULT, - Value: "'foo'", - }, - }, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"name"}, - ParentKeys: []string{"username"}, - ParentTable: "users", - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_DELETE, - Do: types.DO_SET_DEFAULT, - }, - }, - }, - { - ChildKeys: []string{"id", "name"}, - ParentKeys: []string{"id", "username"}, - ParentTable: "table2", - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_UPDATE, - Do: types.DO_SET_NULL, - }, - { - On: types.ON_DELETE, - Do: types.DO_SET_NULL, - }, - }, - }, - }, - }, - }, - want: []string{`CREATE TABLE "dbid"."table1" ("id" INT8, "name" TEXT DEFAULT 'foo', FOREIGN KEY ("name") REFERENCES "dbid"."users"("username") ON UPDATE CASCADE ON DELETE SET DEFAULT, FOREIGN KEY ("id", "name") REFERENCES "dbid"."table2"("id", "username") ON UPDATE SET NULL ON DELETE SET NULL, PRIMARY KEY ("id"));`}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := generate.GenerateDDL("dbid", tt.args.table) - if (err != nil) != tt.wantErr { - t.Errorf("GenerateDDL() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if len(got) != len(tt.want) { - t.Errorf("GenerateDDL(): Got and Want have different lengths") - } - - for i, statement := range got { - want := tt.want[i] - if !compareIgnoringWhitespace(statement, want) { - t.Errorf("GenerateDDL() got = %v, want %v", got, tt.want) - } - - err = postgres.CheckSyntaxReplaceDollar(statement) - assert.NoErrorf(t, err, "postgres syntax check failed: %s", err) - } - }) - } -} - -// there used to be a bug where the DDL generator would edit a table's primary key index, -// if one existed. It would add an extra '\"' to the beginning and end of each column name. -func Test_PrimaryIndexModification(t *testing.T) { - testTable := &types.Table{ - Name: "test", - Columns: []*types.Column{ - { - Name: "id1", - Type: types.IntType, - }, - { - Name: "id2", // doing this to check composite primary keys - Type: types.IntType, - }, - }, - Indexes: []*types.Index{ - { - Name: "primary", - Columns: []string{ - "id1", - "id2", - }, - Type: types.PRIMARY, - }, - }, - } - - _, err := generate.GenerateDDL("dbid", testTable) - if err != nil { - t.Errorf("unexpected error: %v", err) - } - - // check that the primary key index was not modified - if testTable.Indexes[0].Columns[0] != "id1" { - t.Errorf("primary key index was modified. Expected 'id1', got '%s'", testTable.Indexes[0].Columns[0]) - } - - if testTable.Indexes[0].Columns[1] != "id2" { - t.Errorf("primary key index was modified. Expected 'id2', got '%s'", testTable.Indexes[0].Columns[1]) - } -} - -func removeWhitespace(s string) string { - return strings.Map(func(r rune) rune { - if unicode.IsSpace(r) { - return -1 // skip this rune - } - return r - }, s) -} - -// compareIgnoringWhitespace compares two strings while ignoring whitespace characters. -func compareIgnoringWhitespace(a, b string) bool { - aWithoutWhitespace := removeWhitespace(a) - bWithoutWhitespace := removeWhitespace(b) - - return aWithoutWhitespace == bWithoutWhitespace -} diff --git a/node/engine/generate/index.go b/node/engine/generate/index.go deleted file mode 100644 index 2e2cdb8ec..000000000 --- a/node/engine/generate/index.go +++ /dev/null @@ -1,54 +0,0 @@ -package generate - -import ( - "fmt" - "strings" - - "github.com/kwilteam/kwil-db/core/types" -) - -func indexTypeToSQLString(indexType types.IndexType) (string, error) { - err := indexType.Clean() - if err != nil { - return "", err - } - - switch indexType { - case types.BTREE: - return "", nil - case types.UNIQUE_BTREE: - return " UNIQUE", nil - case types.PRIMARY: - return " PRIMARY KEY", nil - default: - return "", fmt.Errorf("unknown index type: %s", indexType) - } -} - -func GenerateCreateIndexStatements(pgSchema, tableName string, indexes []*types.Index) ([]string, error) { - var statements []string - - for _, index := range indexes { - indexType, err := indexTypeToSQLString(index.Type) - if err != nil { - return nil, err - } - - // Skip primary indexes, as they are created with the table - if strings.EqualFold(index.Type.String(), types.PRIMARY.String()) { - continue - } - - cols := make([]string, len(index.Columns)) - for i, col := range index.Columns { - cols[i] = wrapIdent(col) - } - columns := strings.Join(cols, ", ") - - statement := fmt.Sprintf("CREATE%s INDEX %s ON %s.%s (%s);", indexType, wrapIdent(index.Name), - wrapIdent(pgSchema), wrapIdent(tableName), columns) - statements = append(statements, strings.TrimSpace(statement)) - } - - return statements, nil -} diff --git a/node/engine/generate/metadata.go b/node/engine/generate/metadata.go deleted file mode 100644 index a1b4abc83..000000000 --- a/node/engine/generate/metadata.go +++ /dev/null @@ -1,8 +0,0 @@ -package generate - -var ( - // PgSessionPrefix is the prefix for all session variables. - // It is used in combination with Postgre's current_setting function - // to set contextual variables. - PgSessionPrefix = "ctx" -) diff --git a/node/engine/generate/procedure.go b/node/engine/generate/procedure.go deleted file mode 100644 index 803988e29..000000000 --- a/node/engine/generate/procedure.go +++ /dev/null @@ -1,276 +0,0 @@ -package generate - -import ( - "fmt" - "strings" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/utils/order" - "github.com/kwilteam/kwil-db/parse" -) - -// GenerateProcedure generates the plpgsql code for a procedure. -func GenerateProcedure(proc *types.Procedure, schema *types.Schema, pgSchema string) (ddl string, err error) { - defer func() { - if e := recover(); e != nil { - err = fmt.Errorf("panic: %v", e) - } - - // annotate the error with the procedure name - if err != nil { - err = fmt.Errorf("procedure %s: %w", proc.Name, err) - } - }() - - res, err := parse.ParseProcedure(proc, schema) - if err != nil { - return "", err - } - - if res.ParseErrs.Err() != nil { - return "", res.ParseErrs.Err() - } - - vars := make([]*types.NamedType, len(proc.Parameters)) - for i, param := range proc.Parameters { - vars[i] = &types.NamedType{ - Name: formatParameterName(param.Name[1:]), - Type: param.Type, - } - } - - // we copy the return as to not modify it - // we need to write return types if there are any. - // If it returns a table, we do not want to change the column names, - // since it will change the result. However, if there are out variables, - // we want to format them - var ret types.ProcedureReturn - if proc.Returns != nil { - ret.IsTable = proc.Returns.IsTable - ret.Fields = make([]*types.NamedType, len(proc.Returns.Fields)) - for i, field := range proc.Returns.Fields { - if ret.IsTable { - ret.Fields[i] = field - } else { - ret.Fields[i] = &types.NamedType{ - Name: formatReturnVar(i), - Type: field.Type, - } - } - } - } - - analyzed := &analyzedProcedure{ - Name: proc.Name, - Parameters: vars, - Returns: ret, - IsView: proc.IsView(), - OwnerOnly: proc.IsOwnerOnly(), - } - - // we need to get the variables and anonymous variables (loop targets) - for _, v := range order.OrderMap(res.Variables) { - if v.Key[0] != '$' { - panic(fmt.Sprintf("internal bug: expected variable name to start with $, got name %s", v.Key)) - } - - analyzed.DeclaredVariables = append(analyzed.DeclaredVariables, &types.NamedType{ - Name: formatParameterName(v.Key[1:]), - Type: v.Value, - }) - } - - for _, v := range order.OrderMap(res.CompoundVariables) { - // TODO: this isn't a perfect solution. Only loop targets for - // SQL statements are put here. Loops targets over ranges and arrays - // would be raw variables. These should be declared as their base types, not RECORD. - if v.Key[0] != '$' { - panic(fmt.Sprintf("internal bug: expected variable name to start with $, got name %s", v.Key)) - } - analyzed.LoopTargets = append(analyzed.LoopTargets, formatParameterName(v.Key[1:])) - } - - // we need to visit the AST to get the generated body - sqlGen := &procedureGenerator{ - sqlGenerator: sqlGenerator{ - pgSchema: pgSchema, - }, - procedure: proc, - } - - str := strings.Builder{} - for _, stmt := range res.AST { - str.WriteString(stmt.Accept(sqlGen).(string)) - } - - // little sanity check: - if len(res.AnonymousReceivers) != sqlGen.anonymousReceivers { - return "", fmt.Errorf("internal bug: expected %d anonymous variables, got %d", sqlGen.anonymousReceivers, len(res.CompoundVariables)) - } - // append all anonymous variables to the declared variables - for i, v := range res.AnonymousReceivers { - analyzed.DeclaredVariables = append(analyzed.DeclaredVariables, &types.NamedType{ - Name: formatAnonymousReceiver(i), - Type: v, - }) - } - - analyzed.Body = str.String() - - return generateProcedureWrapper(analyzed, pgSchema) -} - -type analyzedProcedure struct { - // Name is the name of the procedure. - Name string - // Parameters are the parameters, in order, that the procedure is expecting. - // If no parameters are expected, this will be nil. - Parameters []*types.NamedType - // Returns is the expected return type(s) of the procedure. - // If no return is expected, this will be nil. - Returns types.ProcedureReturn - // DeclaredVariables are the variables that need to be declared. - DeclaredVariables []*types.NamedType - // LoopTargets is a list of all variables that are loop targets. - // They should be declared as RECORD in plpgsql. - LoopTargets []string - // Body is the plpgsql code for the procedure. - Body string - // IsView is true if the procedure is a view. - IsView bool - // OwnerOnly is true if the procedure is owner-only. - OwnerOnly bool -} - -// generateProcedureWrapper generates the plpgsql code for a procedure, not including the body. -// It takes a procedure and the body of the procedure and returns the plpgsql code that creates -// the procedure. -func generateProcedureWrapper(proc *analyzedProcedure, pgSchema string) (string, error) { - if containsDisallowedDelimiter(proc.Body) { - return "", fmt.Errorf("procedure body contains disallowed delimiter") - } - - str := strings.Builder{} - str.WriteString("CREATE OR REPLACE FUNCTION ") - str.WriteString(fmt.Sprintf("%s.%s(", pgSchema, proc.Name)) - - // writing the function parameters - - // paramSet tracks the used params, and will not allow them - // to be redeclared in the DECLARE section. - paramSet := make(map[string]struct{}) - i := -1 - var field *types.NamedType - for i, field = range proc.Parameters { - if i != 0 { - str.WriteString(", ") - } - - paramSet[field.Name] = struct{}{} - - typ, err := field.Type.PGString() - if err != nil { - return "", err - } - - str.WriteString(fmt.Sprintf("%s %s", field.Name, typ)) - } - - hasOutReturns := false - if len(proc.Returns.Fields) > 0 && !proc.Returns.IsTable { - hasOutReturns = true - if i != -1 { - str.WriteString(", ") - } - - for i, field := range proc.Returns.Fields { - if i != 0 { - str.WriteString(", ") - } - - typ, err := field.Type.PGString() - if err != nil { - return "", err - } - - str.WriteString(fmt.Sprintf("OUT %s %s", field.Name, typ)) - } - } - - str.WriteString(") ") - - // writing the return type - if proc.Returns.IsTable && len(proc.Returns.Fields) > 0 { - str.WriteString("\nRETURNS ") - - str.WriteString("TABLE(") - for i, field := range proc.Returns.Fields { - if i != 0 { - str.WriteString(", ") - } - - typ, err := field.Type.PGString() - if err != nil { - return "", err - } - - str.WriteString(fmt.Sprintf("%s %s", field.Name, typ)) - } - str.WriteString(") ") - } else if !hasOutReturns { - str.WriteString("\nRETURNS void ") - } - - str.WriteString("AS ") - str.WriteString(delimiter) - str.WriteString("\n") - - // we can only have conflict if we use RETURN TABLE. Since we don't allow - // direct assignment to columns like plpgsql, we always want these conflicts - // to refer to the columns. - // see: https://www.postgresql.org/docs/current/plpgsql-implementation.html - str.WriteString("#variable_conflict use_column\n") - - // writing the variable declarations - - // declaresTypes tracks if the DECLARE section is needed. - declaresTypes := false - declareSection := strings.Builder{} - if len(proc.DeclaredVariables) > 0 { - for _, declare := range proc.DeclaredVariables { - _, ok := paramSet[declare.Name] - if ok { - continue - } - - typ, err := declare.Type.PGString() - if err != nil { - return "", err - } - - declaresTypes = true - declareSection.WriteString(fmt.Sprintf("%s %s;\n", declare.Name, typ)) - } - } - if len(proc.LoopTargets) > 0 { - declaresTypes = true - for _, loopTarget := range proc.LoopTargets { - declareSection.WriteString(fmt.Sprintf("%s RECORD;\n", loopTarget)) - } - } - - if declaresTypes { - str.WriteString("DECLARE\n") - str.WriteString(declareSection.String()) - } - - // finishing the function - - str.WriteString("BEGIN\n") - str.WriteString(proc.Body) - str.WriteString("\nEND;\n") - str.WriteString(delimiter) - str.WriteString(" LANGUAGE plpgsql;") - - return str.String(), nil -} diff --git a/node/engine/generate/procedure_test.go b/node/engine/generate/procedure_test.go deleted file mode 100644 index ed0f4edda..000000000 --- a/node/engine/generate/procedure_test.go +++ /dev/null @@ -1,226 +0,0 @@ -package generate - -import ( - "fmt" - "testing" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/stretchr/testify/assert" -) - -func Test_Procedure(t *testing.T) { - name := "test_procedure" - schema := "test_schema" - body := "test_body" - - type testcase struct { - name string - fields []*types.NamedType - returns *types.ProcedureReturn - decls []*types.NamedType - loopTargets []string - want string - } - - tests := []testcase{ - { - name: "basic usage", - fields: []*types.NamedType{ - { - Name: "field1", - Type: &types.DataType{ - Name: "text", - }, - }, - }, - returns: nil, - decls: nil, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure(field1 TEXT) \nRETURNS void AS $kwil_reserved_delim$\n#variable_conflict use_column\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "multiple fields and return types", - fields: []*types.NamedType{ - { - Name: "field1", - Type: &types.DataType{ - Name: "text", - IsArray: true, - }, - }, - { - Name: "field2", - Type: types.TextType, - }, - }, - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "field1", // gets ignored - Type: &types.DataType{ - Name: "text", - IsArray: true, - }, - }, - { - Name: "field2", // gets ignored - Type: &types.DataType{ - Name: "bool", - }, - }, - }, - }, - decls: nil, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure(field1 TEXT[], field2 TEXT, OUT _out_0 TEXT[], OUT _out_1 BOOL) AS $kwil_reserved_delim$\n#variable_conflict use_column\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "no fields, multiple return types", - fields: nil, - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "field1", // gets ignored - Type: &types.DataType{ - Name: "text", - IsArray: true, - }, - }, - { - Name: "field2", // gets ignored - Type: &types.DataType{ - Name: "bool", - }, - }, - }, - }, - decls: nil, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure(OUT _out_0 TEXT[], OUT _out_1 BOOL) AS $kwil_reserved_delim$\n#variable_conflict use_column\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "single field, single return type", - fields: []*types.NamedType{ - { - Name: "field1", - Type: types.TextType, - }, - }, - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "field1", // gets ignored - Type: types.TextType, - }, - }, - }, - decls: nil, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure(field1 TEXT, OUT _out_0 TEXT) AS $kwil_reserved_delim$\n#variable_conflict use_column\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "return table", - fields: nil, - returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "field1", - Type: types.TextType, - }, - { - Name: "field2", - Type: &types.DataType{ - Name: "int", - IsArray: true, - }, - }, - }, - }, - decls: []*types.NamedType{ - { - Name: "local_type", - Type: types.TextType, - }, - { - Name: "cars", - Type: &types.DataType{ - Name: "int", - IsArray: true, - }, - }, - }, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure() \nRETURNS TABLE(field1 TEXT, field2 INT8[]) AS $kwil_reserved_delim$\n#variable_conflict use_column\nDECLARE\nlocal_type TEXT;\ncars INT8[];\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "variable is declared as parameter", - fields: []*types.NamedType{ - { - Name: "field1", - Type: types.TextType, - }, - }, - returns: nil, - decls: []*types.NamedType{ - { - Name: "field1", - Type: types.TextType, - }, - }, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure(field1 TEXT) \nRETURNS void AS $kwil_reserved_delim$\n#variable_conflict use_column\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - { - name: "loops", - fields: nil, - returns: nil, - decls: nil, - loopTargets: []string{ - "loop1", - }, - want: "CREATE OR REPLACE FUNCTION test_schema.test_procedure() \nRETURNS void AS $kwil_reserved_delim$\n#variable_conflict use_column\nDECLARE\nloop1 RECORD;\nBEGIN\ntest_body\nEND;\n$kwil_reserved_delim$ LANGUAGE plpgsql;", - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - var outParams []*types.NamedType - if test.returns != nil && !test.returns.IsTable { - for i, t := range test.returns.Fields { - outParams = append(outParams, &types.NamedType{ - Name: fmt.Sprintf("_out_%d", i), - Type: t.Type, - }) - } - - test.returns.Fields = outParams - } - - ret := types.ProcedureReturn{} - if test.returns != nil { - ret = *test.returns - } - - got, err := generateProcedureWrapper(&analyzedProcedure{ - Name: name, - Parameters: test.fields, - Returns: ret, - DeclaredVariables: test.decls, - LoopTargets: test.loopTargets, - Body: body, - IsView: false, - OwnerOnly: false, - }, schema) - if err != nil { - t.Errorf("ddl.GeneratedProcedure() error = %v", err) - return - } - assert.Equal(t, test.want, got) - }) - } -} - -func Test_Delimiter(t *testing.T) { - _, err := generateProcedureWrapper(&analyzedProcedure{ - Name: "test_procedure", - Parameters: nil, - Returns: types.ProcedureReturn{}, - Body: "'" + delimiter + " other stuff'", - }, "test_schema") - assert.Error(t, err) -} diff --git a/node/engine/generate/sql.go b/node/engine/generate/sql.go deleted file mode 100644 index 7fd2e188c..000000000 --- a/node/engine/generate/sql.go +++ /dev/null @@ -1,32 +0,0 @@ -package generate - -import ( - "fmt" - - "github.com/kwilteam/kwil-db/parse" -) - -// WriteSQL converts a SQL node to a string. -// It can optionally rewrite named parameters to numbered parameters. -// If so, it returns the order of the parameters in the order they appear in the statement. -func WriteSQL(node *parse.SQLStatement, orderParams bool, pgSchema string) (stmt string, params []string, err error) { - if node == nil { - return "", nil, fmt.Errorf("SQL parse node is nil") - } - - defer func() { - if e := recover(); e != nil { - err = fmt.Errorf("panic: %v", e) - } - }() - - sqlGen := &sqlGenerator{ - pgSchema: pgSchema, - } - if orderParams { - sqlGen.numberParameters = true - } - stmt = node.Accept(sqlGen).(string) - - return stmt + ";", sqlGen.orderedParams, nil -} diff --git a/node/engine/generate/table.go b/node/engine/generate/table.go deleted file mode 100644 index 0fcb43567..000000000 --- a/node/engine/generate/table.go +++ /dev/null @@ -1,113 +0,0 @@ -package generate - -import ( - "fmt" - "strings" - - "github.com/kwilteam/kwil-db/core/types" -) - -func GenerateCreateTableStatement(pgSchema string, table *types.Table) (string, error) { - var columnsAndKeys []string - - for _, column := range table.Columns { - colName := wrapIdent(column.Name) - colType, err := column.Type.PGString() - if err != nil { - return "", err - } - - var colAttributes []string - - for _, attr := range column.Attributes { - attrStr, err := attributeToSQLString(column, attr) - if err != nil { - return "", err - } - if attrStr != "" { - colAttributes = append(colAttributes, attrStr) - } - } - - columnDef := fmt.Sprintf("%s %s %s", colName, colType, strings.Join(colAttributes, " ")) - columnsAndKeys = append(columnsAndKeys, strings.TrimSpace(columnDef)) - } - - // now add foreign keys - for _, fk := range table.ForeignKeys { - fkStmt, err := generateForeignKeyStmt(pgSchema, fk) // for now assume that schema for all foreign key tables is same - if err != nil { - return "", err - } - columnsAndKeys = append(columnsAndKeys, fkStmt) - } - - // now build the primary key - pkColumns, err := table.GetPrimaryKey() - if err != nil { - return "", err - } - - columnsAndKeys = append(columnsAndKeys, fmt.Sprintf("PRIMARY KEY (%s)", strings.Join(wrapIdents(pkColumns), ", "))) - - return fmt.Sprintf("CREATE TABLE %s.%s ( %s) ;", wrapIdent(pgSchema), - wrapIdent(table.Name), strings.Join(columnsAndKeys, ", ")), nil -} - -func wrapIdents(idents []string) []string { - for i, ident := range idents { - idents[i] = wrapIdent(ident) - } - return idents -} - -func generateForeignKeyStmt(pgSchema string, fk *types.ForeignKey) (string, error) { - stmt := strings.Builder{} - stmt.WriteString(` FOREIGN KEY (`) - writeDelimitedStrings(&stmt, fk.ChildKeys) - stmt.WriteString(`) REFERENCES `) - if pgSchema != "" { - stmt.WriteString(wrapIdent(pgSchema)) // fk.ParentSchema maybe - stmt.WriteString(".") - } - stmt.WriteString(wrapIdent(fk.ParentTable)) - stmt.WriteString("(") - writeDelimitedStrings(&stmt, fk.ParentKeys) - stmt.WriteString(") ") - - for _, action := range fk.Actions { - actionStmt, err := generateForeignKeyActionClause(action) - if err != nil { - return "", err - } - stmt.WriteString(actionStmt) - } - - return stmt.String(), nil -} - -func writeDelimitedStrings(stmt *strings.Builder, strs []string) { - for i, str := range strs { - if i > 0 && i < len(strs) { - stmt.WriteString(", ") - } - - stmt.WriteString(wrapIdent(str)) - } -} - -func generateForeignKeyActionClause(action *types.ForeignKeyAction) (string, error) { - err := action.Clean() - if err != nil { - return "", err - } - - stmt := strings.Builder{} - stmt.WriteString(" ON ") - stmt.WriteString(action.On.String()) - stmt.WriteString(" ") - stmt.WriteString(action.Do.String()) - stmt.WriteString(" ") - - return stmt.String(), nil -} diff --git a/node/engine/generate/utils.go b/node/engine/generate/utils.go deleted file mode 100644 index 97d28824a..000000000 --- a/node/engine/generate/utils.go +++ /dev/null @@ -1,62 +0,0 @@ -package generate - -import ( - "strings" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/utils/conv" -) - -func attributeToSQLString(col *types.Column, attr *types.Attribute) (string, error) { - switch attr.Type { - case types.PRIMARY_KEY: - return "", nil - case types.DEFAULT: - return "DEFAULT " + attr.Value, nil - case types.NOT_NULL: - return "NOT NULL", nil - case types.UNIQUE: - return "UNIQUE", nil - case types.MIN: - return "CHECK (" + col.Name + " >= " + attr.Value + ")", nil - case types.MAX: - return "CHECK (" + col.Name + " <= " + attr.Value + ")", nil - case types.MIN_LENGTH: - - // for max_len and min_len, we want to check that the value is an int. - // For regular max and min, it can be a decimal or uint256. - - _, err := conv.Int(attr.Value) - if err != nil { - return "", err - } - - fn := "LENGTH" - if col.Type.Equals(types.BlobType) { - fn = "OCTET_LENGTH" - } - - return "CHECK (" + fn + "(" + col.Name + ") >= " + attr.Value + ")", nil - case types.MAX_LENGTH: - _, err := conv.Int(attr.Value) - if err != nil { - return "", err - } - - fn := "LENGTH" - if col.Type.Equals(types.BlobType) { - fn = "OCTET_LENGTH" - } - - return "CHECK (" + fn + "(" + col.Name + ") <= " + attr.Value + ")", nil - default: - return "", nil - } -} - -const delimiter = "$kwil_reserved_delim$" - -// containsDisallowedDelimiter checks if the string contains the delimiter -func containsDisallowedDelimiter(s string) bool { - return strings.Contains(s, delimiter) -} diff --git a/node/engine/integration/deployment_test.go b/node/engine/integration/deployment_test.go deleted file mode 100644 index 9f8bdbb84..000000000 --- a/node/engine/integration/deployment_test.go +++ /dev/null @@ -1,256 +0,0 @@ -//go:build pglive && engineinteg - -package integration_test - -import ( - "context" - "testing" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/parse" - "github.com/stretchr/testify/require" -) - -// TestDeployment tests the negative cases for deployment of schemas -func Test_Deployment(t *testing.T) { - type testCase struct { - name string - // either schema or procedure should be set - // If schema is set, it will deploy the whole schema. - // If only procedure is set, it expects only a procedure - // body, and will wrap the procedure in a schema. - schema string - procedure string - err error - } - - testCases := []testCase{ - { - name: "view procedure mutates", - schema: ` - database mutative_view; - - table users { - id int primary key - } - - procedure mutate_in_view() public view { - INSERT INTO users (id) VALUES (1); - }`, - err: parse.ErrViewMutatesState, - }, - { - name: "view procedure calls non-view", - schema: ` - database view_calls_non_view; - - table users { - id int primary key - } - - procedure view_calls_non_view() public view { - not_a_view(); - } - - procedure not_a_view() public { - INSERT INTO users (id) VALUES (1); - }`, - err: parse.ErrViewMutatesState, - }, - { - name: "empty procedure", - schema: ` - database empty_procedure; - - procedure empty_procedure() public {} - `, - }, - { - name: "untyped variable", - procedure: `$intval := 1;`, // this can infer the type - }, - { - name: "undeclared variable", - procedure: `$intval int := $a;`, - err: parse.ErrUndeclaredVariable, - }, - { - name: "non-existent @ variable", - procedure: `$id int := @ethereum_height;`, - err: parse.ErrUnknownContextualVariable, - }, - { - name: "unknown function", - procedure: ` - $int int := unknown_function(); - `, - err: parse.ErrUnknownFunctionOrProcedure, - }, - { - name: "known procedure", - schema: ` - database known_procedure; - - procedure known_procedure() public returns table(id int) { - select 1 as id; - } - - procedure known_procedure_2() public { - for $row in select * from known_procedure() as k { - - } - } - `, - }, - { - name: "unknown function in SQL", - procedure: ` - for $row in select * from unknown_function() { - break; - } - `, - err: parse.ErrUnknownFunctionOrProcedure, - }, - { - name: "various foreign procedures", - schema: `database foreign_procedures; - - foreign procedure get_tbl() returns table(id int) - foreign procedure get_scalar(int) returns (int) - foreign procedure get_named_scalar(int) returns (id int) - - procedure call_all() public returns table(id int) { - $int1 int := get_scalar['dbid', 'get_scalar'](1); - $int2 int := get_named_scalar['dbid', 'get_scalar'](1); - - return select * from get_tbl['dbid', 'get_table']() as u; - } - `, - }, - { - name: "procedure returns select join from others", - schema: `database select_join; - - table users { - id int primary key, - name text - } - - foreign procedure get_tbl() returns table(id int) - - procedure get_users() public returns table(id int, name text) { - return select * from users; - } - - // get_all joins the users table with the result of get_tbl - procedure get_all() public returns table(id int, name text) { - return select a.id as id, u.name as name from get_tbl['dbid', 'get_tbl']() AS a - INNER JOIN get_users() AS u ON a.id = u.id; - } - `, - }, - { - name: "action references foreign procedure and local procedure", - schema: `database select_join; - - table users { - id int primary key, - name text - } - - foreign procedure get_tbl() returns table(id int) - - procedure get_users() public returns table(id int, name text) { - return select * from users; - } - - // get_all joins the users table with the result of get_tbl - action get_all() public view { - select a.id as id, u.name as name from get_tbl['dbid', 'get_tbl']() AS a - INNER JOIN get_users() AS u ON a.id = u.id; - } - `, - }, - { - name: "action references unknown foreign procedure", - schema: `database select_join; - - action get_all() public view { - select * from get_tbl['dbid', 'get_tbl'](); - } - `, - err: parse.ErrUnknownFunctionOrProcedure, - }, - { - name: "schema with max_len blob", - schema: `database max_len_blob; - - table users { - id int primary key, - data blob max_len(10) - }`, - }, - { - name: "max on uint256", - schema: `database max_uint256; - - table users { - id int primary key, - data uint256 max(1000000000000000000000000000000000000000) - }`, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - if tc.schema != "" && tc.procedure != "" { - t.Fatal("both schema and procedure set") - } - - schema := tc.schema - if tc.procedure != "" { - schema = `database t; - - procedure t() public { - ` + tc.procedure + ` - }` - } - - global, db, err := setup(t) - if err != nil { - t.Fatal(err) - } - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - readonly, err := db.BeginReadTx(ctx) - require.NoError(t, err) - defer readonly.Rollback(ctx) - - // we intentionally use the bare kuneiform parser and don't - // perform extra checks because we want to test that the engine - // catches these errors - parsed, err := parse.ParseSchemaWithoutValidation([]byte(schema)) - require.NoError(t, err) - parsed.Schema.Owner = owner - - err = global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: owner, - Caller: string(owner), - TxID: "test", - Ctx: ctx, - }, tx, parsed.Schema) - if tc.err != nil { - require.ErrorIs(t, err, tc.err) - } else { - require.NoError(t, err) - } - }) - } -} diff --git a/node/engine/integration/execution_test.go b/node/engine/integration/execution_test.go deleted file mode 100644 index b2092f414..000000000 --- a/node/engine/integration/execution_test.go +++ /dev/null @@ -1,816 +0,0 @@ -//go:build pglive && engineinteg - -package integration_test - -import ( - "context" - "fmt" - "testing" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/engine/execution" - "github.com/kwilteam/kwil-db/node/engine/testdata" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/types/sql" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func Test_Engine(t *testing.T) { - type testCase struct { - name string - // ses1 is the first round of execution - ses1 func(t *testing.T, global *execution.GlobalContext, tx sql.DB) - - // ses2 is the second round of execution - ses2 func(t *testing.T, global *execution.GlobalContext, tx sql.DB) - // after is called after the second round - // It is not called in a session, and therefore can only read from the database. - after func(t *testing.T, global *execution.GlobalContext, tx sql.DB) - } - - tests := []testCase{ - { - name: "create database", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - schema, err := global.GetSchema(testdata.TestSchema.DBID()) - require.NoError(t, err) - - require.EqualValues(t, testdata.TestSchema, schema) - - dbs, err := global.ListDatasets(testdata.TestSchema.Owner) - require.NoError(t, err) - - require.Equal(t, 1, len(dbs)) - require.Equal(t, testdata.TestSchema.Name, dbs[0].Name) - }, - }, - { - name: "drop database", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - }, - ses2: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.DeleteDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid2", - Ctx: ctx, - }, tx, testdata.TestSchema.DBID()) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - dbs, err := global.ListDatasets(testdata.TestSchema.Owner) - require.NoError(t, err) - - require.Equal(t, 0, len(dbs)) - }, - }, - { - name: "execute procedures", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - }, - ses2: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - signer := "signer" - - ctx := context.Background() - _, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(signer), - Caller: signer, - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreateUser.Name, - Args: []any{1, "satoshi", 42}, - }) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(signer), - Caller: signer, - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreatePost.Name, - Args: []any{1, "Bitcoin!", "The Bitcoin Whitepaper", "9/31/2008"}, - }) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - res, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionGetPosts.Name, - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - require.Equal(t, res.Columns, []string{"id", "title", "content", "post_date", "author"}) - require.Equal(t, len(res.Rows), 1) - - row1 := res.Rows[0] - - require.Equal(t, row1[0], int64(1)) - require.Equal(t, row1[1], "Bitcoin!") - require.Equal(t, row1[2], "The Bitcoin Whitepaper") - require.Equal(t, row1[3], "9/31/2008") - require.Equal(t, row1[4], "satoshi") - - dbid := testdata.TestSchema.DBID() - // pgSchema := common.DBIDSchema(dbid) - res2, err := global.Execute(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Ctx: ctx, - }, tx, dbid, `SELECT * from posts;`, nil) // or do we require callers to set qualify schema like `SELECT * from `+pgSchema+`.posts;` ? - require.NoError(t, err) - - require.Equal(t, res2.Columns, []string{"id", "title", "content", "author_id", "post_date"}) - require.Equal(t, len(res2.Rows), 1) - require.Equal(t, res2.Rows[0], []any{int64(1), "Bitcoin!", "The Bitcoin Whitepaper", int64(1), "9/31/2008"}) - }, - }, - { - name: "executing outside of a commit", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - _, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreatePost.Name, - Args: []any{1, "Bitcoin!", "The Bitcoin Whitepaper", "9/31/2008"}, - }) - require.NotNil(t, err) - }, - }, - { - name: "calling outside of a commit", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreateUser.Name, - Args: []any{1, "satoshi", 42}, - }) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - users, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionGetUserByAddress.Name, - Args: []any{"signer"}, - }) - require.NoError(t, err) - - require.Equal(t, len(users.Rows), 1) - require.Equal(t, []any{int64(1), "satoshi", int64(42)}, []any{users.Rows[0][0], users.Rows[0][1], users.Rows[0][2]}) - }, - }, - { - name: "deploying database and immediately calling procedure", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreateUser.Name, - Args: []any{1, "satoshi", 42}, - }) - require.NoError(t, err) - }, - after: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - users, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionGetUserByAddress.Name, - Args: []any{"signer"}, - }) - require.NoError(t, err) - - require.Equal(t, len(users.Rows), 1) - require.Equal(t, []any{int64(1), "satoshi", int64(42)}, []any{users.Rows[0][0], users.Rows[0][1], users.Rows[0][2]}) - }, - }, - { - name: "test failed extension init", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - oldExtensions := []*types.Extension{} - copy(oldExtensions, testdata.TestSchema.Extensions) - - testdata.TestSchema.Extensions = append(testdata.TestSchema.Extensions, - &types.Extension{ - Name: "math", - Initialization: []*types.ExtensionConfig{ - { - Key: "fail", - Value: "true", - }, - }, - Alias: "fail_math", - }, - ) - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.Error(t, err) - - testdata.TestSchema.Extensions = oldExtensions - - err = global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - assert.NoError(t, err) - }, - }, - { - name: "owner only action", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionAdminDeleteUser.Name, - Args: []any{1}, - }) - require.Error(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionAdminDeleteUser.Name, - Args: []any{1}, - }) - require.NoError(t, err) - }, - }, - { - name: "private action", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - // calling private fails - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionPrivate.Name, - Args: []any{}, - }) - require.Error(t, err) - - // calling a public which calls private succeeds - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCallsPrivate.Name, - Args: []any{}, - }) - require.NoError(t, err) - }, - }, - { - // this test used to track that this was not possible, because it was necessary - // to protect our old SQLite atomicity model. This is no longer necessary, - // and it's actually preferable that we can support this. Logically, it makes sense - // that a deploy tx followed by an execute tx in the same block should work. - name: "deploy and call at the same time", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionCreateUser.Name, - Args: []any{1, "satoshi", 42}, - }) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ActionGetUserByAddress.Name, - Args: []any{"signer"}, - }) - require.NoError(t, err) - }, - }, - { - name: "deploy many databases", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - for i := 0; i < 10; i++ { - newSchema := *testdata.TestSchema - newSchema.Name = testdata.TestSchema.Name + fmt.Sprint(i) - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid" + fmt.Sprint(i), - Ctx: ctx, - }, tx, &newSchema) - require.NoError(t, err) - } - }, - }, - { - name: "deploying and immediately dropping", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - - err = global.DeleteDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid3", - Ctx: ctx, - }, tx, testdata.TestSchema.DBID()) - require.NoError(t, err) - }, - }, - { - name: "case insensitive", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - schema := *caseSchema - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, &schema) - require.NoError(t, err) - - caller := "signer" - signer := []byte("signer") - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(caller), - Caller: string(signer), - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: schema.DBID(), - Procedure: "CREATE_USER", - Args: []any{1, "satoshi"}, - }) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(caller), - Caller: string(signer), - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: schema.DBID(), - Procedure: "CREATE_USER", - Args: []any{"2", "vitalik"}, - }) - require.NoError(t, err) - - _, err = global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(caller), - Caller: string(signer), - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: schema.DBID(), - Procedure: "CREATE_FOLLOWER", - Args: []any{"satoshi", "vitalik"}, - }) - require.NoError(t, err) - - res, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte(caller), - Caller: string(signer), - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: schema.DBID(), - Procedure: "USE_EXTENSION", - Args: []any{1, "2"}, // math_ext.add($arg1 + $arg2, 1) - - }) - require.NoError(t, err) - - // "SELECT $rES as res;" will be a string because arg type - // inference based on Go variables is only used for inline - // expressions since postgres prepare/describe is desirable for - // statements that actually reference a table (but this one does - // not). - require.Equal(t, "4", res.Rows[0][0]) - require.Equal(t, []string{"res"}, res.Columns) // without the `AS res`, it would be `?column?` - }, - }, - { - name: "procedure", - ses1: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - err := global.CreateDataset(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: testdata.TestSchema.Owner, - Caller: string(testdata.TestSchema.Owner), - TxID: "txid1", - Ctx: ctx, - }, tx, testdata.TestSchema) - require.NoError(t, err) - }, - ses2: func(t *testing.T, global *execution.GlobalContext, tx sql.DB) { - ctx := context.Background() - - _, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ProcCreateUser.Name, - Args: []any{1, "satoshi", 42}, - }) - require.NoError(t, err) - - user, err := global.Procedure(&common.TxContext{ - BlockContext: &common.BlockContext{}, - Signer: []byte("signer"), - Caller: "signer", - Ctx: ctx, - }, tx, &common.ExecutionData{ - Dataset: testdata.TestSchema.DBID(), - Procedure: testdata.ProcGetUserByAddress.Name, - Args: []any{"signer"}, - }) - require.NoError(t, err) - - require.Equal(t, len(user.Rows), 1) - - require.Equal(t, []any{int64(1), "satoshi", int64(42)}, []any{user.Rows[0][0], user.Rows[0][1], user.Rows[0][2]}) - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - if test.ses1 == nil { - test.ses1 = func(t *testing.T, global *execution.GlobalContext, tx sql.DB) {} - } - if test.ses2 == nil { - test.ses2 = func(t *testing.T, global *execution.GlobalContext, tx sql.DB) {} - } - if test.after == nil { - test.after = func(t *testing.T, global *execution.GlobalContext, tx sql.DB) {} - } - - global, db, err := setup(t) - if err != nil { - t.Fatal(err) - } - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginPreparedTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - test.ses1(t, global, tx) - - changes := make(chan any, 1) - go func() { - for ch := range changes { - switch ch.(type) { - case *pg.ChangesetEntry: - // t.Log("entry", ct) - case *pg.Relation: - // t.Log("relation") - } - } - }() - id, err := tx.Precommit(ctx, changes) // not needed, but test how txApp would use the engine - require.NoError(t, err) - require.NotEmpty(t, id) - - err = tx.Commit(ctx) - require.NoError(t, err) - - tx2, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx2.Rollback(ctx) - - test.ses2(t, global, tx2) - - // Omit Precommit here, just to test that it's allowed even though - // txApp would want the commit ID. - - err = tx2.Commit(ctx) - require.NoError(t, err) - - readOnly, err := db.BeginTx(ctx) - require.NoError(t, err) - defer readOnly.Rollback(ctx) - - test.after(t, global, readOnly) - }) - } -} - -var ( - caseSchema = &types.Schema{ - Name: "case_insensITive", - Tables: []*types.Table{ - { - Name: "usErs", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "nAMe", - Type: types.TextType, - }, - }, - Indexes: []*types.Index{ - { - Name: "usErs_name", - Columns: []string{ - "nAmE", - }, - Type: types.BTREE, - }, - }, - }, - { - Name: "fOllOwers", - Columns: []*types.Column{ - { - Name: "foLlOwer_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "fOllOwee_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "fOllOwers_pk", - Columns: []string{ - "foLlowEr_id", - "fOllOwee_Id", - }, - Type: types.PRIMARY, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{ - "FoLlOwer_id", - }, - ParentKeys: []string{ - "iD", - }, - ParentTable: "useRS", - }, - { - ChildKeys: []string{ - "FoLlOweE_id", - }, - ParentKeys: []string{ - "ID", - }, - ParentTable: "UseRS", - }, - }, - }, - }, - Actions: []*types.Action{ - { - Name: "CrEaTe_UsEr", - Parameters: []string{ - "$Id", - "$nAmE", - }, - Public: true, - Body: "INSERT INTO UseRs (ID, nAme) VALUES ($iD, $nAME);", - }, - { - Name: "CrEaTe_FoLlOwEr", - Parameters: []string{ - "$FoLlOwer_nAme", - "$FoLlOwee_nAme", - }, - Public: true, - Body: `INSERT INTO FollOweRS (FOLlOwer_id, FOLlOwee_id) - VALUES ( - (SELECT ID FROM USErs WHERE NAmE = $FoLlOwer_nAME), - (SELECT ID FROM UsErS WHERE nAME = $FoLlOwee_nAME) - );`, - }, - { - Name: "use_ExTension", - Parameters: []string{ - "$vAl1", - "$vAl2", - }, - Public: true, - Body: "$rEs = Math_Ext.AdD($VAl1 + $VAl2, 1); SELECT $rES as res;", // type? procedure execution is not strongly typed... - - }, - }, - Extensions: []*types.Extension{ - { - Name: "maTh", - Alias: "Math_Ext", - }, - }, - } -) diff --git a/node/engine/integration/procedure_test.go b/node/engine/integration/procedure_test.go deleted file mode 100644 index be202915e..000000000 --- a/node/engine/integration/procedure_test.go +++ /dev/null @@ -1,914 +0,0 @@ -//go:build pglive && engineinteg - -package integration_test - -import ( - "context" - "encoding/base64" - "encoding/hex" - "fmt" - "strings" - "testing" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/crypto" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/types/decimal" - "github.com/kwilteam/kwil-db/core/utils/order" - "github.com/kwilteam/kwil-db/node/engine/execution" - "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/parse" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// This test is used to easily test procedure inputs/outputs and logic. -// All tests are given the same schema with a few tables and procedures, as well -// as mock data. The test is then able to define its own procedure, the inputs, -// outputs, and expected error (if any). -func Test_Procedures(t *testing.T) { - type testcase struct { - name string - procedure string - inputs []any // can be nil - outputs [][]any // can be nil - err error // can be nil - caller string // can be empty, if set it will override the default caller in the transaction data - readOnly bool // if true, the procedure will be executed in a read-only transaction - notices []string // expected notices, if any - } - - tests := []testcase{ - { - name: "basic test", - procedure: `procedure create_user2($name text, $usernum int) public { - INSERT INTO users (id, name, wallet_address, user_num) - VALUES (uuid_generate_v5('985b93a4-2045-44d6-bde4-442a4e498bc6'::uuid, @txid), - $name, - @caller, - $usernum - ); - }`, - inputs: []any{"test_user", 4}, - }, - { - name: "for loop", - procedure: `procedure get_all_users($ints int[]) public view returns (ints int[]) { - $result int[]; - for $i in $ints { - $result := array_append($result, $i*2); - } - return $result; - } - `, - inputs: []any{[]int64{1, 2, 3}}, - outputs: [][]any{{[]int64{int64(2), int64(4), int64(6)}}}, // returns 1 row, 1 column, with an array of ints - }, - { - name: "is (null)", - procedure: `procedure is_null($val text) public view returns (is_null bool, is_null2 bool, is_null3 bool, is_null4 bool) { - $val2 := 1; - return $val is not distinct from null, $val2 is not distinct from null, $val is distinct from null, $val2 is distinct from null; - }`, - inputs: []any{nil}, - outputs: [][]any{{true, false, false, true}}, - }, - { - name: "is (concrete)", - procedure: `procedure is_equal() public view returns (is_equal bool, is_equal2 bool, is_equal3 bool, is_equal4 bool) { - $val := 'hello'; - return $val is not distinct from 'hello', $val is not distinct from 'world', $val is distinct from 'hello', $val is distinct from 'world'; - }`, - outputs: [][]any{{true, false, false, true}}, - }, - { - name: "equals", - procedure: `procedure equals($val text) public view returns (is_equal bool, is_equal2 bool, is_equal3 bool, is_equal4 bool) { - $val2 text; - return $val = 'hello', $val = 'world', $val != null, $val2 != null; - }`, - inputs: []any{"hello"}, - outputs: [][]any{{true, false, nil, nil}}, // equals with null should return null - }, - { - name: "and/or", - procedure: `procedure and_or() public view returns (count int) { - $count := 0; - if true and true { - $count := $count + 1; - } - if true and false { - $count := $count + 100; - } - - if (true or false) or (true or true) { - $count := $count + 10; - } - - return $count; - }`, - outputs: [][]any{{int64(11)}}, - }, - { - name: "return next from a non-table", - procedure: `procedure return_next($vals int[]) public view returns table(val int) { - for $i in $vals { - return next $i*2; - } - }`, - inputs: []any{[]int64{1, 2, 3}}, - outputs: [][]any{{int64(2)}, {int64(4)}, {int64(6)}}, - }, - { - name: "table return with no hits doesn't return postgres no-return error", - procedure: `procedure return_next($vals int[]) public view returns table(val int) { - for $i in $vals { - error('unreachable'); - } - }`, - inputs: []any{[]int64{}}, - outputs: [][]any{}, - }, - { - name: "loop over null array", - procedure: `procedure loop_over_null() public view returns (count int) { - $vals int[]; - $count := 0; - for $i in $vals { - $count := $count + 1; - } - return $count; - }`, - outputs: [][]any{{int64(0)}}, - }, - { - name: "encode, decode, and digest functions", - procedure: `procedure encode_decode_digest($hex text) public view returns (encoded text, decoded blob, digest blob) { - $decoded := decode($hex, 'hex'); - $encoded := encode($decoded, 'base64'); - $digest := digest($decoded, 'sha256'); - return $encoded, $decoded, $digest; - }`, - inputs: []any{hex.EncodeToString([]byte("hello"))}, - outputs: [][]any{{base64.StdEncoding.EncodeToString([]byte("hello")), []byte("hello"), crypto.Sha256([]byte("hello"))}}, - }, - { - name: "join on subquery", - procedure: `procedure join_on_subquery() public view returns table(name text, content text) { - return SELECT u.name, p.content FROM users u - INNER JOIN (select content, user_id from posts) p ON u.id = p.user_id - WHERE u.name = 'satoshi'; - }`, - // should come out LIFO, due to default ordering - outputs: [][]any{ - {"satoshi", "buy $btc to grow laser eyes"}, - {"satoshi", "goodbye world"}, - {"satoshi", "hello world"}, - }, - }, - { - name: "string functions", - procedure: `procedure string_funcs() public view { - $val := 'hello world'; - $val := $val || '!!!'; - $val := upper($val); - if $val != 'HELLO WORLD!!!' { - error('upper failed'); - } - $val := lower($val); - if $val != 'hello world!!!' { - error('lower failed'); - } - - if bit_length($val) != 112 { - error('bit_length failed'); - } - if char_length($val) != 14 or character_length($val) != 14 or length($val) != 14 { - error('length failed'); - } - if octet_length($val) != 14 { - error('octet_length failed'); - } - $val := rtrim($val, '!'); - if $val != 'hello world' { - error('rtrim failed'); - } - if rtrim($val||' ') != 'hello world' { - error('rtrim 2 failed'); - } - - $val := ltrim($val, 'h'); - if $val != 'ello world' { - error('ltrim failed'); - } - if ltrim(' '||$val) != 'ello world' { // add a space and trim it off - error('ltrim 2 failed'); - } - - $val := lpad($val, 11, 'h'); - if $val != 'hello world' { - error('lpad failed'); - } - if lpad($val, 12) != ' hello world' { - error('lpad 2 failed'); - } - - $val := rpad($val, 12, '!'); - if $val != 'hello world!' { - error('rpad failed'); - } - if rpad($val, 13) != 'hello world! ' { - error('rpad 2 failed'); - } - - if overlay($val, 'xx', 2, 5) != 'hxxworld!' { - error('overlay failed'); - } - if overlay($val, 'xx', 2) != 'hxxlo world!' { - error('overlay 2 failed'); - } - - if position('world', $val) != 7 { - error('position failed'); - } - if substring($val, 7, 5) != 'world' { - error('substring failed'); - } - if substring($val, 7) != 'world!' { - error('substring 2 failed'); - } - - if trim(' ' || $val || ' ') != 'hello world!' { - error('trim failed'); - } - if trim('a'||$val||'a', 'a') != 'hello world!' { - error('trim 2 failed'); - } - - if parse_unix_timestamp('2021-01-01 00:00:00:123456', 'YYYY-MM-DD HH24:MI:SS:US') != 1609459200.123456 { - error('parse_unix_timestamp failed'); - } - - if format_unix_timestamp(1609459200.123456, 'YYYY-MM-DD HH24:MI:SS:US') != '2021-01-01 00:00:00:123456' { - error('format_unix_timestamp failed'); - } - - if generate_dbid('aa', decode('B7E2d6DABaf3B0038cFAaf09688Fa104f4409697', 'hex')) != 'xacfa19c2d4af530c6225ea139d611f91e7a55222a362dfd5eb70a826' { - error('generate_dbid failed'); - } - - // regression test for invalid generated sql - $dbid := generate_dbid('aa', decode('B7E2d6DABaf3B0038cFAaf09688Fa104f4409697', 'hex')); - if $dbid != 'xacfa19c2d4af530c6225ea139d611f91e7a55222a362dfd5eb70a826' { - error('generate_dbid regression test failed'); - } - }`, - }, - { - name: "arrays", - // all arrays are 1-indexed - procedure: `procedure array_funcs() public view { - $arr int[] := [1, 2, 3]; - $arr := array_append($arr, 4); - if $arr != [1, 2, 3, 4] { - error('array_append failed'); - } - - $arr2 := $arr[2:4]; // should be [2, 3, 4] - if $arr2 != [2, 3, 4] { - error('array slice failed'); - } - - $arr3 := array_prepend(0, $arr); - if $arr3 != [0, 1, 2, 3, 4] { - error('array_prepend failed'); - } - - if array_remove($arr3, 3) != [0, 1, 2, 4] { - error('array_remove failed'); - } - - if array_cat($arr[:2], $arr[4:]) != [1, 2, 4] { - error('array_cat failed'); - } - - $count := 0; - for $row in select array_agg(a) as a2 from (select 1 as a union select 2 as a) as b { - $count := $count + 1; - if $row.a2 != [1, 2] { - error('array_agg failed'); - } - } - if $count != 1 { - error('array_agg failed'); - } - }`, - }, - { - name: "min/max", - procedure: `procedure min_max() public view returns (min int, max int) { - $max := 0; - for $row in select max(user_num) as m from users { - $max := $row.m; - } - $min := 0; - for $row2 in select min(user_num) as m from users { - $min := $row2.m; - } - return $min, $max; - }`, - outputs: [][]any{{int64(1), int64(3)}}, - }, - { - name: "sum", - // cannot use duplicate function names, so we use sum2 - procedure: `procedure sum2() public view returns (sum decimal(1000,0)) { - for $row in select sum(user_num) as s from users { - return $row.s; - } - }`, - outputs: [][]any{{mustDecimal("6", 1000, 0)}}, - }, - { - name: "decimal array", - procedure: `procedure decimal_array() public view returns (decimals decimal(2,1)[]) { - $a := 2.5; - $b := 3.5; - $c := $a/$b; - return [$a, $b, $c]; - }`, - outputs: [][]any{{decimal.DecimalArray{mustDecimal("2.5", 2, 1), mustDecimal("3.5", 2, 1), mustDecimal("0.7", 2, 1)}}}, - }, - { - name: "decimal", - procedure: `procedure d() public view { - $i := 100.423; - $j decimal(16,8) := 46728954.23743892; - $k := $i::decimal(16,8) + $j; - if $k != 46729054.66043892 { - error('decimal failed'); - } - if $k::text != '46729054.66043892' { - error('decimal text failed'); - } - if ($k::decimal(16,2))::text != '46729054.66' { - error('decimal 2 failed'); - } - }`, - }, - { - name: "early empty return", - procedure: `procedure return_early() public view { - $exit := true; - if $exit { - return; - } - error('should not reach here'); - }`, - }, - { - name: "private procedure", - procedure: `procedure private_proc() private view { - error('should not reach here'); - }`, - err: execution.ErrPrivate, - }, - { - name: "owner procedure - success", - procedure: `procedure owner_proc() public owner view returns (is_owner bool) { - return true; - }`, - outputs: [][]any{{true}}, - }, - { - name: "owner procedure - fail", - procedure: `procedure owner_proc() public owner view returns (is_owner bool) { - return false; - }`, - err: execution.ErrOwnerOnly, - caller: "some_other_wallet", - }, - { - name: "mutative procedure in read-only tx", - procedure: `procedure mutative() public { - return; - }`, - err: execution.ErrMutativeProcedure, - readOnly: true, - }, - { - // this is a regression test for a previous bug - name: "unary", - procedure: `procedure unary() public view returns (bool) { - return !true; - }`, - outputs: [][]any{{false}}, - }, - { - name: "array", - procedure: `procedure assign_array() public view returns (ints int[]) { - $arr int[] := [1, 2, 3]; - - $arr[2] := 4; - return $arr; - }`, - outputs: [][]any{{[]int64{int64(1), int64(4), int64(3)}}}, - }, - { - name: "notice", - procedure: `procedure notice_fn() public { - for $i in 1..3 { - notice($i::text); - } - }`, - notices: []string{"1", "2", "3"}, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - global, db, err := setup(t) - if err != nil { - t.Fatal(err) - } - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginPreparedTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - // deploy schema - dbid := deployAndSeed(t, global, tx, test.procedure) - - // parse out procedure name - procedureName := parseProcedureName(test.procedure) - - d := txData() - if test.caller != "" { - d.Caller = test.caller - d.Signer = []byte(test.caller) - } - - var execTx interface { - sql.Tx - sql.Subscriber - } = tx - if test.readOnly { - execTx, err = db.BeginReadTx(ctx) - require.NoError(t, err) - defer execTx.Rollback(ctx) - } - - // listen for notices - notice, done, err := execTx.Subscribe(ctx) - require.NoError(t, err) - defer done(ctx) - - var rec []string - go func() { - for n := range notice { - _, notc, err := parse.ParseNotice(n) - require.NoError(t, err) - rec = append(rec, notc) - } - }() - - // execute test procedure - res, err := global.Procedure(d, execTx, &common.ExecutionData{ - Dataset: dbid, - Procedure: procedureName, - Args: test.inputs, - }) - if test.err != nil { - require.Error(t, err) - require.ErrorIs(t, err, test.err) - return - } - require.NoError(t, err) - - require.Len(t, res.Rows, len(test.outputs)) - - for i, output := range test.outputs { - require.Len(t, res.Rows[i], len(output)) - for j, val := range output { - if dec, ok := val.(*decimal.Decimal); ok { - received := res.Rows[i][j].(*decimal.Decimal) - - assert.Equal(t, dec.String(), received.String()) - assert.Equal(t, dec.Precision(), received.Precision()) - assert.Equal(t, dec.Scale(), received.Scale()) - continue - } - - require.Equal(t, val, res.Rows[i][j]) - } - } - - // check notices - require.Equal(t, test.notices, rec) - }) - } -} - -func mustDecimal(val string, precision, scale uint16) *decimal.Decimal { - d, err := decimal.NewExplicit(val, precision, scale) - if err != nil { - panic(err) - } - - return d -} - -func Test_ForeignProcedures(t *testing.T) { - type testcase struct { - name string - // foreign is the foreign procedure definition. - // It will be deployed in a separate schema. - foreign string - // otherProc is the procedure that calls the foreign procedure. - // It will be included with the foreign procedure. - // It should be formattable to allow the caller to format with - // the target dbid, and the target procedure should be hardcoded. - otherProc string - // inputs are the inputs to the test procedure. - inputs []any - // outputs are the expected outputs from the test procedure. - outputs [][]any - // caller is the calling address. If empty, it defaults to the package default. - caller string - // if wantErr is not empty, the test will expect an error containing this string. - // We use a string, instead go Go's error type, because we are reading errors raised - // from Postgres, which are strings. - wantErr string - } - - tests := []testcase{ - { - name: "foreign procedure takes nothing, returns nothing", - foreign: `foreign procedure do_something()`, - otherProc: `procedure call_foreign() public { - do_something['%s', 'delete_users'](); - }`, - }, - { - name: "foreign procedure takes nothing, returns table", - foreign: `foreign procedure get_users() returns table(id uuid, name text, wallet_address text)`, - otherProc: `procedure call_foreign() public returns table(username text) { - return select name as username from get_users['%s', 'get_users'](); - }`, - outputs: [][]any{ - {"satoshi"}, - {"wendys_drive_through_lady"}, - {"zeus"}, - }, - }, - { - name: "foreign procedure takes values, returns values", - foreign: `foreign procedure id_from_name($name text) returns (id uuid)`, - otherProc: `procedure call_foreign($name text) public returns (id uuid) { - return id_from_name['%s', 'id_from_name']($name); - }`, - inputs: []any{"satoshi"}, - outputs: [][]any{{satoshisUUID}}, - }, - { - name: "foreign procedure expects no args, implementation expects some", - foreign: `foreign procedure id_from_name() returns (id uuid)`, - otherProc: `procedure call_foreign() public returns (id uuid) { - return id_from_name['%s', 'id_from_name'](); - }`, - wantErr: `requires 1 arg(s)`, - }, - { - name: "foreign procedure expects args, implementation expects none", - foreign: `foreign procedure get_users($name text) returns table(id uuid, name text, wallet_address text)`, - otherProc: `procedure call_foreign() public returns table(username text) { - return select name as username from get_users['%s', 'get_users']('satoshi'); - }`, - wantErr: "requires no args", - }, - { - name: "foreign procedure expects 2 args, implementation expects 2", - foreign: `foreign procedure id_from_name($name text, $name2 text) returns (id uuid)`, - otherProc: `procedure call_foreign() public returns (id uuid) { - return id_from_name['%s', 'id_from_name']('satoshi', 'zeus'); - }`, - wantErr: "requires 1 arg(s)", - }, - { - name: "foreign procedure returns 1 arg, implementation returns none", - foreign: `foreign procedure delete_users() returns (text)`, - otherProc: `procedure call_foreign() public returns (text) { - return delete_users['%s', 'delete_users'](); - }`, - wantErr: "returns nothing", - }, - { - name: "foreign procedure returns 0 args, implementation returns 1", - foreign: `foreign procedure id_from_name($name text)`, - otherProc: `procedure call_foreign() public { - id_from_name['%s', 'id_from_name']('satoshi'); - }`, - wantErr: "returns non-nil value(s)", - }, - { - name: "foreign procedure returns table, implementation returns non-table", - foreign: `foreign procedure id_from_name($name text) returns table(id uuid)`, - otherProc: `procedure call_foreign() public { - select id from id_from_name['%s', 'id_from_name']('satoshi'); - }`, - wantErr: "does not return a table", - }, - { - name: "foreign procedure does not return table, implementation returns table", - foreign: `foreign procedure get_users() returns (id uuid, name text, wallet_address text)`, - otherProc: `procedure call_foreign() public returns table(username text) { - $id, $name, $wallet := get_users['%s', 'get_users'](); - }`, - wantErr: "returns a table", - }, - { - name: "foreign procedure returns table, implementation returns nothing", - foreign: `foreign procedure create_user($name text) returns table(id uuid)`, - otherProc: `procedure call_foreign() public { - create_user['%s', 'create_user']('satoshi'); - }`, - wantErr: "does not return a table", - }, - { - name: "procedures returning scalar return different named values (ok)", - // returns value "uid" instead of impl's "id" - foreign: `foreign procedure id_from_name($name text) returns (uid uuid)`, - otherProc: `procedure call_foreign() public returns (id uuid) { - return id_from_name['%s', 'id_from_name']('satoshi'); - }`, - outputs: [][]any{{satoshisUUID}}, - }, - { - name: "procedure returning table return different column names (failure)", - foreign: `foreign procedure get_users() returns table(uid uuid, name text, wallet_address text)`, - otherProc: `procedure call_foreign() public returns table(name text) { - return select name from get_users['%s', 'get_users'](); - }`, - wantErr: "returns id", - }, - { - name: "private procedure via foreign call", - foreign: `foreign procedure is_private($name text)`, - otherProc: `procedure call_foreign() public { - is_private['%s', 'is_private']('satoshi'); - }`, - wantErr: "not public", - }, - // { - // name: "foreign call owner - fail", - // foreign: `foreign procedure is_owner($name text)`, - // otherProc: `procedure call_foreign() public owner { - // is_owner['%s', 'is_owner']('satoshi'); - // }`, - // caller: "some_other_wallet", - // wantErr: "is owner-only", - // }, - { - name: "foreign call owner - success", - foreign: `foreign procedure is_owner($name text)`, - otherProc: `procedure call_foreign() public owner { - is_owner['%s', 'is_owner']('satoshi'); - }`, - }, - // this test tests that foreign caller properly works, and is unset at the end of the - // foreign call. - { - name: "testing foreign caller", - foreign: `foreign procedure return_foreign_caller() returns (caller text)`, - otherProc: `procedure call_foreign() public returns (one text, two text, three text) { - $one := @foreign_caller; - $two := return_foreign_caller['%s', 'return_foreign_caller'](); - $three := @foreign_caller; - - return $one, $two, $three; - }`, - outputs: [][]any{{"", "x93c803781453c866b8e1277d6d13eaa17935d891544fd223e0ea75b0", ""}}, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - global, db, err := setup(t) - if err != nil { - t.Fatal(err) - } - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - // deploy the main test schema - foreignDBID := deployAndSeed(t, global, tx) - - // deploy the new schema that will call the main one - // first, format the procedure with the foreign DBID - otherProc := fmt.Sprintf(test.otherProc, foreignDBID) - - // deploy the new schema - mainDBID := deploy(t, global, tx, fmt.Sprintf("database db2;\n%s\n%s", test.foreign, otherProc)) - - procedureName := parseProcedureName(otherProc) - - d := txData() - if test.caller != "" { - d.Caller = test.caller - d.Signer = []byte(test.caller) - } - - // execute test procedure - res, err := global.Procedure(d, tx, &common.ExecutionData{ - Dataset: mainDBID, - Procedure: procedureName, - Args: test.inputs, - }) - if test.wantErr != "" { - require.Error(t, err) - require.Contains(t, err.Error(), test.wantErr) - return - } - require.NoError(t, err) - - require.Len(t, res.Rows, len(test.outputs)) - for i, output := range test.outputs { - require.Len(t, res.Rows[i], len(output)) - for j, val := range output { - require.Equal(t, val, res.Rows[i][j]) - } - } - }) - } -} - -// testSchema is a schema that can be deployed with deployAndSeed -var testSchema = ` -database ecclesia; - -table users { - id uuid primary key, - name text not null maxlen(100) minlen(4) unique, - wallet_address text not null, - user_num int unique notnull // this could be the primary key, but it's more for testing than to be useful -} - -table posts { - id uuid primary key, - user_id uuid not null, - content text not null maxlen(300), - foreign key (user_id) references users(id) on delete cascade -} - -procedure create_user($name text) public { - $max int; - for $row in select max(user_num) as m from users { - $max := $row.m; - } - - if $max is null { - $max := 0; - } - - INSERT INTO users (id, name, wallet_address, user_num) - VALUES (uuid_generate_v5('985b93a4-2045-44d6-bde4-442a4e498bc6'::uuid, @txid), - $name, - @caller, - $max + 1 - ); -} - -procedure owns_user($wallet text, $name text) public view returns (owns bool) { - $exists bool := false; - for $row in SELECT * FROM users WHERE wallet_address = $wallet - AND name = $name { - $exists := true; - } - - return $exists; -} - -procedure id_from_name($name text) public view returns (id uuid) { - for $row in SELECT id FROM users WHERE name = $name { - return $row.id; - } - error('user not found'); -} - -procedure create_post($username text, $content text) public { - if owns_user(@caller, $username) == false { - error('caller does not own user'); - } - - INSERT INTO posts (id, user_id, content) - VALUES (uuid_generate_v5('985b93a4-2045-44d6-bde4-442a4e498bc6'::uuid, @txid), - id_from_name($username), - $content - ); -} - -// the following procedures serve no utility, and are made only to test foreign calls -// to different signatures. -procedure delete_users() public { - DELETE FROM users; -} - -procedure get_users() public returns table(id uuid, name text, wallet_address text) { - return SELECT id, name, wallet_address FROM users; -} - -// matches create_user signature -procedure is_private($name text) private { - error('should not reach here'); -} - -procedure is_owner($name text) public owner view { - $exists bool := false; -} - -procedure return_foreign_caller() public returns (caller text) { - return @foreign_caller; -} -` - -// maps usernames to post content. -var initialData = map[string][]string{ - "satoshi": {"hello world", "goodbye world", "buy $btc to grow laser eyes"}, - "zeus": {"i am zeus", "i am the god of thunder", "i am the god of lightning"}, - "wendys_drive_through_lady": {"hi how can I help you", "no I don't know what the federal reserve is", "sir this is a wendys"}, -} - -var satoshisUUID = &types.UUID{0x38, 0xeb, 0x77, 0xcb, 0x1e, 0x5a, 0x56, 0xc0, 0x85, 0x63, 0x2e, 0x25, 0x34, 0xd6, 0x7b, 0x96} - -// deploy deploys a schema. -// if deployer is not "", it will set the deployer as the owner. -func deploy(t *testing.T, global *execution.GlobalContext, db sql.DB, schema string) (dbid string) { - parsed, err := parse.Parse([]byte(schema)) - require.NoError(t, err) - - d := txData() - - err = global.CreateDataset(d, db, parsed) - require.NoError(t, err) - - // get dbid - dbs, err := global.ListDatasets(owner) - require.NoError(t, err) - - for _, db := range dbs { - if db.Name == parsed.Name { - dbid = db.DBID - break - } - } - - return dbid -} - -// deployAndSeed deploys the test schema and seeds it with data -func deployAndSeed(t *testing.T, global *execution.GlobalContext, db sql.DB, extraProcedures ...string) (dbid string) { - schema := testSchema - for _, proc := range extraProcedures { - schema += proc + "\n" - } - - // deploy schema - dbid = deploy(t, global, db, schema) - - // create initial data - for _, kv := range order.OrderMap(initialData) { - _, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: dbid, - Procedure: "create_user", - Args: []any{kv.Key}, - }) - require.NoError(t, err) - - for _, post := range kv.Value { - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: dbid, - Procedure: "create_post", - Args: []any{kv.Key, post}, - }) - require.NoError(t, err) - } - } - - return dbid -} - -// parseProcedureName parses the procedure name from a procedure definition -func parseProcedureName(proc string) string { - procs := strings.Split(proc, " ") - procedureName := strings.Split(procs[1], "(")[0] - procedureName = strings.TrimSpace(procedureName) - return procedureName -} diff --git a/node/engine/integration/schema_test.go b/node/engine/integration/schema_test.go deleted file mode 100644 index 77f4f5fa5..000000000 --- a/node/engine/integration/schema_test.go +++ /dev/null @@ -1,304 +0,0 @@ -//go:build pglive && engineinteg - -package integration_test - -import ( - "context" - "fmt" - "os" - "testing" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/node/engine/execution" - "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/parse" - "github.com/stretchr/testify/require" -) - -var ( - owner = []byte("test_owner") -) - -// Test_Schemas is made to test full kuneiform schemas against the engine. -// The intent of this is to test the full engine, with expected error messages, -// without having to write a full integration test. -func Test_Schemas(t *testing.T) { - type testCase struct { - name string - // fn is the test function - // the passed db will be in a transaction - fn func(t *testing.T, global *execution.GlobalContext, db sql.DB) - } - - // the tests rely on three schemas: - // users: a table of users, which maps a wallet address to a human readable name - // social_media: a table of posts and post_counts. posts contains posts, and post_counts contains the number of posts a user has made. - // video_game: a table of scores tracks users high scores in a video game. - // posts and video+game also have admin commands for setting the dbid and procedure names. - testCases := []testCase{ - { - name: "create user, make several posts, and get posts", - fn: func(t *testing.T, global *execution.GlobalContext, db sql.DB) { - usersDBID, socialDBID, _ := deployAllSchemas(t, global, db) - _ = socialDBID - - // create user - _, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: usersDBID, - Procedure: "create_user", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // make a post - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: socialDBID, - Procedure: "create_post", - Args: []any{"hello world"}, - }) - require.NoError(t, err) - - // make another post - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: socialDBID, - Procedure: "create_post", - Args: []any{"goodbye world"}, - }) - require.NoError(t, err) - - // make one more large post - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: socialDBID, - Procedure: "create_post", - Args: []any{"this is a longer post than the others`"}, - }) - require.NoError(t, err) - - // get posts using get_recent_posts - res, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: socialDBID, - Procedure: "get_recent_posts", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // check the columns. should be id, content - require.Len(t, res.Columns, 2) - require.Equal(t, "id", res.Columns[0]) - require.Equal(t, "content", res.Columns[1]) - - // check the values - // the last post should be the first one returned - require.Len(t, res.Rows, 3) - require.Equal(t, "this is a longer post than the others`", res.Rows[0][1]) - require.Equal(t, "goodbye world", res.Rows[1][1]) - require.Equal(t, "hello world", res.Rows[2][1]) - - // use get_recent_posts_by_size to only get posts larger than 20 characters - res, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: socialDBID, - Procedure: "get_recent_posts_by_size", - Args: []any{"satoshi", 20, 10}, // takes username, size, limit - }) - require.NoError(t, err) - - // check the columns. should be id, content - require.Len(t, res.Columns, 2) - require.Equal(t, "id", res.Columns[0]) - require.Equal(t, "content", res.Columns[1]) - - // check the values - // the last post should be the first one returned - require.Len(t, res.Rows, 1) - require.Equal(t, "this is a longer post than the others`", res.Rows[0][1]) - }, - }, - { - // video game schema contains other functionalities, such as type assertions - // arithmetic, etc. TODO: add here once we support fixed point arithmetic - name: "test video game schema", - fn: func(t *testing.T, global *execution.GlobalContext, db sql.DB) { - usersDBID, _, gameDBID := deployAllSchemas(t, global, db) - - // create user - _, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: usersDBID, - Procedure: "create_user", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // set the user's high score - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: gameDBID, - Procedure: "set_high_score", - Args: []any{100}, - }) - require.NoError(t, err) - - // get the user's high score - res, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: gameDBID, - Procedure: "get_high_score", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // check the columns. should be score, as an int - require.Len(t, res.Columns, 1) - require.Equal(t, "score", res.Columns[0]) - - // check the values - require.Len(t, res.Rows, 1) - require.Equal(t, int64(100), res.Rows[0][0]) - }, - }, - { - name: "write data to foreign procedure", - fn: func(t *testing.T, global *execution.GlobalContext, db sql.DB) { - usersDBID, social_media, _ := deployAllSchemas(t, global, db) - - // create user. we do this in the social_media db to ensure the - // procedure can write to a foreign dataset - _, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: social_media, - Procedure: "create_user", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // get the user by name - res, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: usersDBID, - Procedure: "get_user_by_name", - Args: []any{"satoshi"}, - }) - require.NoError(t, err) - - // check the columns. should be owner, name - require.Len(t, res.Columns, 2) - - // check the values - require.Len(t, res.Rows, 1) - require.Equal(t, "test_owner", res.Rows[0][1]) - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - global, db, err := setup(t) - if err != nil { - t.Fatal(err) - } - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - tc.fn(t, global, tx) - }) - } -} - -// loadSchema loads a schema from the schemas directory. -func loadSchema(file string) (*types.Schema, error) { - d, err := os.ReadFile("./schemas/" + file) - if err != nil { - return nil, err - } - - db, err := parse.Parse(d) - if err != nil { - return nil, err - } - - return db, nil -} - -// deployAllSchemas deploys all schemas in the schemas directory. -// it returns the dbid of the deployed schemas. -// It will also properly configure the metadata for social_media and video_game. -func deployAllSchemas(t *testing.T, global *execution.GlobalContext, db sql.DB) (usersDBID, socialMediaDBID, videoGameDBID string) { - schemas := []string{"users.kf", "social_media.kf", "video_game.kf"} - for _, schema := range schemas { - schema, err := loadSchema(schema) - require.NoError(t, err) - - transactionData := txData() - err = global.CreateDataset(transactionData, db, schema) - require.NoError(t, err) - } - - datasets, err := global.ListDatasets(owner) - require.NoError(t, err) - - // get the dbids for the three datasets - var users, socialMedia, videoGame string - for _, dataset := range datasets { - switch dataset.Name { - case "users": - users = dataset.DBID - case "social_media": - socialMedia = dataset.DBID - case "video_game": - videoGame = dataset.DBID - } - } - require.NotEmpty(t, users) - require.NotEmpty(t, socialMedia) - require.NotEmpty(t, videoGame) - - // set the metadata for social_media and video_game - // they each need three types of metadata: - // - dbid: the dbid of the dataset - // - userbyname: the procedure to get a user by name - // - userbyowner: the procedure to get a user by owner - for _, dbid := range []string{socialMedia, videoGame} { - _, err := global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: dbid, - Procedure: "admin_set", - Args: []any{"dbid", users}, - }) - require.NoError(t, err) - - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: dbid, - Procedure: "admin_set", - Args: []any{"userbyname", "get_user_by_name"}, - }) - require.NoError(t, err) - - _, err = global.Procedure(txData(), db, &common.ExecutionData{ - Dataset: dbid, - Procedure: "admin_set", - Args: []any{"userbyowner", "get_user_by_owner"}, - }) - require.NoError(t, err) - } - - return users, socialMedia, videoGame -} - -// txCounter is a global counter for transaction ids. -var txCounter int - -func nextTxID() string { - txCounter++ - return fmt.Sprintf("tx_%d", txCounter) -} - -// txData returns a common.TxContext with the owner as the signer and caller. -func txData() *common.TxContext { - return &common.TxContext{ - Ctx: context.Background(), - BlockContext: &common.BlockContext{}, - Signer: owner, - Caller: string(owner), - TxID: nextTxID(), - } -} diff --git a/node/engine/integration/schemas/social_media.kf b/node/engine/integration/schemas/social_media.kf deleted file mode 100644 index 447caacde..000000000 --- a/node/engine/integration/schemas/social_media.kf +++ /dev/null @@ -1,130 +0,0 @@ -database social_media; - -// This schema is meant to compose with the user.kf, and implement a basic -// social media / blogging platform - -table post_counts { - user_id uuid primary key, - post_count int not null default(0) -} - -table posts { - id uuid primary key, - content text maxlen(300) not null, - author_id uuid not null, // references users db - // post_num is the index of the post for the author - // the first post for a user will be 1, then 2, 3, etc. - // it is used for chronological ordering - post_num int not null, - #author_idx index(author_id) -} - -// increment_post_count increments the post count for the user, identified by -// its user_id. It returns the user id and the new post count. If the user has not -// yet posted, it will set the post count to 1 -procedure increment_post_count($user_id uuid) private returns (post_count int) { - insert into post_counts (user_id, post_count) - values ($user_id, 1) - on conflict (user_id) do update - set post_count = post_counts.post_count + 1; - - for $row IN select post_count from post_counts where user_id = $user_id limit 1 { - return $row.post_count; - } -} - -// create_post creates a post -procedure create_post($content text) public { - $user_id uuid := get_user_id(@caller); - - // increment the post count - $post_count int := increment_post_count($user_id); - - INSERT INTO posts (id, content, author_id, post_num) - VALUES ( - uuid_generate_v5('985b93a4-2045-44d6-bde4-442a4e498bc6'::uuid, @txid), - $content, - $user_id, - $post_count - ); -} - -// get_recent_posts gets a users most recent posts, -// in descending order -procedure get_recent_posts($username text) public view returns table(id uuid, content text) { - // get the configured dbid and procedure - $user_id uuid := get_user_id(@caller); - - return SELECT id, content from posts - WHERE author_id = $user_id - ORDER BY post_num DESC - LIMIT 10; -} - -// get_recent_posts_by_size gets the $limit most recent posts from a user larger than size $size -// this obviously could be written more simply and efficiently with pure SQL. -// but the intent is to test RETURN NEXT and loops over procedures -procedure get_recent_posts_by_size($username text, $size int, $limit int) public view returns table(id uuid, content text) { - // set max limit of 50 - if $limit > 50 { - $limit := 50; - } - - $count int := 0; - for $row in select * from get_recent_posts($username) as a { - if $count == $limit { - break; - } - if length($row.content) >= $size { - $count := $count + 1; - return next $row.id, $row.content; - } - } -} - -// the following foreign procedures define procedures -// that the users db has. It returns redundant data since it needs -// to match the procedure signature defines in users. -foreign procedure get_user($address text) returns (uuid, text) -foreign procedure foreign_create_user(text) - -// table keyvalue is a kv table to track metadata -// for foreign calls. -table keyvalue { - k text primary, // key is a reserved word - value text not null -} - -procedure admin_set($key text, $value text) public owner { - insert into keyvalue (k, value) - values ($key, $value) - on conflict (k) do update set value = $value; -} - -procedure admin_get($key text) public view returns (value text) { - for $row in select value from keyvalue where k = $key { - return $row.value; - } - - error(format('admin has not set a value for k %s', $key)); -} - -// gets the user's id -procedure get_user_id($address text) public view returns (id uuid) { - $dbid text := admin_get('dbid'); - $procedure text := admin_get('userbyowner'); - - // get the user id - $user_id uuid; - $user_id, _ := get_user[$dbid, $procedure](@caller); - - return $user_id; -} - -// this simply tests that we can write data to foreign procedures. -procedure create_user($name text) public { - $dbid text := admin_get('dbid'); - $procedure text := admin_get('userbyowner'); - - foreign_create_user[$dbid, 'create_user']($name); -} diff --git a/node/engine/integration/schemas/users.kf b/node/engine/integration/schemas/users.kf deleted file mode 100644 index 06591c16d..000000000 --- a/node/engine/integration/schemas/users.kf +++ /dev/null @@ -1,40 +0,0 @@ -database users; - -// This schema implements a basic username registry, where a wallet can register one a unique -// username. - -table users { - id uuid primary key, - name text maxlen(30) not null unique, - address text not null unique -} - - -// create_user creates a user in the database. -// It is assigned a unique uuid. -procedure create_user($name text) public { - // we will generate a uuid from the txid - INSERT INTO users (id, name, address) - VALUES (uuid_generate_v5('985b93a4-2045-44d6-bde4-442a4e498bc6'::uuid, @txid), - $name, - @caller - ); -} - -// get_user_by_name gets a user id and address from a username -procedure get_user_by_name($name text) public view returns (id uuid, address text) { - for $row in SELECT id, address FROM users WHERE name = $name { - return $row.id, $row.address; // will return on the first iteration - } - - error(format('user "%s" not found', $name)); -} - -// get_user_by_owner gets a user id and name by the owner -procedure get_user_by_owner($address text) public view returns (id uuid, name text) { - for $row in SELECT id, name FROM users WHERE address = $address { - return $row.id, $row.name; // will return on the first iteration - } - - error(format('user owned by "%s" not found', $address)); -} \ No newline at end of file diff --git a/node/engine/integration/schemas/video_game.kf b/node/engine/integration/schemas/video_game.kf deleted file mode 100644 index 8ef15738b..000000000 --- a/node/engine/integration/schemas/video_game.kf +++ /dev/null @@ -1,77 +0,0 @@ -database video_game; - -// the video_game database is meant to compose with the users -// database, found in ./users.kf. The video_game database is -// a basic video game that tracks user scores. - -// table scores tracks user high scores -table scores { - // user_id references the user's uuid in the users database - // ideally, this would use a foreign key, but we do not have - // cross-db foreign keys yet. - user_id uuid primary key, - high_score int notnull min(0) default(0) -} - -// set_high_score registers a use that already exists in the foreign users db. -// if the user does not exist, it will return an error -procedure set_high_score($initial_score int) public { - $dbid text := admin_get('dbid'); - $procedure text := admin_get('userbyowner'); - - // calling the users's schema "get_user_by_owner" - $user_id uuid; - $user_id, _:= get_user[$dbid, $procedure](@caller); - - INSERT INTO scores(user_id, high_score) - VALUES ($user_id, $initial_score); -} - -// get_high_score gets the high score of a user, identified by username. -// if the user does not exist, it will return an error -procedure get_high_score($username text) public view returns (score int) { - // this is very convoluted, but it is meant to test type assertion - return get_high_score_text($username)::int; -} - -// get_high_score_text gets a users high score and returns it as text -procedure get_high_score_text($username text) public view returns (score text) { - $dbid text := admin_get('dbid'); - $procedure text := admin_get('userbyname'); - - // calling the users's schema "get_user_by_name" - $user_id uuid; - $user_id, _ := get_user[$dbid, $procedure]($username); - - for $row in select high_score from scores where user_id = $user_id { - return $row.high_score::text; - } - - error(format('user %s not found', $username)); -} - -// the following foreign procedures define procedures -// that the users db has. It returns redundant data since it needs -// to match the procedure signature defines in users. -foreign procedure get_user($address text) returns (uuid, text) - -// table keyvalue is a kv table to track metadata -// for foreign calls. -table keyvalue { - k text primary, - value text not null -} - -procedure admin_set($key text, $value text) public owner { - insert into keyvalue (k, value) - values ($key, $value) - on conflict (k) do update set value = $value; -} - -procedure admin_get($key text) public view returns (value text) { - for $row in select value from keyvalue where k = $key { - return $row.value; - } - - error(format('admin has not set a value for key %s', $key)); -} \ No newline at end of file diff --git a/node/engine/integration/setup_test.go b/node/engine/integration/setup_test.go deleted file mode 100644 index b7a033b76..000000000 --- a/node/engine/integration/setup_test.go +++ /dev/null @@ -1,137 +0,0 @@ -//go:build pglive && engineinteg - -// package integration_test contains full engine integration tests -package integration_test - -import ( - "context" - "fmt" - "strings" - "testing" - - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/log" - actions "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/engine/execution" - "github.com/kwilteam/kwil-db/node/pg" - "github.com/kwilteam/kwil-db/node/utils/conv" - "github.com/stretchr/testify/require" -) - -func TestMain(m *testing.M) { - // pg.UseLogger(log.NewStdOut(log.InfoLevel)) // uncomment for debugging - m.Run() -} - -// cleanup deletes all schemas and closes the database -func cleanup(t *testing.T, db *pg.DB) { - txCounter = 0 // reset the global tx counter, which is necessary to properly - // encapsulate each test and make their results independent of each other - - db.AutoCommit(true) - defer db.AutoCommit(false) - defer db.Close() - ctx := context.Background() - - _, err := db.Execute(ctx, `DO $$ - DECLARE - sn text; - BEGIN - FOR sn IN SELECT schema_name FROM information_schema.schemata WHERE schema_name LIKE 'ds_%' - LOOP - EXECUTE 'DROP SCHEMA ' || quote_ident(sn) || ' CASCADE'; - END LOOP; - END $$;`) - require.NoError(t, err) - - _, err = db.Execute(ctx, `DROP SCHEMA IF EXISTS kwild_internal CASCADE`) - require.NoError(t, err) -} - -// setup sets up the global context and registry for the tests -func setup(t *testing.T) (global *execution.GlobalContext, db *pg.DB, err error) { - ctx := context.Background() - - cfg := &pg.DBConfig{ - PoolConfig: pg.PoolConfig{ - ConnConfig: pg.ConnConfig{ - Host: "127.0.0.1", - Port: "5432", - User: "kwild", - Pass: "kwild", // would be ignored if pg_hba.conf set with trust - DBName: "kwil_test_db", - }, - MaxConns: 11, - }, - SchemaFilter: func(s string) bool { - return strings.Contains(s, pg.DefaultSchemaFilterPrefix) - }, - } - db, err = pg.NewDB(ctx, cfg) - if err != nil { - return nil, nil, err - } - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - err = execution.InitializeEngine(ctx, tx) - require.NoError(t, err) - - global, err = execution.NewGlobalContext(ctx, tx, map[string]actions.Initializer{ - "math": (&mathInitializer{}).initialize, - }, &common.Service{ - Logger: log.DiscardLogger, - }) - require.NoError(t, err) - - err = tx.Commit(ctx) - require.NoError(t, err) - - return global, db, nil -} - -// mocks a namespace initializer -type mathInitializer struct { - vals map[string]string -} - -func (m *mathInitializer) initialize(_ *actions.DeploymentContext, _ *common.Service, mp map[string]string) (actions.Instance, error) { - m.vals = mp - - _, ok := m.vals["fail"] - if ok { - return nil, fmt.Errorf("mock extension failed to initialize") - } - - return &mathExt{}, nil -} - -type mathExt struct{} - -var _ actions.Instance = &mathExt{} - -func (m *mathExt) Call(caller *actions.ProcedureContext, _ *common.App, method string, inputs []any) ([]any, error) { - if method != "add" { - return nil, fmt.Errorf("unknown method: %s", method) - } - - if len(inputs) != 2 { - return nil, fmt.Errorf("expected 2 inputs, got %d", len(inputs)) - } - - // The extension needs to tolerate any compatible input type. - - a, err := conv.Int(inputs[0]) - if err != nil { - return nil, fmt.Errorf("expected int64, got %T (%w)", inputs[0], err) - } - - b, err := conv.Int(inputs[1]) - if err != nil { - return nil, fmt.Errorf("expected int64, got %T (%w)", inputs[1], err) - } - - return []any{a + b}, nil -} diff --git a/node/engine/integration/sql_test.go b/node/engine/integration/sql_test.go deleted file mode 100644 index e701d48b0..000000000 --- a/node/engine/integration/sql_test.go +++ /dev/null @@ -1,265 +0,0 @@ -//go:build pglive && engineinteg - -package integration_test - -import ( - "context" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/kwilteam/kwil-db/node/engine/execution" -) - -// the schema deployed here can be found in ./procedure_test.go -func Test_SQL(t *testing.T) { - type testcase struct { - name string - // pre is a sql statement that will be executed before the test - pre string - // sql is a sql statement that can be executed - sql string - // values is a map of values that can be used in the sql statement - values map[string]any - // want is the expected result of the sql statement - want [][]any - // err is the expected error, if any - err error - } - - tests := []testcase{ - { - name: "simple select", - sql: "SELECT name FROM users", - want: [][]any{ - {"satoshi"}, - {"wendys_drive_through_lady"}, - {"zeus"}, - }, - }, - { - name: "select with join", - sql: "select u.name, p.content from users u inner join posts p on u.id = p.user_id limit 1;", - want: [][]any{ - {"satoshi", "goodbye world"}, - }, - }, - { - name: "aggregate", - // getting the user and number of posts that have been made by that user - sql: "select u.name, count(p.id) from users u inner join posts p on u.id = p.user_id group by u.name;", - want: [][]any{ - {"satoshi", int64(3)}, - {"wendys_drive_through_lady", int64(3)}, - {"zeus", int64(3)}, - }, - }, - { - name: "compound select", - sql: `select name from users union all select name from users`, - want: [][]any{ - {"satoshi"}, - {"satoshi"}, - {"wendys_drive_through_lady"}, - {"wendys_drive_through_lady"}, - {"zeus"}, - {"zeus"}, - }, - }, - { - name: "convoluted", - sql: `select u.name, count(p.id) from ( - select id, name from users union all select '4a67d6ea-7ac8-453c-964e-5a144f9e3004'::uuid, 'hello' - ) u - left join ( - select id, user_id from posts union all select id, user_id from posts union all select '699e53a3-079c-40a6-b8ae-0d7bb7b40369'::uuid, '4a67d6ea-7ac8-453c-964e-5a144f9e3004'::uuid - ) as p on u.id = p.user_id group by u.name;`, - want: [][]any{ - {"hello", int64(1)}, - {"satoshi", int64(6)}, - {"wendys_drive_through_lady", int64(6)}, - {"zeus", int64(6)}, - }, - }, - { - name: "exists and collate", - sql: `select exists (select id from users where name = 'SATOSHI' collate nocase)`, - want: [][]any{ - {true}, - }, - }, - { - name: "in", - sql: `select name from users where name in ('satoshi', 'wendys_drive_through_lady')`, - want: [][]any{ - {"satoshi"}, - {"wendys_drive_through_lady"}, - }, - }, - { - name: "like and ilike", - sql: `select name from users where name like 's%' or name ilike 'w_Nd%'`, - want: [][]any{ - {"satoshi"}, - {"wendys_drive_through_lady"}, - }, - }, - { - name: "unary", - sql: `select 22.22=-22.22`, - want: [][]any{ - {false}, - }, - }, - { - name: "between", - sql: `select name from users where user_num between 2 and 3`, - want: [][]any{ - {"wendys_drive_through_lady"}, - {"zeus"}, - }, - }, - { - name: "is, case", - sql: `select name from (select - case when name like 's%' then true - else null end as is_satoshi, - name - from users - ) u where is_satoshi is true`, - want: [][]any{ - {"satoshi"}, - }, - }, - { - name: "null", - sql: `select name from users where user_num is null`, - want: [][]any{}, - }, - { - name: "is distinct from", - sql: `select name from users where user_num is distinct from 2`, - want: [][]any{ - {"satoshi"}, - {"zeus"}, - }, - }, - { - name: "insert with conflict", - // this will conflict on user_num = 1 - pre: `insert into users (id, name, user_num, wallet_address) values ('4a67d6ea-7ac8-453c-964e-5a144f9e3004'::uuid, 'hello', 1, '0xa'), ('4a67d6ea-7ac8-453c-964e-5a144f9e3005'::uuid, 'hello2', 4, '0xb') - on conflict (user_num) do update set name = 'hello3', user_num = excluded.user_num*10`, - sql: `select user_num from users where name like 'hello%'`, - want: [][]any{ - {int64(10)}, - {int64(4)}, - }, - }, - { - name: "update with subquery", - pre: `update users set wallet_address = 'hello' where id in ( - // hack here since we can't use aggregates in where clauses - select id from users u inner join (select count(*) as count, user_id from posts group by user_id) p on u.id = p.user_id where count >= 3 - )`, - sql: `select count(*) from users where wallet_address = 'hello'`, - want: [][]any{ - {int64(3)}, - }, - }, - { - name: "update from", - pre: `update posts p set content = u.name from users u where p.user_id = u.id`, - sql: `select distinct content from posts`, - want: [][]any{ - {"satoshi"}, - {"wendys_drive_through_lady"}, - {"zeus"}, - }, - }, - { - name: "delete", - pre: `delete from users where name = 'satoshi'`, - sql: `select name from users`, - want: [][]any{ - {"wendys_drive_through_lady"}, - {"zeus"}, - }, - }, - { - name: "select constant", - sql: "select 1", - want: [][]any{ - {int64(1)}, - }, - }, - { - // this is a regression test for a bug introduced - // in v0.8 - name: "values", - sql: "select $id", - values: map[string]any{ - "id": "4a67d6ea-7ac8-453c-964e-5a144f9e3004", - }, - want: [][]any{ - {"4a67d6ea-7ac8-453c-964e-5a144f9e3004"}, - }, - }, - { - name: "inferred type - failure", - sql: "select $id is null", - values: map[string]any{ - "id": "4a67d6ea-7ac8-453c-964e-5a144f9e3004", - }, - err: execution.ErrCannotInferType, - }, - { - name: "inferred type - success", - sql: "select $id::text is null", - values: map[string]any{ - "id": "4a67d6ea-7ac8-453c-964e-5a144f9e3004", - }, - want: [][]any{{false}}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - global, db, err := setup(t) - require.NoError(t, err) - defer cleanup(t, db) - - ctx := context.Background() - - tx, err := db.BeginTx(ctx) - require.NoError(t, err) - defer tx.Rollback(ctx) - - // deploy schema - dbid := deployAndSeed(t, global, tx) - - // if there is a pre statement, execute it - if tt.pre != "" { - _, err := global.Execute(txData(), tx, dbid, tt.pre, nil) - require.NoError(t, err) - } - - // execute sql - res, err := global.Execute(txData(), tx, dbid, tt.sql, tt.values) - if tt.err != nil { - require.Error(t, err) - require.ErrorIs(t, err, tt.err) - return - } - require.NoError(t, err) - - require.Len(t, res.Rows, len(tt.want)) - for i, row := range res.Rows { - require.Len(t, row, len(tt.want[i])) - for j, col := range row { - require.Equal(t, tt.want[i][j], col) - } - } - - }) - } -} diff --git a/node/engine/interpreter/README.md b/node/engine/interpreter/README.md new file mode 100644 index 000000000..057bd04bc --- /dev/null +++ b/node/engine/interpreter/README.md @@ -0,0 +1,12 @@ +# Kuneiform Interpreter + +The Kuneiform interpreter is meant to be a simple interpreter for performing basic arithmetic and access control logic. It is capable of: + +- if/then/else statements +- for loops +- basic arithmetic +- executing functions / other actions +- executing SQL statements + +For all function calls, it will make a call to Postgres, so that it can 100% match the functionality provided by Postgres. This is obviously very inefficient, +but it can be optimized later by mirroring Postgres functionality in Go. For now, we are prioritizing speed of development and breadth of supported functions. \ No newline at end of file diff --git a/node/engine/interpreter/benchmark_test.go b/node/engine/interpreter/benchmark_test.go new file mode 100644 index 000000000..f0e512677 --- /dev/null +++ b/node/engine/interpreter/benchmark_test.go @@ -0,0 +1,66 @@ +package interpreter + +// func BenchmarkLoops(b *testing.B) { +// tests := []struct { +// name string +// proc *types.Procedure +// args []any +// }{ +// // { +// // name: "simple_loop", +// // proc: &types.Procedure{ +// // Name: "simple_loop", +// // Body: ` +// // $result int[]; +// // for $i in 1..100 { +// // $result := array_append($result, $i*2); +// // } +// // return $result; +// // `, +// // }, +// // }, +// { +// name: "test_loop", +// args: []any{1, 1000000}, +// proc: &types.Procedure{ +// Name: "loop", +// Body: ` +// $res := 0; +// for $i in $start..$end { +// $res := $res + $i; +// } +// return $res; +// `, +// Parameters: []*types.ProcedureParameter{ +// {Name: "$start", Type: types.IntType}, +// {Name: "$end", Type: types.IntType}, +// }, +// Returns: &types.ProcedureReturn{ +// IsTable: false, +// Fields: []*types.NamedType{ +// {Name: "res", Type: types.IntType}, +// }, +// }, +// }, +// }, +// } + +// ctx := context.Background() +// schema := &types.Schema{ +// Name: "test_schema", +// Procedures: []*types.Procedure{}, +// } + +// for _, tt := range tests { +// b.Run(tt.name, func(b *testing.B) { +// schema.Procedures = []*types.Procedure{tt.proc} +// b.ResetTimer() +// for i := 0; i < b.N; i++ { +// _, err := Run(ctx, tt.proc, schema, tt.args) +// if err != nil { +// b.Fatal(err) +// } +// } +// }) +// } +// } diff --git a/node/engine/interpreter/context.go b/node/engine/interpreter/context.go new file mode 100644 index 000000000..cd01e19c8 --- /dev/null +++ b/node/engine/interpreter/context.go @@ -0,0 +1,360 @@ +package interpreter + +import ( + "fmt" + + "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/extensions/precompiles" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + pggenerate "github.com/kwilteam/kwil-db/node/engine/pg_generate" + "github.com/kwilteam/kwil-db/node/engine/planner/logical" + "github.com/kwilteam/kwil-db/node/types/sql" +) + +// executionContext is the context of the entire execution. +type executionContext struct { + // txCtx is the transaction context. + txCtx *common.TxContext + // scope is the current scope. + scope *scopeContext + // canMutateState is true if the execution is capable of mutating state. + // If true, it must also be deterministic. + canMutateState bool + // db is the database to execute against. + db sql.DB + // interpreter is the interpreter that created this execution context. + interpreter *BaseInterpreter + // logs are the logs that have been generated. + logs []string +} + +// checkPrivilege checks that the current user has a privilege, +// and returns an error if they do not. +func (e *executionContext) checkPrivilege(priv privilege) error { + if !e.interpreter.accessController.HasPrivilege(e.txCtx.Caller, &e.scope.namespace, priv) { + return fmt.Errorf("%w: %s", ErrDoesNotHavePriv, priv) + } + + return nil +} + +// getNamespace gets the specified namespace. +// If the namespace does not exist, it will return an error. +// If the namespace is empty, it will return the current namespace. +func (e *executionContext) getNamespace(namespace string) (*namespace, error) { + if namespace == "" { + namespace = e.scope.namespace + } + + ns, ok := e.interpreter.namespaces[namespace] + if !ok { + return nil, fmt.Errorf("%w: %s", ErrNamespaceNotFound, namespace) + } + + return ns, nil +} + +// getTable gets a table from the interpreter. +// It can optionally be given a namespace to search in. +// If the namespace is empty, it will search the current namespace. +func (e *executionContext) getTable(namespace, tableName string) (*engine.Table, bool) { + ns, err := e.getNamespace(namespace) + if err != nil { + panic(err) // we should never hit an error here + } + + table, ok := ns.tables[tableName] + return table, ok +} + +// query executes a query. +// It will parse the SQL, create a logical plan, and execute the query. +func (e *executionContext) query(sql string, fn func(*row) error) error { + res, err := parse.Parse(sql) + if err != nil { + return err + } + + if len(res) != 1 { + // this is an node bug b/c `query` is only called with a single statement + // from the interpreter + return fmt.Errorf("node bug: expected exactly 1 statement, got %d", len(res)) + } + + sqlStmt, ok := res[0].(*parse.SQLStatement) + if !ok { + return fmt.Errorf("node bug: expected *parse.SQLStatement, got %T", res[0]) + } + + // create a logical plan. This will make the query deterministic (if necessary), + // as well as tell us what the return types will be. + analyzed, err := logical.CreateLogicalPlan( + sqlStmt, + e.getTable, + func(varName string) (dataType *types.DataType, found bool) { + val, found := e.getVariable(varName) + if !found { + return nil, false + } + + // if it is a record, then return nil + if _, ok := val.(*RecordValue); ok { + return nil, false + } + + return val.Type(), true + }, + func(objName string) (obj map[string]*types.DataType, found bool) { + val, found := e.getVariable(objName) + if !found { + return nil, false + } + + if rec, ok := val.(*RecordValue); ok { + dt := make(map[string]*types.DataType) + for _, field := range rec.Order { + dt[field] = rec.Fields[field].Type() + } + + return dt, true + } + + return nil, false + }, + e.canMutateState, + e.scope.namespace, + ) + if err != nil { + return err + } + + generatedSQL, params, err := pggenerate.GenerateSQL(sqlStmt, e.scope.namespace) + if err != nil { + return err + } + + // get the params we will pass + var args []Value + for _, param := range params { + val, found := e.getVariable(param) + if !found { + return fmt.Errorf("%w: %s", ErrVariableNotFound, param) + } + + args = append(args, val) + } + + // get the scan values as well: + var scanValues []Value + for _, field := range analyzed.Plan.Relation().Fields { + scalar, err := field.Scalar() + if err != nil { + return err + } + + zVal, err := NewZeroValue(scalar) + if err != nil { + return err + } + + scanValues = append(scanValues, zVal) + } + + cols := make([]string, len(analyzed.Plan.Relation().Fields)) + for i, field := range analyzed.Plan.Relation().Fields { + cols[i] = field.Name + } + + return query(e.txCtx.Ctx, e.db, generatedSQL, scanValues, func() error { + if len(scanValues) != len(cols) { + // should never happen, but just in case + return fmt.Errorf("node bug: scan values and columns are not the same length") + } + + return fn(&row{ + columns: cols, + Values: scanValues, + }) + }, args) +} + +// executable is the interface and function to call a built-in Postgres function, +// a user-defined Postgres procedure, or a user-defined Kwil action. +type executable struct { + // Name is the name of the function. + Name string + // Func is a function that executes the function. + Func execFunc + // Type is the type of the executable. + Type executableType +} + +type executableType string + +const ( + // executableTypeFunction is a built-in Postgres function. + executableTypeFunction executableType = "function" + // executableTypeAction is a user-defined Kwil action. + executableTypeAction executableType = "action" + // executableTypePrecompile is a precompiled extension. + executableTypePrecompile executableType = "precompile" +) + +type execFunc func(exec *executionContext, args []Value, returnFn resultFunc) error + +// newScope creates a new scope. +func newScope(namespace string) *scopeContext { + return &scopeContext{ + variables: make(map[string]Value), + namespace: namespace, + } +} + +// subScope creates a new sub-scope, which has access to the parent scope. +func (s *scopeContext) subScope() *scopeContext { + return &scopeContext{ + parent: s, + variables: make(map[string]Value), + } +} + +// setVariable sets a variable in the current scope. +// It will allocate the variable if it does not exist. +// if we are setting a variable that was defined in an outer scope, +// it will overwrite the variable in the outer scope. +func (e *executionContext) setVariable(name string, value Value) error { + _, foundScope, found := getVarFromScope(name, e.scope) + if !found { + return e.allocateVariable(name, value) + } + + foundScope.variables[name] = value + return nil +} + +// allocateVariable allocates a variable in the current scope. +func (e *executionContext) allocateVariable(name string, value Value) error { + _, ok := e.scope.variables[name] + if ok { + return fmt.Errorf(`variable "%s" already exists`, name) + } + + e.scope.variables[name] = value + return nil +} + +// getVariable gets a variable from the current scope. +// It searches the parent scopes if the variable is not found. +// It returns the value and a boolean indicating if the variable was found. +func (e *executionContext) getVariable(name string) (Value, bool) { + if len(name) == 0 { + return nil, false + } + + switch name[0] { + case '$': + v, _, f := getVarFromScope(name, e.scope) + return v, f + case '@': + switch name[1:] { + case "caller": + return newText(e.txCtx.Caller), true + case "txid": + return newText(e.txCtx.TxID), true + case "signer": + return newBlob(e.txCtx.Signer), true + case "height": + return newInt(e.txCtx.BlockContext.Height), true + case "foreign_caller": + if e.scope.parent != nil { + return newText(e.scope.parent.namespace), true + } else { + return newText(""), true + } + case "block_timestamp": + return newInt(e.txCtx.BlockContext.Timestamp), true + case "authenticator": + return newText(e.txCtx.Authenticator), true + } + } + + return nil, false +} + +// reloadTables reloads the cached tables from the database for the current namespace. +func (e *executionContext) reloadTables() error { + tables, err := listTablesInNamespace(e.txCtx.Ctx, e.db, e.scope.namespace) + if err != nil { + return err + } + + ns := e.interpreter.namespaces[e.scope.namespace] + + ns.tables = make(map[string]*engine.Table) + for _, table := range tables { + ns.tables[table.Name] = table + } + + return nil +} + +// canExecute checks if the context can execute the action. +// It returns an error if it cannot. +func (e *executionContext) canExecute(namespace string, name string, modifiers precompiles.Modifiers) error { + // if the ctx cannot mutate state and the action is not a view (and thus might try to mutate state), + // then return an error + if !modifiers.Has(precompiles.VIEW) && !e.canMutateState { + return fmt.Errorf("%w: cannot execute action %s in a read-only transaction", ErrActionMutatesState, name) + } + + // if the action is private, then the calling namespace must be the same as the action's namespace + if modifiers.Has(precompiles.PRIVATE) && e.scope.namespace != namespace { + return fmt.Errorf("%w: action %s is private", ErrActionPrivate, name) + } + + // if it is system-only, then this must not be called without an outer scope + if modifiers.Has(precompiles.SYSTEM) && e.scope.parent == nil { + return fmt.Errorf("%w: action %s is system-only", ErrSystemOnly, name) + } + + // if the action is owner only, then check if the user is the owner + if modifiers.Has(precompiles.OWNER) && !e.interpreter.accessController.IsOwner(e.txCtx.Caller) { + return fmt.Errorf("%w: action %s can only be executed by the owner", ErrActionOwnerOnly, name) + } + + return nil +} + +func (e *executionContext) app() *common.App { + // we need to wait until we make changes to the engine interface for extensions before we can implement this + return &common.App{ + Service: e.interpreter.service, + DB: e.db, + Engine: e.interpreter, + } +} + +// getVarFromScope recursively searches the scopes for a variable. +// It returns the value, as well as the scope it was found in. +func getVarFromScope(variable string, scope *scopeContext) (Value, *scopeContext, bool) { + if v, ok := scope.variables[variable]; ok { + return v, scope, true + } + if scope.parent == nil { + return nil, nil, false + } + return getVarFromScope(variable, scope.parent) +} + +// scopeContext is the context for the current block of code. +type scopeContext struct { + // parent is the parent scope. + // if the parent is nil, this is the root + parent *scopeContext + // variables are the variables stored in memory. + variables map[string]Value + // namespace is the current namespace. + namespace string +} diff --git a/node/engine/interpreter/errors.go b/node/engine/interpreter/errors.go new file mode 100644 index 000000000..49adce06b --- /dev/null +++ b/node/engine/interpreter/errors.go @@ -0,0 +1,32 @@ +package interpreter + +import ( + "errors" + "fmt" +) + +var ( + ErrUnaryOnNonScalar = errors.New("cannot perform unary operation on a non-scalar value") + ErrTypeMismatch = errors.New("type mismatch") + ErrIndexOutOfBounds = errors.New("index out of bounds") + ErrVariableNotFound = errors.New("variable not found") + ErrStatementMutatesState = errors.New("statement mutates state") + ErrActionMutatesState = errors.New("action mutates state") + ErrActionOwnerOnly = errors.New("action is owner-only") + ErrActionPrivate = errors.New("action is private") + ErrSystemOnly = errors.New("system-only action") + ErrCannotDrop = errors.New("cannot drop") + ErrCannotCall = errors.New("cannot call action") + ErrDoesNotHavePriv = errors.New("does not have privilege") + ErrNamespaceNotFound = errors.New("namespace not found") + ErrNamespaceExists = errors.New("namespace already exists") + ErrArithmetic = errors.New("arithmetic error") + ErrComparison = errors.New("comparison error") + ErrCast = errors.New("type cast error") + ErrUnary = errors.New("unary operation error") + ErrArrayMixedTypes = errors.New("array contains mixed types") +) + +func castErr(e error) error { + return fmt.Errorf("%w: %s", ErrCast, e) +} diff --git a/node/engine/interpreter/extension.go b/node/engine/interpreter/extension.go new file mode 100644 index 000000000..12539aaec --- /dev/null +++ b/node/engine/interpreter/extension.go @@ -0,0 +1,82 @@ +package interpreter + +import ( + "context" + "fmt" + "strings" + + "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/extensions/precompiles" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/types/sql" +) + +// initializeExtension initializes an extension. +func initializeExtension(ctx context.Context, svc *common.Service, db sql.DB, i precompiles.Initializer, metadata map[string]Value) (*namespace, error) { + convertedMetadata := make(map[string]any) + for k, v := range metadata { + convertedMetadata[k] = v.RawValue() + } + + inst, err := i(ctx, svc, db, convertedMetadata) + if err != nil { + return nil, err + } + + // we construct a map of methods + methods := make(map[string]*executable) + for _, method := range inst.Methods() { + lowerName := strings.ToLower(method.Name) + + _, ok := methods[lowerName] + if ok { + return nil, fmt.Errorf("duplicate method %s", lowerName) + } + methods[lowerName] = &executable{ + Name: lowerName, + Func: func(exec *executionContext, args []Value, fn resultFunc) error { + argVals := make([]any, len(args)) + for i, arg := range args { + var err error + argVals[i], err = NewValue(arg) + if err != nil { + return err + } + } + + return method.Call(exec.txCtx, exec.app(), argVals, func(a []any) error { + resultVals := make([]Value, len(a)) + for i, result := range a { + var err error + resultVals[i], err = NewValue(result) + if err != nil { + return err + } + } + + if len(method.ReturnColumns) != 0 && len(method.ReturnColumns) != len(resultVals) { + return fmt.Errorf("method %s returned %d values, but expected %d", method.Name, len(resultVals), len(method.ReturnColumns)) + } + + return fn(&row{ + columns: method.ReturnColumns, // it is ok if this is nil + Values: resultVals, + }) + }) + }, + Type: executableTypePrecompile, + } + } + + return &namespace{ + availableFunctions: methods, + tables: make(map[string]*engine.Table), + onDeploy: func(ctx *executionContext) error { + return inst.OnUse(ctx.txCtx, ctx.app()) + }, + onUndeploy: func(ctx *executionContext) error { + return inst.OnUnuse(ctx.txCtx, ctx.app()) + }, + namespaceType: namespaceTypeExtension, + }, nil +} diff --git a/node/engine/interpreter/interpreter.go b/node/engine/interpreter/interpreter.go new file mode 100644 index 000000000..5f17492f9 --- /dev/null +++ b/node/engine/interpreter/interpreter.go @@ -0,0 +1,494 @@ +package interpreter + +import ( + "context" + _ "embed" + "fmt" + "regexp" + "strings" + "sync" + + "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/types/validation" + "github.com/kwilteam/kwil-db/core/utils/order" + "github.com/kwilteam/kwil-db/extensions/precompiles" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + "github.com/kwilteam/kwil-db/node/types/sql" +) + +// ThreadSafeInterpreter is a thread-safe interpreter. +// It is defined as a separate struct because there are time where +// the interpreter recursively calls itself, and we need to avoid +// deadlocks. +type ThreadSafeInterpreter struct { + mu sync.RWMutex + i *BaseInterpreter +} + +// lock locks the interpreter with either a read or write lock, depending on the access mode of the database. +func (t *ThreadSafeInterpreter) lock(db sql.DB) (unlock func(), err error) { + am, ok := db.(sql.AccessModer) + if !ok { + return nil, fmt.Errorf("database does not implement AccessModer") + } + + if am.AccessMode() == sql.ReadOnly { + t.mu.RLock() + return t.mu.RUnlock, nil + } + + t.mu.Lock() + return t.mu.Unlock, nil +} + +func (t *ThreadSafeInterpreter) Call(ctx *common.TxContext, db sql.DB, namespace string, action string, args []any, resultFn func(*common.Row) error) (*common.CallResult, error) { + unlock, err := t.lock(db) + if err != nil { + return nil, err + } + defer unlock() + + return t.i.Call(ctx, db, namespace, action, args, resultFn) +} + +func (t *ThreadSafeInterpreter) Execute(ctx *common.TxContext, db sql.DB, statement string, params map[string]any, fn func(*common.Row) error) error { + unlock, err := t.lock(db) + if err != nil { + return err + } + defer unlock() + + return t.i.Execute(ctx, db, statement, params, fn) +} + +func (t *ThreadSafeInterpreter) SetOwner(ctx context.Context, db sql.DB, owner string) error { + // we always need to lock for this + t.mu.Lock() + defer t.mu.Unlock() + + return t.i.SetOwner(ctx, db, owner) +} + +// BaseInterpreter interprets Kwil SQL statements. +type BaseInterpreter struct { + namespaces map[string]*namespace + // accessController is used to check if a user has access to a namespace + accessController *accessController + // service is the base application + service *common.Service +} + +// a namespace is a collection of tables and actions. +// It is conceptually equivalent to Postgres's schema, but is given a +// different name to avoid confusion. +type namespace struct { + // availableFunctions is a map of both built-in functions and user-defined PL/pgSQL functions. + // When the interpreter planner is created, it will be populated with all built-in functions, + // and then it will be updated with user-defined functions, effectively allowing users to override + // some function name with their own implementation. This allows Kwil to add new built-in + // functions without worrying about breaking user schemas. + // This will not include aggregate and window functions, as those can only be used in SQL. + // availableFunctions maps local action names to their execution func. + availableFunctions map[string]*executable + tables map[string]*engine.Table + + // onDeploy is called exactly once when the namespace is deployed. + // It is used to set up the namespace. + onDeploy func(ctx *executionContext) error + // onUndeploy is called exactly once when the namespace is undeployed. + // It is used to clean up the namespace. + onUndeploy func(ctx *executionContext) error + + // namespaceType is the type of namespace. + // It can be user-created, built-in, or extension. + namespaceType namespaceType +} + +type namespaceType string + +const ( + namespaceTypeUser namespaceType = "USER" + namespaceTypeSystem namespaceType = "SYSTEM" + namespaceTypeExtension namespaceType = "EXTENSION" +) + +func (n namespaceType) valid() bool { + switch n { + case namespaceTypeUser, namespaceTypeSystem, namespaceTypeExtension: + return true + default: + return false + } +} + +// NewInterpreter creates a new interpreter. +// It reads currently stored namespaces and loads them into memory. +func NewInterpreter(ctx context.Context, db sql.DB, service *common.Service) (*ThreadSafeInterpreter, error) { + var exists bool + count := 0 + // we need to check if it is initialized. We will do this by checking if the schema kwild_engine exists + err := queryRowFunc(ctx, db, "SELECT EXISTS (SELECT 1 FROM information_schema.schemata WHERE schema_name = 'kwild_engine')", []any{&exists}, func() error { + count++ + return nil + }) + if err != nil { + return nil, err + } + + switch count { + case 0: + return nil, fmt.Errorf("could not determine if the database is initialized") + case 1: + if !exists { + err = initSQL(ctx, db) + if err != nil { + return nil, err + } + } + default: + return nil, fmt.Errorf("unexpected number of rows returned") + } + + namespaces, err := listNamespaces(ctx, db) + if err != nil { + return nil, err + } + + interpreter := &BaseInterpreter{ + namespaces: make(map[string]*namespace), + service: service, + } + for _, ns := range namespaces { + tables, err := listTablesInNamespace(ctx, db, ns.Name) + if err != nil { + return nil, err + } + + tblMap := make(map[string]*engine.Table) + for _, tbl := range tables { + tblMap[tbl.Name] = tbl + } + + actions, err := listActionsInNamespace(ctx, db, ns.Name) + if err != nil { + return nil, err + } + + // now, we override the built-in functions with the actions + namespaceFunctions := copyBuiltinExecutables() + for _, action := range actions { + exec := makeActionToExecutable(ns.Name, action) + namespaceFunctions[exec.Name] = exec + } + + interpreter.namespaces[ns.Name] = &namespace{ + tables: tblMap, + availableFunctions: namespaceFunctions, + namespaceType: ns.Type, + onDeploy: func(ctx *executionContext) error { return nil }, + onUndeploy: func(ctx *executionContext) error { return nil }, + } + } + + accessController, err := newAccessController(ctx, db) + if err != nil { + return nil, err + } + interpreter.accessController = accessController + + // get and initialize all used extensions + storedExts, err := getExtensionInitializationMetadata(ctx, db) + if err != nil { + return nil, err + } + + systemExtensions := precompiles.RegisteredPrecompiles() + for _, ext := range storedExts { + sysExt, ok := systemExtensions[ext.ExtName] + if !ok { + return nil, fmt.Errorf("the database has an extension in use that is unknown to the system: %s", ext.ExtName) + } + + namespace, err := initializeExtension(ctx, service, db, sysExt, ext.Metadata) + if err != nil { + return nil, err + } + + _, ok = interpreter.namespaces[ext.Alias] + if ok { + // should never happen, as this should have been caught during initialization + return nil, fmt.Errorf("internal bug on startup: extension alias %s is already in use", ext.Alias) + } + + interpreter.namespaces[ext.Alias] = namespace + } + + return &ThreadSafeInterpreter{ + i: interpreter, + }, nil +} + +// funcDefToExecutable converts a function definition to an executable. +func funcDefToExecutable(funcName string, funcDef *parse.ScalarFunctionDefinition) *executable { + return &executable{ + Name: funcName, + Func: func(e *executionContext, args []Value, fn resultFunc) error { + //convert args to any + params := make([]string, len(args)) + argTypes := make([]*types.DataType, len(args)) + for i, arg := range args { + params[i] = fmt.Sprintf("$%d", i+1) + argTypes[i] = arg.Type() + } + + // get the expected return type + retTyp, err := funcDef.ValidateArgsFunc(argTypes) + if err != nil { + return err + } + + zeroVal, err := NewZeroValue(retTyp) + if err != nil { + return err + } + + if funcName == "notice" { + // if the function name is notice, then we need to get write the notice to our logs locally. + // This is a special case, as we don't want to execute the query. + e.logs = append(e.logs, args[0].RawValue().(string)) + return nil + } + // format the function + pgFormat, err := funcDef.PGFormatFunc(params) + if err != nil { + return err + } + + // execute the query + // We could avoid a roundtrip here by having go implementating of the function. + // Since for now we are more concerned about expanding functionality than scalability, + // we will use the roundtrip. + iters := 0 + err = query(e.txCtx.Ctx, e.db, "SELECT "+pgFormat+";", []Value{zeroVal}, func() error { + iters++ + return nil + }, args) + if err != nil { + return err + } + if iters != 1 { + return fmt.Errorf("expected 1 row, got %d", iters) + } + + return fn(&row{ + columns: []string{funcName}, + Values: []Value{zeroVal}, + }) + }, + Type: executableTypeFunction, + } +} + +// Execute executes a statement against the database. +func (i *BaseInterpreter) Execute(ctx *common.TxContext, db sql.DB, statement string, params map[string]any, fn func(*common.Row) error) error { + if fn == nil { + fn = func(*common.Row) error { return nil } + } + + // parse the statement + ast, err := parse.Parse(statement) + if err != nil { + return err + } + + if len(ast) == 0 { + return fmt.Errorf("no valid statements provided: %s", statement) + } + + execCtx, err := i.newExecCtx(ctx, db, defaultNamespace) + if err != nil { + return err + } + + for _, param := range order.OrderMap(params) { + val, err := NewValue(param.Value) + if err != nil { + return err + } + + name := strings.ToLower(param.Key) + if !strings.HasPrefix(name, "$") { + name = "$" + name + } + if err := isValidVarName(name); err != nil { + return err + } + + err = execCtx.setVariable(name, val) + if err != nil { + return err + } + } + + interpPlanner := interpreterPlanner{} + + for _, stmt := range ast { + err = stmt.Accept(&interpPlanner).(stmtFunc)(execCtx, func(row *row) error { + return fn(rowToCommonRow(row)) + }) + if err != nil { + return err + } + } + + return nil +} + +var identRegexp = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9_]*$`) + +// isValidVarName checks if a string is a valid variable name. +func isValidVarName(s string) error { + if !strings.HasPrefix(s, "$") { + return fmt.Errorf("variable name must start with $") + } + + if !identRegexp.MatchString(s[1:]) { + return fmt.Errorf("variable name must only contain letters, numbers, and underscores") + } + + // we ignore the $ as part of the "name" + if len(s[1:]) > validation.MAX_IDENT_NAME_LENGTH { + return fmt.Errorf("variable name cannot be longer than %d characters, received %s", validation.MAX_IDENT_NAME_LENGTH, s) + } + + return nil +} + +// Call executes an action against the database. +// The resultFn is called with the result of the action, if any. +func (i *BaseInterpreter) Call(ctx *common.TxContext, db sql.DB, namespace, action string, args []any, resultFn func(*common.Row) error) (*common.CallResult, error) { + if resultFn == nil { + resultFn = func(*common.Row) error { return nil } + } + + if namespace == "" { + namespace = defaultNamespace + } + + ns, ok := i.namespaces[namespace] + if !ok { + return nil, fmt.Errorf(`namespace "%s" does not exist`, namespace) + } + + // now we can call the executable. The executable checks that the caller is allowed to call the action + // (e.g. in case of a private action or owner action) + exec, ok := ns.availableFunctions[action] + if !ok { + // this should never happen + return nil, fmt.Errorf(`node bug: action "%s" does not exist in namespace "%s"`, action, namespace) + } + + switch exec.Type { + case executableTypeFunction: + return nil, fmt.Errorf(`%w: action "%s" is a built-in function and cannot be called directly`, ErrCannotCall, action) + case executableTypeAction, executableTypePrecompile: + // do nothing, this is what we want + default: + return nil, fmt.Errorf(`node bug: unknown executable type "%s"`, exec.Type) + } + + argVals := make([]Value, len(args)) + for i, arg := range args { + val, err := NewValue(arg) + if err != nil { + return nil, err + } + + argVals[i] = val + } + + execCtx, err := i.newExecCtx(ctx, db, namespace) + if err != nil { + return nil, err + } + + err = exec.Func(execCtx, argVals, func(row *row) error { + return resultFn(rowToCommonRow(row)) + }) + if err != nil { + return nil, err + } + + return &common.CallResult{ + Logs: execCtx.logs, + }, nil +} + +func rowToCommonRow(row *row) *common.Row { + // convert the results to any + anyResults := make([]any, len(row.Values)) + dataTypes := make([]*types.DataType, len(row.Values)) + for i, result := range row.Values { + anyResults[i] = result.RawValue() + dataTypes[i] = result.Type() + } + + return &common.Row{ + ColumnNames: row.Columns(), + ColumnTypes: dataTypes, + Values: anyResults, + } +} + +// newExecCtx creates a new execution context. +func (i *BaseInterpreter) newExecCtx(txCtx *common.TxContext, db sql.DB, namespace string) (*executionContext, error) { + am, ok := db.(sql.AccessModer) + if !ok { + return nil, fmt.Errorf("database does not implement AccessModer") + } + + return &executionContext{ + txCtx: txCtx, + scope: newScope(namespace), + canMutateState: am.AccessMode() == sql.ReadWrite, + db: db, + interpreter: i, + }, nil +} + +// SetOwner initializes the interpreter's database by setting the owner. +// It will overwrite the owner if it is already set. +func (i *BaseInterpreter) SetOwner(ctx context.Context, db sql.DB, owner string) error { + err := i.accessController.SetOwnership(ctx, db, string(owner)) + if err != nil { + return err + } + return nil +} + +const ( + defaultNamespace = "main" +) + +var builtInExecutables = func() map[string]*executable { + execs := make(map[string]*executable) + for funcName, impl := range parse.Functions { + if scalarImpl, ok := impl.(*parse.ScalarFunctionDefinition); ok { + execs[funcName] = funcDefToExecutable(funcName, scalarImpl) + } + } + + return execs +}() + +// copyBuiltinExecutables returns a map of built-in functions to their executables. +func copyBuiltinExecutables() map[string]*executable { + b := make(map[string]*executable) + for k, v := range builtInExecutables { + b[k] = v + } + + return b +} diff --git a/node/engine/interpreter/interpreter_test.go b/node/engine/interpreter/interpreter_test.go new file mode 100644 index 000000000..8da31628c --- /dev/null +++ b/node/engine/interpreter/interpreter_test.go @@ -0,0 +1,415 @@ +//go:build pglive + +package interpreter_test + +import ( + "context" + "testing" + + "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/node/engine/interpreter" + "github.com/kwilteam/kwil-db/node/pg" + "github.com/stretchr/testify/require" +) + +const ( + defaultCaller = "owner" + createUsersTable = ` +CREATE TABLE users ( + id INT PRIMARY KEY, + name TEXT, + age INT +); + ` + + createPostsTable = ` +CREATE TABLE posts ( + id INT PRIMARY KEY, + owner_id INT NOT NULL REFERENCES users(id), + content TEXT, + created_at INT +); + ` +) + +func Test_SQL(t *testing.T) { + type testcase struct { + name string // name of the test + // array of sql statements, first element is the namespace, second is the sql statement + // they can begin with {namespace}sql, or just sql + sql []string + execSQL string // sql to return the results. Either this or execAction must be set + results [][]any // table of results + err error // expected error, can be nil. Errors _MUST_ occur on the exec. This is a direct match + errContains string // expected error message, can be empty. Errors _MUST_ occur on the exec. This is a substring match + } + + tests := []testcase{ + { + name: "insert and select", + sql: []string{ + "INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30);", + }, + execSQL: "SELECT name, age FROM users;", + results: [][]any{ + {"Alice", int64(30)}, + }, + }, + { + name: "create namespace, add table, add record, alter table, select", + sql: []string{ + "CREATE NAMESPACE test;", + "{test}CREATE TABLE users (id INT PRIMARY KEY, name TEXT, age INT);", + "{test}INSERT INTO users (id, name, age) VALUES (1, 'Bob', 30);", + "{test}ALTER TABLE users DROP COLUMN age;", + }, + execSQL: "{test}SELECT * FROM users;", + results: [][]any{ + {int64(1), "Bob"}, + }, + }, + { + name: "foreign key across namespaces", + sql: []string{ + "CREATE NAMESPACE test1;", + "CREATE NAMESPACE test2;", + "{test1}CREATE TABLE users (id INT PRIMARY KEY, name TEXT);", + `{test2}CREATE TABLE posts (id INT PRIMARY KEY, + owner_id INT NOT NULL REFERENCES test1.users(id) ON UPDATE CASCADE ON DELETE CASCADE, + content TEXT, created_at INT);`, + "{test1}INSERT INTO users (id, name) VALUES (1, 'Alice'), (2, 'Bob');", + "{test2}INSERT INTO posts (id, owner_id, content, created_at) VALUES (1, 1, 'Hello', @height), (2, 2, 'World', @height);", + "{test1}DELETE FROM users WHERE id = 1;", + }, + execSQL: `{test2}SELECT * FROM posts;`, + results: [][]any{ + {int64(2), int64(2), "World", int64(1)}, + }, + }, + { + name: "update and delete", + sql: []string{ + "INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30), (2, 'Bob', 40);", + "UPDATE users SET age = 50 WHERE name = 'Alice';", + "DELETE FROM users WHERE age = 40;", + }, + execSQL: "SELECT name, age FROM users;", + results: [][]any{ + {"Alice", int64(50)}, + }, + }, + { + name: "recursive common table expression", + execSQL: ` + with recursive r as ( + select 1 as n + union all + select n+1 from r where n < 6 + ) + select * from r; + `, + results: [][]any{ + {int64(1)}, {int64(2)}, {int64(3)}, {int64(4)}, {int64(5)}, {int64(6)}, + }, + }, + { + name: "alter table add column", + sql: []string{ + "ALTER TABLE users ADD COLUMN email TEXT;", + "INSERT INTO users (id, name, age, email) VALUES (1, 'Alice', 30, 'alice@kwil.com');", + }, + execSQL: "SELECT name, age, email FROM users;", + results: [][]any{ + {"Alice", int64(30), "alice@kwil.com"}, + }, + }, + { + name: "alter table drop column", + sql: []string{ + "INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30);", + "ALTER TABLE users DROP COLUMN age;", + }, + execSQL: "SELECT * FROM users;", + results: [][]any{ + {1, "Alice"}, + }, + }, + + // Setting a column to be NOT NULL + { + name: "alter table set column not null", + sql: []string{ + "ALTER TABLE users ALTER COLUMN name SET NOT NULL;", + }, + execSQL: "INSERT INTO users (id, name, age) VALUES (1, null, 30);", + errContains: "violates not-null constraint (SQLSTATE 23502)", + }, + + // Setting a default on a column + { + name: "alter table set column default", + sql: []string{ + "ALTER TABLE users ALTER COLUMN age SET DEFAULT 25;", + "INSERT INTO users (id, name) VALUES (1, 'Alice');", + }, + execSQL: "SELECT id, name, age FROM users;", + results: [][]any{ + {int64(1), "Alice", int64(25)}, + }, + }, + + // Removing a default from a column + { + name: "alter table drop column default", + sql: []string{ + "ALTER TABLE users ALTER COLUMN age SET DEFAULT 25;", + "ALTER TABLE users ALTER COLUMN age DROP DEFAULT;", + "INSERT INTO users (id, name) VALUES (1, 'Alice');", + }, + execSQL: "SELECT id, name, age FROM users;", + results: [][]any{ + {int64(1), "Alice", nil}, // Age will be NULL since the default is removed + }, + }, + + // Removing NOT NULL from a column + { + name: "alter table drop column not null", + sql: []string{ + "ALTER TABLE users ALTER COLUMN name SET NOT NULL;", + "ALTER TABLE users ALTER COLUMN name DROP NOT NULL;", + "INSERT INTO users (id, age) VALUES (1, 30);", + }, + execSQL: "SELECT id, name, age FROM users;", + results: [][]any{ + {int64(1), nil, int64(30)}, + }, + }, + + // Renaming a column + { + name: "alter table rename column", + sql: []string{ + "ALTER TABLE users RENAME COLUMN name TO full_name;", + "INSERT INTO users (id, full_name, age) VALUES (1, 'Alice', 30);", + }, + execSQL: "SELECT full_name, age FROM users;", + results: [][]any{ + {"Alice", int64(30)}, + }, + }, + + // Renaming a table + { + name: "alter table rename table", + sql: []string{ + "ALTER TABLE users RENAME TO app_users;", + "INSERT INTO app_users (id, name, age) VALUES (1, 'Alice', 30);", + }, + execSQL: "SELECT name, age FROM app_users;", + results: [][]any{ + {"Alice", int64(30)}, + }, + }, + { + name: "drop default namespace", + execSQL: "DROP NAMESPACE main;", + errContains: "cannot drop built-in namespace", + }, + { + name: "drop info namespace", + execSQL: "DROP NAMESPACE info;", + errContains: "cannot drop built-in namespace", + }, + { + name: "drop non-existent namespace", + execSQL: "DROP NAMESPACE some_ns;", + err: interpreter.ErrNamespaceNotFound, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + db, err := newTestDB() + require.NoError(t, err) + defer db.Close() + + ctx := context.Background() + tx, err := db.BeginTx(ctx) + require.NoError(t, err) + defer tx.Rollback(ctx) // always rollback + + interp, err := interpreter.NewInterpreter(ctx, tx, &common.Service{}) + require.NoError(t, err) + + err = interp.SetOwner(ctx, tx, defaultCaller) + require.NoError(t, err) + + err = interp.Execute(newTxCtx(), tx, createUsersTable, nil, nil) + require.NoError(t, err) + + err = interp.Execute(newTxCtx(), tx, createPostsTable, nil, nil) + require.NoError(t, err) + var values [][]any + + for _, sql := range test.sql { + err = interp.Execute(newTxCtx(), tx, sql, nil, func(v *common.Row) error { + values = append(values, v.Values) + return nil + }) + require.NoError(t, err) + } + + if test.execSQL != "" { + err = interp.Execute(newTxCtx(), tx, test.execSQL, nil, func(v *common.Row) error { + values = append(values, v.Values) + return nil + }) + if test.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, test.err) + } else if test.errContains != "" { + require.Contains(t, err.Error(), test.errContains) + } else { + require.NoError(t, err) + } + } + + require.Equal(t, len(test.results), len(values)) + for i, row := range values { + require.Equal(t, len(test.results[i]), len(row)) + for j, val := range row { + require.EqualValues(t, test.results[i][j], val) + } + } + }) + } +} + +func newTxCtx() *common.TxContext { + return &common.TxContext{ + Ctx: context.Background(), + BlockContext: &common.BlockContext{ + Height: 1, + ChainContext: &common.ChainContext{ + NetworkParameters: &common.NetworkParameters{}, + MigrationParams: &common.MigrationContext{}, + }, + }, + Caller: defaultCaller, + Signer: []byte(defaultCaller), + Authenticator: "test_authenticator", + } +} + +func Test_Actions(t *testing.T) { + type testcase struct { + name string // name of the test + // array of sql statements, first element is the namespace, second is the sql statement + // they can begin with {namespace}sql, or just sql + stmt []string + // namespace in which the action is defined + namespace string + // action to execute + action string + // values to pass to the action + values []any + // expected results + results [][]any + // expected error + err error + } + + tests := []testcase{ + { + name: "insert and select", + stmt: []string{` + CREATE ACTION create_user($name text, $age int) public returns (count int) { + INSERT INTO users (id, name, age) + VALUES (1, $name, $age); + + for $row in SELECT count(*) as count FROM users WHERE name = $name { + RETURN $row.count; + }; + + error('user not found'); + } + `}, + action: "create_user", + values: []any{"Alice", int64(30)}, + results: [][]any{ + {int64(1)}, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + db, err := newTestDB() + require.NoError(t, err) + defer db.Close() + + ctx := context.Background() + tx, err := db.BeginTx(ctx) + require.NoError(t, err) + defer tx.Rollback(ctx) // always rollback + + interp, err := interpreter.NewInterpreter(ctx, tx, &common.Service{}) + require.NoError(t, err) + + err = interp.SetOwner(ctx, tx, defaultCaller) + require.NoError(t, err) + + err = interp.Execute(newTxCtx(), tx, createUsersTable, nil, nil) + require.NoError(t, err) + + err = interp.Execute(newTxCtx(), tx, createPostsTable, nil, nil) + require.NoError(t, err) + + for _, stmt := range test.stmt { + err = interp.Execute(newTxCtx(), tx, stmt, nil, nil) + require.NoError(t, err) + } + + var results [][]any + // TODO: add expected logs + _, err = interp.Call(newTxCtx(), tx, test.namespace, test.action, test.values, func(v *common.Row) error { + results = append(results, v.Values) + return nil + }) + if test.err != nil { + require.Error(t, err) + require.ErrorIs(t, err, test.err) + } else { + require.NoError(t, err) + } + + require.Equal(t, len(test.results), len(results)) + for i, row := range results { + require.Equal(t, len(test.results[i]), len(row)) + for j, val := range row { + require.EqualValues(t, test.results[i][j], val) + } + } + }) + } +} + +func newTestDB() (*pg.DB, error) { + cfg := &pg.DBConfig{ + PoolConfig: pg.PoolConfig{ + ConnConfig: pg.ConnConfig{ + Host: "127.0.0.1", + Port: "5432", + User: "kwild", + Pass: "kwild", // would be ignored if pg_hba.conf set with trust + DBName: "kwil_test_db", + }, + MaxConns: 11, + }, + } + + ctx := context.Background() + + return pg.NewDB(ctx, cfg) +} diff --git a/node/engine/interpreter/ops.go b/node/engine/interpreter/ops.go new file mode 100644 index 000000000..54ee63a40 --- /dev/null +++ b/node/engine/interpreter/ops.go @@ -0,0 +1,142 @@ +package interpreter + +import ( + "fmt" + + "github.com/kwilteam/kwil-db/node/engine/parse" +) + +type ComparisonOp uint8 + +const ( + equal ComparisonOp = iota + lessThan + greaterThan + is + isDistinctFrom +) + +type UnaryOp uint8 + +const ( + not UnaryOp = iota + neg + pos +) + +func (op UnaryOp) String() string { + switch op { + case not: + return "NOT" + case neg: + return "-" + case pos: + return "+" + } + + panic(fmt.Sprintf("unknown unary operator: %d", op)) +} + +type ArithmeticOp uint8 + +const ( + add ArithmeticOp = iota + sub + mul + div + mod + concat +) + +func (op ArithmeticOp) String() string { + switch op { + case add: + return "+" + case sub: + return "-" + case mul: + return "*" + case div: + return "/" + case mod: + return "%" + case concat: + return "||" + } + + panic(fmt.Sprintf("unknown arithmetic operator: %d", op)) +} + +func (op ComparisonOp) String() string { + switch op { + case equal: + return "=" + case lessThan: + return "<" + case greaterThan: + return ">" + case is: + return "IS" + case isDistinctFrom: + return "IS DISTINCT FROM" + } + + panic(fmt.Sprintf("unknown comparison operator: %d", op)) +} + +// GetComparisonOps gets the comparison operators for the given operator. +// Since the interpreter has a restricted subset of comparison operators compared to the parser, +// it is possible that one parser operator maps to multiple interpreter operators (which should be +// combined using OR). It also returns a boolean indicating if the operator should be negated. +func getComparisonOps(op parse.ComparisonOperator) (ops []ComparisonOp, negate bool) { + switch op { + case parse.ComparisonOperatorEqual: + return []ComparisonOp{equal}, false + case parse.ComparisonOperatorNotEqual: + return []ComparisonOp{equal}, true + case parse.ComparisonOperatorLessThan: + return []ComparisonOp{lessThan}, false + case parse.ComparisonOperatorLessThanOrEqual: + return []ComparisonOp{lessThan, equal}, false + case parse.ComparisonOperatorGreaterThan: + return []ComparisonOp{greaterThan}, false + case parse.ComparisonOperatorGreaterThanOrEqual: + return []ComparisonOp{greaterThan, equal}, false + } + + panic(fmt.Sprintf("unknown ast comparison operator: %v", op)) +} + +// ConvertArithmeticOp converts an arithmetic operator from the parser to the interpreter. +func convertArithmeticOp(op parse.ArithmeticOperator) ArithmeticOp { + ar, ok := arithmeticOps[op] + if !ok { + panic(fmt.Sprintf("unknown ast arithmetic operator: %v", op)) + } + return ar +} + +// ConvertUnaryOp converts a unary operator from the parser to the interpreter. +func convertUnaryOp(op parse.UnaryOperator) UnaryOp { + ar, ok := unaryOps[op] + if !ok { + panic(fmt.Sprintf("unknown ast unary operator: %v", op)) + } + + return ar +} + +var arithmeticOps = map[parse.ArithmeticOperator]ArithmeticOp{ + parse.ArithmeticOperatorAdd: add, + parse.ArithmeticOperatorSubtract: sub, + parse.ArithmeticOperatorMultiply: mul, + parse.ArithmeticOperatorDivide: div, + parse.ArithmeticOperatorModulo: mod, + parse.ArithmeticOperatorConcat: concat, +} + +var unaryOps = map[parse.UnaryOperator]UnaryOp{ + parse.UnaryOperatorNot: not, + parse.UnaryOperatorNeg: neg, + parse.UnaryOperatorPos: pos, +} diff --git a/node/engine/interpreter/planner.go b/node/engine/interpreter/planner.go new file mode 100644 index 000000000..7a89cae68 --- /dev/null +++ b/node/engine/interpreter/planner.go @@ -0,0 +1,1674 @@ +// package interpreter provides a basic interpreter for Kuneiform procedures. +// It allows running procedures as standalone programs (instead of generating +// PL/pgSQL code). +package interpreter + +import ( + "errors" + "fmt" + "strings" + + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/extensions/precompiles" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + pggenerate "github.com/kwilteam/kwil-db/node/engine/pg_generate" +) + +// makeActionToExecutable creates an executable from an action +func makeActionToExecutable(namespace string, act *Action) *executable { + planner := &interpreterPlanner{} + stmtFns := make([]stmtFunc, len(act.Body)) + for j, stmt := range act.Body { + stmtFns[j] = stmt.Accept(planner).(stmtFunc) + } + + validateArgs := func(v []Value) error { + if len(v) != len(act.Parameters) { + return fmt.Errorf("expected %d arguments, got %d", len(act.Parameters), len(v)) + } + + for i, arg := range v { + if !act.Parameters[i].Type.EqualsStrict(arg.Type()) { + return fmt.Errorf("expected argument %d to be %s, got %s", i+1, act.Parameters[i].Type, arg.Type()) + } + } + + return nil + } + + return &executable{ + Name: act.Name, + Func: func(exec *executionContext, args []Value, fn resultFunc) error { + if err := exec.canExecute(namespace, act.Name, act.Modifiers); err != nil { + return err + } + + // validate the args + err := validateArgs(args) + if err != nil { + return err + } + + // get the expected return col names + var returnColNames []string + if act.Returns != nil { + for i, f := range act.Returns.Fields { + cName := f.Name + if cName == "" { + cName = fmt.Sprintf("column%d", i+1) + } + returnColNames = append(returnColNames, cName) + } + } + + // create a new scope for the action + oldScope := exec.scope + defer func() { + exec.scope = oldScope + }() + exec.scope = newScope(namespace) + + for j, param := range act.Parameters { + err = exec.allocateVariable(param.Name, args[j]) + if err != nil { + return err + } + } + + // execute the statements + for _, stmt := range stmtFns { + err := stmt(exec, func(row *row) error { + row.columns = returnColNames + err := fn(row) + if err != nil { + return err + } + + return nil + }) + switch err { + case nil: + // do nothing + case errReturn: + // the procedure is done, exit early + return nil + default: + return err + } + } + + return nil + }, + Type: executableTypeAction, + } +} + +// interpreterPlanner creates functions for running Kuneiform logic. +type interpreterPlanner struct{} + +var ( + + // errBreak is an error returned when a break statement is encountered. + errBreak = errors.New("break") + // errReturn is an error returned when a return statement is encountered. + errReturn = errors.New("return") +) + +func makeRow(v []Value) *row { + return &row{ + Values: v, + } +} + +// row represents a row of values. +type row struct { + // columns is a list of column names. + // It can be nil and/or not match the length of values. + // The Columns() method should always be used. + columns []string + // Values is a list of values. + Values []Value +} + +const unknownColName = "?column?" + +func (r *row) Columns() []string { + switch len(r.columns) { + case 0: + for range r.Values { + r.columns = append(r.columns, unknownColName) + } + return r.columns + case len(r.Values): + return r.columns + default: + panic(fmt.Errorf("columns and values do not match: %d columns, %d values", len(r.columns), len(r.Values))) + } +} + +// fillUnnamed fills all empty strings in the columns with the unknown column name. +func (r *row) fillUnnamed() { + r.Columns() // make sure the columns are initialized + for i, col := range r.columns { + if col == "" { + r.columns[i] = unknownColName + } + } +} + +type resultFunc func(*row) error + +type stmtFunc func(exec *executionContext, fn resultFunc) error + +func (i *interpreterPlanner) VisitActionStmtDeclaration(p0 *parse.ActionStmtDeclaration) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + return exec.allocateVariable(p0.Variable.Name, newNull(p0.Type)) + }) +} + +func (i *interpreterPlanner) VisitActionStmtAssignment(p0 *parse.ActionStmtAssign) any { + valFn := p0.Value.Accept(i).(exprFunc) + + var arrFn exprFunc + var indexFn exprFunc + if a, ok := p0.Variable.(*parse.ExpressionArrayAccess); ok { + arrFn = a.Array.Accept(i).(exprFunc) + indexFn = a.Index.Accept(i).(exprFunc) + } + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + val, err := valFn(exec) + if err != nil { + return err + } + + switch a := p0.Variable.(type) { + case *parse.ExpressionVariable: + return exec.setVariable(a.Name, val) + case *parse.ExpressionArrayAccess: + scalarVal, ok := val.(ScalarValue) + if !ok { + return fmt.Errorf("expected scalar value, got %T", val) + } + + arrVal, err := arrFn(exec) + if err != nil { + return err + } + + arr, ok := arrVal.(ArrayValue) + if !ok { + return fmt.Errorf("expected array, got %T", arrVal) + } + + index, err := indexFn(exec) + if err != nil { + return err + } + + if !index.Type().EqualsStrict(types.IntType) { + return fmt.Errorf("array index must be integer, got %s", index.Type()) + } + + err = arr.Set(int32(index.RawValue().(int64)), scalarVal) + if err != nil { + return err + } + + // TODO: do I need to re-set the array? I dont think so b/c the implementation is a pointer. Should make a unit test for this. + + return nil + default: + panic(fmt.Errorf("unexpected assignable variable type: %T", p0.Variable)) + } + }) +} + +func (i *interpreterPlanner) VisitActionStmtCall(p0 *parse.ActionStmtCall) any { + + // we cannot simply use the same visitor as the expression function call, because expression function + // calls always return exactly one value. Here, we can return 0 values, many values, or a table. + + receivers := make([]string, len(p0.Receivers)) + for j, r := range p0.Receivers { + receivers[j] = r.Name + } + + args := make([]exprFunc, len(p0.Call.Args)) + for j, arg := range p0.Call.Args { + args[j] = arg.Accept(i).(exprFunc) + } + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + ns, err := exec.getNamespace(p0.Call.Namespace) + if err != nil { + return err + } + + funcDef, ok := ns.availableFunctions[p0.Call.Name] + if !ok { + return fmt.Errorf(`unknown action "%s" in namespace "%s"`, p0.Call.Name, p0.Call.Namespace) + } + + vals := make([]Value, len(args)) + for j, valFn := range args { + val, err := valFn(exec) + if err != nil { + return err + } + + vals[j] = val + } + + iter := 0 + err = funcDef.Func(exec, vals, func(row *row) error { + iter++ + + // re-verify the returns, since the above checks only for what the function signature + // says, but this checks what the function actually returns. + if len(receivers) > len(row.Values) { + return fmt.Errorf(`expected action "%s" to return at least %d values, but it returned %d`, funcDef.Name, len(receivers), len(row.Values)) + } + + for j, r := range receivers { + err = exec.setVariable(r, row.Values[j]) + if err != nil { + return err + } + } + + return nil + }) + if err != nil { + return err + } + if len(receivers) > 0 { + if iter == 0 { + return fmt.Errorf(`expected action "%s" to return a single record, but it returned nothing`, funcDef.Name) + } + if iter > 1 { + return fmt.Errorf(`expected action "%s" to return a single record, but it returned %d records`, funcDef.Name, iter) + } + } + + return nil + }) +} + +// executeBlock executes a block of statements with their own sub-scope. +// It takes a list of statements, and a list of variable allocations that will be made in the sub-scope. +func executeBlock(exec *executionContext, fn resultFunc, + stmtFuncs []stmtFunc) error { + oldScope := exec.scope + defer func() { + exec.scope = oldScope + }() + + exec.scope = exec.scope.subScope() + + for _, stmt := range stmtFuncs { + err := stmt(exec, fn) + if err != nil { + return err + } + } + + return nil +} + +func (i *interpreterPlanner) VisitActionStmtForLoop(p0 *parse.ActionStmtForLoop) any { + stmtFns := make([]stmtFunc, len(p0.Body)) + for j, stmt := range p0.Body { + stmtFns[j] = stmt.Accept(i).(stmtFunc) + } + + loopFn := p0.LoopTerm.Accept(i).(loopTermFunc) + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + oldScope := exec.scope + defer func() { + exec.scope = oldScope + }() + + err := loopFn(exec, func(term Value) error { + exec.scope = oldScope.subScope() + err := exec.allocateVariable(p0.Receiver.Name, term) + if err != nil { + return err + } + + for _, stmt := range stmtFns { + err := stmt(exec, fn) + if err != nil { + return err + } + } + + return nil + }) + switch err { + case nil, errBreak: + // swallow break errors since we are breaking out of the loop + return nil + default: + return err + } + }) +} + +// loopTermFunc is a function that allows iterating over a loop term. +// It calls the function passed to it with each value. +type loopTermFunc func(exec *executionContext, fn func(Value) error) (err error) + +func (i *interpreterPlanner) VisitLoopTermRange(p0 *parse.LoopTermRange) any { + startFn := p0.Start.Accept(i).(exprFunc) + endFn := p0.End.Accept(i).(exprFunc) + + return loopTermFunc(func(exec *executionContext, fn func(Value) error) (err error) { + start, err := startFn(exec) + if err != nil { + return err + } + + end, err := endFn(exec) + if err != nil { + return err + } + + if !start.Type().EqualsStrict(types.IntType) { + return fmt.Errorf("expected integer, got %s", start.Type()) + } + + if !end.Type().EqualsStrict(types.IntType) { + return fmt.Errorf("expected integer, got %s", end.Type()) + } + + for i := start.RawValue().(int64); i <= end.RawValue().(int64); i++ { + err = fn(newInt(i)) + if err != nil { + return err + } + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitLoopTermSQL(p0 *parse.LoopTermSQL) any { + return loopTermFunc(func(exec *executionContext, fn func(Value) error) error { + raw, err := p0.Statement.Raw() + if err != nil { + return err + } + + // query executes a Kuneiform query and returns a cursor. + return exec.query(raw, func(r *row) error { + // we will add any named row to the scope. + // unnamed we will skip. + rec := newRecordValue() + for i, col := range r.Columns() { + if col == "" { + continue + } + + err = rec.AddValue(col, r.Values[i]) + if err != nil { + return err + } + } + + return fn(rec) + }) + }) +} + +func (i *interpreterPlanner) VisitLoopTermVariable(p0 *parse.LoopTermVariable) any { + return loopTermFunc(func(exec *executionContext, fn func(Value) error) (err error) { + val, found := exec.getVariable(p0.Variable.Name) + if !found { + return fmt.Errorf("%w: %s", ErrVariableNotFound, p0.Variable.Name) + } + + arr, ok := val.(ArrayValue) + if !ok { + return fmt.Errorf("expected array, got %T", val) + } + + for i := int32(0); i < arr.Len(); i++ { + scalar, err := arr.Index(i + 1) // all arrays are 1-indexed + if err != nil { + return err + } + + err = fn(scalar) + if err != nil { + return err + } + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitActionStmtIf(p0 *parse.ActionStmtIf) any { + var ifThenFns []struct { + If exprFunc + Then []stmtFunc + } + + for _, ifThen := range p0.IfThens { + ifFn := ifThen.If.Accept(i).(exprFunc) + var thenFns []stmtFunc + for _, stmt := range ifThen.Then { + thenFns = append(thenFns, stmt.Accept(i).(stmtFunc)) + } + + ifThenFns = append(ifThenFns, struct { + If exprFunc + Then []stmtFunc + }{ + If: ifFn, + Then: thenFns, + }) + } + + var elseFns []stmtFunc + if p0.Else != nil { + for _, stmt := range p0.Else { + elseFns = append(elseFns, stmt.Accept(i).(stmtFunc)) + } + } + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + branchRun := false // tracks if any IF branch has been run + for _, ifThen := range ifThenFns { + if branchRun { + break + } + + cond, err := ifThen.If(exec) + if err != nil { + return err + } + + if boolVal, ok := cond.(*BoolValue); ok { + if boolVal.Null() { + continue + } + if !boolVal.Bool.Bool { + continue + } + } else { + return fmt.Errorf("expected bool, got %s", cond.Type()) + } + + branchRun = true + + err = executeBlock(exec, fn, ifThen.Then) + if err != nil { + return err + } + } + + if !branchRun && p0.Else != nil { + err := executeBlock(exec, fn, elseFns) + if err != nil { + return err + } + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitActionStmtSQL(p0 *parse.ActionStmtSQL) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + raw, err := p0.SQL.Raw() + if err != nil { + return err + } + + // query executes any arbitrary SQL. + err = exec.query(raw, func(rv *row) error { + // we ignore results here since we are not returning anything. + return nil + }) + if err != nil { + return err + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitActionStmtBreak(p0 *parse.ActionStmtBreak) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + return errBreak + }) +} + +func (i *interpreterPlanner) VisitActionStmtReturn(p0 *parse.ActionStmtReturn) any { + var valFns []exprFunc + var sqlStmt stmtFunc + + if len(p0.Values) > 0 { + for _, v := range p0.Values { + valFns = append(valFns, v.Accept(i).(exprFunc)) + } + } else { + sqlStmt = p0.SQL.Accept(i).(stmtFunc) + } + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if len(valFns) > 0 { + vals := make([]Value, len(p0.Values)) + for j, valFn := range valFns { + val, err := valFn(exec) + if err != nil { + return err + } + + vals[j] = val + } + + err := fn(makeRow(vals)) + if err != nil { + return err + } + + // we return a special error to indicate that the procedure is done. + return errReturn + } + + // otherwise, we execute the SQL statement. + return sqlStmt(exec, func(row *row) error { + row.fillUnnamed() + return fn(row) + }) + }) +} + +func (i *interpreterPlanner) VisitActionStmtReturnNext(p0 *parse.ActionStmtReturnNext) any { + valFns := make([]exprFunc, len(p0.Values)) + for j, v := range p0.Values { + valFns[j] = v.Accept(i).(exprFunc) + } + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + vals := make([]Value, len(p0.Values)) + for j, valFn := range valFns { + val, err := valFn(exec) + if err != nil { + return err + } + + vals[j] = val + } + + err := fn(makeRow(vals)) + if err != nil { + return err + } + + // we don't return an errReturn or mark done here because return next is not the last statement in a procedure. + return nil + }) +} + +// everything in this section is for expressions, which evaluate to exactly one value. + +// handleTypeCast is a helper function that handles type casting. +func cast(t parse.Typecasted, s exprFunc) exprFunc { + if t.GetTypeCast() == nil { + return s + } + + return exprFunc(func(exec *executionContext) (Value, error) { + val, err := s(exec) + if err != nil { + return nil, err + } + + return val.Cast(t.GetTypeCast()) + }) +} + +// exprFunc is a function that returns a value. +type exprFunc func(exec *executionContext) (Value, error) + +func (i *interpreterPlanner) VisitExpressionLiteral(p0 *parse.ExpressionLiteral) any { + return cast(p0, func(exec *executionContext) (Value, error) { + return NewValue(p0.Value) + }) +} + +func (i *interpreterPlanner) VisitExpressionFunctionCall(p0 *parse.ExpressionFunctionCall) any { + args := make([]exprFunc, len(p0.Args)) + for j, arg := range p0.Args { + args[j] = arg.Accept(i).(exprFunc) + } + + return cast(p0, func(exec *executionContext) (Value, error) { + ns, err := exec.getNamespace(p0.Namespace) + if err != nil { + return nil, err + } + + execute, ok := ns.availableFunctions[p0.Name] + if !ok { + return nil, fmt.Errorf(`unknown function "%s" in namespace "%s"`, p0.Name, p0.Namespace) + } + + vals := make([]Value, len(args)) + for j, arg := range args { + val, err := arg(exec) + if err != nil { + return nil, err + } + + vals[j] = val + } + + var val Value + iters := 0 + err = execute.Func(exec, vals, func(received *row) error { + iters++ + if len(received.Values) != 1 { + return fmt.Errorf(`expected function "%s" to return 1 value, but it returned %d`, p0.Name, len(received.Values)) + } + + val = received.Values[0] + + return nil + }) + if err != nil { + return nil, err + } + + if iters == 0 { + return nil, fmt.Errorf(`expected function "%s" to return a single value, but it returned nothing`, p0.Name) + } else if iters > 1 { + return nil, fmt.Errorf(`expected function "%s" to return a single value, but it returned %d values`, p0.Name, iters) + } + + return val, nil + }) +} + +func (i *interpreterPlanner) VisitExpressionVariable(p0 *parse.ExpressionVariable) any { + return cast(p0, func(exec *executionContext) (Value, error) { + val, found := exec.getVariable(p0.Name) + if !found { + return nil, fmt.Errorf("%w: %s", ErrVariableNotFound, p0.Name) + } + + return val, nil + }) +} + +func (i *interpreterPlanner) VisitExpressionArrayAccess(p0 *parse.ExpressionArrayAccess) any { + arrFn := p0.Array.Accept(i).(exprFunc) + indexFn := p0.Index.Accept(i).(exprFunc) + + return cast(p0, func(exec *executionContext) (Value, error) { + arrVal, err := arrFn(exec) + if err != nil { + return nil, err + } + + arr, ok := arrVal.(ArrayValue) + if !ok { + return nil, fmt.Errorf("expected array, got %T", arrVal) + } + + index, err := indexFn(exec) + if err != nil { + return nil, err + } + + if !index.Type().EqualsStrict(types.IntType) { + return nil, fmt.Errorf("array index must be integer, got %s", index.Type()) + } + + return arr.Index(int32(index.RawValue().(int64))) + }) +} + +func (i *interpreterPlanner) VisitExpressionMakeArray(p0 *parse.ExpressionMakeArray) any { + valFns := make([]exprFunc, len(p0.Values)) + for j, v := range p0.Values { + valFns[j] = v.Accept(i).(exprFunc) + } + + return cast(p0, func(exec *executionContext) (Value, error) { + if len(valFns) == 0 { + return nil, fmt.Errorf("array must have at least one element") + } + + val0, err := valFns[0](exec) + if err != nil { + return nil, err + } + + scal, ok := val0.(ScalarValue) + if !ok { + return nil, fmt.Errorf("expected scalar value, got %T", val0) + } + + var vals []ScalarValue + for j, valFn := range valFns { + if j == 0 { + continue + } + + val, err := valFn(exec) + if err != nil { + return nil, err + } + + scal, ok := val.(ScalarValue) + if !ok { + return nil, fmt.Errorf("expected scalar value, got %T", val) + } + + vals = append(vals, scal) + } + + return scal.Array(vals...) + }) +} + +func (i *interpreterPlanner) VisitExpressionFieldAccess(p0 *parse.ExpressionFieldAccess) any { + recordFn := p0.Record.Accept(i).(exprFunc) + + return cast(p0, func(exec *executionContext) (Value, error) { + objVal, err := recordFn(exec) + if err != nil { + return nil, err + } + + obj, ok := objVal.(*RecordValue) + if !ok { + return nil, fmt.Errorf("expected object, got %T", objVal) + } + + f, ok := obj.Fields[p0.Field] + if !ok { + return nil, fmt.Errorf("field %s not found in object", p0.Field) + } + + return f, nil + }) +} + +func (i *interpreterPlanner) VisitExpressionParenthesized(p0 *parse.ExpressionParenthesized) any { + return p0.Inner.Accept(i) +} + +func (i *interpreterPlanner) VisitExpressionComparison(p0 *parse.ExpressionComparison) any { + cmpOps, negate := getComparisonOps(p0.Operator) + + left := p0.Left.Accept(i).(exprFunc) + right := p0.Right.Accept(i).(exprFunc) + + retFn := makeComparisonFunc(left, right, cmpOps[0]) + + for _, op := range cmpOps[1:] { + retFn = makeLogicalFunc(retFn, makeComparisonFunc(left, right, op), false) + } + + if negate { + return makeUnaryFunc(retFn, not) + } + + return retFn +} + +// makeComparisonFunc returns a function that compares two values. +func makeComparisonFunc(left, right exprFunc, cmpOps ComparisonOp) exprFunc { + return func(exec *executionContext) (Value, error) { + leftVal, err := left(exec) + if err != nil { + return nil, err + } + + rightVal, err := right(exec) + if err != nil { + return nil, err + } + + return leftVal.Compare(rightVal, cmpOps) + } +} + +func (i *interpreterPlanner) VisitExpressionLogical(p0 *parse.ExpressionLogical) any { + left := p0.Left.Accept(i).(exprFunc) + right := p0.Right.Accept(i).(exprFunc) + and := p0.Operator == parse.LogicalOperatorAnd + + return makeLogicalFunc(left, right, and) +} + +// makeLogicalFunc returns a function that performs a logical operation. +// If and is true, it performs an AND operation, otherwise it performs an OR operation. +func makeLogicalFunc(left, right exprFunc, and bool) exprFunc { + return func(exec *executionContext) (Value, error) { + leftVal, err := left(exec) + if err != nil { + return nil, err + } + + rightVal, err := right(exec) + if err != nil { + return nil, err + } + + if leftVal.Type() != types.BoolType || rightVal.Type() != types.BoolType { + return nil, fmt.Errorf("expected bools, got %s and %s", leftVal.Type(), rightVal.Type()) + } + + if leftVal.Null() { + return leftVal, nil + } + + if rightVal.Null() { + return rightVal, nil + } + + if and { + return newBool(leftVal.RawValue().(bool) && rightVal.RawValue().(bool)), nil + } + + return newBool(leftVal.RawValue().(bool) || rightVal.RawValue().(bool)), nil + } +} + +func (i *interpreterPlanner) VisitExpressionArithmetic(p0 *parse.ExpressionArithmetic) any { + op := convertArithmeticOp(p0.Operator) + + leftFn := p0.Left.Accept(i).(exprFunc) + rightFn := p0.Right.Accept(i).(exprFunc) + return exprFunc(func(exec *executionContext) (Value, error) { + left, err := leftFn(exec) + if err != nil { + return nil, err + } + + right, err := rightFn(exec) + if err != nil { + return nil, err + } + + leftScalar, ok := left.(ScalarValue) + if !ok { + return nil, fmt.Errorf("expected scalar, got %T", left) + } + + rightScalar, ok := right.(ScalarValue) + if !ok { + return nil, fmt.Errorf("expected scalar, got %T", right) + } + + return leftScalar.Arithmetic(rightScalar, op) + }) +} + +func (i *interpreterPlanner) VisitExpressionUnary(p0 *parse.ExpressionUnary) any { + op := convertUnaryOp(p0.Operator) + val := p0.Expression.Accept(i).(exprFunc) + return makeUnaryFunc(val, op) +} + +// makeUnaryFunc returns a function that performs a unary operation. +func makeUnaryFunc(val exprFunc, op UnaryOp) exprFunc { + return exprFunc(func(exec *executionContext) (Value, error) { + v, err := val(exec) + if err != nil { + return nil, err + } + + vScalar, ok := v.(ScalarValue) + if !ok { + return nil, fmt.Errorf("%w: expected scalar, got %T", ErrUnaryOnNonScalar, v) + } + + return vScalar.Unary(op) + }) +} + +func (i *interpreterPlanner) VisitExpressionIs(p0 *parse.ExpressionIs) any { + left := p0.Left.Accept(i).(exprFunc) + right := p0.Right.Accept(i).(exprFunc) + + op := is + if p0.Distinct { + op = isDistinctFrom + } + + retFn := makeComparisonFunc(left, right, op) + + if p0.Not { + return makeUnaryFunc(retFn, not) + } + + return retFn +} + +/* +Role management +*/ +func (i *interpreterPlanner) VisitGrantOrRevokeStatement(p0 *parse.GrantOrRevokeStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if !exec.interpreter.accessController.HasPrivilege(exec.txCtx.Caller, nil, RolesPrivilege) { + return fmt.Errorf("%w: %s", ErrDoesNotHavePriv, RolesPrivilege) + } + + switch { + case len(p0.Privileges) > 0 && p0.ToRole != "": + fn := exec.interpreter.accessController.GrantPrivileges + if !p0.IsGrant { + fn = exec.interpreter.accessController.RevokePrivileges + } + return fn(exec.txCtx.Ctx, exec.db, p0.ToRole, p0.Privileges, p0.Namespace) + case p0.GrantRole != "" && p0.ToUser != "": + fn := exec.interpreter.accessController.AssignRole + if !p0.IsGrant { + fn = exec.interpreter.accessController.UnassignRole + } + return fn(exec.txCtx.Ctx, exec.db, p0.ToUser, p0.GrantRole) + default: + // failure to hit these cases should have been caught by the parser, where better error + // messages can be generated. This is a catch-all for any other invalid cases. + return fmt.Errorf("invalid grant/revoke statement") + } + }) +} + +func (i *interpreterPlanner) VisitCreateRoleStatement(p0 *parse.CreateRoleStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if !exec.interpreter.accessController.HasPrivilege(exec.txCtx.Caller, nil, RolesPrivilege) { + return fmt.Errorf("%w: %s", ErrDoesNotHavePriv, RolesPrivilege) + } + + if p0.IfNotExists { + if exec.interpreter.accessController.RoleExists(p0.Role) { + return nil + } + } + + return exec.interpreter.accessController.CreateRole(exec.txCtx.Ctx, exec.db, p0.Role) + }) +} + +func (i *interpreterPlanner) VisitDropRoleStatement(p0 *parse.DropRoleStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if !exec.interpreter.accessController.HasPrivilege(exec.txCtx.Caller, nil, RolesPrivilege) { + return fmt.Errorf("%w: %s", ErrDoesNotHavePriv, RolesPrivilege) + } + + if p0.IfExists { + if !exec.interpreter.accessController.RoleExists(p0.Role) { + return nil + } + } + + return exec.interpreter.accessController.DeleteRole(exec.txCtx.Ctx, exec.db, p0.Role) + }) +} + +func (i *interpreterPlanner) VisitTransferOwnershipStatement(p0 *parse.TransferOwnershipStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if !exec.interpreter.accessController.IsOwner(exec.txCtx.Caller) { + return fmt.Errorf("%w: %s", ErrDoesNotHavePriv, "caller must be owner") + } + + return exec.interpreter.accessController.SetOwnership(exec.txCtx.Ctx, exec.db, p0.To) + }) +} + +/* + top-level adhoc +*/ + +// handleNamespaced is a helper function that handles statements namespaced with curly braces. +func handleNamespaced(exec *executionContext, stmt parse.Namespaceable) (reset func(), err error) { + // if no special namespace is set, we can just return a no-op function + if stmt.GetNamespacePrefix() == "" { + return func() {}, nil + } + + // otherwise, we need to set the current namespace + oldNs := exec.scope.namespace + + // ensure the new namespace exists + _, err = exec.getNamespace(stmt.GetNamespacePrefix()) + if err != nil { + return nil, err + } + + // set the new namespace + exec.scope.namespace = stmt.GetNamespacePrefix() + + return func() { + exec.scope.namespace = oldNs + }, nil +} + +func (i *interpreterPlanner) VisitSQLStatement(p0 *parse.SQLStatement) any { + mutatesState := true + var privilege privilege + switch p0.SQL.(type) { + case *parse.InsertStatement: + privilege = InsertPrivilege + case *parse.UpdateStatement: + privilege = UpdatePrivilege + case *parse.DeleteStatement: + privilege = DeletePrivilege + case *parse.SelectStatement: + privilege = SelectPrivilege + mutatesState = false + default: + panic(fmt.Errorf("unexpected SQL statement type: %T", p0.SQL)) + } + raw, err := p0.Raw() + if err != nil { + panic(err) + } + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + if err := exec.checkPrivilege(privilege); err != nil { + return err + } + + // if the query is trying to mutate state but the exec ctx cant then we should error + if mutatesState && !exec.canMutateState { + return fmt.Errorf("%w: SQL statement mutates state, but the execution context is read-only: %s", ErrStatementMutatesState, raw) + } + + return exec.query(raw, fn) + }) +} + +// here, we other top-level statements that are not covered by the other visitors. + +// genAndExec generates and executes a DML statement. +// It should only be used for DDL statements, which do not bind or return values. +func genAndExec(exec *executionContext, stmt parse.TopLevelStatement) error { + sql, _, err := pggenerate.GenerateSQL(stmt, exec.scope.namespace) + if err != nil { + return err + } + + return execute(exec.txCtx.Ctx, exec.db, sql) +} + +func (i *interpreterPlanner) VisitAlterTableStatement(p0 *parse.AlterTableStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(AlterPrivilege); err != nil { + return err + } + + // ensure the table exists + _, found := exec.getTable("", p0.Table) + if !found { + return fmt.Errorf("table %s does not exist", p0.Table) + } + + // instead of handling every case and how it should change the in-memory objects, we just + // generate the SQL and execute it, and then completely refresh the in-memory objects for this schema. + // This isn't the most efficient way to do it, but it's the easiest to implement, and since DDL isn't + // really a hotpath, it's fine. + err = genAndExec(exec, p0) + if err != nil { + return err + } + + return exec.reloadTables() + }) +} + +func (i *interpreterPlanner) VisitCreateTableStatement(p0 *parse.CreateTableStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(CreatePrivilege); err != nil { + return err + } + + // ensure the table does not already exist + _, found := exec.getTable("", p0.Name) + if found { + if p0.IfNotExists { + return nil + } + + return fmt.Errorf(`table "%s" already exists`, p0.Name) + } + + err = genAndExec(exec, p0) + if err != nil { + return err + } + + return exec.reloadTables() + }) +} + +func (i *interpreterPlanner) VisitDropTableStatement(p0 *parse.DropTableStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(DropPrivilege); err != nil { + return err + } + + for _, table := range p0.Tables { + // ensure the table exists + _, found := exec.getTable("", table) + if !found { + if p0.IfExists { + continue + } + + return fmt.Errorf(`table "%s" does not exist`, table) + } + } + + if err := genAndExec(exec, p0); err != nil { + return err + } + + return exec.reloadTables() + }) +} + +func (i *interpreterPlanner) VisitCreateIndexStatement(p0 *parse.CreateIndexStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(CreatePrivilege); err != nil { + return err + } + + // ensure the table exists + tbl, found := exec.getTable("", p0.On) + if !found { + return fmt.Errorf(`table "%s" does not exist`, p0.On) + } + + // ensure the columns exist + tblCols := make(map[string]struct{}, len(tbl.Columns)) + for _, col := range tbl.Columns { + tblCols[col.Name] = struct{}{} + } + + for _, col := range p0.Columns { + if _, found := tblCols[col]; !found { + return fmt.Errorf(`column "%s" does not exist in table "%s"`, col, p0.On) + } + } + + if err := genAndExec(exec, p0); err != nil { + return err + } + + // we reload tables here because we track indexes in the table object + return exec.reloadTables() + }) +} + +func (i *interpreterPlanner) VisitDropIndexStatement(p0 *parse.DropIndexStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(DropPrivilege); err != nil { + return err + } + + if err := genAndExec(exec, p0); err != nil { + return err + } + + // we reload tables here because we track indexes in the table object + return exec.reloadTables() + }) +} + +func (i *interpreterPlanner) VisitUseExtensionStatement(p0 *parse.UseExtensionStatement) any { + configValues := make([]exprFunc, len(p0.Config)) + for j, config := range p0.Config { + configValues[j] = config.Value.Accept(i).(exprFunc) + } + + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(UsePrivilege); err != nil { + return err + } + + config := make(map[string]Value, len(p0.Config)) + + for j, configValue := range configValues { + val, err := configValue(exec) + if err != nil { + return err + } + + config[p0.Config[j].Key] = val + } + + initializer, ok := precompiles.RegisteredPrecompiles()[strings.ToLower(p0.ExtName)] + if !ok { + return fmt.Errorf(`extension "%s" does not exist`, p0.ExtName) + } + + extNamespace, err := initializeExtension(exec.txCtx.Ctx, exec.interpreter.service, exec.db, initializer, config) + if err != nil { + return err + } + + err = extNamespace.onDeploy(exec) + if err != nil { + return err + } + + err = registerExtensionInitialization(exec.txCtx.Ctx, exec.db, p0.Alias, p0.ExtName, config) + if err != nil { + return err + } + + exec.interpreter.namespaces[p0.Alias] = extNamespace + + return nil + }) +} + +func (i *interpreterPlanner) VisitUnuseExtensionStatement(p0 *parse.UnuseExtensionStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + // ensure that the caller has the necessary privileges + if err := exec.checkPrivilege(UsePrivilege); err != nil { + return err + } + + ns, exists := exec.interpreter.namespaces[p0.Alias] + if !exists { + if p0.IfExists { + return nil + } + + return fmt.Errorf(`extension initialized with alias "%s" does not exist`, p0.Alias) + } + + if ns.namespaceType != namespaceTypeExtension { + return fmt.Errorf(`namespace "%s" is not an extension`, p0.Alias) + } + + err := ns.onUndeploy(exec) + if err != nil { + return err + } + + err = unregisterExtensionInitialization(exec.txCtx.Ctx, exec.db, p0.Alias) + if err != nil { + return err + } + + delete(exec.interpreter.namespaces, p0.Alias) + + return nil + }) +} + +func (i *interpreterPlanner) VisitCreateActionStatement(p0 *parse.CreateActionStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + if err := exec.checkPrivilege(CreatePrivilege); err != nil { + return err + } + + namespace := exec.interpreter.namespaces[exec.scope.namespace] + + // we check in the available functions map because there is a chance that the user is overwriting an existing function. + if _, exists := namespace.availableFunctions[p0.Name]; exists { + if p0.IfNotExists { + return nil + } else if p0.OrReplace { + delete(namespace.availableFunctions, p0.Name) + } else { + return fmt.Errorf(`action/function "%s" already exists`, p0.Name) + } + } + + act := Action{} + if err := act.FromAST(p0); err != nil { + return err + } + + err = storeAction(exec.txCtx.Ctx, exec.db, exec.scope.namespace, &act) + if err != nil { + return err + } + + execute := makeActionToExecutable(exec.scope.namespace, &act) + namespace.availableFunctions[p0.Name] = execute + + return nil + }) +} + +func (i *interpreterPlanner) VisitDropActionStatement(p0 *parse.DropActionStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + reset, err := handleNamespaced(exec, p0) + if err != nil { + return err + } + defer reset() + + if err := exec.checkPrivilege(DropPrivilege); err != nil { + return err + } + + namespace := exec.interpreter.namespaces[exec.scope.namespace] + + // we check that the referenced executable is an action + executable, exists := namespace.availableFunctions[p0.Name] + if !exists { + if p0.IfExists { + return nil + } + + return fmt.Errorf(`action "%s" does not exist`, p0.Name) + } + if executable.Type != executableTypeAction { + return fmt.Errorf(`%w: cannot drop executable "%s" of type %s`, ErrCannotDrop, p0.Name, executable.Type) + } + + delete(namespace.availableFunctions, p0.Name) + + // there is a case where an action overwrites a function. We should restore the function if it exists. + if funcDef, ok := parse.Functions[p0.Name]; ok { + if scalarFunc, ok := funcDef.(*parse.ScalarFunctionDefinition); ok { + namespace.availableFunctions[p0.Name] = funcDefToExecutable(p0.Name, scalarFunc) + } + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitCreateNamespaceStatement(p0 *parse.CreateNamespaceStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if err := exec.checkPrivilege(CreatePrivilege); err != nil { + return err + } + + if _, exists := exec.interpreter.namespaces[p0.Namespace]; exists { + if p0.IfNotExists { + return nil + } + + return fmt.Errorf(`%w: "%s"`, ErrNamespaceExists, p0.Namespace) + } + + if _, err := createNamespace(exec.txCtx.Ctx, exec.db, p0.Namespace, namespaceTypeUser); err != nil { + return err + } + + exec.interpreter.namespaces[p0.Namespace] = &namespace{ + availableFunctions: make(map[string]*executable), + tables: make(map[string]*engine.Table), + onDeploy: func(*executionContext) error { return nil }, + onUndeploy: func(*executionContext) error { return nil }, + } + + return nil + }) +} + +func (i *interpreterPlanner) VisitDropNamespaceStatement(p0 *parse.DropNamespaceStatement) any { + return stmtFunc(func(exec *executionContext, fn resultFunc) error { + if err := exec.checkPrivilege(DropPrivilege); err != nil { + return err + } + + ns, exists := exec.interpreter.namespaces[p0.Namespace] + if !exists { + if p0.IfExists { + return nil + } + + return fmt.Errorf(`%w: namespace "%s" does not exist`, ErrNamespaceNotFound, p0.Namespace) + } + + if ns.namespaceType == namespaceTypeSystem { + return fmt.Errorf(`cannot drop built-in namespace "%s"`, p0.Namespace) + } + if ns.namespaceType == namespaceTypeExtension { + return fmt.Errorf(`cannot drop extension namespace using DROP "%s"`, p0.Namespace) + } + + if err := dropNamespace(exec.txCtx.Ctx, exec.db, p0.Namespace); err != nil { + return err + } + + delete(exec.interpreter.namespaces, p0.Namespace) + exec.interpreter.accessController.DeleteNamespace(p0.Namespace) + + return nil + }) +} + +// below are the alter table statements + +func (i *interpreterPlanner) VisitAddColumn(p0 *parse.AddColumn) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitDropColumn(p0 *parse.DropColumn) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitRenameColumn(p0 *parse.RenameColumn) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitRenameTable(p0 *parse.RenameTable) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitAddTableConstraint(p0 *parse.AddTableConstraint) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitDropTableConstraint(p0 *parse.DropTableConstraint) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitColumn(p0 *parse.Column) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitAlterColumnSet(p0 *parse.AlterColumnSet) any { + panic("intepreter planner should not be called for alter table statements") +} + +func (i *interpreterPlanner) VisitAlterColumnDrop(p0 *parse.AlterColumnDrop) any { + panic("intepreter planner should not be called for alter table statements") +} + +// below this, I have all visitors that are SQL specific. We don't need to implement them, +// since we will have separate handling for SQL statements at a later stage. + +func (i *interpreterPlanner) VisitExpressionColumn(p0 *parse.ExpressionColumn) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionCollate(p0 *parse.ExpressionCollate) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionStringComparison(p0 *parse.ExpressionStringComparison) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionIn(p0 *parse.ExpressionIn) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionBetween(p0 *parse.ExpressionBetween) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionSubquery(p0 *parse.ExpressionSubquery) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionCase(p0 *parse.ExpressionCase) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitCommonTableExpression(p0 *parse.CommonTableExpression) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitSelectStatement(p0 *parse.SelectStatement) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitSelectCore(p0 *parse.SelectCore) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitResultColumnExpression(p0 *parse.ResultColumnExpression) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitResultColumnWildcard(p0 *parse.ResultColumnWildcard) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitRelationTable(p0 *parse.RelationTable) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitRelationSubquery(p0 *parse.RelationSubquery) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitJoin(p0 *parse.Join) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitUpdateStatement(p0 *parse.UpdateStatement) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitUpdateSetClause(p0 *parse.UpdateSetClause) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitDeleteStatement(p0 *parse.DeleteStatement) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitInsertStatement(p0 *parse.InsertStatement) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitUpsertClause(p0 *parse.OnConflict) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitOrderingTerm(p0 *parse.OrderingTerm) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitIfThen(p0 *parse.IfThen) any { + // we handle this directly in VisitActionStmtIf + panic("VisitIfThen should never be called by the interpreter") +} + +func (i *interpreterPlanner) VisitWindowImpl(p0 *parse.WindowImpl) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitWindowReference(p0 *parse.WindowReference) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitExpressionWindowFunctionCall(p0 *parse.ExpressionWindowFunctionCall) any { + panic("intepreter planner should not be called for SQL expressions") +} + +func (i *interpreterPlanner) VisitPrimaryKeyInlineConstraint(p0 *parse.PrimaryKeyInlineConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitPrimaryKeyOutOfLineConstraint(p0 *parse.PrimaryKeyOutOfLineConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitUniqueInlineConstraint(p0 *parse.UniqueInlineConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitUniqueOutOfLineConstraint(p0 *parse.UniqueOutOfLineConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitDefaultConstraint(p0 *parse.DefaultConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitNotNullConstraint(p0 *parse.NotNullConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitCheckConstraint(p0 *parse.CheckConstraint) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitForeignKeyReferences(p0 *parse.ForeignKeyReferences) any { + panic("interpreter planner should never be called for table constraints") +} + +func (i *interpreterPlanner) VisitForeignKeyOutOfLineConstraint(p0 *parse.ForeignKeyOutOfLineConstraint) any { + panic("interpreter planner should never be called for table constraints") +} diff --git a/node/engine/interpreter/roles.go b/node/engine/interpreter/roles.go new file mode 100644 index 000000000..415a49480 --- /dev/null +++ b/node/engine/interpreter/roles.go @@ -0,0 +1,616 @@ +package interpreter + +import ( + "context" + "fmt" + + "github.com/kwilteam/kwil-db/node/types/sql" +) + +// the following are built-in roles that are always available. +const ( + ownerRole = "owner" + defaultRole = "default" +) + +func isBuiltInRole(role string) bool { + return role == ownerRole || role == defaultRole +} + +/* + This file includes a lot of the functionality for roles and access control. + This could be made much more efficient by caching role information, but for simplicity + we will access the db each time. The only exception is for the default role, since it will cover + the majority of cases. +*/ + +func newAccessController(ctx context.Context, db sql.DB) (*accessController, error) { + ac := &accessController{ + roles: make(map[string]*perms), + userRoles: make(map[string][]string), + } + + // get the owner + err := queryRowFunc(ctx, db, "SELECT value FROM kwild_engine.metadata WHERE key = $1", []any{&ac.owner}, func() error { + return nil + }, ownerKey) + if err != nil { + return nil, err + } + + getRolesStmt := `SELECT r.name, array_agg(rp.privilege_type), array_agg(n.name) + FROM kwild_engine.roles r + LEFT JOIN kwild_engine.role_privileges rp ON rp.role_id = r.id + LEFT JOIN kwild_engine.namespaces n ON rp.namespace_id = n.id + GROUP BY r.name + ORDER BY 1,2,3` + + // list all roles, their perms, and users + var roleName string + var privileges []*string + var namespaces []*string + err = queryRowFunc(ctx, db, getRolesStmt, []any{&roleName, &privileges, &namespaces}, func() error { + perm := &perms{ + namespacePrivileges: make(map[string]map[privilege]struct{}), + globalPrivileges: make(map[privilege]struct{}), + } + + for i, priv := range privileges { + if priv == nil { + panic("unexpected error: privilege is nil") + } + + // check that the privilege exists + // This should never not be the case, but it is good to check + _, ok := privilegeNames[privilege(*priv)] + if !ok { + return fmt.Errorf(`unknown privilege "%s" stored in DB`, *priv) + } + + if namespaces[i] == nil { + perm.globalPrivileges[privilege(*priv)] = struct{}{} + } else { + if _, ok := perm.namespacePrivileges[*namespaces[i]]; !ok { + perm.namespacePrivileges[*namespaces[i]] = make(map[privilege]struct{}) + } + + perm.namespacePrivileges[*namespaces[i]][privilege(*priv)] = struct{}{} + } + } + + ac.roles[roleName] = perm + + return nil + }) + if err != nil { + return nil, err + } + + // get all users and their roles + getUsersStmt := `SELECT u.user_identifier, array_agg(r.name) + FROM kwild_engine.user_roles u + JOIN kwild_engine.roles r ON r.id = u.role_id + GROUP BY u.user_identifier + ORDER BY 1, 2` + + var user string + var roles []*string + err = queryRowFunc(ctx, db, getUsersStmt, []any{&user, &roles}, func() error { + for _, role := range roles { + if role == nil { + panic("unexpected error: role is nil") + } + + ac.userRoles[user] = append(ac.userRoles[user], *role) + } + + return nil + }) + if err != nil { + return nil, err + } + + return ac, nil +} + +// accessController enforces access control on the database. +type accessController struct { + owner string // the db owner + roles map[string]*perms + userRoles map[string][]string // a map of user public keys to the roles they have. It does _not_ include the default role. +} + +// CreateRole adds a new role to the access controller. +func (a *accessController) CreateRole(ctx context.Context, db sql.DB, role string) error { + if isBuiltInRole(role) { + return fmt.Errorf(`role "%s" is a built-in role and cannot be added`, role) + } + + _, ok := a.roles[role] + if ok { + return fmt.Errorf(`role "%s" already exists`, role) + } + + err := createRole(ctx, db, role) + if err != nil { + return err + } + + a.roles[role] = &perms{ + namespacePrivileges: make(map[string]map[privilege]struct{}), + globalPrivileges: make(map[privilege]struct{}), + } + + return nil +} + +func (a *accessController) DeleteRole(ctx context.Context, db sql.DB, role string) error { + if isBuiltInRole(role) { + return fmt.Errorf(`role "%s" is a built-in role and cannot be removed`, role) + } + + _, ok := a.roles[role] + if !ok { + return fmt.Errorf(`role "%s" does not exist`, role) + } + + // remove the role from the db + err := execute(ctx, db, "DELETE FROM kwild_engine.roles WHERE name = $1", role) + if err != nil { + return err + } + + delete(a.roles, role) + + // iterate over all users and remove the role from them + for user, roles := range a.userRoles { + for i, r := range roles { + if r == role { + a.userRoles[user] = append(roles[:i], roles[i+1:]...) + break + } + } + } + + return nil +} + +// DeleteNamespace deletes all roles and privileges associated with a namespace. +func (a *accessController) DeleteNamespace(namespace string) { + for _, role := range a.roles { + delete(role.namespacePrivileges, namespace) + } +} + +func (a *accessController) HasPrivilege(user string, namespace *string, privilege privilege) bool { + // if it is the owner, they have all privileges + if user == a.owner { + return true + } + + // since all users have the default role, we can check that first + if a.roles[defaultRole].canDo(privilege, namespace) { + return true + } + + // otherwise, we need to check the user's roles + roles, ok := a.userRoles[user] + if !ok { + return false + } + + for _, role := range roles { + perms, ok := a.roles[role] + if !ok { + fmt.Println("Unexpected cache error: role does not exist. This is a bug.") + continue + } + + if perms.canDo(privilege, namespace) { + return true + } + } + + return false +} + +func (a *accessController) GrantPrivileges(ctx context.Context, db sql.DB, role string, privs []string, namespace *string) error { + if role == ownerRole { + return fmt.Errorf(`owner role already has all privileges`) + } + + perms, ok := a.roles[role] + if !ok { + return fmt.Errorf(`role "%s" does not exist`, role) + } + + // verify that the privileges are valid + convPrivs, err := validatePrivileges(privs...) + if err != nil { + return err + } + + // if a namespace is provided, check that it exists and that all privileges can be namespaced + if namespace != nil { + _, ok := perms.namespacePrivileges[*namespace] + if !ok { + return fmt.Errorf(`namespace "%s" does not exist`, *namespace) + } + + err = canBeNamespaced(convPrivs...) + if err != nil { + return err + } + } + + for _, p := range convPrivs { + if perms.canDo(p, namespace) { + return fmt.Errorf(`role "%s" already has some or all of the specified privileges`, role) + } + } + + // update the cache if the db operation is successful + defer func() { + if err == nil { + a.roles[role].grant(namespace, convPrivs...) + } + }() + + err = grantPrivileges(ctx, db, role, privs, namespace) + if err != nil { + return err + } + + return nil +} + +func (a *accessController) RevokePrivileges(ctx context.Context, db sql.DB, role string, privs []string, namespace *string) error { + if role == ownerRole { + return fmt.Errorf(`owner role cannot have privileges revoked`) + } + + perms, ok := a.roles[role] + if !ok { + return fmt.Errorf(`role "%s" does not exist`, role) + } + + // verify that the privileges are valid + convPrivs, err := validatePrivileges(privs...) + if err != nil { + return err + } + + // if a namespace is provided, check that it exists and that all privileges can be namespaced + if namespace != nil { + _, ok := perms.namespacePrivileges[*namespace] + if !ok { + return fmt.Errorf(`namespace "%s" does not exist`, *namespace) + } + + err = canBeNamespaced(convPrivs...) + if err != nil { + return err + } + } + + for _, p := range convPrivs { + if !perms.canDo(p, namespace) { + return fmt.Errorf(`role "%s" does not have some or all of the specified privileges`, role) + } + } + + // update the cache if the db operation is successful + defer func() { + if err == nil { + a.roles[role].revoke(namespace, convPrivs...) + } + }() + + err = revokePrivileges(ctx, db, role, privs, namespace) + if err != nil { + return err + } + + return nil +} + +func (a *accessController) AssignRole(ctx context.Context, db sql.DB, role string, user string) error { + if isBuiltInRole(role) { + return fmt.Errorf(`role "%s" is a built-in role and cannot be assigned`, role) + } + + // check that the role exists + _, ok := a.roles[role] + if !ok { + return fmt.Errorf(`role "%s" does not exist`, role) + } + + // ensure that the user exists + _, ok = a.userRoles[user] + if !ok { + a.userRoles[user] = []string{} + } + + // check if the user already has the role + for _, r := range a.userRoles[user] { + if r == role { + return fmt.Errorf(`user "%s" already has role "%s"`, user, role) + } + } + + var err error + // update the cache if the db operation is successful + defer func() { + if err == nil { + a.userRoles[user] = append(a.userRoles[user], role) + } + }() + + err = assignRole(ctx, db, role, user) + if err != nil { + return err + } + + return nil +} + +func (a *accessController) UnassignRole(ctx context.Context, db sql.DB, role string, user string) error { + if isBuiltInRole(role) { + return fmt.Errorf(`role "%s" is a built-in role and cannot be unassigned`, role) + } + + _, ok := a.roles[role] + if !ok { + return fmt.Errorf(`role "%s" does not exist`, role) + } + + roles, ok := a.userRoles[user] + if !ok { + return fmt.Errorf(`user "%s" does not exist`, user) + } + + // check if the user has the role + var hasRole bool + for i, r := range roles { + if r == role { + hasRole = true + // remove the role from the user's roles + a.userRoles[user] = append(roles[:i], roles[i+1:]...) + break + } + } + + if !hasRole { + return fmt.Errorf(`user "%s" does not have role "%s"`, user, role) + } + + err := unassignRole(ctx, db, role, user) + if err != nil { + return err + } + + return nil +} + +const ownerKey = "db_owner" + +// SetOwnership sets the owner of the database. +// It will overwrite the current owner. +func (a *accessController) SetOwnership(ctx context.Context, db sql.DB, user string) error { + // update the db + err := execute(ctx, db, "INSERT INTO kwild_engine.metadata (key, value) VALUES ($1, $2) ON CONFLICT (key) DO UPDATE SET value = $2", ownerKey, user) + if err != nil { + return err + } + + // update the cache + a.owner = user + + return nil +} + +func (a *accessController) IsOwner(user string) bool { + return user == a.owner +} + +func (a *accessController) RoleExists(role string) bool { + _, ok := a.roles[role] + return ok +} + +// createRole creates a role in the db +func createRole(ctx context.Context, db sql.DB, roleName string) error { + err := execute(ctx, db, "INSERT INTO kwild_engine.roles (name) VALUES ($1)", roleName) + return err +} + +// grantPrivileges grants privileges to a role. +// If the privileges do not exist, it will return an error. +// It can optionally be applied to a specific namespace. +func grantPrivileges(ctx context.Context, db sql.DB, roleName string, privileges []string, namespace *string) error { + if namespace == nil { + err := execute(ctx, db, `INSERT INTO kwild_engine.role_privileges (role_id, privilege_type) + SELECT r.id, unnest($2::text[]) FROM kwild_engine.roles r WHERE r.name = $1`, roleName, privileges) + return err + } + + err := execute(ctx, db, `INSERT INTO kwild_engine.role_privileges (role_id, namespace_id, privilege_type) + SELECT r.id, n.id, unnest($3::text[]) FROM kwild_engine.roles r + JOIN kwild_engine.namespaces n ON n.name = $2 + WHERE r.name = $1`, roleName, *namespace, privileges) + return err +} + +// revokePrivileges revokes privileges from a role. +// If the privileges do not exist, it will return an error. +// It can optionally be applied to a specific namespace. +func revokePrivileges(ctx context.Context, db sql.DB, roleName string, privileges []string, namespace *string) error { + if namespace == nil { + err := execute(ctx, db, `DELETE FROM kwild_engine.role_privileges + WHERE role_id = (SELECT id FROM kwild_engine.roles WHERE name = $1) AND privilege_type = ANY($2::text[])`, roleName, privileges) + return err + } + + err := execute(ctx, db, `DELETE FROM kwild_engine.role_privileges + WHERE role_id = (SELECT id FROM kwild_engine.roles WHERE name = $1) + AND namespace_id = (SELECT id FROM kwild_engine.namespaces WHERE name = $2) + AND privilege_type = ANY($3::text[])`, roleName, *namespace, privileges) + return err +} + +// assignRole assigns a role to a user. +// If the role does not exist, it will return an error. +func assignRole(ctx context.Context, db sql.DB, roleName, user string) error { + err := execute(ctx, db, `INSERT INTO kwild_engine.user_roles (user_id, role_id) + VALUES ($1, (SELECT id FROM kwild_engine.roles WHERE name = $2))`, user, roleName) + return err +} + +// unassignRole unassigns a role from a user. +// If the role does not exist, it will return an error. +func unassignRole(ctx context.Context, db sql.DB, roleName, user string) error { + err := execute(ctx, db, `DELETE FROM kwild_engine.user_roles + WHERE user_id = $1 AND role_id = (SELECT id FROM kwild_engine.roles WHERE name = $2)`, user, roleName) + return err +} + +var privilegeNames = map[privilege]struct{}{ + CallPrivilege: {}, + SelectPrivilege: {}, + InsertPrivilege: {}, + UpdatePrivilege: {}, + DeletePrivilege: {}, + CreatePrivilege: {}, + DropPrivilege: {}, + AlterPrivilege: {}, + RolesPrivilege: {}, + UsePrivilege: {}, +} + +type privilege string + +func (p privilege) String() string { + return string(p) +} + +const ( + // Can execute actions + CallPrivilege privilege = "CALL" + // can execute ad-hoc select queries + SelectPrivilege privilege = "SELECT" + // can insert data + InsertPrivilege privilege = "INSERT" + // can update data + UpdatePrivilege privilege = "UPDATE" + // can delete data + DeletePrivilege privilege = "DELETE" + // can create new objects + CreatePrivilege privilege = "CREATE" + // can drop objects + DropPrivilege privilege = "DROP" + // use can use extensions + UsePrivilege privilege = "USE" + // can alter objects + AlterPrivilege privilege = "ALTER" + // can manage roles. + // roles are global, and are not tied to a specific namespace or object. + RolesPrivilege privilege = "ROLES" +) + +// perms is a struct that holds the permissions for a role. +type perms struct { + // namespacePrivileges is a map of namespace names to the privileges that are allowed on that namespace. + // It does NOT include inherited privileges. + namespacePrivileges map[string]map[privilege]struct{} + // globalPrivileges is a set of privileges that are allowed globally. + // it does NOT include inherited privileges. + globalPrivileges map[privilege]struct{} +} + +// canDo returns true if the role can perform the specified action. +func (p *perms) canDo(priv privilege, namespace *string) bool { + // if the user has the global privilege, return true + _, hasGlobal := p.globalPrivileges[priv] + if hasGlobal { + return true + } + // if the user does not have global and no namespace is provided, return false + if namespace == nil { + return false + } + + // otherwise, check the namespace + np, ok := p.namespacePrivileges[*namespace] + if !ok { + return false + } + + _, has := np[priv] + return has +} + +// grant adds the privileges to the set. +func (p *perms) grant(namespace *string, privs ...privilege) { + if namespace == nil { + for _, priv := range privs { + p.globalPrivileges[priv] = struct{}{} + } + } else { + np, ok := p.namespacePrivileges[*namespace] + if !ok { + panic("unexpected error: namespace does not exist") + } + + for _, priv := range privs { + np[priv] = struct{}{} + } + + p.namespacePrivileges[*namespace] = np + } +} + +// revoke removes the privileges from the set. +func (p *perms) revoke(namespace *string, privs ...privilege) { + if namespace == nil { + for _, priv := range privs { + delete(p.globalPrivileges, priv) + } + } else { + np, ok := p.namespacePrivileges[*namespace] + if !ok { + panic("unexpected error: namespace does not exist") + } + + for _, priv := range privs { + delete(np, priv) + } + + p.namespacePrivileges[*namespace] = np + } +} + +// canBeNamespaced returns a nil error if the privilege can be namespaced. +func canBeNamespaced(ps ...privilege) error { + for _, p := range ps { + if p == RolesPrivilege { + return fmt.Errorf(`privilege "%s" cannot be namespaced`, p) + } + } + + return nil +} + +// validatePrivileges returns a nil error if the privileges are valid. +func validatePrivileges(ps ...string) ([]privilege, error) { + ps2 := make([]privilege, len(ps)) + for i, p := range ps { + _, ok := privilegeNames[privilege(p)] + if !ok { + return nil, fmt.Errorf(`privilege "%s" does not exist`, p) + } + + ps2[i] = privilege(p) + } + + return ps2, nil +} diff --git a/node/engine/interpreter/schema.sql b/node/engine/interpreter/schema.sql new file mode 100644 index 000000000..a0aa5f80f --- /dev/null +++ b/node/engine/interpreter/schema.sql @@ -0,0 +1,438 @@ +/* + This section contains all of the DDL for creating the schema for `kwild_engine`, which is + the internal schema for the engine. This stores all metadata for actions. +*/ +CREATE SCHEMA IF NOT EXISTS kwild_engine; + +DO $$ +BEGIN + -- scalar_data_type is an enumeration of all scalar data types supported by the engine + BEGIN + CREATE TYPE kwild_engine.scalar_data_type AS ENUM ( + 'INT8', 'TEXT', 'BOOL', 'UUID', 'NUMERIC', 'BYTEA' + ); + EXCEPTION + WHEN duplicate_object THEN NULL; + END; + + -- modifiers is an enumeration of all modifiers that can be applied to an action + BEGIN + CREATE TYPE kwild_engine.modifiers AS ENUM ( + 'VIEW', 'OWNER', 'PUBLIC', 'PRIVATE', 'SYSTEM' + ); + EXCEPTION + WHEN duplicate_object THEN NULL; + END; + + -- privilege_type is an enumeration of all privilege types that can be applied to a role + BEGIN + CREATE TYPE kwild_engine.privilege_type AS ENUM ( + 'SELECT', 'INSERT', 'UPDATE', 'DELETE', 'CREATE', 'DROP', 'ALTER', 'CALL', 'ROLES' + ); + EXCEPTION + WHEN duplicate_object THEN NULL; + END; + + BEGIN + CREATE TYPE kwild_engine.namespace_type AS ENUM ( + 'USER', 'SYSTEM', 'EXTENSION' + ); + EXCEPTION + WHEN duplicate_object THEN NULL; + END; +END $$; + +-- metadata stores all metadata for the engine +CREATE TABLE IF NOT EXISTS kwild_engine.metadata ( + id BIGSERIAL PRIMARY KEY, + key TEXT NOT NULL UNIQUE, + value TEXT NOT NULL +); + +-- namespaces is a table that stores all user schemas in the engine +CREATE TABLE IF NOT EXISTS kwild_engine.namespaces ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + type kwild_engine.namespace_type NOT NULL DEFAULT 'USER' +); + + +CREATE TABLE IF NOT EXISTS kwild_engine.initialized_extensions ( + id BIGSERIAL PRIMARY KEY, + namespace_id INT8 NOT NULL REFERENCES kwild_engine.namespaces(id) ON UPDATE CASCADE ON DELETE CASCADE, + base_extension TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS kwild_engine.extension_initialization_parameters ( + id BIGSERIAL PRIMARY KEY, + extension_id INT8 NOT NULL REFERENCES kwild_engine.initialized_extensions(id) ON UPDATE CASCADE ON DELETE CASCADE, + key TEXT NOT NULL, + value TEXT NOT NULL, + data_type TEXT NOT NULL, + UNIQUE (extension_id, key) +); + +-- actions is a table that stores all actions in the engine +CREATE TABLE IF NOT EXISTS kwild_engine.actions ( + id BIGSERIAL PRIMARY KEY, + schema_name TEXT NOT NULL REFERENCES kwild_engine.namespaces(name) ON UPDATE CASCADE ON DELETE CASCADE, + name TEXT NOT NULL UNIQUE, + raw_statement TEXT NOT NULL, + returns_table BOOLEAN NOT NULL DEFAULT FALSE, + modifiers kwild_engine.modifiers[] +); + +-- parameters is a table that stores all parameters for actions in the engine +CREATE TABLE IF NOT EXISTS kwild_engine.parameters ( + id BIGSERIAL PRIMARY KEY, + action_id INT8 NOT NULL REFERENCES kwild_engine.actions(id) ON UPDATE CASCADE ON DELETE CASCADE, + name TEXT NOT NULL CHECK (name = lower(name)), + position INT8 NOT NULL, + scalar_type kwild_engine.scalar_data_type NOT NULL, + is_array BOOLEAN NOT NULL, + metadata BYTEA DEFAULT NULL +); + +-- return_types is a table that stores all return types for actions in the engine +CREATE TABLE IF NOT EXISTS kwild_engine.return_fields ( + id BIGSERIAL PRIMARY KEY, + action_id INT8 NOT NULL REFERENCES kwild_engine.actions(id) ON UPDATE CASCADE ON DELETE CASCADE, + name TEXT NOT NULL CHECK (name = lower(name)), + position INT8 NOT NULL, + scalar_type kwild_engine.scalar_data_type NOT NULL, + is_array BOOLEAN NOT NULL, + metadata BYTEA DEFAULT NULL +); + +-- roles_table is a table that stores all role information. +-- since Kwil uses it's own roles system that is in no way related to the Postgres roles system, we need to store this information +CREATE TABLE IF NOT EXISTS kwild_engine.roles ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL UNIQUE +); + +CREATE TABLE IF NOT EXISTS kwild_engine.role_privileges ( + id BIGSERIAL PRIMARY KEY, + privilege_type kwild_engine.privilege_type NOT NULL, + namespace_id INT8 REFERENCES kwild_engine.namespaces(id) ON UPDATE CASCADE ON DELETE CASCADE, -- the namespace it is targeting. Can be null if it is a global privilege + role_id INT8 NOT NULL REFERENCES kwild_engine.roles(id) ON UPDATE CASCADE ON DELETE CASCADE +); + +-- user_roles is a table that stores all users who have been assigned roles +CREATE TABLE IF NOT EXISTS kwild_engine.user_roles ( + id BIGSERIAL PRIMARY KEY, + user_identifier TEXT NOT NULL, + role_id INT8 NOT NULL +); + +-- an index here helps with performance when querying for a user's roles +CREATE INDEX IF NOT EXISTS user_roles_user_identifier_idx ON kwild_engine.user_roles(user_identifier); + +-- create a single default role that will be used for all users +INSERT INTO kwild_engine.roles (name) VALUES ('default') ON CONFLICT DO NOTHING; +-- default role can select and call by default +INSERT INTO kwild_engine.role_privileges (privilege_type, role_id) VALUES ('SELECT', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'default' +)), ('CALL', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'default' +)) ON CONFLICT DO NOTHING; + +-- format_type is a function that formats a data type for display +CREATE OR REPLACE FUNCTION kwild_engine.format_type(scal kwild_engine.scalar_data_type, is_arr BOOLEAN, meta BYTEA) +RETURNS TEXT AS $$ +DECLARE + result TEXT; +BEGIN + result := lower(scal::text); + + if result = 'numeric' THEN + if octet_length(meta) = 4 THEN + -- precision and scale are uint16, precision is first 2 bytes, scale is next 2 bytes + result := result || '(' || + ((get_byte(meta, 0) << 8 | get_byte(meta, 1))::TEXT) || ',' || + ((get_byte(meta, 2) << 8 | get_byte(meta, 3))::TEXT) || ')'; + ELSIF octet_length(meta) != 0 THEN + -- should never happen, would suggest some sort of serious internal error + RAISE EXCEPTION 'Invalid metadata length for numeric data type'; + END IF; + ELSIF octet_length(meta) != 0 THEN + -- should never happen, would suggest some sort of serious internal error + RAISE EXCEPTION 'Invalid metadata length for non-numeric data type'; + END IF; + + if is_arr THEN + result := result || '[]'; + END IF; + + RETURN result; +END; +$$ LANGUAGE plpgsql; + +/* + This section creates the schema the `kwild` schema, which is the public user-facing schema. + End users can access the views in this schema to get information about the database. + + All views are ordered to ensure that they are deterministic when queried. +*/ +CREATE SCHEMA IF NOT EXISTS info; +INSERT INTO kwild_engine.namespaces (name, type) VALUES ('info', 'SYSTEM') ON CONFLICT DO NOTHING; + +-- info.namespaces is a public view that provides a list of all namespaces in the database +CREATE VIEW info.namespaces AS +SELECT + name, + type::TEXT +FROM + kwild_engine.namespaces +ORDER BY + name; + +-- info.tables is a public view that provides a list of all tables in the database +CREATE VIEW info.tables AS +SELECT tablename::TEXT AS name, schemaname::TEXT AS schema +FROM pg_tables +JOIN kwild_engine.namespaces us + ON schemaname = us.name +ORDER BY 1, 2; + +-- info.columns is a public view that provides a list of all columns in the database +CREATE VIEW info.columns AS +SELECT + c.table_schema::TEXT AS schema_name, + c.table_name::TEXT AS table_name, + c.column_name::TEXT AS column_name, + CASE + WHEN t.typcategory = 'A' + THEN (pg_catalog.format_type(a.atttypid, a.atttypmod)) + ELSE c.data_type + END AS data_type, + c.is_nullable::bool AS is_nullable, + c.column_default AS default_value, + CASE + WHEN tc.constraint_type = 'PRIMARY KEY' + THEN true + ELSE false + END AS is_primary_key, + c.ordinal_position AS ordinal_position +FROM information_schema.columns c +JOIN pg_namespace n + ON c.table_schema = n.nspname::TEXT +JOIN pg_class cl + ON cl.relname = c.table_name + AND cl.relnamespace = n.oid +JOIN pg_attribute a + ON a.attname = c.column_name + AND a.attrelid = cl.oid +JOIN pg_type t + ON t.oid = a.atttypid +LEFT JOIN information_schema.key_column_usage kcu + ON c.table_name = kcu.table_name + AND c.column_name = kcu.column_name + AND c.table_schema = kcu.table_schema +LEFT JOIN information_schema.table_constraints tc + ON kcu.constraint_name = tc.constraint_name + AND tc.constraint_type = 'PRIMARY KEY' + AND tc.table_schema = c.table_schema +JOIN + kwild_engine.namespaces us ON n.nspname::TEXT = us.name +WHERE cl.relkind = 'r' -- Only include regular tables +ORDER BY + c.table_name, + c.ordinal_position, + 1, 2, 3, 4, 5, 6, 7, 8; + +-- info.indexes is a public view that provides a list of all indexes in the database +CREATE VIEW info.indexes AS +SELECT + n.nspname::TEXT AS schema_name, + c.relname::TEXT AS table_name, + ic.relname::TEXT AS index_name, + i.indisprimary AS is_pk, + i.indisunique AS is_unique, + array_agg(a.attname ORDER BY x.ordinality) AS column_names +FROM pg_index i +JOIN pg_class c ON c.oid = i.indrelid +JOIN pg_class ic ON ic.oid = i.indexrelid +JOIN pg_namespace n ON c.relnamespace = n.oid +JOIN pg_am am ON ic.relam = am.oid +JOIN pg_attribute a ON a.attnum = ANY(i.indkey) AND a.attrelid = c.oid +JOIN LATERAL unnest(i.indkey) WITH ORDINALITY AS x(colnum, ordinality) ON x.colnum = a.attnum +JOIN + kwild_engine.namespaces us ON n.nspname::TEXT = us.name +GROUP BY n.nspname, c.relname, ic.relname, i.indisprimary, i.indisunique +ORDER BY 1,2,3,4,5,6; + +-- info.constraints is a public view that provides a list of all constraints in the database +CREATE VIEW info.constraints AS +SELECT + pg_namespace.nspname::TEXT AS schema_name, + conname AS constraint_name, + split_part(conrelid::regclass::text, '.', 2) AS table_name, + array_agg(attname) AS columns, + pg_get_constraintdef(pg_constraint.oid) AS expression, + CASE contype + WHEN 'c' THEN 'CHECK' + WHEN 'u' THEN 'UNIQUE' + END AS constraint_type +FROM + pg_constraint +JOIN + pg_class ON conrelid = pg_class.oid +JOIN + pg_namespace ON pg_class.relnamespace = pg_namespace.oid +LEFT JOIN + unnest(conkey) AS cols(colnum) ON true +LEFT JOIN + pg_attribute ON pg_attribute.attnum = cols.colnum AND pg_attribute.attrelid = pg_class.oid +JOIN + kwild_engine.namespaces us ON pg_namespace.nspname::TEXT = us.name +WHERE + contype = 'c' -- Only check constraints + OR contype = 'u' -- Only unique constraints +GROUP BY + pg_namespace.nspname, conname, conrelid, pg_constraint.oid +ORDER BY + 1, 2, 3, 4, 5, 6; + +-- info.foreign_keys is a public view that provides a list of all foreign keys in the database +CREATE VIEW info.foreign_keys AS +SELECT + pg_namespace.nspname::TEXT AS schema_name, + conname AS constraint_name, + split_part(conrelid::regclass::text, '.', 2) AS table_name, + array_agg(attname) AS columns, + CASE confupdtype + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + END AS on_update, + CASE confdeltype + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + END AS on_delete +FROM + pg_constraint +JOIN + pg_class ON conrelid = pg_class.oid +JOIN + pg_namespace ON pg_class.relnamespace = pg_namespace.oid +LEFT JOIN + unnest(conkey) AS cols(colnum) ON true +LEFT JOIN + pg_attribute ON pg_attribute.attnum = cols.colnum AND pg_attribute.attrelid = pg_class.oid +JOIN + kwild_engine.namespaces us ON pg_namespace.nspname::TEXT = us.name +WHERE + contype = 'f' -- Only foreign key constraints +GROUP BY + pg_namespace.nspname, conname, conrelid, confupdtype, confdeltype +ORDER BY + table_name, constraint_name, + 1, 2, 3, 4, 5, 6; + +-- actions is a public view that provides a list of all actions in the database +CREATE VIEW info.actions AS +SELECT + a.schema_name AS namespace, + a.name::TEXT, + a.raw_statement, + a.modifiers::TEXT[] AS modifiers, + a.returns_table, + array_agg( + json_build_object( + 'name', p.name, + 'data_type', kwild_engine.format_type(p.scalar_type, p.is_array, p.metadata) + ) + ORDER BY p.position, p.name, kwild_engine.format_type(p.scalar_type, p.is_array, p.metadata) + ) AS parameters, + array_agg( + json_build_object( + 'name', r.name, + 'data_type', kwild_engine.format_type(r.scalar_type, r.is_array, r.metadata) + ) + ORDER BY r.position, r.name, kwild_engine.format_type(r.scalar_type, r.is_array, r.metadata) + ) AS return_types +FROM kwild_engine.actions a +JOIN kwild_engine.parameters p + ON a.id = p.action_id +LEFT JOIN kwild_engine.return_fields r + ON a.id = r.action_id +GROUP BY a.schema_name, a.name, a.modifiers, a.raw_statement, a.returns_table +ORDER BY a.name, + 1, 2, 3, 4, 5 --TODO: do we need to order the aggregates? +; + +-- roles is a public view that provides a list of all roles in the database +CREATE VIEW info.roles AS +SELECT + name +FROM + kwild_engine.roles +ORDER BY + name; + +CREATE VIEW info.user_roles AS +SELECT + user_identifier, + r.name AS role +FROM + kwild_engine.user_roles ur +JOIN + kwild_engine.roles r + ON ur.role_id = r.id +ORDER BY + 1, 2; + +-- role_privileges is a public view that provides a list of all role privileges in the database +CREATE VIEW info.role_privileges AS +SELECT + r.name AS role, + p.privilege_type AS privilege, + n.name AS namespace +FROM + kwild_engine.role_privileges p +JOIN + kwild_engine.roles r + ON p.role_id = r.id +LEFT JOIN + kwild_engine.namespaces n + ON p.namespace_id = n.id +ORDER BY + 1, 2, 3; + +CREATE VIEW info.extensions AS +SELECT + n.name AS namespace, + ie.base_extension AS extension, + array_agg( + json_build_object( + 'key', eip.key, + 'value', eip.value + ) + ORDER BY eip.key, eip.value + ) AS parameters +FROM + kwild_engine.initialized_extensions ie +JOIN + kwild_engine.namespaces n + ON ie.namespace_id = n.id +LEFT JOIN + kwild_engine.extension_initialization_parameters eip + ON ie.id = eip.extension_id +GROUP BY + n.name, ie.base_extension +ORDER BY + 1, 2; + +-- lastly, we need to create a default namespace for the user +CREATE SCHEMA IF NOT EXISTS main; +INSERT INTO kwild_engine.namespaces (name, type) VALUES ('main', 'SYSTEM') ON CONFLICT DO NOTHING; \ No newline at end of file diff --git a/node/engine/interpreter/schema_testdata.sql b/node/engine/interpreter/schema_testdata.sql new file mode 100644 index 000000000..0b3f86655 --- /dev/null +++ b/node/engine/interpreter/schema_testdata.sql @@ -0,0 +1,33 @@ +--TODO: delete this file, it is just for testing + +-- DELETE ME! just for testing +INSERT INTO kwild_engine.roles (name) VALUES ('admin') ON CONFLICT DO NOTHING; +INSERT INTO kwild_engine.role_inheritance (inheriter_id, inherited_from_id) VALUES ( + (SELECT id FROM kwild_engine.roles WHERE name = 'admin'), + (SELECT id FROM kwild_engine.roles WHERE name = 'default') +) ON CONFLICT DO NOTHING; +INSERT INTO kwild_engine.privileges (privilege_type, role_id) VALUES ('INSERT', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'admin' +)), ('UPDATE', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'admin' +)) ON CONFLICT DO NOTHING; + +-- leader role +INSERT INTO kwild_engine.roles (name) VALUES ('leader') ON CONFLICT DO NOTHING; +INSERT INTO kwild_engine.role_inheritance (inheriter_id, inherited_from_id) VALUES ( + (SELECT id FROM kwild_engine.roles WHERE name = 'leader'), + (SELECT id FROM kwild_engine.roles WHERE name = 'admin') +) ON CONFLICT DO NOTHING; +INSERT INTO kwild_engine.privileges (privilege_type, role_id) VALUES ('DELETE', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'leader' +)), ('DROP', ( + SELECT id + FROM kwild_engine.roles + WHERE name = 'leader' +)) ON CONFLICT DO NOTHING; \ No newline at end of file diff --git a/node/engine/interpreter/sql.go b/node/engine/interpreter/sql.go new file mode 100644 index 000000000..1cc8b49e7 --- /dev/null +++ b/node/engine/interpreter/sql.go @@ -0,0 +1,497 @@ +package interpreter + +import ( + "context" + _ "embed" + "encoding/binary" + "errors" + "fmt" + "strings" + + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/utils/order" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + "github.com/kwilteam/kwil-db/node/pg" + "github.com/kwilteam/kwil-db/node/types/sql" +) + +var ( + //go:embed schema.sql + schemaInitSQL string +) + +// initSQL initializes the SQL schema. +func initSQL(ctx context.Context, db sql.DB) error { + return pg.Exec(ctx, db, schemaInitSQL) +} + +// queryOneInt64 queries for a single int64 value. +func queryOneInt64(ctx context.Context, db sql.DB, query string, args ...any) (int64, error) { + var res *int64 + err := queryRowFunc(ctx, db, query, []any{&res}, func() error { return nil }, args...) + if err != nil { + return 0, err + } + if res == nil { + return 0, errors.New("expected exactly one row") + } + + return *res, nil +} + +// createNamespace creates a new schema for a user. +func createNamespace(ctx context.Context, db sql.DB, name string, nsType namespaceType) (int64, error) { + err := execute(ctx, db, `CREATE SCHEMA `+name) + if err != nil { + return 0, err + } + + return queryOneInt64(ctx, db, `INSERT INTO kwild_engine.namespaces (name, type) VALUES ($1, $2) RETURNING id`, name, nsType) +} + +// dropNamespace drops a schema for a user. +func dropNamespace(ctx context.Context, db sql.DB, name string) error { + err := execute(ctx, db, `DROP SCHEMA `+name+` CASCADE`) + if err != nil { + return err + } + + return execute(ctx, db, `DELETE FROM kwild_engine.namespaces WHERE name = $1`, name) +} + +// storeAction stores an action in the database. +// It should always be called within a transaction. +func storeAction(ctx context.Context, db sql.DB, namespace string, action *Action) error { + returnsTable := false + if action.Returns != nil { + returnsTable = action.Returns.IsTable + } + + modStrs := make([]string, len(action.Modifiers)) + for i, mod := range action.Modifiers { + modStrs[i] = string(mod) + } + + actionID, err := queryOneInt64(ctx, db, `INSERT INTO kwild_engine.actions (name, schema_name, raw_statement, modifiers, returns_table) + VALUES ($1, $2, $3, $4, $5) RETURNING id`, + action.Name, namespace, action.RawStatement, modStrs, returnsTable) + if err != nil { + return err + } + + for i, param := range action.Parameters { + dt, err := param.Type.PGString() + if err != nil { + return err + } + + err = execute(ctx, db, `INSERT INTO kwild_engine.parameters (action_id, name, scalar_type, is_array, metadata, position) + VALUES ($1, $2, $3, $4, $5, $6)`, + actionID, param.Name, dt, param.Type.IsArray, getTypeMetadata(param.Type), i+1) + if err != nil { + return err + } + } + + if action.Returns != nil { + for i, field := range action.Returns.Fields { + dt, err := field.Type.PGString() + if err != nil { + return err + } + + err = execute(ctx, db, `INSERT INTO kwild_engine.return_fields (action_id, name, scalar_type, is_array, metadata, position) + VALUES ($1, $2, $3, $4, $5, $6)`, + actionID, field.Name, dt, field.Type.IsArray, getTypeMetadata(field.Type), i+1) + if err != nil { + return err + } + } + } + + return nil +} + +// listNamespaces lists all namespaces that are created. +func listNamespaces(ctx context.Context, db sql.DB) ([]struct { + Name string + Type namespaceType +}, error) { + var namespaces []struct { + Name string + Type namespaceType + } + var namespace string + var nsType string + err := queryRowFunc(ctx, db, `SELECT name, type::TEXT FROM kwild_engine.namespaces`, []any{&namespace, &nsType}, + func() error { + nsT := namespaceType(nsType) + if !nsT.valid() { + return fmt.Errorf("unknown namespace type %s", nsType) + } + + namespaces = append(namespaces, struct { + Name string + Type namespaceType + }{Name: namespace, Type: nsT}) + return nil + }, + ) + if err != nil { + return nil, err + } + + return namespaces, nil +} + +// listTablesInNamespace lists all tables in a namespace. +func listTablesInNamespace(ctx context.Context, db sql.DB, namespace string) ([]*engine.Table, error) { + tables := make([]*engine.Table, 0) + var schemaName string + var tblName string + var colNames, dataTypes, indexNames, constraintNames, constraintTypes, fkNames, fkOnUpdate, fkOnDelete []string + var indexCols, constraintCols, fkCols [][]string + var isNullables, isPrimaryKeys, isPKs, isUniques []bool + + scans := []any{ + &schemaName, + &tblName, + &colNames, + &dataTypes, + &isNullables, + &isPrimaryKeys, + &indexNames, + &isPKs, + &isUniques, + &indexCols, + &constraintNames, + &constraintTypes, + &constraintCols, + &fkNames, + &fkCols, + &fkOnUpdate, + &fkOnDelete, + } + err := queryRowFunc(ctx, db, ` + WITH columns AS ( + SELECT c.schema_name, c.table_name, + array_agg(c.column_name ORDER BY c.ordinal_position) AS column_names, + array_agg(c.data_type ORDER BY c.ordinal_position) AS data_types, + array_agg(c.is_nullable ORDER BY c.ordinal_position) AS is_nullables, + array_agg(c.is_primary_key ORDER BY c.ordinal_position) AS is_primary_keys + FROM info.columns c + GROUP BY c.schema_name, c.table_name + ), + indexes AS ( + SELECT i.schema_name, i.table_name, + array_agg(i.index_name ORDER BY i.index_name) AS index_names, + array_agg(i.is_pk ORDER BY i.index_name) AS is_pks, + array_agg(i.is_unique ORDER BY i.index_name) AS is_uniques, + array_agg(i.column_names ORDER BY i.index_name) AS column_names + FROM info.indexes i + GROUP BY i.schema_name, i.table_name + ), constraints AS ( + SELECT c.schema_name, c.table_name, + array_agg(c.constraint_name ORDER BY c.constraint_name) AS constraint_names, + array_agg(c.constraint_type ORDER BY c.constraint_name) AS constraint_types, + array_agg(c.columns ORDER BY c.constraint_name) AS columns + FROM info.constraints c + GROUP BY c.schema_name, c.table_name + ), foreign_keys AS ( + SELECT f.schema_name, f.table_name, + array_agg(f.constraint_name ORDER BY f.constraint_name) AS constraint_names, + array_agg(f.columns ORDER BY f.constraint_name) AS columns, + array_agg(f.on_update ORDER BY f.constraint_name) AS on_updates, + array_agg(f.on_delete ORDER BY f.constraint_name) AS on_deletes + FROM info.foreign_keys f + GROUP BY f.schema_name, f.table_name + ) + SELECT + t.schema, t.name, + c.column_names, c.data_types, c.is_nullables, c.is_primary_keys, + i.index_names, i.is_pks, i.is_uniques, i.column_names, + co.constraint_names, co.constraint_types, co.columns, + f.constraint_names, f.columns, f.on_updates, f.on_deletes + FROM info.tables t + JOIN columns c ON t.name = c.table_name AND t.schema = c.schema_name + LEFT JOIN indexes i ON t.name = i.table_name AND t.schema = i.schema_name + LEFT JOIN constraints co ON t.name = co.table_name AND t.schema = co.schema_name + LEFT JOIN foreign_keys f ON t.name = f.table_name AND t.schema = f.schema_name + WHERE t.schema = $1`, scans, + func() error { + tbl := &engine.Table{ + Name: tblName, + Constraints: make(map[string]*engine.Constraint), + } + + tables = append(tables, tbl) + + // add columns + for i, colName := range colNames { + dt, err := types.ParseDataType(dataTypes[i]) + if err != nil { + return err + } + + tbl.Columns = append(tbl.Columns, &engine.Column{ + Name: colName, + DataType: dt, + Nullable: isNullables[i], + IsPrimaryKey: isPrimaryKeys[i], + }) + } + + // add indexes + for i, indexName := range indexNames { + indexType := engine.BTREE + if isPKs[i] { + indexType = engine.PRIMARY + } else if isUniques[i] { + indexType = engine.UNIQUE_BTREE + } + + tbl.Indexes = append(tbl.Indexes, &engine.Index{ + Name: indexName, + Columns: indexCols[i], + Type: indexType, + }) + } + + // add constraints + for i, constraintName := range constraintNames { + var constraintType engine.ConstraintType + switch strings.ToLower(constraintTypes[i]) { + case "unique": + constraintType = engine.ConstraintUnique + case "check": + constraintType = engine.ConstraintCheck + default: + return fmt.Errorf("unknown constraint type %s", constraintTypes[i]) + } + + _, ok := tbl.Constraints[constraintName] + if ok { + return fmt.Errorf("duplicate constraint %s", constraintName) + } + + tbl.Constraints[constraintName] = &engine.Constraint{ + Type: constraintType, + Columns: constraintCols[i], + } + } + + // add foreign keys + for i, fkName := range fkNames { + _, ok := tbl.Constraints[fkName] + if ok { + return fmt.Errorf("duplicate foreign key %s", fkName) + } + + fk := &engine.Constraint{ + Type: engine.ConstraintFK, + Columns: fkCols[i], + } + + tbl.Constraints[fkName] = fk + } + return nil + }, namespace, + ) + if err != nil { + return nil, err + } + + return tables, nil +} + +// listActionsInNamespace lists all actions in a namespace. +func listActionsInNamespace(ctx context.Context, db sql.DB, namespace string) ([]*Action, error) { + var actions []*Action + var rawStmt string + scans := []any{ + &rawStmt, + } + + err := queryRowFunc(ctx, db, `SELECT raw_statement FROM kwild_engine.actions WHERE schema_name = $1`, scans, + func() error { + res, err := parse.Parse(rawStmt) + if err != nil { + return err + } + + if len(res) != 1 { + return fmt.Errorf("expected exactly 1 statement, got %d", len(res)) + } + + createActionStmt, ok := res[0].(*parse.CreateActionStatement) + if !ok { + return fmt.Errorf("expected CreateActionStatement, got %T", res[0]) + } + + act := &Action{} + err = act.FromAST(createActionStmt) + if err != nil { + return err + } + + actions = append(actions, act) + return nil + }, namespace, + ) + if err != nil { + return nil, err + } + + return actions, nil +} + +// registerExtensionInitialization registers that an extension was initialized with some values. +func registerExtensionInitialization(ctx context.Context, db sql.DB, name, baseExtName string, metadata map[string]Value) error { + id, err := createNamespace(ctx, db, name, namespaceTypeExtension) + if err != nil { + return err + } + + extId, err := queryOneInt64(ctx, db, `INSERT INTO kwild_engine.initialized_extensions (namespace_id, base_extension) VALUES ( + $1, + $2 + ) RETURNING id + `, id, baseExtName) + if err != nil { + return err + } + + insertMetaStmt := `INSERT INTO kwild_engine.extension_initialization_parameters (extension_id, key, value, data_type) VALUES ` + i := 2 + rawVals := []any{extId} + for k, v := range metadata { + if i > 2 { + insertMetaStmt += `,` + } + + strVal, err := valueToString(v) + if err != nil { + return err + } + + rawVals = append(rawVals, k, strVal, v.Type().String()) + insertMetaStmt += fmt.Sprintf(`($1, $%d, $%d, $%d)`, i, i+1, i+2) + i += 3 + } + + return execute(ctx, db, insertMetaStmt, rawVals...) +} + +// unregisterExtensionInitialization unregisters that an extension was initialized. +// It simply wraps dropNamespace, relying on foreign key constraints to delete all related data. +// I wrap it in case we need to do more in the future. +func unregisterExtensionInitialization(ctx context.Context, db sql.DB, alias string) error { + return dropNamespace(ctx, db, alias) +} + +type storedExtension struct { + // ExtName is the name of the extension. + ExtName string + // Alias is the alias of the extension. + Alias string + // Metadata is the metadata of the extension. + Metadata map[string]Value +} + +// getExtensionInitializationMetadata gets all initialized extensions and their metadata. +func getExtensionInitializationMetadata(ctx context.Context, db sql.DB) ([]*storedExtension, error) { + extMap := make(map[string]*storedExtension) // maps the alias to the extension, will be sorted later + + var extName, alias string + var key, val, dt string + err := queryRowFunc(ctx, db, ` + SELECT n.name AS alias, ie.base_extension AS ext_name, eip.key, eip.value, eip.data_type + FROM kwild_engine.initialized_extensions ie + JOIN kwild_engine.namespaces n ON ie.namespace_id = n.id + LEFT JOIN kwild_engine.extension_initialization_parameters eip ON ie.id = eip.extension_id`, + []any{&alias, &extName, &key, &val, &dt}, + func() error { + ext, ok := extMap[alias] + if !ok { + ext = &storedExtension{ + Alias: alias, + ExtName: extName, + Metadata: make(map[string]Value), + } + extMap[alias] = ext + } + + datatype, err := types.ParseDataType(dt) + if err != nil { + return err + } + + v, err := parseValue(val, datatype) + if err != nil { + return err + } + + ext.Metadata[key] = v + return nil + }) + if err != nil { + return nil, err + } + + var fin []*storedExtension + ordered := order.OrderMap(extMap) + for _, o := range ordered { + fin = append(fin, o.Value) + } + + return fin, nil +} + +// getTypeMetadata gets the serialized type metadata. +// If there is none, it returns nil. +func getTypeMetadata(t *types.DataType) []byte { + if !t.HasMetadata() { + return nil + } + + meta := make([]byte, 4) + binary.LittleEndian.PutUint16(meta[:2], t.Metadata[0]) + binary.LittleEndian.PutUint16(meta[2:], t.Metadata[1]) + + return meta +} + +// query executes a SQL query with the given values. +// It is a utility function to help reduce boilerplate when executing +// SQL with Value types. +func query(ctx context.Context, db sql.DB, query string, scanVals []Value, fn func() error, args []Value) error { + argVals := make([]any, len(args)) + var err error + for i, v := range args { + argVals[i] = v + } + + recVals := make([]any, len(scanVals)) + for i := range scanVals { + recVals[i] = scanVals[i] + } + + err = queryRowFunc(ctx, db, query, recVals, fn, argVals...) + if err != nil { + return err + } + + return nil +} + +// queryRowFunc executes a SQL query with the given values. +func queryRowFunc(ctx context.Context, tx sql.Executor, stmt string, + scans []any, fn func() error, args ...any) error { + return pg.QueryRowFunc(ctx, tx, stmt, scans, fn, append([]any{pg.QueryModeExec}, args...)...) +} + +// execute executes a SQL statement with the given values. +func execute(ctx context.Context, db sql.DB, stmt string, args ...any) error { + return queryRowFunc(ctx, db, stmt, nil, func() error { return nil }, args...) +} diff --git a/node/engine/interpreter/sql_test.go b/node/engine/interpreter/sql_test.go new file mode 100644 index 000000000..b5f6053bf --- /dev/null +++ b/node/engine/interpreter/sql_test.go @@ -0,0 +1,342 @@ +//go:build pglive + +package interpreter + +import ( + "context" + "testing" + + "github.com/kwilteam/kwil-db/common" + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/types/decimal" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/pg" + "github.com/kwilteam/kwil-db/node/types/sql" + "github.com/stretchr/testify/require" +) + +func Test_built_in_sql(t *testing.T) { + type testcase struct { + name string + fn func(ctx context.Context, db sql.DB) + } + tests := []testcase{ + { + name: "test store and load actions", + fn: func(ctx context.Context, db sql.DB) { + for _, act := range all_test_actions { + err := storeAction(ctx, db, "main", act) + require.NoError(t, err) + } + + actions, err := listActionsInNamespace(ctx, db, "main") + require.NoError(t, err) + + actMap := map[string]*Action{} + for _, act := range actions { + actMap[act.Name] = act + } + + require.Equal(t, len(all_test_actions), len(actMap)) + for _, act := range all_test_actions { + stored, ok := actMap[act.Name] + require.True(t, ok) + require.Equal(t, act.Name, stored.Name) + require.Equal(t, act.RawStatement, stored.RawStatement) + require.Equal(t, act.Modifiers, stored.Modifiers) + namedTypesEq(t, act.Parameters, stored.Parameters) + + if act.Returns != nil { + require.NotNil(t, stored.Returns) + require.Equal(t, act.Returns.IsTable, stored.Returns.IsTable) + namedTypesEq(t, act.Returns.Fields, stored.Returns.Fields) + } else { + require.Nil(t, stored.Returns) + } + + require.Equal(t, len(act.Body), len(stored.Body)) + } + }, + }, + { + name: "test store and load tables", + fn: func(ctx context.Context, db sql.DB) { + _, err := db.Execute(ctx, ` + CREATE TABLE main.users ( + id UUID PRIMARY KEY, + name TEXT NOT NULL CHECK (name <> '' AND length(name) <= 100), + age INT CHECK (age >= 0), + wallet_address TEXT UNIQUE NOT NULL + );`) + require.NoError(t, err) + + _, err = db.Execute(ctx, ` + CREATE TABLE main.posts ( + id UUID PRIMARY KEY, + title TEXT NOT NULL, + author_id UUID REFERENCES main.users (id) ON DELETE CASCADE + ); + `) + require.NoError(t, err) + + _, err = db.Execute(ctx, `CREATE UNIQUE INDEX ON main.users (name);`) + require.NoError(t, err) + + _, err = db.Execute(ctx, `CREATE INDEX user_ages ON main.users (age);`) + require.NoError(t, err) + + _, err = createNamespace(ctx, db, "other", namespaceTypeUser) + require.NoError(t, err) + + _, err = db.Execute(ctx, `CREATE TABLE other.my_table (id UUID PRIMARY KEY);`) + require.NoError(t, err) + + wantSchemas := map[string]map[string]*engine.Table{ + "main": { + "users": { + Name: "users", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "name", + DataType: types.TextType, + }, + { + Name: "age", + DataType: types.IntType, + Nullable: true, + }, + { + Name: "wallet_address", + DataType: types.TextType, + }, + }, + Indexes: []*engine.Index{ + { + Name: "user_ages", + Columns: []string{"age"}, + Type: engine.BTREE, + }, + { + Name: "users_name_idx", + Columns: []string{"name"}, + Type: engine.UNIQUE_BTREE, + }, + { + Name: "users_pkey", + Columns: []string{"id"}, + Type: engine.PRIMARY, + }, + { + Name: "users_wallet_address_key", + Columns: []string{"wallet_address"}, + Type: engine.UNIQUE_BTREE, + }, + }, + Constraints: map[string]*engine.Constraint{ + "users_name_check": { + Type: engine.ConstraintCheck, + Columns: []string{"name"}, + }, + "users_age_check": { + Type: engine.ConstraintCheck, + Columns: []string{"age"}, + }, + "users_wallet_address_key": { + Type: engine.ConstraintUnique, + Columns: []string{"wallet_address"}, + }, + }, + }, + "posts": { + Name: "posts", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "title", + DataType: types.TextType, + }, + { + Name: "author_id", + DataType: types.UUIDType, + Nullable: true, + }, + }, + Indexes: []*engine.Index{ + { + Name: "posts_pkey", + Columns: []string{"id"}, + Type: engine.PRIMARY, + }, + }, + Constraints: map[string]*engine.Constraint{ + "posts_author_id_fkey": { + Type: engine.ConstraintFK, + Columns: []string{"author_id"}, + }, + }, + }, + }, + "other": { + "my_table": { + Name: "my_table", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + }, + Indexes: []*engine.Index{ + { + Name: "my_table_pkey", + Columns: []string{"id"}, + Type: engine.PRIMARY, + }, + }, + }, + }, + } + + tables := map[string]map[string]*engine.Table{} + + for schemaName := range wantSchemas { + tbls, err := listTablesInNamespace(ctx, db, schemaName) + require.NoError(t, err) + tables[schemaName] = map[string]*engine.Table{} + for _, tbl := range tbls { + tables[schemaName][tbl.Name] = tbl + } + } + + require.Equal(t, len(wantSchemas), len(tables)) + for schemaName, wantSchema := range wantSchemas { + storedTbls, ok := tables[schemaName] + require.True(t, ok) + for _, want := range wantSchema { + stored, ok := storedTbls[want.Name] + require.True(t, ok) + require.Equal(t, want.Name, stored.Name) + require.Equal(t, len(want.Columns), len(stored.Columns)) + for i, wc := range want.Columns { + sc := stored.Columns[i] + require.Equal(t, wc.Name, sc.Name) + require.Equal(t, wc.DataType.String(), sc.DataType.String()) + require.Equal(t, wc.IsPrimaryKey, sc.IsPrimaryKey) + require.Equal(t, wc.Nullable, sc.Nullable) + } + require.Equal(t, len(want.Indexes), len(stored.Indexes)) + for i, wi := range want.Indexes { + si := stored.Indexes[i] + require.Equal(t, wi.Columns, si.Columns) + require.Equal(t, wi.Type, si.Type) + require.Equal(t, wi.Name, si.Name) + } + require.Equal(t, len(stored.Constraints), len(want.Constraints)) + for i, wc := range want.Constraints { + sc := stored.Constraints[i] + require.Equal(t, wc.Type, sc.Type) + require.Equal(t, wc.Columns, sc.Columns) + } + } + } + }, + }, + { + name: "test store and load extensions", + fn: func(ctx context.Context, db sql.DB) { + vals := func() map[string]Value { + return map[string]Value{ + "str": mustNewVal("val1"), + "int": mustNewVal(123), + "bool": mustNewVal(true), + "dec": mustNewVal(mustDec("123.456")), + "uuid": mustNewVal(mustUUID("c7b6a54c-392c-48f9-803d-31cb97e76052")), + "blob": mustNewVal([]byte{1, 2, 3}), + "strarr": mustNewVal([]string{"a", "b", "c"}), + "intarr": mustNewVal([]int{1, 2, 3}), + "boolarr": mustNewVal([]bool{true, false, true}), + "decarr": mustNewVal([]*decimal.Decimal{mustDec("1.23"), mustDec("4.56")}), + "uuidarr": mustNewVal([]*types.UUID{mustUUID("c7b6a54c-392c-48f9-803d-31cb97e76052"), mustUUID("c7b6a54c-392c-48f9-803d-31cb97e76053")}), + "blobarr": mustNewVal([][]byte{{1, 2, 3}, {4, 5, 6}}), + } + } + + err := registerExtensionInitialization(ctx, db, "ext1_init", "ext1", vals()) + require.NoError(t, err) + + err = registerExtensionInitialization(ctx, db, "ext2_init", "ext2", vals()) + require.NoError(t, err) + + exts, err := getExtensionInitializationMetadata(ctx, db) + require.NoError(t, err) + + require.Equal(t, 2, len(exts)) + require.Equal(t, "ext1", exts[0].ExtName) + require.Equal(t, "ext1_init", exts[0].Alias) + require.EqualValues(t, vals(), exts[0].Metadata) + + require.Equal(t, "ext2", exts[1].ExtName) + require.Equal(t, "ext2_init", exts[1].Alias) + require.EqualValues(t, vals(), exts[1].Metadata) + }, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + cfg := &pg.DBConfig{ + PoolConfig: pg.PoolConfig{ + ConnConfig: pg.ConnConfig{ + Host: "127.0.0.1", + Port: "5432", + User: "kwild", + Pass: "kwild", // would be ignored if pg_hba.conf set with trust + DBName: "kwil_test_db", + }, + MaxConns: 11, + }, + } + + ctx := context.Background() + + db, err := pg.NewDB(ctx, cfg) + require.NoError(t, err) + defer db.Close() + tx, err := db.BeginTx(ctx) + require.NoError(t, err) + defer tx.Rollback(ctx) // always rollback to avoid cleanup + + interp, err := NewInterpreter(ctx, tx, &common.Service{}) + require.NoError(t, err) + + err = interp.SetOwner(ctx, tx, "owner") + require.NoError(t, err) + + test.fn(ctx, tx) + }) + } +} + +func namedTypesEq(t *testing.T, a, b []*NamedType) { + require.Equal(t, len(a), len(b)) + for i, at := range a { + require.Equal(t, at.Name, b[i].Name) + require.Equal(t, at.Type.String(), b[i].Type.String()) + } +} + +func mustNewVal(v any) Value { + val, err := NewValue(v) + if err != nil { + panic(err) + } + return val +} diff --git a/node/engine/interpreter/types.go b/node/engine/interpreter/types.go new file mode 100644 index 000000000..d7a13a870 --- /dev/null +++ b/node/engine/interpreter/types.go @@ -0,0 +1,243 @@ +package interpreter + +import ( + "fmt" + "strings" + + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/extensions/precompiles" + "github.com/kwilteam/kwil-db/node/engine/parse" +) + +type Action struct { + // Name is the name of the action. + // It should always be lower case. + Name string `json:"name"` + + // Parameters are the input parameters of the action. + Parameters []*NamedType `json:"parameters"` + // Modifiers modify the access to the action. + Modifiers []precompiles.Modifier `json:"modifiers"` + + // Body is the logic of the action. + // TODO: delete this and just pass around strings + Body []parse.ActionStmt + + // RawStatement is the unparsed CREATE ACTION statement. + RawStatement string `json:"raw_statement"` + + // Returns specifies the return types of the action. + Returns *ActionReturn `json:"return_types"` +} + +func (a *Action) GetName() string { + return a.Name +} + +// FromAST sets the fields of the action from an AST node. +func (a *Action) FromAST(ast *parse.CreateActionStatement) error { + a.Name = ast.Name + a.RawStatement = ast.Raw + a.Body = ast.Statements + + a.Parameters = convertNamedTypes(ast.Parameters) + + if ast.Returns != nil { + a.Returns = &ActionReturn{ + IsTable: ast.Returns.IsTable, + Fields: convertNamedTypes(ast.Returns.Fields), + } + } + + modSet := make(map[precompiles.Modifier]struct{}) + a.Modifiers = []precompiles.Modifier{} + hasPublicPrivateOrSystem := false + for _, m := range ast.Modifiers { + mod, err := stringToMod(m) + if err != nil { + return err + } + + if mod == precompiles.PUBLIC || mod == precompiles.PRIVATE || mod == precompiles.SYSTEM { + if hasPublicPrivateOrSystem { + return fmt.Errorf("only one of PUBLIC, PRIVATE, or SYSTEM is allowed") + } + + hasPublicPrivateOrSystem = true + } + + if _, ok := modSet[mod]; !ok { + modSet[mod] = struct{}{} + a.Modifiers = append(a.Modifiers, mod) + } + } + + if !hasPublicPrivateOrSystem { + return fmt.Errorf(`one of PUBLIC, PRIVATE, or SYSTEM access modifier is required. received: "%s"`, strings.Join(ast.Modifiers, ", ")) + } + + return nil +} + +// convertNamedTypes converts a list of named types from the AST to the internal representation. +func convertNamedTypes(params []*parse.NamedType) []*NamedType { + namedTypes := make([]*NamedType, len(params)) + for i, p := range params { + namedTypes[i] = &NamedType{ + Name: p.Name, + Type: p.Type, + } + } + return namedTypes +} + +// NamedType is a parameter in a procedure. +type NamedType struct { + // Name is the name of the parameter. + // It should always be lower case. + // If it is a procedure parameter, it should begin + // with a $. + Name string `json:"name"` + // Type is the type of the parameter. + Type *types.DataType `json:"type"` +} + +// ActionReturn holds the return type of a procedure. +// EITHER the Type field is set, OR the Table field is set. +type ActionReturn struct { + IsTable bool `json:"is_table"` + Fields []*NamedType `json:"fields"` +} + +func stringToMod(s string) (precompiles.Modifier, error) { + switch strings.ToLower(s) { + case "public": + return precompiles.PUBLIC, nil + case "private": + return precompiles.PRIVATE, nil + case "system": + return precompiles.SYSTEM, nil + case "owner": + return precompiles.OWNER, nil + case "view": + return precompiles.VIEW, nil + default: + return "", fmt.Errorf("unknown modifier %s", s) + } +} + +// // Table is a table in the schema. +// type Table struct { +// // Name is the name of the table. +// Name string +// // Columns is a list of columns in the table. +// Columns []*Column +// // Indexes is a list of indexes on the table. +// Indexes []*Index +// // Constraints are constraints on the table. +// Constraints map[string]*Constraint +// } + +// func (t *Table) PrimaryKeyCols() []*Column { +// var pkCols []*Column +// for _, col := range t.Columns { +// if col.IsPrimaryKey { +// pkCols = append(pkCols, col) +// } +// } + +// return pkCols +// } + +// // HasPrimaryKey returns true if the column is part of the primary key. +// func (t *Table) HasPrimaryKey(col string) bool { +// col = strings.ToLower(col) +// for _, c := range t.Columns { +// if c.Name == col && c.IsPrimaryKey { +// return true +// } +// } +// return false +// } + +// // Column returns a column by name. +// // If the column is not found, the second return value is false. +// func (t *Table) Column(name string) (*Column, bool) { +// for _, col := range t.Columns { +// if col.Name == name { +// return col, true +// } +// } +// return nil, false +// } + +// // SearchConstraint returns a list of constraints that match the given column and type. +// func (t *Table) SearchConstraint(column string, constraint ConstraintType) []*Constraint { +// var constraints []*Constraint +// for _, c := range t.Constraints { +// if c.Type == constraint { +// for _, col := range c.Columns { +// if col == column { +// constraints = append(constraints, c) +// } +// } +// } +// } +// return constraints +// } + +// // Column is a column in a table. +// type Column struct { +// // Name is the name of the column. +// Name string +// // DataType is the data type of the column. +// DataType *types.DataType +// // DefaultValue is the default value of the column. +// DefaultValue any // can be nil +// // Nullable is true if the column can be null. +// Nullable bool +// // IsPrimaryKey is true if the column is part of the primary key. +// IsPrimaryKey bool +// } + +// // TODO: constraints should be tied to the table +// // Constraint is a constraint in the schema. +// type Constraint struct { +// // Name is the name of the constraint. +// // It must be unique within the schema. +// Name string +// // Type is the type of the constraint. +// Type ConstraintType +// // Columns is a list of column names that the constraint is on. +// Columns []string +// } + +// type ConstraintType string + +// const ( +// ConstraintUnique ConstraintType = "unique" +// ConstraintCheck ConstraintType = "check" +// ConstraintFK ConstraintType = "foreign_key" +// ) + +// // IndexType is a type of index (e.g. BTREE, UNIQUE_BTREE, PRIMARY) +// type IndexType string + +// // Index is an index on a table. +// type Index struct { +// Name string `json:"name"` +// Columns []string `json:"columns"` +// Type IndexType `json:"type"` +// } + +// // index types +// const ( +// // BTREE is the default index type. +// BTREE IndexType = "BTREE" +// // UNIQUE_BTREE is a unique BTREE index. +// UNIQUE_BTREE IndexType = "UNIQUE_BTREE" +// // PRIMARY is a primary index. +// // Only one primary index is allowed per table. +// // A primary index cannot exist on a table that also has a primary key. +// PRIMARY IndexType = "PRIMARY" +// ) diff --git a/node/engine/interpreter/types_test.go b/node/engine/interpreter/types_test.go new file mode 100644 index 000000000..13050b807 --- /dev/null +++ b/node/engine/interpreter/types_test.go @@ -0,0 +1,53 @@ +//go:build pglive + +package interpreter + +import ( + "github.com/kwilteam/kwil-db/node/engine/parse" +) + +// mustParse is a helper function to parse an action and panic on error. +func mustParse(s string) *Action { + res, err := parse.Parse(s) + if err != nil { + panic(err) + } + + act := Action{} + err = act.FromAST(res[0].(*parse.CreateActionStatement)) + if err != nil { + panic(err) + } + + return &act +} + +// actions +var ( + all_test_actions = []*Action{ + action_create_user, + action_list_users, + action_get_user_by_name, + } + action_create_user = mustParse(`CREATE ACTION create_user ($name TEXT, $age INT) public { + INSERT INTO users (id, name, age) + VALUES ( + uuid_generate_v5('c7b6a54c-392c-48f9-803d-31cb97e76052'::uuid, @txid), + $name, + $age + ); + };`) + + action_list_users = mustParse(`CREATE ACTION list_users () public view owner { + RETURN SELECT id, name, age + FROM users; + };`) + + action_get_user_by_name = mustParse(`CREATE ACTION get_user_by_name ($name TEXT) public view { + FOR $row IN SELECT id, age + FROM users + WHERE name = $name { + RETURN $row.id, $row.age; + } + };`) +) diff --git a/node/engine/interpreter/values.go b/node/engine/interpreter/values.go new file mode 100644 index 000000000..9706e4e88 --- /dev/null +++ b/node/engine/interpreter/values.go @@ -0,0 +1,2436 @@ +package interpreter + +import ( + "fmt" + "strconv" + "strings" + + "github.com/jackc/pgx/v5/pgtype" + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/types/decimal" +) + +// ValueMapping maps Go types and Kwil native types. +type ValueMapping struct { + // KwilType is the Kwil type that the value maps to. + // It will ignore the metadata of the type. + KwilType *types.DataType + // ZeroValue creates a zero-value of the type. + ZeroValue func() (Value, error) +} + +var ( + kwilTypeToValue = map[struct { + name string + isArray bool + }]ValueMapping{} +) + +func registerValueMapping(ms ...ValueMapping) { + for _, m := range ms { + k := struct { + name string + isArray bool + }{ + name: m.KwilType.Name, + isArray: m.KwilType.IsArray, + } + + _, ok := kwilTypeToValue[k] + if ok { + panic(fmt.Sprintf("type %s already registered", m.KwilType.Name)) + } + + kwilTypeToValue[k] = m + } +} + +func init() { + registerValueMapping( + ValueMapping{ + KwilType: types.IntType, + ZeroValue: func() (Value, error) { + return newInt(0), nil + }, + }, + ValueMapping{ + KwilType: types.TextType, + ZeroValue: func() (Value, error) { + return newText(""), nil + }, + }, + ValueMapping{ + KwilType: types.BoolType, + ZeroValue: func() (Value, error) { + return newBool(false), nil + }, + }, + ValueMapping{ + KwilType: types.BlobType, + ZeroValue: func() (Value, error) { + return newBlob([]byte{}), nil + }, + }, + ValueMapping{ + KwilType: types.UUIDType, + ZeroValue: func() (Value, error) { + return newUUID(&types.UUID{}), nil + }, + }, + ValueMapping{ + KwilType: types.DecimalType, + ZeroValue: func() (Value, error) { + dec, err := decimal.NewFromString("0") + if err != nil { + return nil, err + } + return newDec(dec), nil + }, + }, + ValueMapping{ + KwilType: types.IntArrayType, + ZeroValue: func() (Value, error) { + return &IntArrayValue{ + Array: pgtype.Array[pgtype.Int8]{}, + }, nil + }, + }, + ValueMapping{ + KwilType: types.TextArrayType, + ZeroValue: func() (Value, error) { + return &TextArrayValue{ + Array: pgtype.Array[pgtype.Text]{}, + }, nil + }, + }, + ValueMapping{ + KwilType: types.BoolArrayType, + ZeroValue: func() (Value, error) { + return &BoolArrayValue{ + Array: pgtype.Array[pgtype.Bool]{}, + }, nil + }, + }, + ValueMapping{ + KwilType: types.BlobArrayType, + ZeroValue: func() (Value, error) { + return &BlobArrayValue{ + Array: pgtype.Array[*BlobValue]{}, + }, nil + }, + }, + ValueMapping{ + KwilType: types.DecimalArrayType, + ZeroValue: func() (Value, error) { + return &DecimalArrayValue{ + Array: pgtype.Array[pgtype.Numeric]{}, + }, nil + }, + }, + ) +} + +// NewZeroValue creates a new zero value of the given type. +func NewZeroValue(t *types.DataType) (Value, error) { + m, ok := kwilTypeToValue[struct { + name string + isArray bool + }{ + name: t.Name, + isArray: t.IsArray, + }] + if !ok { + return nil, fmt.Errorf("type %s not found", t.Name) + } + + return m.ZeroValue() +} + +// Value is a value that can be compared, used in arithmetic operations, +// and have unary operations applied to it. +type Value interface { + // Compare compares the variable with another variable using the given comparison operator. + // It will return a boolean value or null, depending on the comparison and the values. + Compare(v Value, op ComparisonOp) (*BoolValue, error) + // Type returns the type of the variable. + Type() *types.DataType + // RawValue returns the value of the variable. + // This is one of: nil, int64, string, bool, []byte, *types.UUID, *decimal.Decimal, + // []*int64, []*string, []*bool, [][]byte, []*decimal.Decimal, []*types.UUID + RawValue() any + // Cast casts the variable to the given type. + // It is meant to mirror Postgres's type casting behavior. + Cast(t *types.DataType) (Value, error) + // Null returns true if the variable is null. + Null() bool +} + +// ScalarValue is a scalar value that can be computed on and have unary operations applied to it. +type ScalarValue interface { + Value + // Arithmetic performs an arithmetic operation on the variable with another variable. + Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) + // Unary applies a unary operation to the variable. + Unary(op UnaryOp) (ScalarValue, error) + // Array creates an array from this scalar value and any other scalar values. + Array(v ...ScalarValue) (ArrayValue, error) +} + +// ArrayValue is an array value that can be compared and have unary operations applied to it. +type ArrayValue interface { + Value + // Len returns the length of the array. + Len() int32 + // Index returns the value at the given index. + // If the index is out of bounds, an error is returned. + // All indexing is 1-based. + Index(i int32) (ScalarValue, error) + // Set sets the value at the given index. + // If the index is out of bounds, enough space is allocated to set the value. + // This matches the behavior of Postgres. + // All indexing is 1-based. + Set(i int32, v ScalarValue) error +} + +func newValidArr[T any](a []T) pgtype.Array[T] { + return pgtype.Array[T]{ + Elements: a, + Dims: []pgtype.ArrayDimension{{Length: int32(len(a)), LowerBound: 1}}, + Valid: true, + } +} + +// NewValue creates a new Value from the given any val. +func NewValue(v any) (Value, error) { + switch v := v.(type) { + case Value: + return v, nil + case int64: + return newInt(v), nil + case int: + return newInt(int64(v)), nil + case string: + return newText(v), nil + case bool: + return newBool(v), nil + case []byte: + return newBlob(v), nil + case *types.UUID: + return newUUID(v), nil + case types.UUID: + return newUUID(&v), nil + case *decimal.Decimal: + return newDec(v), nil + case decimal.Decimal: + return newDec(&v), nil + case []int64: + pgInts := make([]pgtype.Int8, len(v)) + for i, val := range v { + pgInts[i].Int64 = val + pgInts[i].Valid = true + } + + return &IntArrayValue{ + Array: newValidArr(pgInts), + }, nil + case []*int64: + pgInts := make([]pgtype.Int8, len(v)) + for i, val := range v { + if val == nil { + pgInts[i].Valid = false + } else { + pgInts[i].Int64 = *val + pgInts[i].Valid = true + } + } + return &IntArrayValue{ + Array: newValidArr(pgInts), + }, nil + case []int: + pgInts := make([]pgtype.Int8, len(v)) + for i, val := range v { + pgInts[i].Int64 = int64(val) + pgInts[i].Valid = true + } + + return &IntArrayValue{ + Array: newValidArr(pgInts), + }, nil + case []*int: + pgInts := make([]pgtype.Int8, len(v)) + for i, val := range v { + if val == nil { + pgInts[i].Valid = false + } else { + pgInts[i].Int64 = int64(*val) + pgInts[i].Valid = true + } + } + return &IntArrayValue{ + Array: newValidArr(pgInts), + }, nil + case []string: + pgTexts := make([]pgtype.Text, len(v)) + for i, val := range v { + pgTexts[i].String = val + pgTexts[i].Valid = true + } + + return &TextArrayValue{ + Array: newValidArr(pgTexts), + }, nil + case []*string: + pgTexts := make([]pgtype.Text, len(v)) + for i, val := range v { + if val == nil { + pgTexts[i].Valid = false + } else { + pgTexts[i].String = *val + pgTexts[i].Valid = true + } + } + + return &TextArrayValue{ + Array: newValidArr(pgTexts), + }, nil + case []bool: + pgBools := make([]pgtype.Bool, len(v)) + for i, val := range v { + pgBools[i].Bool = val + pgBools[i].Valid = true + } + + return &BoolArrayValue{ + Array: newValidArr(pgBools), + }, nil + case []*bool: + pgBools := make([]pgtype.Bool, len(v)) + for i, val := range v { + if val == nil { + pgBools[i].Valid = false + } else { + pgBools[i].Bool = *val + pgBools[i].Valid = true + } + } + + return &BoolArrayValue{ + Array: newValidArr(pgBools), + }, nil + case [][]byte: + pgBlobs := make([]*BlobValue, len(v)) + for i, val := range v { + pgBlobs[i] = newBlob(val) + } + + return &BlobArrayValue{ + Array: newValidArr(pgBlobs), + }, nil + case []*[]byte: + pgBlobs := make([]*BlobValue, len(v)) + for i, val := range v { + if val == nil { + pgBlobs[i] = &BlobValue{} + } else { + pgBlobs[i] = newBlob(*val) + } + } + + return &BlobArrayValue{ + Array: newValidArr(pgBlobs), + }, nil + case []*decimal.Decimal: + pgDecs := make([]pgtype.Numeric, len(v)) + for i, val := range v { + pgDecs[i] = pgTypeFromDec(val) + } + + return &DecimalArrayValue{ + Array: newValidArr(pgDecs), + }, nil + case []*types.UUID: + pgUUIDs := make([]pgtype.UUID, len(v)) + for i, val := range v { + if val == nil { + pgUUIDs[i].Valid = false + } else { + pgUUIDs[i].Bytes = *val + pgUUIDs[i].Valid = true + } + } + + return &UuidArrayValue{ + Array: newValidArr(pgUUIDs), + }, nil + case nil: + return &TextValue{ + Text: pgtype.Text{ + Valid: false, + }, + }, nil + default: + return nil, fmt.Errorf("unexpected type %T", v) + } +} + +func makeTypeErr(left, right Value) error { + return fmt.Errorf("%w: left: %s right: %s", ErrTypeMismatch, left.Type(), right.Type()) +} + +// makeArrTypeErr returns an error for when an array operation is performed on a non-array type. +func makeArrTypeErr(arrVal Value, newVal Value) error { + return fmt.Errorf("%w: cannot create an array of different types %s and %s", ErrArrayMixedTypes, arrVal.Type(), newVal.Type()) +} + +func newInt(i int64) *IntValue { + return &IntValue{ + Int8: pgtype.Int8{ + Int64: i, + Valid: true, + }, + } +} + +type IntValue struct { + pgtype.Int8 +} + +func (i *IntValue) Null() bool { + return !i.Valid +} + +func (v *IntValue) Compare(v2 Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(v, v2, op); early { + return res, nil + } + + val2, ok := v2.(*IntValue) + if !ok { + return nil, makeTypeErr(v, v2) + } + + var b bool + switch op { + case equal: + b = v.Int64 == val2.Int64 + case lessThan: + b = v.Int64 < val2.Int64 + case greaterThan: + b = v.Int64 > val2.Int64 + case isDistinctFrom: + b = v.Int64 != val2.Int64 + default: + return nil, fmt.Errorf("%w: cannot compare int with operator %s", ErrComparison, op) + } + + return newBool(b), nil +} + +// nullCmp is a helper function for comparing null values. +// It takes two values and a comparison operator. +// If the operator is IS or IS DISTINCT FROM, it will return a boolean value +// based on the comparison of the two values. +// If the operator is any other operator and either of the values is null, +// it will return a null value. +func nullCmp(a, b Value, op ComparisonOp) (*BoolValue, bool) { + // if it is isDistinctFrom or is, we should handle nulls + // Otherwise, if either is a null, we return early because we cannot compare + // a null value with a non-null value. + if op == isDistinctFrom { + if a.Null() && b.Null() { + return newBool(false), true + } + if a.Null() || b.Null() { + return newBool(true), true + } + + // otherwise, we let equality handle it + } + + if op == is { + if a.Null() && b.Null() { + return newBool(true), true + } + if a.Null() || b.Null() { + return newBool(false), true + } + } + + if a.Null() || b.Null() { + // the type of this null doesnt really matter. + return newNull(types.BoolType).(*BoolValue), true + } + + return nil, false +} + +// checks if any value is null. If so, it will return the null value. +func checkScalarNulls(v ...ScalarValue) (ScalarValue, bool) { + for _, val := range v { + if val.Null() { + return val, true + } + } + + return nil, false +} + +func (i *IntValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + if res, early := checkScalarNulls(i, v); early { + return res, nil + } + + val2, ok := v.(*IntValue) + if !ok { + return nil, makeTypeErr(i, v) + } + + var r int64 + + switch op { + case add: + r = i.Int64 + val2.Int64 + case sub: + r = i.Int64 - val2.Int64 + case mul: + r = i.Int64 * val2.Int64 + case div: + if val2.Int64 == 0 { + return nil, fmt.Errorf("%w: cannot divide by zero", ErrArithmetic) + } + r = i.Int64 / val2.Int64 + case mod: + if val2.Int64 == 0 { + return nil, fmt.Errorf("%w: cannot modulo by zero", ErrArithmetic) + } + r = i.Int64 % val2.Int64 + default: + return nil, fmt.Errorf("%w: cannot perform arithmetic operation %s on type int", ErrArithmetic, op) + } + + return &IntValue{ + Int8: pgtype.Int8{ + Int64: r, + Valid: true, + }, + }, nil +} + +func (i *IntValue) Unary(op UnaryOp) (ScalarValue, error) { + if i.Null() { + return i, nil + } + + switch op { + case neg: + return &IntValue{Int8: pgtype.Int8{Int64: -i.Int64, Valid: true}}, nil + case not: + return nil, fmt.Errorf("%w: cannot apply logical NOT to an integer", ErrUnary) + case pos: + return i, nil + default: + return nil, fmt.Errorf("%w: unknown unary operator: %s", ErrUnary, op) + } +} + +func (i *IntValue) Type() *types.DataType { + return types.IntType +} + +func (i *IntValue) RawValue() any { + if !i.Valid { + return nil + } + + return i.Int64 +} + +func (i *IntValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]pgtype.Int8, len(v)+1) + pgtArr[0] = i.Int8 + for j, val := range v { + if intVal, ok := val.(*IntValue); !ok { + return nil, makeArrTypeErr(i, val) + } else { + pgtArr[j+1] = intVal.Int8 + } + } + + arr := newValidArr(pgtArr) + + return &IntArrayValue{ + Array: arr, + }, nil +} + +func (i *IntValue) Cast(t *types.DataType) (Value, error) { + if i.Null() { + return newNull(t), nil + } + + // we check for decimal first since type switching on it + // doesn't work, since it has precision and scale + if t.Name == types.DecimalStr { + if t.IsArray { + return nil, fmt.Errorf("%w: cannot cast int to decimal array", ErrCast) + } + + dec, err := decimal.NewFromString(fmt.Sprint(i.Int64)) + if err != nil { + return nil, castErr(err) + } + + return newDec(dec), nil + } + + switch *t { + case *types.IntType: + return i, nil + case *types.TextType: + return newText(fmt.Sprint(i.Int64)), nil + case *types.BoolType: + return newBool(i.Int64 != 0), nil + default: + return nil, fmt.Errorf("%w: cannot cast int to %s", ErrCast, t) + } +} + +// newNull creates a new null value of the given type. +func newNull(t *types.DataType) Value { + if t.Name == types.DecimalStr { + if t.IsArray { + return newNullDecArr() + } + + return newDec(nil) + } + + switch *t { + case *types.IntType: + return &IntValue{ + Int8: pgtype.Int8{ + Valid: false, + }, + } + case *types.TextType: + return &TextValue{ + Text: pgtype.Text{ + Valid: false, + }, + } + case *types.BoolType: + return &BoolValue{ + Bool: pgtype.Bool{ + Valid: false, + }, + } + case *types.BlobType: + return &BlobValue{} + case *types.UUIDType: + return &UUIDValue{ + UUID: pgtype.UUID{ + Valid: false, + }, + } + case *types.DecimalType: + return newDec(nil) + case *types.IntArrayType: + return &IntArrayValue{ + Array: pgtype.Array[pgtype.Int8]{Valid: false}, + } + case *types.TextArrayType: + return &TextArrayValue{ + Array: pgtype.Array[pgtype.Text]{Valid: false}, + } + case *types.BoolArrayType: + return &BoolArrayValue{ + Array: pgtype.Array[pgtype.Bool]{Valid: false}, + } + case *types.BlobArrayType: + return &BlobArrayValue{ + Array: pgtype.Array[*BlobValue]{Valid: false}, + } + case *types.UUIDArrayType: + return &UuidArrayValue{ + Array: pgtype.Array[pgtype.UUID]{Valid: false}, + } + default: + panic(fmt.Sprintf("tried to create null with unexpected type %s", t.Name)) + } +} + +func newText(s string) *TextValue { + return &TextValue{ + Text: pgtype.Text{ + String: s, + Valid: true, + }, + } +} + +type TextValue struct { + pgtype.Text +} + +func (t *TextValue) Null() bool { + return !t.Valid +} + +func (s *TextValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(s, v, op); early { + return res, nil + } + + val2, ok := v.(*TextValue) + if !ok { + return nil, makeTypeErr(s, v) + } + + var b bool + switch op { + case equal: + b = s.String == val2.String + case lessThan: + b = s.String < val2.String + case greaterThan: + b = s.String > val2.String + case isDistinctFrom: + b = s.String != val2.String + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with type %s", ErrComparison, s.Type(), op) + } + + return newBool(b), nil +} + +func (s *TextValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + if res, early := checkScalarNulls(s, v); early { + return res, nil + } + + val2, ok := v.(*TextValue) + if !ok { + return nil, makeTypeErr(s, v) + } + + if op == concat { + return newText(s.String + val2.String), nil + } + + return nil, fmt.Errorf("%w: cannot perform arithmetic operation %s on type string", ErrArithmetic, op) +} + +func (s *TextValue) Unary(op UnaryOp) (ScalarValue, error) { + return nil, fmt.Errorf("%w: cannot perform unary operation on string", ErrUnary) +} + +func (s *TextValue) Type() *types.DataType { + return types.TextType +} + +func (s *TextValue) RawValue() any { + if !s.Valid { + return nil + } + + return s.String +} + +func (s *TextValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]pgtype.Text, len(v)+1) + pgtArr[0] = s.Text + for j, val := range v { + if textVal, ok := val.(*TextValue); !ok { + return nil, makeArrTypeErr(s, val) + } else { + pgtArr[j+1] = textVal.Text + } + } + + arr := newValidArr(pgtArr) + + return &TextArrayValue{ + Array: arr, + }, nil +} + +func (s *TextValue) Cast(t *types.DataType) (Value, error) { + if s.Null() { + return newNull(t), nil + } + + if t.Name == types.DecimalStr { + if t.IsArray { + return nil, fmt.Errorf("%w: cannot cast text to decimal array", ErrCast) + } + + dec, err := decimal.NewFromString(s.String) + if err != nil { + return nil, castErr(err) + } + + return newDec(dec), nil + } + + switch *t { + case *types.IntType: + i, err := strconv.ParseInt(s.String, 10, 64) + if err != nil { + return nil, castErr(err) + } + + return newInt(int64(i)), nil + case *types.TextType: + return s, nil + case *types.BoolType: + b, err := strconv.ParseBool(s.String) + if err != nil { + return nil, castErr(err) + } + + return newBool(b), nil + case *types.UUIDType: + u, err := types.ParseUUID(s.String) + if err != nil { + return nil, castErr(err) + } + + return newUUID(u), nil + case *types.BlobType: + return newBlob([]byte(s.String)), nil + default: + return nil, fmt.Errorf("%w: cannot cast text to %s", ErrCast, t) + } +} + +func newBool(b bool) *BoolValue { + return &BoolValue{ + Bool: pgtype.Bool{ + Bool: b, + Valid: true, + }, + } +} + +type BoolValue struct { + pgtype.Bool +} + +func (b *BoolValue) Null() bool { + return !b.Valid +} + +func (b *BoolValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(b, v, op); early { + return res, nil + } + + val2, ok := v.(*BoolValue) + if !ok { + return nil, makeTypeErr(b, v) + } + + var b2 bool + switch op { + case equal: + b2 = b.Bool.Bool == val2.Bool.Bool + case isDistinctFrom: + b2 = b.Bool.Bool != val2.Bool.Bool + case lessThan: + b2 = !b.Bool.Bool && val2.Bool.Bool + case greaterThan: + b2 = b.Bool.Bool && !val2.Bool.Bool + case is: + b2 = b.Bool.Bool == val2.Bool.Bool + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with type %s", ErrComparison, b.Type(), op) + } + + return newBool(b2), nil +} + +func (b *BoolValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + return nil, fmt.Errorf("%w: cannot perform arithmetic operation on bool", ErrArithmetic) +} + +func (b *BoolValue) Unary(op UnaryOp) (ScalarValue, error) { + if b.Null() { + return b, nil + } + + switch op { + case not: + return newBool(!b.Bool.Bool), nil + case neg, pos: + return nil, fmt.Errorf("%w: cannot perform unary operation %s on bool", ErrUnary, op) + default: + return nil, fmt.Errorf("%w: unexpected operator id %s for bool", ErrUnary, op) + } +} + +func (b *BoolValue) Type() *types.DataType { + return types.BoolType +} + +func (b *BoolValue) RawValue() any { + if !b.Valid { + return nil + } + + return b.Bool.Bool +} + +func (b *BoolValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]pgtype.Bool, len(v)+1) + pgtArr[0] = b.Bool + for j, val := range v { + if boolVal, ok := val.(*BoolValue); !ok { + return nil, makeArrTypeErr(b, val) + } else { + pgtArr[j+1] = boolVal.Bool + } + } + + arr := newValidArr(pgtArr) + + return &BoolArrayValue{ + Array: arr, + }, nil +} + +func (b *BoolValue) Cast(t *types.DataType) (Value, error) { + if b.Null() { + return newNull(t), nil + } + + switch *t { + case *types.IntType: + if b.Bool.Bool { + return newInt(1), nil + } + + return newInt(0), nil + case *types.TextType: + return newText(strconv.FormatBool(b.Bool.Bool)), nil + case *types.BoolType: + return b, nil + default: + return nil, fmt.Errorf("%w: cannot cast bool to %s", ErrCast, t) + } +} + +func newBlob(b []byte) *BlobValue { + return &BlobValue{ + bts: b, + } +} + +type BlobValue struct { + bts []byte +} + +func (b *BlobValue) Null() bool { + return b.bts == nil +} + +func (b *BlobValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(b, v, op); early { + return res, nil + } + + val2, ok := v.(*BlobValue) + if !ok { + return nil, makeTypeErr(b, v) + } + + var b2 bool + switch op { + case equal: + b2 = string(b.bts) == string(val2.bts) + case isDistinctFrom: + b2 = string(b.bts) != string(val2.bts) + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with type %s", ErrComparison, b.Type(), op) + } + + return newBool(b2), nil +} + +func (b *BlobValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + if res, early := checkScalarNulls(b, v); early { + return res, nil + } + + val2, ok := v.(*BlobValue) + if !ok { + return nil, makeTypeErr(b, v) + } + + if op == concat { + return newBlob(append(b.bts, val2.bts...)), nil + } + + return nil, fmt.Errorf("%w: cannot perform arithmetic operation %s on blob", ErrArithmetic, op) +} + +func (b *BlobValue) Unary(op UnaryOp) (ScalarValue, error) { + return nil, fmt.Errorf("%w: cannot perform unary operation on blob", ErrUnary) +} + +func (b *BlobValue) Type() *types.DataType { + return types.BlobType +} + +func (b *BlobValue) RawValue() any { + return b.bts +} + +func (b *BlobValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]*BlobValue, len(v)+1) + pgtArr[0] = b + for j, val := range v { + if blobVal, ok := val.(*BlobValue); !ok { + return nil, makeArrTypeErr(b, val) + } else { + pgtArr[j+1] = blobVal + } + } + + arr := newValidArr(pgtArr) + + return &BlobArrayValue{ + Array: arr, + }, nil +} + +func (b *BlobValue) Cast(t *types.DataType) (Value, error) { + switch *t { + case *types.IntType: + i, err := strconv.ParseInt(string(b.bts), 10, 64) + if err != nil { + return nil, castErr(err) + } + + return newInt(i), nil + case *types.TextType: + return newText(string(b.bts)), nil + case *types.BlobType: + return b, nil + default: + return nil, fmt.Errorf("%w: cannot cast blob to %s", ErrCast, t) + } +} + +var _ pgtype.BytesScanner = (*BlobValue)(nil) +var _ pgtype.BytesValuer = (*BlobValue)(nil) + +// ScanBytes implements the pgtype.BytesScanner interface. +func (b *BlobValue) ScanBytes(src []byte) error { + if src == nil { + b.bts = nil + return nil + } + + // copy the src bytes into the prealloc bytes + b.bts = make([]byte, len(src)) + copy(b.bts, src) + return nil +} + +// BytesValue implements the pgtype.BytesValuer interface. +func (b *BlobValue) BytesValue() ([]byte, error) { + if b.Null() { + return nil, nil + } + + return b.bts, nil +} + +func newUUID(u *types.UUID) *UUIDValue { + if u == nil { + return &UUIDValue{ + UUID: pgtype.UUID{ + Valid: false, + }, + } + } + return &UUIDValue{ + UUID: pgtype.UUID{ + Bytes: *u, + Valid: true, + }, + } +} + +type UUIDValue struct { + pgtype.UUID +} + +func (u *UUIDValue) Null() bool { + return !u.Valid +} + +func (u *UUIDValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(u, v, op); early { + return res, nil + } + + val2, ok := v.(*UUIDValue) + if !ok { + return nil, makeTypeErr(u, v) + } + + var b bool + switch op { + case equal: + b = u.Bytes == val2.Bytes + case isDistinctFrom: + b = u.Bytes != val2.Bytes + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with type %s", ErrComparison, u.Type(), op) + } + + return newBool(b), nil +} + +func (u *UUIDValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + return nil, fmt.Errorf("%w: cannot perform arithmetic operation on uuid", ErrArithmetic) +} + +func (u *UUIDValue) Unary(op UnaryOp) (ScalarValue, error) { + return nil, fmt.Errorf("%w: cannot perform unary operation on uuid", ErrUnary) +} + +func (u *UUIDValue) Type() *types.DataType { + return types.UUIDType +} + +func (u *UUIDValue) RawValue() any { + if !u.Valid { + return nil + } + + // kwil always handled uuids as pointers + u2 := types.UUID(u.Bytes) + return &u2 +} + +func (u *UUIDValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]pgtype.UUID, len(v)+1) + pgtArr[0] = u.UUID + for j, val := range v { + if uuidVal, ok := val.(*UUIDValue); !ok { + return nil, makeArrTypeErr(u, val) + } else { + pgtArr[j+1] = uuidVal.UUID + } + } + + arr := newValidArr(pgtArr) + + return &UuidArrayValue{ + Array: arr, + }, nil +} + +func (u *UUIDValue) Cast(t *types.DataType) (Value, error) { + if u.Null() { + return newNull(t), nil + } + + switch *t { + case *types.TextType: + return newText(types.UUID(u.Bytes).String()), nil + case *types.BlobType: + return newBlob(u.Bytes[:]), nil + case *types.UUIDType: + return u, nil + default: + return nil, fmt.Errorf("%w: cannot cast uuid to %s", ErrCast, t) + } +} + +func pgTypeFromDec(d *decimal.Decimal) pgtype.Numeric { + if d == nil { + return pgtype.Numeric{ + Valid: false, + } + } + if d.NaN() { + return pgtype.Numeric{ + NaN: true, + Valid: true, + } + } + + bigint := d.BigInt() + // cockroach's APD library tracks negativity outside of the BigInt, + // so here we need to check if the decimal is negative, and if so, + // apply it to the big int we are putting into the pgtype. + if d.IsNegative() { + bigint = bigint.Neg(bigint) + } + + return pgtype.Numeric{ + Int: bigint, + Exp: d.Exp(), + Valid: true, + } +} + +func decFromPgType(n pgtype.Numeric) (*decimal.Decimal, error) { + if n.NaN { + return decimal.NewNaN(), nil + } + if !n.Valid { + // we should never get here, but just in case + return nil, fmt.Errorf("internal bug: null decimal") + } + + return decimal.NewFromBigInt(n.Int, int32(n.Exp)) +} + +func newDec(d *decimal.Decimal) *DecimalValue { + if d == nil { + return &DecimalValue{ + Numeric: pgtype.Numeric{ + Valid: false, + }, + } + } + + return &DecimalValue{ + Numeric: pgTypeFromDec(d), + } +} + +type DecimalValue struct { + pgtype.Numeric +} + +func (d *DecimalValue) Null() bool { + return !d.Valid +} + +func (d *DecimalValue) dec() (*decimal.Decimal, error) { + if d.NaN { + return nil, fmt.Errorf("NaN") + } + if !d.Valid { + // we should never get here, but just in case + return nil, fmt.Errorf("internal bug: null decimal") + } + + d2, err := decimal.NewFromBigInt(d.Int, int32(d.Exp)) + if err != nil { + return nil, err + } + + return d2, nil +} + +func (d *DecimalValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(d, v, op); early { + return res, nil + } + + val2, ok := v.(*DecimalValue) + if !ok { + return nil, makeTypeErr(d, v) + } + + dec1, err := d.dec() + if err != nil { + return nil, err + } + + dec2, err := val2.dec() + if err != nil { + return nil, err + } + + res, err := dec1.Cmp(dec2) + if err != nil { + return nil, err + } + + return cmpIntegers(res, 0, op) +} + +func (d *DecimalValue) Arithmetic(v ScalarValue, op ArithmeticOp) (ScalarValue, error) { + if res, early := checkScalarNulls(d, v); early { + return res, nil + } + + // we check they are both decimal, but we don't check the precision and scale + // because our decimal library will calculate with higher precision and scale anyways. + if v.Type().Name != d.Type().Name { + return nil, makeTypeErr(d, v) + } + + val2, ok := v.(*DecimalValue) + if !ok { + return nil, makeTypeErr(d, v) + } + + dec1, err := d.dec() + if err != nil { + return nil, err + } + + dec2, err := val2.dec() + if err != nil { + return nil, err + } + + var d2 *decimal.Decimal + switch op { + case add: + d2, err = decimal.Add(dec1, dec2) + case sub: + d2, err = decimal.Sub(dec1, dec2) + case mul: + d2, err = decimal.Mul(dec1, dec2) + case div: + d2, err = decimal.Div(dec1, dec2) + case mod: + d2, err = decimal.Mod(dec1, dec2) + default: + return nil, fmt.Errorf("%w: unexpected operator id %d for decimal", ErrArithmetic, op) + } + if err != nil { + return nil, err + } + + return newDec(d2), nil +} + +func (d *DecimalValue) Unary(op UnaryOp) (ScalarValue, error) { + if d.Null() { + return d, nil + } + + switch op { + case neg: + dec, err := d.dec() + if err != nil { + return nil, err + } + + err = dec.Neg() + if err != nil { + return nil, err + } + + return newDec(dec), nil + case pos: + return d, nil + default: + return nil, fmt.Errorf("%w: unexpected operator id %s for decimal", ErrUnary, op) + } +} + +func (d *DecimalValue) Type() *types.DataType { + // we will try to get the precision and scale from the decimal, + // but if we can't, we will return the default. + dec, err := d.dec() + if err != nil { + return types.DecimalType + } + + res, err := types.NewDecimalType(dec.Precision(), dec.Scale()) + if err != nil { + return types.DecimalType + } + + return res +} + +func (d *DecimalValue) RawValue() any { + if !d.Valid { + return nil + } + dec, err := d.dec() + if err != nil { + return nil + } + + return dec +} + +func (d *DecimalValue) Array(v ...ScalarValue) (ArrayValue, error) { + pgtArr := make([]pgtype.Numeric, len(v)+1) + pgtArr[0] = d.Numeric + for j, val := range v { + if decVal, ok := val.(*DecimalValue); !ok { + return nil, makeArrTypeErr(d, val) + } else { + pgtArr[j+1] = decVal.Numeric + } + } + + arr := newValidArr(pgtArr) + + return &DecimalArrayValue{ + Array: arr, + }, nil +} + +func (d *DecimalValue) Cast(t *types.DataType) (Value, error) { + if t.Name == types.DecimalStr { + if t.IsArray { + return nil, fmt.Errorf("%w: cannot cast decimal to decimal array", ErrCast) + } + + // if no metadata, then its a noop + + if !t.HasMetadata() { + return d, nil + } + + // otherwise, we need to alter the precision and scale + + dec, err := d.dec() + if err != nil { + return nil, castErr(err) + } + + err = dec.SetPrecisionAndScale(t.Metadata[0], t.Metadata[1]) + if err != nil { + return nil, castErr(err) + } + + return newDec(dec), nil + } + + switch *t { + case *types.IntType: + dec, err := d.dec() + if err != nil { + return nil, castErr(err) + } + + i, err := dec.Int64() + if err != nil { + return nil, castErr(err) + } + + return newInt(i), nil + case *types.TextType: + dec, err := d.dec() + if err != nil { + return nil, castErr(err) + } + + return newText(dec.String()), nil + default: + return nil, fmt.Errorf("%w: cannot cast decimal to %s", ErrCast, t) + } +} + +func newIntArr(v []*int64) *IntArrayValue { + pgInts := make([]pgtype.Int8, len(v)) + for i, val := range v { + if val == nil { + pgInts[i].Valid = false + } else { + pgInts[i].Int64 = *val + pgInts[i].Valid = true + } + } + + return &IntArrayValue{ + Array: newValidArr(pgInts), + } +} + +type IntArrayValue struct { + pgtype.Array[pgtype.Int8] +} + +func (a *IntArrayValue) Null() bool { + return !a.Valid +} + +func (a *IntArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +func (a *IntArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *IntArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return &IntValue{a.Elements[i-1]}, nil // indexing is 1-based +} + +// allocArr checks that the array has index i, and if not, it allocates enough space to set the value. +func allocArr[T any](p *pgtype.Array[T], i int32) error { + if i < 1 { + return fmt.Errorf("index out of bounds") + } + + if i > int32(len(p.Elements)) { + // Allocate enough space to set the value. + // This matches the behavior of Postgres. + newVal := make([]T, i) + copy(newVal, p.Elements) + p.Elements = newVal + p.Dims[0] = pgtype.ArrayDimension{ + Length: int32(i), + LowerBound: 1, + } + } + + return nil +} + +func (a *IntArrayValue) Set(i int32, v ScalarValue) error { + // we do not need to worry about nulls here. Postgres will automatically make an array + // not null if we set a value in it. + // to test it: + // CREATE TABLE test (arr int[]); + // INSERT INTO test VALUES (NULL); + // UPDATE test SET arr[1] = 1; + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*IntValue) + if !ok { + return fmt.Errorf("cannot set non-int value in int array") + } + + a.Elements[i-1] = val.Int8 + return nil +} + +func (a *IntArrayValue) Type() *types.DataType { + return types.IntArrayType +} + +func (a *IntArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + var res []*int64 + for _, v := range a.Elements { + if v.Valid { + res = append(res, &v.Int64) + } else { + res = append(res, nil) + } + } + + return res +} + +func (a *IntArrayValue) Cast(t *types.DataType) (Value, error) { + if a.Null() { + return newNull(t), nil + } + + if t.Name == types.DecimalStr { + if !t.IsArray { + return nil, fmt.Errorf("%w: cannot cast int array to decimal", ErrCast) + } + + return castArrWithPtr(a, func(i int64) (*decimal.Decimal, error) { + if !t.HasMetadata() { + return decimal.NewFromString(strconv.FormatInt(i, 10)) + } + + return decimal.NewExplicit(strconv.FormatInt(i, 10), t.Metadata[0], t.Metadata[1]) + }, newDecimalArrayValue) + } + + switch *t { + case *types.IntArrayType: + return a, nil + case *types.TextArrayType: + return castArr(a, func(i int64) (string, error) { return strconv.FormatInt(i, 10), nil }, newTextArrayValue) + case *types.BoolArrayType: + return castArr(a, func(i int64) (bool, error) { return i != 0, nil }, newBoolArrayValue) + default: + return nil, fmt.Errorf("%w: cannot cast int array to %s", ErrCast, t) + } +} + +func newTextArrayValue(s []*string) *TextArrayValue { + vals := make([]pgtype.Text, len(s)) + for i, v := range s { + if v == nil { + vals[i] = pgtype.Text{Valid: false} + } else { + vals[i] = pgtype.Text{String: *v, Valid: true} + } + } + + return &TextArrayValue{ + Array: newValidArr(vals), + } +} + +type TextArrayValue struct { + pgtype.Array[pgtype.Text] +} + +func (a *TextArrayValue) Null() bool { + return !a.Valid +} + +func (a *TextArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +func (a *TextArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *TextArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return &TextValue{a.Elements[i-1]}, nil +} + +func (a *TextArrayValue) Set(i int32, v ScalarValue) error { + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*TextValue) + if !ok { + return fmt.Errorf("cannot set non-text value in text array") + } + + a.Elements[i-1] = val.Text + return nil +} + +func (a *TextArrayValue) Type() *types.DataType { + return types.TextArrayType +} + +func (a *TextArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + res := make([]*string, len(a.Elements)) + for i, v := range a.Elements { + if v.Valid { + res[i] = &v.String + } + } + + return res +} + +func (a *TextArrayValue) Cast(t *types.DataType) (Value, error) { + if t.Name == types.DecimalStr { + if !t.IsArray { + return nil, fmt.Errorf("%w: cannot cast text array to decimal", ErrCast) + } + + return castArrWithPtr(a, func(s string) (*decimal.Decimal, error) { + if !t.HasMetadata() { + return decimal.NewFromString(s) + } + + return decimal.NewExplicit(s, t.Metadata[0], t.Metadata[1]) + }, newDecimalArrayValue) + } + + switch *t { + case *types.IntArrayType: + return castArr(a, func(s string) (int64, error) { return strconv.ParseInt(s, 10, 64) }, newIntArr) + case *types.BoolArrayType: + return castArr(a, strconv.ParseBool, newBoolArrayValue) + case *types.UUIDArrayType: + return castArrWithPtr(a, types.ParseUUID, newUUIDArrayValue) + case *types.TextArrayType: + return a, nil + case *types.BlobArrayType: + return castArr(a, func(s string) ([]byte, error) { return []byte(s), nil }, newBlobArrayValue) + default: + return nil, fmt.Errorf("%w: cannot cast text array to %s", ErrCast, t) + } +} + +// castArr casts an array of one type to an array of another type. +// Generics: +// A is the current scalar Kwil type +// B is the desired scalar Kwil type +// C is the current array Kwil type +// D is the desired array Kwil type +// Params: +// c: the current array +// get: a function that converts the current array's scalar type to the desired scalar type +// newArr: a function that creates a new array of the desired type +func castArr[A any, B any, C ArrayValue, D ArrayValue](c C, get func(a A) (B, error), newArr func([]*B) D) (D, error) { + return castArrWithPtr(c, func(b A) (*B, error) { + res, err := get(b) + if err != nil { + return nil, err + } + + return &res, nil + }, newArr) +} + +// castArrWithPtr casts an array of one type to an array of another type. +// It expects that the get function will return a pointer to the desired type. +func castArrWithPtr[A any, B any, C ArrayValue, D ArrayValue](c C, get func(a A) (*B, error), newArr func([]*B) D) (D, error) { + res := make([]*B, c.Len()) + for i := range c.Len() { + v, err := c.Index(i + 1) // SQL Indexes are 1-based + if err != nil { + return *new(D), castErr(err) + } + + // if the value is nil, we dont need to do anything; a nil value is already + // in the array + if !v.Null() { + raw, ok := v.RawValue().(A) + if !ok { + // should never happen unless I messed up the usage of generics or implementation + // of RawValue + return *new(D), castErr(fmt.Errorf("internal bug: unexpected type %T", v.RawValue())) + } + + res[i], err = get(raw) + if err != nil { + return *new(D), castErr(err) + } + } + } + + return newArr(res), nil +} + +func newBoolArrayValue(b []*bool) *BoolArrayValue { + vals := make([]pgtype.Bool, len(b)) + for i, v := range b { + if v == nil { + vals[i] = pgtype.Bool{Valid: false} + } else { + vals[i] = pgtype.Bool{Bool: *v, Valid: true} + } + } + + return &BoolArrayValue{ + Array: newValidArr(vals), + } +} + +type BoolArrayValue struct { + pgtype.Array[pgtype.Bool] +} + +func (a *BoolArrayValue) Null() bool { + return !a.Valid +} + +func (a *BoolArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +func (a *BoolArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *BoolArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return &BoolValue{a.Elements[i-1]}, nil +} + +func (a *BoolArrayValue) Set(i int32, v ScalarValue) error { + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*BoolValue) + if !ok { + return fmt.Errorf("cannot set non-bool value in bool array") + } + + a.Elements[i-1] = val.Bool + return nil +} + +func (a *BoolArrayValue) Type() *types.DataType { + return types.BoolArrayType +} + +func (a *BoolArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + barr := make([]*bool, len(a.Elements)) + for i, v := range a.Elements { + if v.Valid { + barr[i] = &v.Bool + } + } + + return barr +} + +func (a *BoolArrayValue) Cast(t *types.DataType) (Value, error) { + switch *t { + case *types.TextArrayType: + return castArr(a, func(b bool) (string, error) { return strconv.FormatBool(b), nil }, newTextArrayValue) + case *types.IntArrayType: + return castArr(a, func(b bool) (int64, error) { + if b { + return 1, nil + } else { + return 0, nil + } + }, newIntArr) + case *types.BoolArrayType: + return a, nil + default: + return nil, fmt.Errorf("%w: cannot cast bool array to %s", ErrCast, t) + } +} + +func newNullDecArr() *DecimalArrayValue { + return &DecimalArrayValue{ + Array: pgtype.Array[pgtype.Numeric]{Valid: false}, + } +} + +func newDecimalArrayValue(d []*decimal.Decimal) *DecimalArrayValue { + vals := make([]pgtype.Numeric, len(d)) + for i, v := range d { + if v == nil { + vals[i] = pgtype.Numeric{Valid: false} + } else { + vals[i] = pgTypeFromDec(v) + } + } + + return &DecimalArrayValue{ + Array: newValidArr(vals), + } +} + +type DecimalArrayValue struct { + pgtype.Array[pgtype.Numeric] +} + +func (a *DecimalArrayValue) Null() bool { + return !a.Valid +} + +func (a *DecimalArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +// cmpArrs compares two Kwil array types. +func cmpArrs[M ArrayValue](a M, b Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(a, b, op); early { + return res, nil + } + + val2, ok := b.(M) + if !ok { + return nil, makeTypeErr(a, b) + } + + isEqual := func(a, b ArrayValue) (isEq bool, err error) { + if a.Len() != b.Len() { + return false, nil + } + + for i := int32(1); i <= a.Len(); i++ { + v1, err := a.Index(i) + if err != nil { + return false, err + } + + v2, err := b.Index(i) + if err != nil { + return false, err + } + + if v1.Null() && v2.Null() { + continue + } + + if v1.Null() || v2.Null() { + return false, nil + } + + res, err := v1.Compare(v2, equal) + if err != nil { + return false, err + } + + if !res.Bool.Bool { + return false, nil + } + } + + return true, nil + } + + eq, err := isEqual(a, val2) + if err != nil { + return nil, err + } + + switch op { + case equal: + return newBool(eq), nil + case isDistinctFrom: + return newBool(!eq), nil + default: + return nil, fmt.Errorf("%w: only =, IS DISTINCT FROM are supported for array comparison", ErrComparison) + } +} + +func (a *DecimalArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *DecimalArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return &DecimalValue{Numeric: a.Elements[i-1]}, nil +} + +func (a *DecimalArrayValue) Set(i int32, v ScalarValue) error { + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*DecimalValue) + if !ok { + return fmt.Errorf("cannot set non-decimal value in decimal array") + } + + a.Elements[i-1] = val.Numeric + return nil +} + +func (a *DecimalArrayValue) Type() *types.DataType { + return types.DecimalArrayType +} + +func (a *DecimalArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + res := make([]*decimal.Decimal, len(a.Elements)) + for i, v := range a.Elements { + if v.Valid { + dec, err := decFromPgType(v) + if err != nil { + panic(err) + } + + res[i] = dec + } + } + + return res +} + +func (a *DecimalArrayValue) Cast(t *types.DataType) (Value, error) { + if t.Name == types.DecimalStr { + if !t.IsArray { + return nil, fmt.Errorf("%w: cannot cast decimal array to decimal", ErrCast) + } + + // if no metadata, then its a noop + if !t.HasMetadata() { + return a, nil + } + + // otherwise, we need to alter the precision and scale + res := make([]*decimal.Decimal, a.Len()) + for i := int32(1); i <= a.Len(); i++ { + v, err := a.Index(i) + if err != nil { + return nil, err + } + + dec, err := v.(*DecimalValue).dec() + if err != nil { + return nil, err + } + + err = dec.SetPrecisionAndScale(t.Metadata[0], t.Metadata[1]) + if err != nil { + return nil, err + } + + res[i-1] = dec + } + + return newDecimalArrayValue(res), nil + } + + switch *t { + case *types.TextArrayType: + return castArr(a, func(d *decimal.Decimal) (string, error) { return d.String(), nil }, newTextArrayValue) + case *types.IntArrayType: + return castArr(a, func(d *decimal.Decimal) (int64, error) { return d.Int64() }, newIntArr) + case *types.DecimalArrayType: + return a, nil + default: + return nil, fmt.Errorf("%w: cannot cast decimal array to %s", ErrCast, t) + } +} + +func newBlobArrayValue(b []*[]byte) *BlobArrayValue { + vals := make([]*BlobValue, len(b)) + for i, v := range b { + if v == nil { + vals[i] = &BlobValue{bts: nil} + } else { + vals[i] = &BlobValue{bts: *v} + } + } + + return &BlobArrayValue{ + Array: newValidArr(vals), + } +} + +type BlobArrayValue struct { + // we embed BlobValue because unlike other types, there is no native pgtype embedded within + // blob value that allows pgx to scan the value into the struct. + pgtype.Array[*BlobValue] +} + +func (a *BlobArrayValue) Null() bool { + return !a.Valid +} + +func (a *BlobArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +func (a *BlobArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *BlobArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return a.Elements[i-1], nil +} + +func (a *BlobArrayValue) Set(i int32, v ScalarValue) error { + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*BlobValue) + if !ok { + return fmt.Errorf("cannot set non-blob value in blob array") + } + + a.Elements[i-1] = val + return nil +} + +func (a *BlobArrayValue) Type() *types.DataType { + return types.BlobArrayType +} + +func (a *BlobArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + res := make([][]byte, len(a.Elements)) + for i, v := range a.Elements { + if v != nil { + res[i] = make([]byte, len(v.bts)) + copy(res[i], v.bts) + } + } + + return res +} + +func (a *BlobArrayValue) Cast(t *types.DataType) (Value, error) { + switch *t { + case *types.TextArrayType: + return castArr(a, func(b []byte) (string, error) { return string(b), nil }, newTextArrayValue) + case *types.BlobArrayType: + return a, nil + default: + return nil, fmt.Errorf("%w: cannot cast blob array to %s", ErrCast, t) + } +} + +func newUUIDArrayValue(u []*types.UUID) *UuidArrayValue { + vals := make([]pgtype.UUID, len(u)) + for i, v := range u { + if v == nil { + vals[i] = pgtype.UUID{Valid: false} + } else { + vals[i] = pgtype.UUID{Bytes: *v, Valid: true} + } + } + + return &UuidArrayValue{ + Array: newValidArr(vals), + } +} + +type UuidArrayValue struct { + pgtype.Array[pgtype.UUID] +} + +func (a *UuidArrayValue) Null() bool { + return !a.Valid +} + +func (a *UuidArrayValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + return cmpArrs(a, v, op) +} + +func (a *UuidArrayValue) Len() int32 { + return int32(len(a.Elements)) +} + +func (a *UuidArrayValue) Index(i int32) (ScalarValue, error) { + if i < 1 || i > a.Len() { + return nil, fmt.Errorf("index out of bounds") + } + + return &UUIDValue{a.Elements[i-1]}, nil +} + +func (a *UuidArrayValue) Set(i int32, v ScalarValue) error { + err := allocArr(&a.Array, i) + if err != nil { + return err + } + + val, ok := v.(*UUIDValue) + if !ok { + return fmt.Errorf("cannot set non-uuid value in uuid array") + } + + a.Elements[i-1] = val.UUID + return nil +} + +func (a *UuidArrayValue) Type() *types.DataType { + return types.UUIDArrayType +} + +func (a *UuidArrayValue) RawValue() any { + if !a.Valid { + return nil + } + + res := make([]*types.UUID, len(a.Elements)) + for i, v := range a.Elements { + if v.Valid { + u := types.UUID(v.Bytes) + res[i] = &u + } + } + + return res +} + +func (a *UuidArrayValue) Cast(t *types.DataType) (Value, error) { + switch *t { + case *types.TextArrayType: + return castArr(a, func(u *types.UUID) (string, error) { return u.String(), nil }, newTextArrayValue) + case *types.UUIDArrayType: + return a, nil + case *types.BlobArrayType: + return castArr(a, func(u *types.UUID) ([]byte, error) { return u.Bytes(), nil }, newBlobArrayValue) + default: + return nil, fmt.Errorf("%w: cannot cast uuid array to %s", ErrCast, t) + } +} + +func newRecordValue() *RecordValue { + return &RecordValue{ + Fields: make(map[string]Value), + } +} + +// RecordValue is a special type that represents a row in a table. +type RecordValue struct { + Fields map[string]Value + Order []string +} + +func (r *RecordValue) Null() bool { + return len(r.Fields) == 0 +} + +func (r *RecordValue) AddValue(k string, v Value) error { + _, ok := r.Fields[k] + if ok { + // protecting against this since it would detect non-determinism, + // but our query planner should already protect against this + return fmt.Errorf("record already has field %s", k) + } + + r.Fields[k] = v + r.Order = append(r.Order, k) + return nil +} + +func (o *RecordValue) Compare(v Value, op ComparisonOp) (*BoolValue, error) { + if res, early := nullCmp(o, v, op); early { + return res, nil + } + + val2, ok := v.(*RecordValue) + if !ok { + return nil, makeTypeErr(o, v) + } + + isSame := true + if len(o.Fields) != len(val2.Fields) { + isSame = false + } + + if isSame { + for i, field := range o.Order { + v2, ok := val2.Fields[field] + if !ok { + isSame = false + break + } + + eq, err := o.Fields[field].Compare(v2, equal) + if err != nil { + return nil, err + } + + if !eq.RawValue().(bool) { + isSame = false + break + } + + // check the order + if field != val2.Order[i] { + isSame = false + break + } + } + } + + switch op { + case equal: + return newBool(isSame), nil + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with record type", ErrComparison, op) + } +} + +func (o *RecordValue) Type() *types.DataType { + return &types.DataType{ + Name: "record", // special type that is not in the types package + } +} + +func (o *RecordValue) RawValue() any { + return o.Fields +} + +func (o *RecordValue) Cast(t *types.DataType) (Value, error) { + return nil, fmt.Errorf("%w: cannot cast record to %s", ErrCast, t) +} + +func cmpIntegers(a, b int, op ComparisonOp) (*BoolValue, error) { + switch op { + case equal: + return newBool(a == b), nil + case lessThan: + return newBool(a < b), nil + case greaterThan: + return newBool(a > b), nil + case isDistinctFrom: + return newBool(a != b), nil + default: + return nil, fmt.Errorf("%w: cannot use comparison operator %s with numeric types", ErrComparison, op) + } +} + +// valueToString converts a value to a string. +func valueToString(v Value) (string, error) { + if v.Null() { + return "NULL", nil + } + + array, ok := v.(ArrayValue) + if ok { + // we will convert each element to a string and join them with a comma + strs := make([]string, array.Len()) + for i := int32(1); i <= array.Len(); i++ { + val, err := array.Index(i) + if err != nil { + return "", err + } + + str, err := valueToString(val) + if err != nil { + return "", err + } + + strs[i-1] = str + } + + return strings.Join(strs, ","), nil + } + + switch val := v.(type) { + case *TextValue: + return val.Text.String, nil + case *IntValue: + return strconv.FormatInt(val.Int64, 10), nil + case *BoolValue: + return strconv.FormatBool(val.Bool.Bool), nil + case *UUIDValue: + return types.UUID(val.UUID.Bytes).String(), nil + case *DecimalValue: + dec, err := val.dec() + if err != nil { + return "", err + } + + return dec.String(), nil + case *BlobValue: + return string(val.bts), nil + case *RecordValue: + return "", fmt.Errorf("cannot convert record to string") + default: + return "", fmt.Errorf("unexpected type %T", v) + } +} + +// parseValue parses a string into a value. +func parseValue(s string, t *types.DataType) (Value, error) { + if s == "NULL" { + return newNull(t), nil + } + + if t.IsArray { + return parseArray(s, t) + } + + if t.Name == types.DecimalStr { + dec, err := decimal.NewFromString(s) + if err != nil { + return nil, err + } + + return newDec(dec), nil + } + + switch *t { + case *types.TextType: + return newText(s), nil + case *types.IntType: + i, err := strconv.ParseInt(s, 10, 64) + if err != nil { + return nil, err + } + + return newInt(i), nil + case *types.BoolType: + b, err := strconv.ParseBool(s) + if err != nil { + return nil, err + } + + return newBool(b), nil + case *types.UUIDType: + u, err := types.ParseUUID(s) + if err != nil { + return nil, err + } + + return newUUID(u), nil + case *types.BlobType: + return newBlob([]byte(s)), nil + default: + return nil, fmt.Errorf("unexpected type %s", t) + } +} + +// parseArray parses a string into an array value. +func parseArray(s string, t *types.DataType) (ArrayValue, error) { + if s == "NULL" { + return newNull(t).(ArrayValue), nil + } + + // we will parse the string into individual values and then cast them to the + // correct type + strs := strings.Split(s, ",") + fields := make([]ScalarValue, len(strs)) + scalarType := t.Copy() + scalarType.IsArray = false + for i, str := range strs { + val, err := parseValue(str, scalarType) + if err != nil { + return nil, err + } + + scalar, ok := val.(ScalarValue) + if !ok { + return nil, fmt.Errorf("unexpected type %T", val) + } + + fields[i] = scalar + } + + if len(fields) == 0 { + // if 0-length, then we return a new zero-length array + zv, err := NewZeroValue(t) + if err != nil { + return nil, err + } + + zva, ok := zv.(ArrayValue) + if !ok { + return nil, fmt.Errorf("unexpected type %T", zv) + } + + return zva, nil + } + + arrType, err := fields[0].Array(fields[1:]...) + if err != nil { + return nil, err + } + + return arrType, nil +} diff --git a/node/engine/interpreter/values_test.go b/node/engine/interpreter/values_test.go new file mode 100644 index 000000000..3dc0e3d1f --- /dev/null +++ b/node/engine/interpreter/values_test.go @@ -0,0 +1,830 @@ +package interpreter + +import ( + "testing" + + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/types/decimal" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Arithmetic(t *testing.T) { + type testcase struct { + name string + a any + b any + add any + sub any + mul any + div any + mod any + concat any + } + + tests := []testcase{ + { + name: "int", + a: int64(10), + b: int64(5), + add: int64(15), + sub: int64(5), + mul: int64(50), + div: int64(2), + mod: int64(0), + concat: ErrArithmetic, + }, + { + name: "decimal", + a: mustDec("10.00"), + b: mustDec("5.00"), + add: mustDec("15.00"), + sub: mustDec("5.00"), + mul: mustDec("50.00"), + div: mustDec("2.00"), + mod: mustDec("0.00"), + concat: ErrArithmetic, + }, + { + name: "text", + a: "hello", + b: "world", + add: ErrArithmetic, + sub: ErrArithmetic, + mul: ErrArithmetic, + div: ErrArithmetic, + mod: ErrArithmetic, + concat: "helloworld", + }, + { + name: "uuid", + a: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + b: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + add: ErrArithmetic, + sub: ErrArithmetic, + mul: ErrArithmetic, + div: ErrArithmetic, + mod: ErrArithmetic, + concat: ErrArithmetic, + }, + { + name: "blob", + a: []byte("hello"), + b: []byte("world"), + add: ErrArithmetic, + sub: ErrArithmetic, + mul: ErrArithmetic, + div: ErrArithmetic, + mod: ErrArithmetic, + concat: []byte("helloworld"), + }, + { + name: "bool", + a: true, + b: false, + add: ErrArithmetic, + sub: ErrArithmetic, + mul: ErrArithmetic, + div: ErrArithmetic, + mod: ErrArithmetic, + concat: ErrArithmetic, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + makeVal := func(v any) ScalarValue { + val, err := NewValue(v) + require.NoError(t, err) + return val.(ScalarValue) + } + + a := makeVal(tt.a) + b := makeVal(tt.b) + + isErrOrResult := func(a, b ScalarValue, op ArithmeticOp, want any) { + res, err := a.Arithmetic(b, op) + if wantErr, ok := want.(error); ok { + require.Error(t, err) + require.ErrorIs(t, err, wantErr) + return + } + require.NoError(t, err) + + raw := res.RawValue() + + eq(t, want, raw) + + // operations on null values should always return null + null := newNull(a.Type()).(ScalarValue) + + res, err = a.Arithmetic(null, op) + require.NoError(t, err) + + require.True(t, res.Null()) + require.Nil(t, res.RawValue()) + } + + isErrOrResult(a, b, add, tt.add) + isErrOrResult(a, b, sub, tt.sub) + isErrOrResult(a, b, mul, tt.mul) + isErrOrResult(a, b, div, tt.div) + isErrOrResult(a, b, mod, tt.mod) + isErrOrResult(a, b, concat, tt.concat) + + // test rountripping strings + testRoundTripParse(t, a) + testRoundTripParse(t, b) + }) + } +} + +// eq is a helper function that checks if two values are equal. +// It handles the semantics of comparing decimal values. +func eq(t *testing.T, a, b any) { + // if the values are decimals, we need to compare them manually + if aDec, ok := a.(*decimal.Decimal); ok { + bDec, ok := b.(*decimal.Decimal) + require.True(t, ok) + + rec, err := aDec.Cmp(bDec) + require.NoError(t, err) + assert.Zero(t, rec) + return + } + + if aDec, ok := a.([]*decimal.Decimal); ok { + bDec, ok := b.([]*decimal.Decimal) + require.True(t, ok) + + require.Len(t, aDec, len(bDec)) + for i := range aDec { + eq(t, aDec[i], bDec[i]) + } + return + } + + assert.EqualValues(t, a, b) +} + +func Test_Comparison(t *testing.T) { + type testcase struct { + name string + a any + b any + gt any + lt any + eq any + is any + distinctFrom any + } + + // there are 6 types: int, text, bool, blob, uuid, decimal + // Each type can also have a one dimensional array of that type + // We need tests for each type and each array type, testing comparison against each other + // as well as against null values. + tests := []testcase{ + { + name: "int", + a: int64(10), + b: int64(5), + eq: false, + gt: true, + lt: false, + is: ErrComparison, + distinctFrom: true, + }, + { + name: "decimal", + a: mustDec("10.00"), + b: mustDec("5.00"), + eq: false, + gt: true, + lt: false, + is: ErrComparison, + distinctFrom: true, + }, + { + name: "text", + a: "hello", + b: "world", + eq: false, + gt: false, + lt: true, + is: ErrComparison, + distinctFrom: true, + }, + { + name: "uuid", + a: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + b: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "blob", + a: []byte("hello"), + b: []byte("world"), + eq: false, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: true, + }, + { + name: "bool", + a: true, + b: false, + eq: false, + gt: true, + lt: false, + is: false, + distinctFrom: true, + }, + { + name: "int-null", + a: int64(10), + b: nil, + eq: nil, + gt: nil, + lt: nil, + is: false, + distinctFrom: true, + }, + { + name: "null-null", + a: nil, + b: nil, + eq: nil, + gt: nil, + lt: nil, + is: true, + distinctFrom: false, + }, + // array tests + { + name: "int-array", + a: []int64{1, 2, 3}, + b: []int64{1, 2, 3}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "text-array", + a: []string{"hello", "world"}, + b: []string{"hello", "world"}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "decimal-array", + a: []*decimal.Decimal{mustDec("1.00"), mustDec("2.00"), mustDec("3.00")}, + b: []*decimal.Decimal{mustDec("1.00"), mustDec("2.00"), mustDec("3.00")}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "text array not equal", + a: []string{"hello", "world"}, + b: []string{"world", "hello"}, + eq: false, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: true, + }, + { + name: "uuid-array", + a: []*types.UUID{mustUUID("550e8400-e29b-41d4-a716-446655440000")}, + b: []*types.UUID{mustUUID("550e8400-e29b-41d4-a716-446655440000")}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "blob-array", + a: [][]byte{[]byte("hello"), []byte("world")}, + b: [][]byte{[]byte("hello"), []byte("world")}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "bool-array", + a: []bool{true, false}, + b: []bool{true, false}, + eq: true, + gt: ErrComparison, + lt: ErrComparison, + is: ErrComparison, + distinctFrom: false, + }, + { + name: "int-array-null", + a: []int64{1, 2, 3}, + b: nil, + eq: nil, + gt: nil, + lt: nil, + is: false, + distinctFrom: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + makeVal := func(v any) Value { + val, err := NewValue(v) + require.NoError(t, err) + return val + } + + a := makeVal(tt.a) + b := makeVal(tt.b) + + isErrOrResult := func(a, b Value, op ComparisonOp, want any) { + t.Log(op.String()) + res, err := a.Compare(b, op) + if wantErr, ok := want.(error); ok { + require.Error(t, err) + require.ErrorIs(t, err, wantErr) + return + } + require.NoError(t, err) + + switch wantVal := want.(type) { + default: + require.EqualValues(t, wantVal, res.RawValue()) + case nil: + require.True(t, res.Null()) + require.Nil(t, res.RawValue()) + case bool: + require.Equal(t, wantVal, res.RawValue()) + case *bool: + require.Equal(t, *wantVal, res.RawValue()) + } + } + + isErrOrResult(a, b, lessThan, tt.lt) + isErrOrResult(a, b, greaterThan, tt.gt) + isErrOrResult(a, b, equal, tt.eq) + isErrOrResult(a, b, is, tt.is) + isErrOrResult(a, b, isDistinctFrom, tt.distinctFrom) + + // test rountripping strings + testRoundTripParse(t, a) + testRoundTripParse(t, b) + }) + } +} + +func Test_Cast(t *testing.T) { + // for this test, we want to test each type and array type, + // and ensure it can be casted to each other type and array type + // all numerics will be precision 10, scale 5. + // If a value is left as nil, it will expect an error when casted to that type. + type testcase struct { + name string + val any + intVal any + text any + boolVal any + decimalVal any + uuidVal any + blobVal any + intArr any + textArr any + boolArr any + decimalArr any + uuidArr any + blobArr any + } + + mDec := func(dec string) *decimal.Decimal { + // all decimals will be precision 10, scale 5 + d, err := decimal.NewFromString(dec) + require.NoError(t, err) + + err = d.SetPrecisionAndScale(10, 5) + require.NoError(t, err) + return d + } + + mDecArr := func(decimals ...string) []*decimal.Decimal { + var res []*decimal.Decimal + for _, dec := range decimals { + res = append(res, mDec(dec)) + } + return res + } + + tests := []testcase{ + { + name: "int", + val: int64(10), + intVal: int64(10), + text: "10", + boolVal: true, + decimalVal: mDec("10.00000"), + }, + { + name: "text", + val: "hello", + text: "hello", + blobVal: []byte("hello"), + }, + { + name: "text (number)", + val: "10", + intVal: 10, + text: "10", + decimalVal: mDec("10.00000"), + blobVal: []byte("10"), + }, + { + name: "text (bool)", + val: "true", + boolVal: true, + text: "true", + blobVal: []byte("true"), + }, + { + name: "text (decimal)", + val: "10.5", + decimalVal: mDec("10.50000"), + text: "10.5", + blobVal: []byte("10.5"), + }, + { + name: "text (uuid)", + val: "550e8400-e29b-41d4-a716-446655440000", + uuidVal: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + text: "550e8400-e29b-41d4-a716-446655440000", + blobVal: []byte("550e8400-e29b-41d4-a716-446655440000"), + }, + { + name: "bool", + val: true, + boolVal: true, + text: "true", + intVal: int64(1), + }, + { + name: "decimal", + val: mDec("10.00000"), + decimalVal: mDec("10.00000"), + text: "10.00000", + intVal: int64(10), + }, + { + name: "uuid", + val: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + uuidVal: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + text: "550e8400-e29b-41d4-a716-446655440000", + blobVal: mustUUID("550e8400-e29b-41d4-a716-446655440000").Bytes(), + }, + { + name: "blob", + val: []byte("hello"), + blobVal: []byte("hello"), + text: "hello", + }, + { + name: "int-array", + val: []int64{1, 2, 3}, + intArr: []int64{1, 2, 3}, + textArr: []string{"1", "2", "3"}, + boolArr: []bool{true, true, true}, + decimalArr: mDecArr("1", "2", "3"), + }, + { + name: "text-array", + val: []string{"hello", "world"}, + textArr: []string{"hello", "world"}, + blobArr: [][]byte{[]byte("hello"), []byte("world")}, + }, + { + name: "text-array (uuid)", + val: []string{"550e8400-e29b-41d4-a716-446655440000"}, + uuidArr: []*types.UUID{mustUUID("550e8400-e29b-41d4-a716-446655440000")}, + textArr: []string{"550e8400-e29b-41d4-a716-446655440000"}, + blobArr: [][]byte{[]byte("550e8400-e29b-41d4-a716-446655440000")}, + }, + { + name: "bool-array", + val: []bool{true, false}, + boolArr: []bool{true, false}, + textArr: []string{"true", "false"}, + intArr: []int64{1, 0}, + }, + { + name: "decimal-array", + val: mDecArr("1", "2", "3"), + decimalArr: mDecArr("1", "2", "3"), + textArr: []string{"1.00000", "2.00000", "3.00000"}, + intArr: []int64{1, 2, 3}, + }, + { + name: "uuid-array", + val: []*types.UUID{mustUUID("550e8400-e29b-41d4-a716-446655440000")}, + uuidArr: []*types.UUID{mustUUID("550e8400-e29b-41d4-a716-446655440000")}, + textArr: []string{"550e8400-e29b-41d4-a716-446655440000"}, + blobArr: [][]byte{mustUUID("550e8400-e29b-41d4-a716-446655440000").Bytes()}, + }, + { + name: "blob-array", + val: [][]byte{[]byte("hello"), []byte("world")}, + blobArr: [][]byte{[]byte("hello"), []byte("world")}, + textArr: []string{"hello", "world"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + val, err := NewValue(tt.val) + require.NoError(t, err) + + check := func(dataType *types.DataType, want any) { + t.Log(dataType.String()) + if want == nil { + want = ErrCast + } + + res, err := val.Cast(dataType) + if wantErr, ok := want.(error); ok { + assert.Error(t, err) + assert.ErrorIs(t, err, wantErr) + return + } + require.NoError(t, err) + + eq(t, want, res.RawValue()) + } + + decimalType, err := types.NewDecimalType(10, 5) + require.NoError(t, err) + + decArrType := decimalType.Copy() + decArrType.IsArray = true + + check(types.IntType, tt.intVal) + check(types.TextType, tt.text) + check(types.BoolType, tt.boolVal) + check(decimalType, tt.decimalVal) + check(types.UUIDType, tt.uuidVal) + check(types.BlobType, tt.blobVal) + + if intArr, ok := tt.intArr.([]int64); ok { + tt.intArr = ptrArr(intArr) + } + if textArr, ok := tt.textArr.([]string); ok { + tt.textArr = ptrArr(textArr) + } + if boolArr, ok := tt.boolArr.([]bool); ok { + tt.boolArr = ptrArr(boolArr) + } + + check(types.IntArrayType, tt.intArr) + check(types.TextArrayType, tt.textArr) + check(types.BoolArrayType, tt.boolArr) + check(decArrType, tt.decimalArr) + check(types.UUIDArrayType, tt.uuidArr) + check(types.BlobArrayType, tt.blobArr) + + // test rountripping strings + testRoundTripParse(t, val) + }) + } +} + +func Test_Unary(t *testing.T) { + type testcase struct { + name string + val any + pos any + neg any + not any + } + + // any values left nil will expect an error when the unary operator is applied + tests := []testcase{ + { + name: "int", + val: int64(10), + pos: int64(10), + neg: int64(-10), + }, + { + name: "decimal", + val: mustDec("10.00"), + pos: mustDec("10.00"), + neg: mustDec("-10.00"), + }, + { + name: "text", + // text values should not be able to be used with unary operators + }, + { + name: "uuid", + val: mustUUID("550e8400-e29b-41d4-a716-446655440000"), + // uuid values should not be able to be used with unary operators + }, + { + name: "blob", + // blob values should not be able to be used with unary operators + val: []byte("hello"), + }, + { + name: "bool", + val: true, + not: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + val, err := NewValue(tt.val) + require.NoError(t, err) + scal, ok := val.(ScalarValue) + require.True(t, ok) + + check := func(op UnaryOp, want any) { + if want == nil { + want = ErrUnary + } + + t.Log(op.String()) + res, err := scal.Unary(op) + if wantErr, ok := want.(error); ok { + require.Error(t, err) + require.ErrorIs(t, err, wantErr) + return + } + + require.NoError(t, err) + eq(t, want, res.RawValue()) + } + + check(pos, tt.pos) + check(neg, tt.neg) + check(not, tt.not) + + // test rountripping strings + testRoundTripParse(t, val) + }) + } +} + +func Test_Array(t *testing.T) { + type testcase struct { + name string + vals []any + wantErr error + } + + // all values will be put into an array. + // unless the wantErr is specified, it will expect the array to be created successfully + + tests := []testcase{ + { + name: "int", + vals: []any{int64(1), int64(2), int64(3)}, + }, + { + name: "decimal", + vals: []any{mustDec("1.00"), mustDec("2.00"), mustDec("3.00")}, + }, + { + name: "text", + vals: []any{"hello", "world"}, + }, + { + name: "uuid", + vals: []any{mustUUID("550e8400-e29b-41d4-a716-446655440000"), mustUUID("550e8400-e29b-41d4-a716-446655440001")}, + }, + { + name: "blob", + vals: []any{[]byte("hello"), []byte("world")}, + }, + { + name: "bool", + vals: []any{true, false}, + }, + { + name: "mixed", + vals: []any{int64(1), "hello"}, + wantErr: ErrArrayMixedTypes, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if len(tt.vals) == 0 { + t.Fatal("no values provided") + } + + var vals []ScalarValue + for _, v := range tt.vals { + val, err := NewValue(v) + require.NoError(t, err) + vals = append(vals, val.(ScalarValue)) + } + + res, err := vals[0].Array(vals[1:]...) + if tt.wantErr != nil { + require.Error(t, err) + require.ErrorIs(t, err, tt.wantErr) + return + } + + for i := range res.Len() { + s, err := res.Index(i + 1) // 1-indexed + require.NoError(t, err) + + eq(t, tt.vals[i], s.RawValue()) + } + + // we will now set them all to nulls and test that the array is created successfully + dt := vals[0].Type() + for i := range vals { + err = res.Set(int32(i+1), newNull(dt).(ScalarValue)) + require.NoError(t, err) + } + + for i := range res.Len() { + s, err := res.Index(i + 1) // 1-indexed + require.NoError(t, err) + + isNull := s.Null() + _ = isNull + require.True(t, s.Null()) + require.Nil(t, s.RawValue()) + } + + // test rountripping strings + testRoundTripParse(t, res) + }) + } +} + +// ptrArr is a helper function that converts a slice of values to a slice of pointers to those values. +// Since Kwil returns pointers to account for nulls, we need to convert the slice of values to pointers +func ptrArr[T any](arr []T) []*T { + var res []*T + for i := range arr { + res = append(res, &arr[i]) + } + return res +} + +func mustDec(dec string) *decimal.Decimal { + d, err := decimal.NewFromString(dec) + if err != nil { + panic(err) + } + return d +} + +func mustUUID(s string) *types.UUID { + u, err := types.ParseUUID(s) + if err != nil { + panic(err) + } + return u +} + +// testRoundTripParse is a helper function that formats a value to a string, then parses it back to a value. +// It is meant to be used within these other tests. +func testRoundTripParse(t *testing.T, v Value) { + if v.Null() { + return + } + str, err := valueToString(v) + require.NoError(t, err) + + val2, err := parseValue(str, v.Type()) + require.NoError(t, err) + + equal, err := v.Compare(val2, equal) + require.NoError(t, err) + + if !equal.RawValue().(bool) { + t.Fatalf("values not equal: %v != %v", v.RawValue(), val2.RawValue()) + } +} diff --git a/parse/.gitignore b/node/engine/parse/.gitignore similarity index 100% rename from parse/.gitignore rename to node/engine/parse/.gitignore diff --git a/parse/antlr.go b/node/engine/parse/antlr.go similarity index 56% rename from parse/antlr.go rename to node/engine/parse/antlr.go index f240eb566..4bdb73a64 100644 --- a/parse/antlr.go +++ b/node/engine/parse/antlr.go @@ -2,40 +2,28 @@ package parse import ( "encoding/hex" + "fmt" "math" "math/big" "strconv" "strings" - "github.com/antlr4-go/antlr/v4" + antlr "github.com/antlr4-go/antlr/v4" "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/core/types/decimal" "github.com/kwilteam/kwil-db/core/types/validation" - "github.com/kwilteam/kwil-db/parse/gen" + "github.com/kwilteam/kwil-db/node/engine/parse/gen" ) // schemaVisitor is a visitor for converting Kuneiform's ANTLR // generated parse tree into our native schema type. It will perform -// syntax validation on actions and procedures. +// syntax validation on actions. type schemaVisitor struct { antlr.BaseParseTreeVisitor - // schema is the schema that was parsed. - // If no schema was parsed, it will be nil. - schema *types.Schema - // schemaInfo holds information on the position - // of certain blocks in the schema. - schemaInfo *SchemaInfo // errs is used for passing errors back to the caller. errs *errorListener // stream is the input stream stream *antlr.InputStream - // both procedures and actions are only needed if parsing - // an entire top-level schema, and will not be called if - // parsing only an action or procedure body, or SQL. - // procedures maps the asts of all parsed procedures - procedures map[string][]ProcedureStmt - // actions maps the asts of all parsed actions - actions map[string][]ActionStmt } // getTextFromStream gets the text from the input stream for a given range. @@ -54,9 +42,6 @@ func (s *schemaVisitor) getTextFromStream(start, stop int) (str string) { // newSchemaVisitor creates a new schema visitor. func newSchemaVisitor(stream *antlr.InputStream, errLis *errorListener) *schemaVisitor { return &schemaVisitor{ - schemaInfo: &SchemaInfo{ - Blocks: make(map[string]*Block), - }, errs: errLis, stream: stream, } @@ -64,21 +49,159 @@ func newSchemaVisitor(stream *antlr.InputStream, errLis *errorListener) *schemaV var _ gen.KuneiformParserVisitor = (*schemaVisitor)(nil) -// below are the 4 top-level entry points for the visitor. -func (s *schemaVisitor) VisitSchema_entry(ctx *gen.Schema_entryContext) any { - return ctx.Schema().Accept(s) +func (s *schemaVisitor) VisitEntry(ctx *gen.EntryContext) any { + var stmts []TopLevelStatement + for _, stmt := range ctx.AllStatement() { + stmts = append(stmts, stmt.Accept(s).(TopLevelStatement)) + } + + return stmts } -func (s *schemaVisitor) VisitAction_entry(ctx *gen.Action_entryContext) any { - return ctx.Action_block().Accept(s) +func (s *schemaVisitor) VisitStatement(ctx *gen.StatementContext) any { + var s2 TopLevelStatement + switch { + case ctx.Sql_statement() != nil: + s2 = ctx.Sql_statement().Accept(s).(*SQLStatement) + case ctx.Create_table_statement() != nil: + s2 = ctx.Create_table_statement().Accept(s).(TopLevelStatement) + case ctx.Alter_table_statement() != nil: + s2 = ctx.Alter_table_statement().Accept(s).(TopLevelStatement) + case ctx.Drop_table_statement() != nil: + s2 = ctx.Drop_table_statement().Accept(s).(TopLevelStatement) + case ctx.Create_index_statement() != nil: + s2 = ctx.Create_index_statement().Accept(s).(TopLevelStatement) + case ctx.Drop_index_statement() != nil: + s2 = ctx.Drop_index_statement().Accept(s).(TopLevelStatement) + case ctx.Create_role_statement() != nil: + s2 = ctx.Create_role_statement().Accept(s).(TopLevelStatement) + case ctx.Drop_role_statement() != nil: + s2 = ctx.Drop_role_statement().Accept(s).(TopLevelStatement) + case ctx.Grant_statement() != nil: + s2 = ctx.Grant_statement().Accept(s).(TopLevelStatement) + case ctx.Revoke_statement() != nil: + s2 = ctx.Revoke_statement().Accept(s).(TopLevelStatement) + case ctx.Transfer_ownership_statement() != nil: + s2 = ctx.Transfer_ownership_statement().Accept(s).(TopLevelStatement) + case ctx.Create_action_statement() != nil: + s3 := ctx.Create_action_statement().Accept(s).(*CreateActionStatement) + r := s.getTextFromStream(ctx.GetStart().GetStart(), ctx.GetStop().GetStop()) + ";" + s3.Raw = r + s2 = s3 + case ctx.Drop_action_statement() != nil: + s2 = ctx.Drop_action_statement().Accept(s).(TopLevelStatement) + case ctx.Create_namespace_statement() != nil: + s2 = ctx.Create_namespace_statement().Accept(s).(TopLevelStatement) + case ctx.Drop_namespace_statement() != nil: + s2 = ctx.Drop_namespace_statement().Accept(s).(TopLevelStatement) + case ctx.Use_extension_statement() != nil: + s2 = ctx.Use_extension_statement().Accept(s).(TopLevelStatement) + case ctx.Unuse_extension_statement() != nil: + s2 = ctx.Unuse_extension_statement().Accept(s).(TopLevelStatement) + default: + panic(fmt.Sprintf("unknown parser entry: %s", ctx.GetText())) + } + + if ctx.GetNamespace() != nil { + namespaceable, ok := s2.(Namespaceable) + if !ok { + s.errs.RuleErr(ctx, ErrSyntax, fmt.Sprintf("statement %T cannot have a namespace", s2)) + } else { + namespaceable.SetNamespacePrefix(s.getIdent(ctx.GetNamespace())) + } + } + + return s2 } -func (s *schemaVisitor) VisitProcedure_entry(ctx *gen.Procedure_entryContext) any { - return ctx.Procedure_block().Accept(s) +func (s *schemaVisitor) VisitCreate_action_statement(ctx *gen.Create_action_statementContext) any { + cas := &CreateActionStatement{ + IfNotExists: ctx.EXISTS() != nil, + OrReplace: ctx.REPLACE() != nil, + Name: s.getIdent(ctx.Identifier(0)), + Parameters: arr[*NamedType](len(ctx.AllType_())), + Statements: arr[ActionStmt](len(ctx.AllAction_statement())), + Raw: s.getTextFromStream(ctx.GetStart().GetStart(), ctx.GetStop().GetStop()), + } + + if cas.IfNotExists && cas.OrReplace { + s.errs.RuleErr(ctx, ErrSyntax, "cannot have both IF NOT EXISTS and OR REPLACE") + return cas + } + + allIdents := ctx.AllIdentifier() + foundMods := make(map[string]struct{}) + for _, id := range allIdents[1:] { + modText := s.getIdent(id) + if _, ok := foundMods[modText]; ok { + s.errs.RuleErr(ctx, ErrSyntax, "modifier %s redeclared", modText) + } + foundMods[modText] = struct{}{} + + cas.Modifiers = append(cas.Modifiers, modText) + } + + paramSet := make(map[string]struct{}) + for i, t := range ctx.AllType_() { + name := s.cleanStringIdent(ctx.VARIABLE(i)) + + // check for duplicate parameters + if _, ok := paramSet[name]; ok { + s.errs.RuleErr(ctx, ErrDuplicateParameterName, "parameter %s redeclared", name) + } + paramSet[name] = struct{}{} + + // parameters must start with $ + if !strings.HasPrefix(name, "$") { + s.errs.RuleErr(ctx, ErrSyntax, "parameter name must start with $") + } + + typ := t.Accept(s).(*types.DataType) + cas.Parameters[i] = &NamedType{ + Name: name, + Type: typ, + } + } + + if ctx.Action_return() != nil { + cas.Returns = ctx.Action_return().Accept(s).(*ActionReturn) + } + + for i, stmt := range ctx.AllAction_statement() { + cas.Statements[i] = stmt.Accept(s).(ActionStmt) + } + + cas.Set(ctx) + return cas +} + +func (s *schemaVisitor) VisitDrop_action_statement(ctx *gen.Drop_action_statementContext) any { + das := &DropActionStatement{ + IfExists: ctx.EXISTS() != nil, + Name: s.getIdent(ctx.Identifier()), + } + das.Set(ctx) + return das } -func (s *schemaVisitor) VisitSql_entry(ctx *gen.Sql_entryContext) any { - return ctx.Sql().Accept(s) +func (s *schemaVisitor) VisitCreate_namespace_statement(ctx *gen.Create_namespace_statementContext) any { + cns := &CreateNamespaceStatement{ + IfNotExists: ctx.EXISTS() != nil, + Namespace: s.getIdent(ctx.Identifier()), + } + + cns.Set(ctx) + return cns +} + +func (s *schemaVisitor) VisitDrop_namespace_statement(ctx *gen.Drop_namespace_statementContext) any { + dns := &DropNamespaceStatement{ + IfExists: ctx.EXISTS() != nil, + Namespace: s.getIdent(ctx.Identifier()), + } + + dns.Set(ctx) + return dns } // unknownExpression creates a new literal with an unknown type and null value. @@ -248,12 +371,12 @@ func (s *schemaVisitor) VisitIdentifier_list(ctx *gen.Identifier_listContext) an } func (s *schemaVisitor) VisitIdentifier(ctx *gen.IdentifierContext) any { - return s.getIdent(ctx.IDENTIFIER()) + return s.getIdent(ctx) } func (s *schemaVisitor) VisitType(ctx *gen.TypeContext) any { dt := &types.DataType{ - Name: s.getIdent(ctx.IDENTIFIER()), + Name: s.getIdent(ctx.Identifier()), } if ctx.LPAREN() != nil { @@ -270,7 +393,8 @@ func (s *schemaVisitor) VisitType(ctx *gen.TypeContext) any { return types.UnknownType } - dt.Metadata = [2]uint16{uint16(prec), uint16(scale)} + met := [2]uint16{uint16(prec), uint16(scale)} + dt.Metadata = met } if ctx.LBRACKET() != nil { @@ -296,22 +420,16 @@ func (s *schemaVisitor) VisitVariable(ctx *gen.VariableContext) any { switch { case ctx.VARIABLE() != nil: e = &ExpressionVariable{ - Name: strings.ToLower(strings.TrimLeft(ctx.GetText(), "$")), + Name: strings.ToLower(ctx.GetText()), Prefix: VariablePrefixDollar, } tok = ctx.VARIABLE().GetSymbol() case ctx.CONTEXTUAL_VARIABLE() != nil: e = &ExpressionVariable{ - Name: strings.ToLower(strings.TrimLeft(ctx.GetText(), "@")), + Name: strings.ToLower(ctx.GetText()), Prefix: VariablePrefixAt, } tok = ctx.CONTEXTUAL_VARIABLE().GetSymbol() - - _, ok := SessionVars[e.Name] - if !ok { - s.errs.RuleErr(ctx, ErrUnknownContextualVariable, e.Name) - } - default: panic("unknown variable") } @@ -322,654 +440,670 @@ func (s *schemaVisitor) VisitVariable(ctx *gen.VariableContext) any { return e } -func (s *schemaVisitor) VisitVariable_list(ctx *gen.Variable_listContext) any { - var vars []*ExpressionVariable - for _, v := range ctx.AllVariable() { - vars = append(vars, v.Accept(s).(*ExpressionVariable)) +// arr will make an array of type A if the input is greater than 0 +func arr[A any](b int) []A { + if b > 0 { + return make([]A, b) } - - return vars + return nil } -func (s *schemaVisitor) VisitSchema(ctx *gen.SchemaContext) any { - s.schema = &types.Schema{ - Name: ctx.Database_declaration().Accept(s).(string), - Tables: arr[*types.Table](len(ctx.AllTable_declaration())), - Extensions: arr[*types.Extension](len(ctx.AllUse_declaration())), - Actions: arr[*types.Action](len(ctx.AllAction_declaration())), - Procedures: arr[*types.Procedure](len(ctx.AllProcedure_declaration())), - ForeignProcedures: arr[*types.ForeignProcedure](len(ctx.AllForeign_procedure_declaration())), +func (s *schemaVisitor) VisitUse_extension_statement(ctx *gen.Use_extension_statementContext) any { + e := &UseExtensionStatement{ + IfNotExists: ctx.EXISTS() != nil, + ExtName: s.getIdent(ctx.GetExtension_name()), + Alias: s.getIdent(ctx.GetAlias()), } - for i, t := range ctx.AllTable_declaration() { - s.schema.Tables[i] = t.Accept(s).(*types.Table) - s.registerBlock(t, s.schema.Tables[i].Name) + allIdent := ctx.AllIdentifier() + for i, id := range allIdent[1 : len(allIdent)-1] { + e.Config = append(e.Config, &struct { + Key string + Value Expression + }{ + Key: s.getIdent(id), + Value: ctx.Action_expr(i).Accept(s).(Expression), + }) } + e.Set(ctx) + return e +} - // only now that we have visited all tables can we validate - // foreign keys - for _, t := range s.schema.Tables { - for _, fk := range t.ForeignKeys { - // the best we can do is get the position of the full - // table. - pos, ok := s.schemaInfo.Blocks[strings.ToLower(fk.ParentTable)] - if !ok { - pos2, ok2 := s.schemaInfo.Blocks[t.Name] - if ok2 { - s.errs.AddErr(pos2, ErrUnknownTable, fk.ParentTable) - } else { - s.errs.RuleErr(ctx, ErrUnknownTable, fk.ParentTable) - } - continue - } - - // check that all ParentKeys exist - parentTable, ok := s.schema.FindTable(fk.ParentTable) - if !ok { - s.errs.AddErr(pos, ErrUnknownTable, fk.ParentTable) - continue - } - - for _, col := range fk.ParentKeys { - if _, ok := parentTable.FindColumn(col); !ok { - s.errs.AddErr(pos, ErrUnknownColumn, col) - } - } - } +func (s *schemaVisitor) VisitUnuse_extension_statement(ctx *gen.Unuse_extension_statementContext) any { + e := &UnuseExtensionStatement{ + IfExists: ctx.EXISTS() != nil, + Alias: s.getIdent(ctx.GetAlias()), } - for i, e := range ctx.AllUse_declaration() { - s.schema.Extensions[i] = e.Accept(s).(*types.Extension) - s.registerBlock(e, s.schema.Extensions[i].Alias) - } + e.Set(ctx) + return e +} - for i, a := range ctx.AllAction_declaration() { - s.schema.Actions[i] = a.Accept(s).(*types.Action) - s.registerBlock(a, s.schema.Actions[i].Name) +func (s *schemaVisitor) VisitType_list(ctx *gen.Type_listContext) any { + var ts []*types.DataType + for _, t := range ctx.AllType_() { + ts = append(ts, t.Accept(s).(*types.DataType)) } - for i, p := range ctx.AllProcedure_declaration() { - s.schema.Procedures[i] = p.Accept(s).(*types.Procedure) - s.registerBlock(p, s.schema.Procedures[i].Name) - } + return ts +} - for i, p := range ctx.AllForeign_procedure_declaration() { - s.schema.ForeignProcedures[i] = p.Accept(s).(*types.ForeignProcedure) - s.registerBlock(p, s.schema.ForeignProcedures[i].Name) +func (s *schemaVisitor) VisitNamed_type_list(ctx *gen.Named_type_listContext) any { + var ts []*NamedType + for i, t := range ctx.AllIdentifier() { + ts = append(ts, &NamedType{ + Name: s.getIdent(t), + Type: ctx.Type_(i).Accept(s).(*types.DataType), + }) } - return s.schema + return ts } -// registerBlock registers a top-level block (table, action, procedure, etc.), -// ensuring uniqueness -func (s *schemaVisitor) registerBlock(ctx antlr.ParserRuleContext, name string) { - lower := strings.ToLower(name) - if _, ok := s.schemaInfo.Blocks[lower]; ok { - s.errs.RuleErr(ctx, ErrDuplicateBlock, lower) - return - } +func (s *schemaVisitor) VisitAction_return(ctx *gen.Action_returnContext) any { + ret := &ActionReturn{} - if _, ok := Functions[lower]; ok { - s.errs.RuleErr(ctx, ErrReservedKeyword, lower) - return - } - - if validation.IsKeyword(lower) { - s.errs.RuleErr(ctx, ErrReservedKeyword, lower) - return + usesNamedFields := false + switch { + case ctx.GetReturn_columns() != nil: + ret.Fields = ctx.GetReturn_columns().Accept(s).([]*NamedType) + usesNamedFields = true + case ctx.GetUnnamed_return_types() != nil: + ret.Fields = make([]*NamedType, len(ctx.GetUnnamed_return_types().AllType_())) + for i, t := range ctx.GetUnnamed_return_types().AllType_() { + ret.Fields[i] = &NamedType{ + Name: "", + Type: t.Accept(s).(*types.DataType), + } + } + default: + panic("unknown return type") } - node := &Position{} - node.Set(ctx) - - s.schemaInfo.Blocks[lower] = &Block{ - Position: *node, - AbsStart: ctx.GetStart().GetStart(), - AbsEnd: ctx.GetStop().GetStop(), - } -} + if ctx.TABLE() != nil { + ret.IsTable = true -// arr will make an array of type A if the input is greater than 0 -func arr[A any](b int) []A { - if b > 0 { - return make([]A, b) + // if it returns a table, it _must_ use named fields + if !usesNamedFields { + s.errs.RuleErr(ctx, ErrSyntax, "actions returning tables must use named fields in the return clause") + } } - return nil -} -func (s *schemaVisitor) VisitAnnotation(ctx *gen.AnnotationContext) any { - // we will parse but reconstruct annotations, so they can later be consumed by the gateway - str := strings.Builder{} + ret.Set(ctx) - str.WriteString(s.getIdent(ctx.CONTEXTUAL_VARIABLE())) - str.WriteString("(") - for i, l := range ctx.AllLiteral() { - if i > 0 { - str.WriteString(", ") + // validate that the return fields are unique + seen := make(map[string]struct{}) + for _, f := range ret.Fields { + if f.Name == "" { + continue } - str.WriteString(s.getIdent(ctx.IDENTIFIER(i))) - str.WriteString("=") - // we do not touch the literal, since case should be preserved - str.WriteString(l.GetText()) + if _, ok := seen[f.Name]; ok { + s.errs.RuleErr(ctx, ErrDuplicateResultColumnName, "field %s redeclared", f.Name) + } + seen[f.Name] = struct{}{} } - str.WriteString(")") - return str.String() -} - -// isErrNode is true if an antlr terminal node is an error node. -func isErrNode(node antlr.TerminalNode) bool { - _, ok := node.(antlr.ErrorNode) - return ok + return ret } -func (s *schemaVisitor) VisitDatabase_declaration(ctx *gen.Database_declarationContext) any { - // needed to avoid https://github.com/kwilteam/kwil-db/issues/752 - if isErrNode(ctx.DATABASE()) { - return "" +// VisitSql_statement visits a SQL DML statement. It is called by all nested +// sql statements (e.g. in actions) +func (s *schemaVisitor) VisitSql_statement(ctx *gen.Sql_statementContext) any { + stmt := &SQLStatement{ + CTEs: arr[*CommonTableExpression](len(ctx.AllCommon_table_expression())), } - return s.getIdent(ctx.IDENTIFIER()) -} - -func (s *schemaVisitor) VisitUse_declaration(ctx *gen.Use_declarationContext) any { - // the first identifier is the extension name, the last is the alias, - // and all in between are keys in the initialization. - e := &types.Extension{ - Name: s.getIdent(ctx.IDENTIFIER(0)), - Initialization: arr[*types.ExtensionConfig](len(ctx.AllIDENTIFIER()) - 2), - Alias: s.getIdent(ctx.IDENTIFIER(len(ctx.AllIDENTIFIER()) - 1)), + for i, cte := range ctx.AllCommon_table_expression() { + stmt.CTEs[i] = cte.Accept(s).(*CommonTableExpression) } - for i, id := range ctx.AllIDENTIFIER()[1 : len(ctx.AllIDENTIFIER())-1] { - val := ctx.Literal(i).Accept(s).(*ExpressionLiteral) - - e.Initialization[i] = &types.ExtensionConfig{ - Key: s.getIdent(id), - Value: val.String(), - } + if ctx.RECURSIVE() != nil { + stmt.Recursive = true } - return e -} - -func (s *schemaVisitor) VisitTable_declaration(ctx *gen.Table_declarationContext) any { - t := &types.Table{ - Name: s.getIdent(ctx.IDENTIFIER()), - Columns: arr[*types.Column](len(ctx.AllColumn_def())), - Indexes: arr[*types.Index](len(ctx.AllIndex_def())), - ForeignKeys: arr[*types.ForeignKey](len(ctx.AllForeign_key_def())), + switch { + case ctx.Select_statement() != nil: + stmt.SQL = ctx.Select_statement().Accept(s).(*SelectStatement) + case ctx.Update_statement() != nil: + stmt.SQL = ctx.Update_statement().Accept(s).(*UpdateStatement) + case ctx.Insert_statement() != nil: + stmt.SQL = ctx.Insert_statement().Accept(s).(*InsertStatement) + case ctx.Delete_statement() != nil: + stmt.SQL = ctx.Delete_statement().Accept(s).(*DeleteStatement) + default: + panic("unknown dml statement") } - for i, c := range ctx.AllColumn_def() { - t.Columns[i] = c.Accept(s).(*types.Column) - } + raw := s.getTextFromStream(ctx.GetStart().GetStart(), ctx.GetStop().GetStop()) + stmt.raw = &raw - for i, idx := range ctx.AllIndex_def() { - t.Indexes[i] = idx.Accept(s).(*types.Index) + stmt.Set(ctx) + return stmt +} - // check that all columns in indexes and foreign key children exist - for _, col := range t.Indexes[i].Columns { - if _, ok := t.FindColumn(col); !ok { - s.errs.RuleErr(idx, ErrUnknownColumn, col) - } - } +func (s *schemaVisitor) VisitCommon_table_expression(ctx *gen.Common_table_expressionContext) any { + // first identifier is the table name, the rest are the columns + cte := &CommonTableExpression{ + Name: ctx.Identifier(0).Accept(s).(string), + Query: ctx.Select_statement().Accept(s).(*SelectStatement), } - for i, fk := range ctx.AllForeign_key_def() { - t.ForeignKeys[i] = fk.Accept(s).(*types.ForeignKey) - - // check that all ChildKeys exist. - // we will have to check for parent keys in a later stage, - // since not all tables are parsed yet. - for _, col := range t.ForeignKeys[i].ChildKeys { - if _, ok := t.FindColumn(col); !ok { - s.errs.RuleErr(fk, ErrUnknownColumn, col) - } - } + for _, id := range ctx.AllIdentifier()[1:] { + cte.Columns = append(cte.Columns, id.Accept(s).(string)) } - _, err := t.GetPrimaryKey() - if err != nil { - s.errs.RuleErr(ctx, ErrNoPrimaryKey, err.Error()) - } + cte.Set(ctx) - return t + return cte } -func (s *schemaVisitor) VisitColumn_def(ctx *gen.Column_defContext) any { - col := &types.Column{ - Name: s.getIdent(ctx.IDENTIFIER()), - Type: ctx.Type_().Accept(s).(*types.DataType), +func (s *schemaVisitor) VisitCreate_table_statement(ctx *gen.Create_table_statementContext) any { + stmt := &CreateTableStatement{ + Name: s.getIdent(ctx.GetName()), + IfNotExists: ctx.EXISTS() != nil, + Columns: arr[*Column](len(ctx.AllTable_column_def())), + Constraints: arr[*OutOfLineConstraint](len(ctx.AllTable_constraint_def())), } - // due to unfortunate lexing edge cases to support min/max, we - // have to parse the constraints here. Each constraint is a text, and should be - // one of: - // MIN/MAX/MINLEN/MAXLEN/MIN_LENGTH/MAX_LENGTH/NOTNULL/NOT/NULL/PRIMARY/KEY/PRIMARY_KEY/PK/DEFAULT/UNIQUE - // If NOT is present, it needs to be followed by NULL; similarly, if NULL is present, it needs to be preceded by NOT. - // If PRIMARY is present, it can be followed by key, but does not have to be. key must be preceded by primary. - // MIN, MAX, MINLEN, MAXLEN, MIN_LENGTH, MAX_LENGTH, and DEFAULT must also have a literal following them. - type constraint struct { - ident string - lit *string - } - constraints := make([]constraint, len(ctx.AllConstraint())) - for i, c := range ctx.AllConstraint() { - con := constraint{} - switch { - case c.IDENTIFIER() != nil: - con.ident = c.IDENTIFIER().GetText() - case c.PRIMARY() != nil: - con.ident = "primary_key" - case c.NOT() != nil: - con.ident = "notnull" - case c.DEFAULT() != nil: - con.ident = "default" - case c.UNIQUE() != nil: - con.ident = "unique" - default: - panic("unknown constraint") - } + // for basic validation + var primaryKey []string + allColumns := make(map[string]bool) - if c.Literal() != nil { - l := strings.ToLower(c.Literal().Accept(s).(*ExpressionLiteral).String()) - con.lit = &l + if len(ctx.AllTable_column_def()) == 0 { + s.errs.RuleErr(ctx, ErrTableDefinition, "no column definitions found") + } + for i, c := range ctx.AllTable_column_def() { + col := c.Accept(s).(*Column) + stmt.Columns[i] = col + if allColumns[col.Name] { + s.errs.RuleErr(c, ErrCollation, "constraint name exists") + } else { + allColumns[col.Name] = true } - constraints[i] = con } - for i := range constraints { - switch constraints[i].ident { - case "min": - if constraints[i].lit == nil { - s.errs.RuleErr(ctx, ErrSyntax, "missing literal for min constraint") - return col - } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.MIN, - Value: *constraints[i].lit, - }) - case "max": - if constraints[i].lit == nil { - s.errs.RuleErr(ctx, ErrSyntax, "missing literal for max constraint") - return col - } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.MAX, - Value: *constraints[i].lit, - }) - case "minlen", "min_length": - if constraints[i].lit == nil { - s.errs.RuleErr(ctx, ErrSyntax, "missing literal for min length constraint") - return col - } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.MIN_LENGTH, - Value: *constraints[i].lit, - }) - case "maxlen", "max_length": - if constraints[i].lit == nil { - s.errs.RuleErr(ctx, ErrSyntax, "missing literal for max length constraint") - return col - } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.MAX_LENGTH, - Value: *constraints[i].lit, - }) - case "notnull": - if constraints[i].lit != nil { - s.errs.RuleErr(ctx, ErrSyntax, "unexpected literal for not null constraint") - return col + // we iterate through all columns to see if the primary key has been declared. + // This allows us to check if it gets doubley declared. + for _, column := range stmt.Columns { + for _, constraint := range column.Constraints { + switch constraint.(type) { + case *PrimaryKeyInlineConstraint: + // ensure that the primary key is not redeclared + if len(primaryKey) != 0 { + s.errs.AddErr(column, ErrRedeclaredPrimaryKey, "primary key redeclared") + continue + } + primaryKey = []string{column.Name} } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.NOT_NULL, - }) - case "primary_key", "pk": - if constraints[i].lit != nil { - s.errs.RuleErr(ctx, ErrSyntax, "unexpected literal for primary key constraint") - return col + } + } + + constraintSet := make(map[string]struct{}) + // we will validate that columns referenced in constraints exist. + // We will also check that the primary key is not redeclared. + for i, c := range ctx.AllTable_constraint_def() { + constraint := c.Accept(s).(*OutOfLineConstraint) + + // if the constraint was named, we will check that it is not redeclared. + // If not named, it will be auto-named. + if constraint.Name != "" { + _, ok := constraintSet[constraint.Name] + if ok { + s.errs.RuleErr(c, ErrRedeclaredConstraint, "constraint name exists") + } else { + constraintSet[constraint.Name] = struct{}{} } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.PRIMARY_KEY, - }) - case "default": - if constraints[i].lit == nil { - s.errs.RuleErr(ctx, ErrSyntax, "missing literal for default constraint") - return col + } + + stmt.Constraints[i] = constraint + + // if it is a primary key, we need to check that it is not redeclared + if pk, ok := constraint.Constraint.(*PrimaryKeyOutOfLineConstraint); ok { + if len(primaryKey) != 0 { + s.errs.AddErr(constraint, ErrRedeclaredPrimaryKey, "primary key redeclared") + continue } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.DEFAULT, - Value: *constraints[i].lit, - }) - case "unique": - if constraints[i].lit != nil { - s.errs.RuleErr(ctx, ErrSyntax, "unexpected literal for unique constraint") - return col + primaryKey = pk.Columns + } + + for _, col := range constraint.Constraint.LocalColumns() { + if !allColumns[col] { + s.errs.RuleErr(c, ErrUnknownColumn, "constraint on unknown column") } - col.Attributes = append(col.Attributes, &types.Attribute{ - Type: types.UNIQUE, - }) - default: - s.errs.RuleErr(ctx, ErrSyntax, "unknown constraint: %s", constraints[i].ident) - return col } } - for _, con := range col.Attributes { - err := con.Clean(col) - if err != nil { - s.errs.RuleErr(ctx, ErrColumnConstraint, err.Error()) - return col - } + if len(primaryKey) == 0 { + s.errs.RuleErr(ctx, ErrNoPrimaryKey, "no primary key declared") } - return col + stmt.Set(ctx) + return stmt } -func (s *schemaVisitor) VisitConstraint(ctx *gen.ConstraintContext) any { - panic("VisitConstraint should not be called, as the logic should be implemented in VisitColumn_def") -} +func (s *schemaVisitor) VisitTable_column_def(ctx *gen.Table_column_defContext) interface{} { + column := &Column{ + Name: s.getIdent(ctx.Identifier()), + Type: ctx.Type_().Accept(s).(*types.DataType), + Constraints: arr[InlineConstraint](len(ctx.AllInline_constraint())), + } -func (s *schemaVisitor) VisitIndex_def(ctx *gen.Index_defContext) any { - name := ctx.HASH_IDENTIFIER().GetText() - name = strings.TrimLeft(name, "#") - idx := &types.Index{ - Name: strings.ToLower(name), - Columns: ctx.Identifier_list().Accept(s).([]string), + for i, c := range ctx.AllInline_constraint() { + column.Constraints[i] = c.Accept(s).(InlineConstraint) } - s.validateVariableIdentifier(ctx.HASH_IDENTIFIER().GetSymbol(), idx.Name) + column.Set(ctx) + return column +} +func (s *schemaVisitor) VisitInline_constraint(ctx *gen.Inline_constraintContext) any { + var c InlineConstraint switch { - case ctx.INDEX() != nil: - idx.Type = types.BTREE - case ctx.UNIQUE() != nil: - idx.Type = types.UNIQUE_BTREE case ctx.PRIMARY() != nil: - idx.Type = types.PRIMARY + c = &PrimaryKeyInlineConstraint{} + case ctx.UNIQUE() != nil: + c = &UniqueInlineConstraint{} + case ctx.NOT() != nil: + c = &NotNullConstraint{} + case ctx.DEFAULT() != nil: + c = &DefaultConstraint{ + Value: ctx.Action_expr().Accept(s).(Expression), + } + case ctx.CHECK() != nil: + c = &CheckConstraint{ + Expression: ctx.Sql_expr().Accept(s).(Expression), + } + case ctx.Fk_constraint() != nil: + c = ctx.Fk_constraint().Accept(s).(*ForeignKeyReferences) default: - panic("unknown index type") + panic("unknown constraint") } - return idx + c.Set(ctx) + return c } -func (s *schemaVisitor) VisitForeign_key_def(ctx *gen.Foreign_key_defContext) any { - fk := &types.ForeignKey{ - ChildKeys: ctx.GetChild_keys().Accept(s).([]string), - ParentKeys: ctx.GetParent_keys().Accept(s).([]string), - ParentTable: strings.ToLower(ctx.GetParent_table().GetText()), - Actions: arr[*types.ForeignKeyAction](len(ctx.AllForeign_key_action())), +func (s *schemaVisitor) VisitFk_constraint(ctx *gen.Fk_constraintContext) any { + c := &ForeignKeyReferences{ + RefTable: s.getIdent(ctx.GetTable()), + RefColumns: ctx.Identifier_list().Accept(s).([]string), + Actions: arr[*ForeignKeyAction](len(ctx.AllFk_action())), } - for i, a := range ctx.AllForeign_key_action() { - fk.Actions[i] = a.Accept(s).(*types.ForeignKeyAction) + if ctx.GetNamespace() != nil { + c.RefTableNamespace = s.getIdent(ctx.GetNamespace()) } - return fk + for i, a := range ctx.AllFk_action() { + c.Actions[i] = a.Accept(s).(*ForeignKeyAction) + } + + c.Set(ctx) + return c } -func (s *schemaVisitor) VisitForeign_key_action(ctx *gen.Foreign_key_actionContext) any { - ac := &types.ForeignKeyAction{} +func (s *schemaVisitor) VisitFk_action(ctx *gen.Fk_actionContext) interface{} { + act := &ForeignKeyAction{} switch { - case ctx.UPDATE() != nil, ctx.LEGACY_ON_UPDATE() != nil: - ac.On = types.ON_UPDATE - case ctx.DELETE() != nil, ctx.LEGACY_ON_DELETE() != nil: - ac.On = types.ON_DELETE + case ctx.UPDATE() != nil: + act.On = ON_UPDATE + case ctx.DELETE() != nil: + act.On = ON_DELETE default: panic("unknown foreign key action") } switch { - case ctx.ACTION() != nil, ctx.LEGACY_NO_ACTION() != nil: - ac.Do = types.DO_NO_ACTION - case ctx.NULL() != nil, ctx.LEGACY_SET_NULL() != nil: - ac.Do = types.DO_SET_NULL - case ctx.DEFAULT() != nil, ctx.LEGACY_SET_DEFAULT() != nil: - ac.Do = types.DO_SET_DEFAULT - // cascade and restrict do not have legacys case ctx.CASCADE() != nil: - ac.Do = types.DO_CASCADE + act.Do = DO_CASCADE case ctx.RESTRICT() != nil: - ac.Do = types.DO_RESTRICT + act.Do = DO_RESTRICT + case ctx.SET() != nil: + if ctx.NULL() != nil { + act.Do = DO_SET_NULL + } else { + act.Do = DO_SET_DEFAULT + } + case ctx.NO() != nil: + act.Do = DO_NO_ACTION default: panic("unknown foreign key action") } - return ac + return act } -func (s *schemaVisitor) VisitType_list(ctx *gen.Type_listContext) any { - var ts []*types.DataType - for _, t := range ctx.AllType_() { - ts = append(ts, t.Accept(s).(*types.DataType)) +func (s *schemaVisitor) VisitTable_constraint_def(ctx *gen.Table_constraint_defContext) any { + name := "" + if ctx.GetName() != nil { + name = s.getIdent(ctx.GetName()) } - return ts -} + var c OutOfLineConstraintClause + switch { + case ctx.PRIMARY() != nil: + c = &PrimaryKeyOutOfLineConstraint{ + Columns: ctx.Identifier_list().Accept(s).([]string), + } + case ctx.UNIQUE() != nil: + c = &UniqueOutOfLineConstraint{ + Columns: ctx.Identifier_list().Accept(s).([]string), + } + case ctx.CHECK() != nil: + c = &CheckConstraint{ + Expression: ctx.Sql_expr().Accept(s).(Expression), + } + case ctx.FOREIGN() != nil: + c = &ForeignKeyOutOfLineConstraint{ + Columns: ctx.Identifier_list().Accept(s).([]string), + References: ctx.Fk_constraint().Accept(s).(*ForeignKeyReferences), + } + default: + panic("unknown constraint") + } -func (s *schemaVisitor) VisitNamed_type_list(ctx *gen.Named_type_listContext) any { - var ts []*types.NamedType - for i, t := range ctx.AllIDENTIFIER() { - ts = append(ts, &types.NamedType{ - Name: s.getIdent(t), - Type: ctx.Type_(i).Accept(s).(*types.DataType), - }) + c.Set(ctx) + oolc := &OutOfLineConstraint{ + Name: name, + Constraint: c, } - return ts + oolc.Set(ctx) + + return oolc } -func (s *schemaVisitor) VisitTyped_variable_list(ctx *gen.Typed_variable_listContext) any { - var vars []*types.ProcedureParameter - for i, v := range ctx.AllVariable() { - vars = append(vars, &types.ProcedureParameter{ - Name: v.Accept(s).(*ExpressionVariable).String(), - Type: ctx.Type_(i).Accept(s).(*types.DataType), - }) +func (s *schemaVisitor) VisitDrop_table_statement(ctx *gen.Drop_table_statementContext) any { + stmt := &DropTableStatement{ + Tables: ctx.GetTables().Accept(s).([]string), } - return vars -} + if ctx.Opt_drop_behavior() != nil { + stmt.Behavior = ctx.Opt_drop_behavior().Accept(s).(DropBehavior) + } + + if ctx.EXISTS() != nil { + stmt.IfExists = true + } -func (s *schemaVisitor) VisitAccess_modifier(ctx *gen.Access_modifierContext) any { - // we will have to parse this at a later stage, since this is either public/private, - // or a types.Modifier - panic("VisitAccess_modifier should not be called") + stmt.Set(ctx) + return stmt } -// getModifiersAndPublicity parses access modifiers and returns. it should be used when -// parsing procedures and actions -func getModifiersAndPublicity(ctxs []gen.IAccess_modifierContext) (public bool, mods []types.Modifier) { - for _, ctx := range ctxs { - switch { - case ctx.PUBLIC() != nil: - public = true - case ctx.PRIVATE() != nil: - public = false - case ctx.VIEW() != nil: - mods = append(mods, types.ModifierView) - case ctx.OWNER() != nil: - mods = append(mods, types.ModifierOwner) - default: - // should not happen, as this would suggest a bug in the parser - panic("unknown access modifier") - } +func (s *schemaVisitor) VisitOpt_drop_behavior(ctx *gen.Opt_drop_behaviorContext) any { + switch { + case ctx.CASCADE() != nil: + return DropBehaviorCascade + case ctx.RESTRICT() != nil: + return DropBehaviorRestrict + default: + return DropBehaviorDefault // restrict is the default } - - return } -func (s *schemaVisitor) VisitAction_declaration(ctx *gen.Action_declarationContext) any { - act := &types.Action{ - Name: s.getIdent(ctx.IDENTIFIER()), - Annotations: arr[string](len(ctx.AllAnnotation())), +func (s *schemaVisitor) VisitAlter_table_statement(ctx *gen.Alter_table_statementContext) any { + stmt := &AlterTableStatement{ + Table: ctx.Identifier().Accept(s).(string), + Action: ctx.Alter_table_action().Accept(s).(AlterTableAction), } - for i, a := range ctx.AllAnnotation() { - act.Annotations[i] = a.Accept(s).(string) + stmt.Set(ctx) + return stmt +} + +func (s *schemaVisitor) VisitAdd_column_constraint(ctx *gen.Add_column_constraintContext) any { + a := &AlterColumnSet{ + Column: ctx.Identifier().Accept(s).(string), } - public, mods := getModifiersAndPublicity(ctx.AllAccess_modifier()) - act.Public = public - act.Modifiers = mods + if ctx.NULL() != nil { + a.Type = ConstraintTypeNotNull + } else { + a.Type = ConstraintTypeDefault - if ctx.Variable_list() != nil { - params := ctx.Variable_list().Accept(s).([]*ExpressionVariable) - paramStrs := make([]string, len(params)) - for i, p := range params { - paramStrs[i] = p.String() + if ctx.Action_expr() == nil { + s.errs.RuleErr(ctx, ErrSyntax, "missing literal for default constraint") + return a } - act.Parameters = paramStrs + + a.Value = ctx.Action_expr().Accept(s).(Expression) } - act.Body = s.getTextFromStream(ctx.Action_block().GetStart().GetStart(), ctx.Action_block().GetStop().GetStop()) + a.Set(ctx) + return a +} - ast := ctx.Action_block().Accept(s).([]ActionStmt) - s.actions[act.Name] = ast +func (s *schemaVisitor) VisitDrop_column_constraint(ctx *gen.Drop_column_constraintContext) any { + a := &AlterColumnDrop{ + Column: ctx.Identifier().Accept(s).(string), + } - return act + switch { + case ctx.NULL() != nil: + a.Type = ConstraintTypeNotNull + case ctx.DEFAULT() != nil: + a.Type = ConstraintTypeDefault + default: + panic("unknown constraint") + } + + a.Set(ctx) + return a } -func (s *schemaVisitor) VisitProcedure_declaration(ctx *gen.Procedure_declarationContext) any { - proc := &types.Procedure{ - Name: s.getIdent(ctx.IDENTIFIER()), - Annotations: arr[string](len(ctx.AllAnnotation())), +func (s *schemaVisitor) VisitAdd_column(ctx *gen.Add_columnContext) any { + a := &AddColumn{ + Name: ctx.Identifier().Accept(s).(string), + Type: ctx.Type_().Accept(s).(*types.DataType), } - if ctx.Typed_variable_list() != nil { - proc.Parameters = ctx.Typed_variable_list().Accept(s).([]*types.ProcedureParameter) - } + a.Set(ctx) + return a +} - if ctx.Procedure_return() != nil { - proc.Returns = ctx.Procedure_return().Accept(s).(*types.ProcedureReturn) +func (s *schemaVisitor) VisitDrop_column(ctx *gen.Drop_columnContext) any { + a := &DropColumn{ + Name: ctx.Identifier().Accept(s).(string), } - for i, a := range ctx.AllAnnotation() { - proc.Annotations[i] = a.Accept(s).(string) + a.Set(ctx) + return a +} + +func (s *schemaVisitor) VisitRename_column(ctx *gen.Rename_columnContext) any { + a := &RenameColumn{ + OldName: ctx.GetOld_column().Accept(s).(string), + NewName: ctx.GetNew_column().Accept(s).(string), } - public, mods := getModifiersAndPublicity(ctx.AllAccess_modifier()) - proc.Public = public - proc.Modifiers = mods + a.Set(ctx) + return a +} - ast := ctx.Procedure_block().Accept(s).([]ProcedureStmt) - s.procedures[proc.Name] = ast +func (s *schemaVisitor) VisitRename_table(ctx *gen.Rename_tableContext) any { + a := &RenameTable{ + Name: ctx.Identifier().Accept(s).(string), + } - proc.Body = s.getTextFromStream(ctx.Procedure_block().GetStart().GetStart(), ctx.Procedure_block().GetStop().GetStop()) + a.Set(ctx) + return a +} - return proc +func (s *schemaVisitor) VisitAdd_table_constraint(ctx *gen.Add_table_constraintContext) any { + a := &AddTableConstraint{ + Constraint: ctx.Table_constraint_def().Accept(s).(*OutOfLineConstraint), + } + + a.Set(ctx) + return a } -func (s *schemaVisitor) VisitForeign_procedure_declaration(ctx *gen.Foreign_procedure_declarationContext) any { - // similar to https://github.com/kwilteam/kwil-db/issues/752, the parser will recognize - // `foreign proced`` as a foreign procedure named `proced`. it will throw an error, but we - // don't want to return this to the client either. - fp := &types.ForeignProcedure{} - if isErrNode(ctx.FOREIGN()) { - return fp +func (s *schemaVisitor) VisitDrop_table_constraint(ctx *gen.Drop_table_constraintContext) any { + a := &DropTableConstraint{ + Name: ctx.Identifier().Accept(s).(string), } - if isErrNode(ctx.PROCEDURE()) { - return fp + + a.Set(ctx) + return a +} + +func (s *schemaVisitor) VisitCreate_index_statement(ctx *gen.Create_index_statementContext) any { + a := &CreateIndexStatement{ + On: ctx.GetTable().Accept(s).(string), + Columns: ctx.GetColumns().Accept(s).([]string), + Type: IndexTypeBTree, } - fp.Name = s.getIdent(ctx.IDENTIFIER()) + if ctx.EXISTS() != nil { + a.IfNotExists = true + } - if ctx.Procedure_return() != nil { - fp.Returns = ctx.Procedure_return().Accept(s).(*types.ProcedureReturn) + if ctx.GetName() != nil { + a.Name = ctx.GetName().Accept(s).(string) } - // no default, since foreign procedures can take no inputs optionally - switch { - case ctx.GetUnnamed_params() != nil: - fp.Parameters = ctx.GetUnnamed_params().Accept(s).([]*types.DataType) - case ctx.GetNamed_params() != nil: - ps := ctx.GetNamed_params().Accept(s).([]*types.ProcedureParameter) - var dataTypes []*types.DataType - for _, p := range ps { - dataTypes = append(dataTypes, p.Type) - } - fp.Parameters = dataTypes + if ctx.UNIQUE() != nil { + a.Type = IndexTypeUnique } - return fp + a.Set(ctx) + return a } -func (s *schemaVisitor) VisitProcedure_return(ctx *gen.Procedure_returnContext) any { - ret := &types.ProcedureReturn{} - - switch { - case ctx.GetReturn_columns() != nil: - ret.Fields = ctx.GetReturn_columns().Accept(s).([]*types.NamedType) - case ctx.GetUnnamed_return_types() != nil: - ret.Fields = make([]*types.NamedType, len(ctx.GetUnnamed_return_types().AllType_())) - for i, t := range ctx.GetUnnamed_return_types().AllType_() { - ret.Fields[i] = &types.NamedType{ - Name: "col" + strconv.Itoa(i), - Type: t.Accept(s).(*types.DataType), - } - } - default: - panic("unknown return type") +func (s *schemaVisitor) VisitDrop_index_statement(ctx *gen.Drop_index_statementContext) interface{} { + a := &DropIndexStatement{ + Name: ctx.Identifier().Accept(s).(string), } - if ctx.TABLE() != nil { - ret.IsTable = true + if ctx.EXISTS() != nil { + a.CheckExist = true } - return ret + a.Set(ctx) + return a } -// VisitSQL visits a SQL statement. It is the top-level SQL visitor. -func (s *schemaVisitor) VisitSql(ctx *gen.SqlContext) any { - return ctx.Sql_statement().Accept(s) +func (s *schemaVisitor) VisitCreate_role_statement(ctx *gen.Create_role_statementContext) any { + stmt := &CreateRoleStatement{ + Role: s.getIdent(ctx.Identifier()), + } + if ctx.EXISTS() != nil { + stmt.IfNotExists = true + } + + stmt.Set(ctx) + return stmt } -// VisitSql_statement visits a SQL statement. It is called by all nested -// sql statements (e.g. in procedures and actions) -func (s *schemaVisitor) VisitSql_statement(ctx *gen.Sql_statementContext) any { - stmt := &SQLStatement{ - CTEs: arr[*CommonTableExpression](len(ctx.AllCommon_table_expression())), +func (s *schemaVisitor) VisitDrop_role_statement(ctx *gen.Drop_role_statementContext) any { + stmt := &DropRoleStatement{ + Role: s.getIdent(ctx.Identifier()), + } + if ctx.EXISTS() != nil { + stmt.IfExists = true } - for i, cte := range ctx.AllCommon_table_expression() { - stmt.CTEs[i] = cte.Accept(s).(*CommonTableExpression) + stmt.Set(ctx) + return stmt +} + +func (s *schemaVisitor) VisitGrant_statement(ctx *gen.Grant_statementContext) any { + c := s.parseGrantOrRevoke(ctx) + c.IsGrant = true + return c +} + +func (s *schemaVisitor) VisitRevoke_statement(ctx *gen.Revoke_statementContext) any { + c := s.parseGrantOrRevoke(ctx) + c.IsGrant = false // not necessary, but for clarity + return c +} + +// parseGrantOrRevoke parses a GRANT or REVOKE statement. +// It is the responsibility of the caller to set the correct IsGrant field. +func (s *schemaVisitor) parseGrantOrRevoke(ctx interface { + antlr.ParserRuleContext + Privilege_list() gen.IPrivilege_listContext + GetGrant_role() gen.IIdentifierContext + GetRole() gen.IIdentifierContext + GetUser() antlr.Token + GetNamespace() gen.IIdentifierContext +}) *GrantOrRevokeStatement { + // can be: + // GRANT/REVOKE privilege_list/role TO/FROM role/user + + c := &GrantOrRevokeStatement{} + switch { + case ctx.Privilege_list() != nil: + c.Privileges = ctx.Privilege_list().Accept(s).([]string) + case ctx.GetGrant_role() != nil: + c.GrantRole = ctx.GetGrant_role().Accept(s).(string) + default: + // should not happen, as this would suggest a bug in the parser + panic("invalid grant/revoke statement") } switch { - case ctx.Select_statement() != nil: - stmt.SQL = ctx.Select_statement().Accept(s).(*SelectStatement) - case ctx.Update_statement() != nil: - stmt.SQL = ctx.Update_statement().Accept(s).(*UpdateStatement) - case ctx.Insert_statement() != nil: - stmt.SQL = ctx.Insert_statement().Accept(s).(*InsertStatement) - case ctx.Delete_statement() != nil: - stmt.SQL = ctx.Delete_statement().Accept(s).(*DeleteStatement) + case ctx.GetRole() != nil: + c.ToRole = ctx.GetRole().Accept(s).(string) + case ctx.GetUser() != nil: + c.ToUser = ctx.GetUser().GetText() default: - panic("unknown sql statement") + // should not happen, as this would suggest a bug in the parser + panic("invalid grant/revoke statement") } + c.Set(ctx) - stmt.Set(ctx) - return stmt -} + if ctx.GetNamespace() != nil { + ns := s.getIdent(ctx.GetNamespace()) + c.Namespace = &ns + } -func (s *schemaVisitor) VisitCommon_table_expression(ctx *gen.Common_table_expressionContext) any { - // first identifier is the table name, the rest are the columns - cte := &CommonTableExpression{ - Name: ctx.Identifier(0).Accept(s).(string), - Query: ctx.Select_statement().Accept(s).(*SelectStatement), + // either privileges can be granted to roles, or roles can be granted to users. + // Other permutations are invalid. + + // if granting roles, then recipient must be a user + if len(c.Privileges) == 0 { + // no privileges, so we are granting a role + if c.ToRole != "" { + s.errs.RuleErr(ctx, ErrGrantOrRevoke, "cannot grant or revoke a role to another role") + } + + if c.Namespace != nil { + s.errs.RuleErr(ctx, ErrGrantOrRevoke, "cannot grant or revoke a role on a namespace") + } + } else { + // if granting privileges, then recipient must be a role + if c.ToUser != "" { + s.errs.RuleErr(ctx, ErrGrantOrRevoke, "cannot grant or revoke privileges to a user") + } } - for _, id := range ctx.AllIdentifier()[1:] { - cte.Columns = append(cte.Columns, id.Accept(s).(string)) + return c +} + +func (s *schemaVisitor) VisitPrivilege_list(ctx *gen.Privilege_listContext) any { + var privs []string + for _, p := range ctx.AllPrivilege() { + privs = append(privs, p.Accept(s).(string)) } - cte.Set(ctx) + return privs +} - return cte +func (s *schemaVisitor) VisitPrivilege(ctx *gen.PrivilegeContext) any { + // since there is only one token, we can just get all text + return ctx.GetText() +} + +func (s *schemaVisitor) VisitTransfer_ownership_statement(ctx *gen.Transfer_ownership_statementContext) any { + stmt := &TransferOwnershipStatement{ + To: s.getIdent(ctx.Identifier()), + } + + stmt.Set(ctx) + return stmt } func (s *schemaVisitor) VisitSelect_statement(ctx *gen.Select_statementContext) any { @@ -1073,17 +1207,39 @@ func (s *schemaVisitor) VisitSelect_core(ctx *gen.Select_coreContext) any { stmt.Having = ctx.GetHaving().Accept(s).(Expression) } + if ctx.WINDOW() != nil { + // the only Identifier used in the SELECT CORE grammar is for naming windows, + // so we can safely get identifiers by index here. + for i, window := range ctx.AllWindow() { + name := s.getIdent(ctx.Identifier(i)) + + win := window.Accept(s).(*WindowImpl) + + stmt.Windows = append(stmt.Windows, &struct { + Name string + Window *WindowImpl + }{ + Name: name, + Window: win, + }) + } + } + stmt.Set(ctx) return stmt } func (s *schemaVisitor) VisitTable_relation(ctx *gen.Table_relationContext) any { t := &RelationTable{ - Table: strings.ToLower(ctx.GetTable_name().Accept(s).(string)), + Table: s.getIdent(ctx.GetTable_name()), } if ctx.GetAlias() != nil { - t.Alias = strings.ToLower(ctx.GetAlias().Accept(s).(string)) + t.Alias = s.getIdent(ctx.GetAlias()) + } + + if ctx.GetNamespace() != nil { + t.Namespace = s.getIdent(ctx.GetNamespace()) } t.Set(ctx) @@ -1105,21 +1261,6 @@ func (s *schemaVisitor) VisitSubquery_relation(ctx *gen.Subquery_relationContext return t } -func (s *schemaVisitor) VisitFunction_relation(ctx *gen.Function_relationContext) any { - t := &RelationFunctionCall{ - FunctionCall: ctx.Sql_function_call().Accept(s).(ExpressionCall), - } - - // alias is technially required here, but we allow it in the grammar - // to throw a better error message here. - if ctx.Identifier() != nil { - t.Alias = ctx.Identifier().Accept(s).(string) - } - - t.Set(ctx) - return t -} - func (s *schemaVisitor) VisitJoin(ctx *gen.JoinContext) any { j := &Join{ Relation: ctx.Relation().Accept(s).(Table), @@ -1213,8 +1354,14 @@ func (s *schemaVisitor) VisitInsert_statement(ctx *gen.Insert_statementContext) Table: ctx.GetTable_name().Accept(s).(string), } - for _, valList := range ctx.AllSql_expr_list() { - ins.Values = append(ins.Values, valList.Accept(s).([]Expression)) + // can either be INSERT INTO table VALUES (1, 2, 3) or + // INSERT INTO table SELECT * FROM table2 + if ctx.Select_statement() != nil { + ins.Select = ctx.Select_statement().Accept(s).(*SelectStatement) + } else { + for _, valList := range ctx.AllSql_expr_list() { + ins.Values = append(ins.Values, valList.Accept(s).([]Expression)) + } } if ctx.GetAlias() != nil { @@ -1226,7 +1373,7 @@ func (s *schemaVisitor) VisitInsert_statement(ctx *gen.Insert_statementContext) } if ctx.Upsert_clause() != nil { - ins.Upsert = ctx.Upsert_clause().Accept(s).(*UpsertClause) + ins.OnConflict = ctx.Upsert_clause().Accept(s).(*OnConflict) } ins.Set(ctx) @@ -1234,7 +1381,7 @@ func (s *schemaVisitor) VisitInsert_statement(ctx *gen.Insert_statementContext) } func (s *schemaVisitor) VisitUpsert_clause(ctx *gen.Upsert_clauseContext) any { - u := &UpsertClause{} + u := &OnConflict{} if ctx.GetConflict_columns() != nil { u.ConflictColumns = ctx.GetConflict_columns().Accept(s).([]string) @@ -1413,7 +1560,7 @@ func (s *schemaVisitor) VisitBetween_sql_expr(ctx *gen.Between_sql_exprContext) } func (s *schemaVisitor) VisitFunction_call_sql_expr(ctx *gen.Function_call_sql_exprContext) any { - call := ctx.Sql_function_call().Accept(s).(ExpressionCall) + call := ctx.Sql_function_call().Accept(s).(*ExpressionFunctionCall) if ctx.Type_cast() != nil { call.Cast(ctx.Type_cast().Accept(s).(*types.DataType)) @@ -1424,6 +1571,31 @@ func (s *schemaVisitor) VisitFunction_call_sql_expr(ctx *gen.Function_call_sql_e return call } +func (s *schemaVisitor) VisitWindow_function_call_sql_expr(ctx *gen.Window_function_call_sql_exprContext) any { + e := &ExpressionWindowFunctionCall{ + FunctionCall: ctx.Sql_function_call().Accept(s).(*ExpressionFunctionCall), + } + + if ctx.Identifier() != nil { + name := s.getIdent(ctx.Identifier()) + wr := &WindowReference{ + Name: name, + } + wr.SetToken(ctx.Identifier().IDENTIFIER().GetSymbol()) + e.Window = wr + } else { + e.Window = ctx.Window().Accept(s).(*WindowImpl) + } + + if ctx.FILTER() != nil { + e.Filter = ctx.Sql_expr().Accept(s).(Expression) + } + + e.Set(ctx) + + return e +} + func (s *schemaVisitor) VisitParen_sql_expr(ctx *gen.Paren_sql_exprContext) any { e := &ExpressionParenthesized{ Inner: ctx.Sql_expr().Accept(s).(Expression), @@ -1568,6 +1740,19 @@ func (s *schemaVisitor) VisitUnary_sql_expr(ctx *gen.Unary_sql_exprContext) any return e } +func (s *schemaVisitor) VisitMake_array_sql_expr(ctx *gen.Make_array_sql_exprContext) any { + e := &ExpressionMakeArray{ + Values: ctx.Sql_expr_list().Accept(s).([]Expression), + } + + if ctx.Type_cast() != nil { + e.TypeCast = ctx.Type_cast().Accept(s).(*types.DataType) + } + + e.Set(ctx) + return e +} + func (s *schemaVisitor) VisitSubquery_sql_expr(ctx *gen.Subquery_sql_exprContext) any { e := &ExpressionSubquery{ Subquery: ctx.Select_statement().Accept(s).(*SelectStatement), @@ -1665,91 +1850,10 @@ func (s *schemaVisitor) VisitNormal_call_sql(ctx *gen.Normal_call_sqlContext) an return call } -func (s *schemaVisitor) VisitForeign_call_sql(ctx *gen.Foreign_call_sqlContext) any { - e := &ExpressionForeignCall{ - Name: ctx.Identifier().Accept(s).(string), - } - - if ctx.Sql_expr_list() != nil { - e.Args = ctx.Sql_expr_list().Accept(s).([]Expression) - } - - dbid := ctx.GetDbid().Accept(s).(Expression) - proc := ctx.GetProcedure().Accept(s).(Expression) - e.ContextualArgs = append(e.ContextualArgs, dbid, proc) - - e.Set(ctx) - - return e -} - -func (s *schemaVisitor) VisitAction_block(ctx *gen.Action_blockContext) any { - var stmts []ActionStmt - - for _, stmt := range ctx.AllAction_statement() { - stmts = append(stmts, stmt.Accept(s).(ActionStmt)) - } - - return stmts -} - -func (s *schemaVisitor) VisitSql_action(ctx *gen.Sql_actionContext) any { - stmt := &ActionStmtSQL{ - SQL: ctx.Sql_statement().Accept(s).(*SQLStatement), - } - - stmt.Set(ctx) - return stmt -} - -func (s *schemaVisitor) VisitLocal_action(ctx *gen.Local_actionContext) any { - stmt := &ActionStmtActionCall{ - Action: s.getIdent(ctx.IDENTIFIER()), - } - - if ctx.Procedure_expr_list() != nil { - stmt.Args = ctx.Procedure_expr_list().Accept(s).([]Expression) - } - - stmt.Set(ctx) - return stmt -} - -func (s *schemaVisitor) VisitExtension_action(ctx *gen.Extension_actionContext) any { - stmt := &ActionStmtExtensionCall{ - Extension: s.getIdent(ctx.IDENTIFIER(0)), - Method: s.getIdent(ctx.IDENTIFIER(1)), - } - - if ctx.Procedure_expr_list() != nil { - stmt.Args = ctx.Procedure_expr_list().Accept(s).([]Expression) - } - - if ctx.Variable_list() != nil { - varList := ctx.Variable_list().Accept(s).([]*ExpressionVariable) - for _, v := range varList { - stmt.Receivers = append(stmt.Receivers, v.String()) - } - } - - stmt.Set(ctx) - return stmt -} - -func (s *schemaVisitor) VisitProcedure_block(ctx *gen.Procedure_blockContext) any { - var stmts []ProcedureStmt - - for _, stmt := range ctx.AllProc_statement() { - stmts = append(stmts, stmt.Accept(s).(ProcedureStmt)) - } - - return stmts -} - -func (s *schemaVisitor) VisitField_access_procedure_expr(ctx *gen.Field_access_procedure_exprContext) any { +func (s *schemaVisitor) VisitField_access_action_expr(ctx *gen.Field_access_action_exprContext) any { e := &ExpressionFieldAccess{ - Record: ctx.Procedure_expr().Accept(s).(Expression), - Field: s.getIdent(ctx.IDENTIFIER()), + Record: ctx.Action_expr().Accept(s).(Expression), + Field: s.getIdent(ctx.Identifier()), } if ctx.Type_cast() != nil { @@ -1761,7 +1865,7 @@ func (s *schemaVisitor) VisitField_access_procedure_expr(ctx *gen.Field_access_p return e } -func (s *schemaVisitor) VisitLiteral_procedure_expr(ctx *gen.Literal_procedure_exprContext) any { +func (s *schemaVisitor) VisitLiteral_action_expr(ctx *gen.Literal_action_exprContext) any { e := ctx.Literal().Accept(s).(*ExpressionLiteral) if ctx.Type_cast() != nil { @@ -1772,9 +1876,9 @@ func (s *schemaVisitor) VisitLiteral_procedure_expr(ctx *gen.Literal_procedure_e return e } -func (s *schemaVisitor) VisitParen_procedure_expr(ctx *gen.Paren_procedure_exprContext) any { +func (s *schemaVisitor) VisitParen_action_expr(ctx *gen.Paren_action_exprContext) any { e := &ExpressionParenthesized{ - Inner: ctx.Procedure_expr().Accept(s).(Expression), + Inner: ctx.Action_expr().Accept(s).(Expression), } if ctx.Type_cast() != nil { @@ -1785,7 +1889,7 @@ func (s *schemaVisitor) VisitParen_procedure_expr(ctx *gen.Paren_procedure_exprC return e } -func (s *schemaVisitor) VisitVariable_procedure_expr(ctx *gen.Variable_procedure_exprContext) any { +func (s *schemaVisitor) VisitVariable_action_expr(ctx *gen.Variable_action_exprContext) any { e := ctx.Variable().Accept(s).(*ExpressionVariable) if ctx.Type_cast() != nil { @@ -1796,9 +1900,9 @@ func (s *schemaVisitor) VisitVariable_procedure_expr(ctx *gen.Variable_procedure return e } -func (s *schemaVisitor) VisitIs_procedure_expr(ctx *gen.Is_procedure_exprContext) any { +func (s *schemaVisitor) VisitIs_action_expr(ctx *gen.Is_action_exprContext) any { e := &ExpressionIs{ - Left: ctx.Procedure_expr(0).Accept(s).(Expression), + Left: ctx.Action_expr(0).Accept(s).(Expression), } if ctx.NOT() != nil { @@ -1837,10 +1941,10 @@ func (s *schemaVisitor) VisitIs_procedure_expr(ctx *gen.Is_procedure_exprContext return e } -func (s *schemaVisitor) VisitLogical_procedure_expr(ctx *gen.Logical_procedure_exprContext) any { +func (s *schemaVisitor) VisitLogical_action_expr(ctx *gen.Logical_action_exprContext) any { e := &ExpressionLogical{ - Left: ctx.Procedure_expr(0).Accept(s).(Expression), - Right: ctx.Procedure_expr(1).Accept(s).(Expression), + Left: ctx.Action_expr(0).Accept(s).(Expression), + Right: ctx.Action_expr(1).Accept(s).(Expression), } switch { @@ -1856,10 +1960,10 @@ func (s *schemaVisitor) VisitLogical_procedure_expr(ctx *gen.Logical_procedure_e return e } -func (s *schemaVisitor) VisitProcedure_expr_arithmetic(ctx *gen.Procedure_expr_arithmeticContext) any { +func (s *schemaVisitor) VisitAction_expr_arithmetic(ctx *gen.Action_expr_arithmeticContext) any { e := &ExpressionArithmetic{ - Left: ctx.Procedure_expr(0).Accept(s).(Expression), - Right: ctx.Procedure_expr(1).Accept(s).(Expression), + Left: ctx.Action_expr(0).Accept(s).(Expression), + Right: ctx.Action_expr(1).Accept(s).(Expression), } switch { @@ -1883,10 +1987,10 @@ func (s *schemaVisitor) VisitProcedure_expr_arithmetic(ctx *gen.Procedure_expr_a return e } -func (s *schemaVisitor) VisitComparison_procedure_expr(ctx *gen.Comparison_procedure_exprContext) any { +func (s *schemaVisitor) VisitComparison_action_expr(ctx *gen.Comparison_action_exprContext) any { e := &ExpressionComparison{ - Left: ctx.Procedure_expr(0).Accept(s).(Expression), - Right: ctx.Procedure_expr(1).Accept(s).(Expression), + Left: ctx.Action_expr(0).Accept(s).(Expression), + Right: ctx.Action_expr(1).Accept(s).(Expression), } switch { @@ -1910,8 +2014,8 @@ func (s *schemaVisitor) VisitComparison_procedure_expr(ctx *gen.Comparison_proce return e } -func (s *schemaVisitor) VisitFunction_call_procedure_expr(ctx *gen.Function_call_procedure_exprContext) any { - call := ctx.Procedure_function_call().Accept(s).(ExpressionCall) +func (s *schemaVisitor) VisitFunction_call_action_expr(ctx *gen.Function_call_action_exprContext) any { + call := ctx.Action_function_call().Accept(s).(*ExpressionFunctionCall) if ctx.Type_cast() != nil { call.Cast(ctx.Type_cast().Accept(s).(*types.DataType)) @@ -1922,9 +2026,9 @@ func (s *schemaVisitor) VisitFunction_call_procedure_expr(ctx *gen.Function_call return call } -func (s *schemaVisitor) VisitArray_access_procedure_expr(ctx *gen.Array_access_procedure_exprContext) any { +func (s *schemaVisitor) VisitArray_access_action_expr(ctx *gen.Array_access_action_exprContext) any { e := &ExpressionArrayAccess{ - Array: ctx.Procedure_expr(0).Accept(s).(Expression), + Array: ctx.Action_expr(0).Accept(s).(Expression), } s.makeArray(e, ctx.GetSingle(), ctx.GetLeft(), ctx.GetRight()) @@ -1937,9 +2041,9 @@ func (s *schemaVisitor) VisitArray_access_procedure_expr(ctx *gen.Array_access_p return e } -func (s *schemaVisitor) VisitUnary_procedure_expr(ctx *gen.Unary_procedure_exprContext) any { +func (s *schemaVisitor) VisitUnary_action_expr(ctx *gen.Unary_action_exprContext) any { e := &ExpressionUnary{ - Expression: ctx.Procedure_expr().Accept(s).(Expression), + Expression: ctx.Action_expr().Accept(s).(Expression), } // this is the only known unary right now @@ -1958,19 +2062,19 @@ func (s *schemaVisitor) VisitUnary_procedure_expr(ctx *gen.Unary_procedure_exprC return e } -func (s *schemaVisitor) VisitMake_array_procedure_expr(ctx *gen.Make_array_procedure_exprContext) any { +func (s *schemaVisitor) VisitMake_array_action_expr(ctx *gen.Make_array_action_exprContext) any { e := &ExpressionMakeArray{ // golang interface assertions do not work for slices, so we simply - // cast the result to []Expression. This comes from VisitProcedure_expr_list, + // cast the result to []Expression. This comes from VisitAction_expr_list, // directly below. } // we could enforce this in the parser, but it is not super intuitive, // so we want to control the error message - if ctx.Procedure_expr_list() == nil { + if ctx.Action_expr_list() == nil { s.errs.RuleErr(ctx, ErrSyntax, "cannot assign empty arrays. declare using `$arr type[];` instead`") } - e.Values = ctx.Procedure_expr_list().Accept(s).([]Expression) + e.Values = ctx.Action_expr_list().Accept(s).([]Expression) if ctx.Type_cast() != nil { e.TypeCast = ctx.Type_cast().Accept(s).(*types.DataType) @@ -1980,12 +2084,12 @@ func (s *schemaVisitor) VisitMake_array_procedure_expr(ctx *gen.Make_array_proce return e } -func (s *schemaVisitor) VisitProcedure_expr_list(ctx *gen.Procedure_expr_listContext) any { +func (s *schemaVisitor) VisitAction_expr_list(ctx *gen.Action_expr_listContext) any { // we do not return a type ExpressionList here, since ExpressionList is SQL specific, - // and not supported in procedures. Instead, we return a slice of Expression. + // and not supported in actions. Instead, we return a slice of Expression. var exprs []Expression - for _, e := range ctx.AllProcedure_expr() { + for _, e := range ctx.AllAction_expr() { exprs = append(exprs, e.Accept(s).(Expression)) } @@ -1993,7 +2097,7 @@ func (s *schemaVisitor) VisitProcedure_expr_list(ctx *gen.Procedure_expr_listCon } func (s *schemaVisitor) VisitStmt_variable_declaration(ctx *gen.Stmt_variable_declarationContext) any { - stmt := &ProcedureStmtDeclaration{ + stmt := &ActionStmtDeclaration{ Variable: varFromTerminalNode(ctx.VARIABLE()), } @@ -2034,14 +2138,14 @@ func varFromString(s string) *ExpressionVariable { panic("invalid variable: " + s) } - e.Name = strings.ToLower(s[1:]) + e.Name = strings.ToLower(s) return e } -func (s *schemaVisitor) VisitStmt_procedure_call(ctx *gen.Stmt_procedure_callContext) any { - stmt := &ProcedureStmtCall{ - Call: ctx.Procedure_function_call().Accept(s).(ExpressionCall), +func (s *schemaVisitor) VisitStmt_action_call(ctx *gen.Stmt_action_callContext) any { + stmt := &ActionStmtCall{ + Call: ctx.Action_function_call().Accept(s).(*ExpressionFunctionCall), } for i, v := range ctx.AllVariable_or_underscore() { @@ -2064,22 +2168,21 @@ func (s *schemaVisitor) VisitVariable_or_underscore(ctx *gen.Variable_or_undersc return nil } - str := s.getIdent(ctx.VARIABLE()) + str := s.cleanStringIdent(ctx.VARIABLE()) return &str } func (s *schemaVisitor) VisitStmt_variable_assignment(ctx *gen.Stmt_variable_assignmentContext) any { - stmt := &ProcedureStmtAssign{} + stmt := &ActionStmtAssign{} - stmt.Variable = ctx.Procedure_expr(0).Accept(s).(Expression) - // this can either be a variable or an array access - switch v := stmt.Variable.(type) { - case *ExpressionVariable, *ExpressionArrayAccess: - // ok - default: - s.errs.RuleErr(ctx.Procedure_expr(0), ErrSyntax, "cannot assign to %T", v) + assignVariable := ctx.Action_expr(0).Accept(s).(Expression) + + assignable, ok := assignVariable.(Assignable) + if !ok { + s.errs.RuleErr(ctx.Action_expr(0), ErrSyntax, "cannot assign to %T", assignVariable) } - stmt.Value = ctx.Procedure_expr(1).Accept(s).(Expression) + stmt.Variable = assignable + stmt.Value = ctx.Action_expr(1).Accept(s).(Expression) if ctx.Type_() != nil { stmt.Type = ctx.Type_().Accept(s).(*types.DataType) @@ -2090,9 +2193,9 @@ func (s *schemaVisitor) VisitStmt_variable_assignment(ctx *gen.Stmt_variable_ass } func (s *schemaVisitor) VisitStmt_for_loop(ctx *gen.Stmt_for_loopContext) any { - stmt := &ProcedureStmtForLoop{ + stmt := &ActionStmtForLoop{ Receiver: varFromTerminalNode(ctx.VARIABLE()), - Body: arr[ProcedureStmt](len(ctx.AllProc_statement())), + Body: arr[ActionStmt](len(ctx.AllAction_statement())), } switch { @@ -2119,8 +2222,8 @@ func (s *schemaVisitor) VisitStmt_for_loop(ctx *gen.Stmt_for_loopContext) any { panic("unknown loop term") } - for i, st := range ctx.AllProc_statement() { - stmt.Body[i] = st.Accept(s).(ProcedureStmt) + for i, st := range ctx.AllAction_statement() { + stmt.Body[i] = st.Accept(s).(ActionStmt) } stmt.Set(ctx) @@ -2128,17 +2231,17 @@ func (s *schemaVisitor) VisitStmt_for_loop(ctx *gen.Stmt_for_loopContext) any { } func (s *schemaVisitor) VisitStmt_if(ctx *gen.Stmt_ifContext) any { - stmt := &ProcedureStmtIf{ + stmt := &ActionStmtIf{ IfThens: arr[*IfThen](len(ctx.AllIf_then_block())), - Else: arr[ProcedureStmt](len(ctx.AllProc_statement())), + Else: arr[ActionStmt](len(ctx.AllAction_statement())), } for i, th := range ctx.AllIf_then_block() { stmt.IfThens[i] = th.Accept(s).(*IfThen) } - for i, st := range ctx.AllProc_statement() { - stmt.Else[i] = st.Accept(s).(ProcedureStmt) + for i, st := range ctx.AllAction_statement() { + stmt.Else[i] = st.Accept(s).(ActionStmt) } stmt.Set(ctx) @@ -2147,12 +2250,12 @@ func (s *schemaVisitor) VisitStmt_if(ctx *gen.Stmt_ifContext) any { func (s *schemaVisitor) VisitIf_then_block(ctx *gen.If_then_blockContext) any { ifthen := &IfThen{ - If: ctx.Procedure_expr().Accept(s).(Expression), - Then: arr[ProcedureStmt](len(ctx.AllProc_statement())), + If: ctx.Action_expr().Accept(s).(Expression), + Then: arr[ActionStmt](len(ctx.AllAction_statement())), } - for i, st := range ctx.AllProc_statement() { - ifthen.Then[i] = st.Accept(s).(ProcedureStmt) + for i, st := range ctx.AllAction_statement() { + ifthen.Then[i] = st.Accept(s).(ActionStmt) } ifthen.Set(ctx) @@ -2161,7 +2264,7 @@ func (s *schemaVisitor) VisitIf_then_block(ctx *gen.If_then_blockContext) any { } func (s *schemaVisitor) VisitStmt_sql(ctx *gen.Stmt_sqlContext) any { - stmt := &ProcedureStmtSQL{ + stmt := &ActionStmtSQL{ SQL: ctx.Sql_statement().Accept(s).(*SQLStatement), } @@ -2170,22 +2273,22 @@ func (s *schemaVisitor) VisitStmt_sql(ctx *gen.Stmt_sqlContext) any { } func (s *schemaVisitor) VisitStmt_break(ctx *gen.Stmt_breakContext) any { - stmt := &ProcedureStmtBreak{} + stmt := &ActionStmtBreak{} stmt.Set(ctx) return stmt } func (s *schemaVisitor) VisitStmt_return(ctx *gen.Stmt_returnContext) any { - stmt := &ProcedureStmtReturn{} + stmt := &ActionStmtReturn{} switch { case ctx.Sql_statement() != nil: stmt.SQL = ctx.Sql_statement().Accept(s).(*SQLStatement) - case ctx.Procedure_expr_list() != nil: + case ctx.Action_expr_list() != nil: // loop through and add since these are Expressions, not Expressions - exprs := ctx.Procedure_expr_list().Accept(s).([]Expression) + exprs := ctx.Action_expr_list().Accept(s).([]Expression) stmt.Values = append(stmt.Values, exprs...) - // return can be nil if a procedure simply wants to exit early + // return can be nil if an action simply wants to exit early } stmt.Set(ctx) @@ -2193,55 +2296,59 @@ func (s *schemaVisitor) VisitStmt_return(ctx *gen.Stmt_returnContext) any { } func (s *schemaVisitor) VisitStmt_return_next(ctx *gen.Stmt_return_nextContext) any { - stmt := &ProcedureStmtReturnNext{} + stmt := &ActionStmtReturnNext{} - vals := ctx.Procedure_expr_list().Accept(s).([]Expression) + vals := ctx.Action_expr_list().Accept(s).([]Expression) stmt.Values = append(stmt.Values, vals...) stmt.Set(ctx) return stmt } -func (s *schemaVisitor) VisitNormal_call_procedure(ctx *gen.Normal_call_procedureContext) any { - call := &ExpressionFunctionCall{ - Name: s.getIdent(ctx.IDENTIFIER()), +func (s *schemaVisitor) VisitNormal_call_action(ctx *gen.Normal_call_actionContext) any { + call := &ExpressionFunctionCall{} + + call.Name = s.getIdent(ctx.GetFunction()) + if ctx.GetNamespace() != nil { + call.Namespace = s.getIdent(ctx.GetNamespace()) } - // distinct and * cannot be used in procedure function calls - if ctx.Procedure_expr_list() != nil { - call.Args = ctx.Procedure_expr_list().Accept(s).([]Expression) + // distinct and * cannot be used in action function calls + if ctx.Action_expr_list() != nil { + call.Args = ctx.Action_expr_list().Accept(s).([]Expression) } call.Set(ctx) return call } -func (s *schemaVisitor) VisitForeign_call_procedure(ctx *gen.Foreign_call_procedureContext) any { - e := &ExpressionForeignCall{ - Name: s.getIdent(ctx.IDENTIFIER()), - } - - if ctx.Procedure_expr_list() != nil { - e.Args = ctx.Procedure_expr_list().Accept(s).([]Expression) +func (s *schemaVisitor) VisitRange(ctx *gen.RangeContext) any { + r := &LoopTermRange{ + Start: ctx.Action_expr(0).Accept(s).(Expression), + End: ctx.Action_expr(1).Accept(s).(Expression), } - dbid := ctx.GetDbid().Accept(s).(Expression) - proc := ctx.GetProcedure().Accept(s).(Expression) - e.ContextualArgs = append(e.ContextualArgs, dbid, proc) + r.Set(ctx) + return r +} - e.Set(ctx) +func (s *schemaVisitor) VisitWindow(ctx *gen.WindowContext) any { + win := &WindowImpl{} - return e -} + if ctx.GetPartition() != nil { + win.PartitionBy = ctx.GetPartition().Accept(s).([]Expression) + } -func (s *schemaVisitor) VisitRange(ctx *gen.RangeContext) any { - r := &LoopTermRange{ - Start: ctx.Procedure_expr(0).Accept(s).(Expression), - End: ctx.Procedure_expr(1).Accept(s).(Expression), + if ctx.ORDER() != nil { + for _, o := range ctx.AllOrdering_term() { + win.OrderBy = append(win.OrderBy, o.Accept(s).(*OrderingTerm)) + } } - r.Set(ctx) - return r + // currently does not support frame + + win.Set(ctx) + return win } func (s *schemaVisitor) Visit(tree antlr.ParseTree) interface { @@ -2252,7 +2359,11 @@ func (s *schemaVisitor) Visit(tree antlr.ParseTree) interface { // getIdent returns the text of an identifier. // It checks that the identifier is not too long. // It also converts the identifier to lowercase. -func (s *schemaVisitor) getIdent(i antlr.TerminalNode) string { +func (s *schemaVisitor) getIdent(i gen.IIdentifierContext) string { + return strings.ToLower(s.cleanStringIdent(i.IDENTIFIER())) +} + +func (s *schemaVisitor) cleanStringIdent(i antlr.TerminalNode) string { ident := i.GetText() s.validateVariableIdentifier(i.GetSymbol(), ident) return strings.ToLower(ident) diff --git a/node/engine/parse/ast.go b/node/engine/parse/ast.go new file mode 100644 index 000000000..1154d7777 --- /dev/null +++ b/node/engine/parse/ast.go @@ -0,0 +1,1948 @@ +package parse + +import ( + "encoding/hex" + "fmt" + "strings" + + antlr "github.com/antlr4-go/antlr/v4" + "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/core/types/decimal" +) + +// this file contains the ASTs for SQL, DDL, and actions. + +// Node is a node in the AST. +type Node interface { + Positionable + Accept(Visitor) any +} + +type GetPositioner interface { + GetPosition() *Position + Clear() +} + +type Positionable interface { + GetPositioner + Set(r antlr.ParserRuleContext) + SetToken(t antlr.Token) +} + +type Typecastable struct { + TypeCast *types.DataType +} + +func (t *Typecastable) Cast(t2 *types.DataType) { + t.TypeCast = t2 +} + +func (t *Typecastable) GetTypeCast() *types.DataType { + return t.TypeCast +} + +type Typecasted interface { + GetTypeCast() *types.DataType +} + +// Expression is an interface for all expressions. +type Expression interface { + Node +} + +// Assignable is an interface for all expressions that can be assigned to. +type Assignable interface { + Expression + assignable() +} + +// TopLevelStatement is a top-level statement. +// By itself, it is a valid statement. +type TopLevelStatement interface { + Node + topLevelStatement() +} + +// ExpressionLiteral is a literal expression. +type ExpressionLiteral struct { + Position + Typecastable + Type *types.DataType + // Value is the value of the literal. + // It must be of type string, int64, bool, *uint256.Int, *decimal.Decimal, + // or nil + Value any +} + +func (e *ExpressionLiteral) Accept(v Visitor) any { + return v.VisitExpressionLiteral(e) +} + +// String returns the string representation of the literal. +func (e *ExpressionLiteral) String() string { + s, err := literalToString(e.Value) + if err != nil { + panic(err.Error() + ": " + fmt.Sprintf("%T", e.Value)) + } + return s +} + +// literalToString formats a literal value to be used in a SQL / DDL statement. +func literalToString(value any) (string, error) { + str := strings.Builder{} + switch v := value.(type) { + case string: // for text type + str.WriteString("'" + v + "'") + case int64, int, int32: // for int type + str.WriteString(fmt.Sprint(v)) + case *types.Uint256: + str.WriteString(v.String()) + case *decimal.Decimal: + str.WriteString(v.String()) + case bool: // for bool type + if v { + str.WriteString("true") + } + str.WriteString("false") + case []byte: + str.WriteString("0x" + hex.EncodeToString(v)) + case nil: + // do nothing + default: + return "", fmt.Errorf("unsupported literal type: %T", v) + } + + return str.String(), nil +} + +// ExpressionFunctionCall is a function call expression. +type ExpressionFunctionCall struct { + Position + Typecastable + // Namespace is the namespace/schema that the function is in. + // It can be empty if the function is in the default namespace. + Namespace string + // Name is the name of the function. + Name string + // Args are the arguments to the function call. + // They are passed using () + Args []Expression + // Distinct is true if the function call is a DISTINCT function call. + Distinct bool + // Star is true if the function call is a * function call. + // If it is set, then Args must be empty. + Star bool +} + +func (e *ExpressionFunctionCall) Accept(v Visitor) any { + return v.VisitExpressionFunctionCall(e) +} + +// ExpressionWindowFunctionCall is a window function call expression. +type ExpressionWindowFunctionCall struct { + Position + FunctionCall *ExpressionFunctionCall + // Filter is the filter clause. + // If nil, then there is no filter clause. + Filter Expression + // Window is the window function that is being called. + Window Window +} + +func (e *ExpressionWindowFunctionCall) Accept(v Visitor) any { + return v.VisitExpressionWindowFunctionCall(e) +} + +// Window is an interface for all window functions. +// It can either reference an exact window (e.g. OVER (partition by ... order by ...)) +// or it can reference a window function name (e.g. OVER my_window). +type Window interface { + Node + window() +} + +type WindowImpl struct { + Position + // PartitionBy is the partition by clause. + PartitionBy []Expression + // OrderBy is the order by clause. + OrderBy []*OrderingTerm + // In the future, when/if we support frame clauses, we can add it here. +} + +func (w *WindowImpl) Accept(v Visitor) any { + return v.VisitWindowImpl(w) +} + +func (w *WindowImpl) window() {} + +type WindowReference struct { + Position + // Name is the name of the window. + Name string +} + +func (w *WindowReference) Accept(v Visitor) any { + return v.VisitWindowReference(w) +} + +func (w *WindowReference) window() {} + +// ExpressionVariable is a variable. +// This can either be $ or @ variables. +type ExpressionVariable struct { + Position + Typecastable + // Name is the naem of the variable, + // without the $ or @. + Name string + // Prefix is the $ or @ prefix. + Prefix VariablePrefix +} + +func (e *ExpressionVariable) Accept(v Visitor) any { + return v.VisitExpressionVariable(e) +} + +// String returns the string representation, as it was passed +// in Kuneiform. +func (e *ExpressionVariable) String() string { + return e.Name +} + +func (e *ExpressionVariable) assignable() {} + +type VariablePrefix string + +const ( + VariablePrefixDollar VariablePrefix = "$" + VariablePrefixAt VariablePrefix = "@" +) + +// ExpressionArrayAccess accesses an array value. +type ExpressionArrayAccess struct { + Position + Typecastable + // Array is the array that is being accessed. + Array Expression + // Index is the index that is being accessed. + // Either Index or FromTo is set, but not both. + Index Expression + // FromTo is the range that is being accessed. + // Either Index or FromTo is set, but not both. + // If FromTo is set, then it is a range access. + // If both values are set, then it is arr[FROM:TO]. + // If only From is set, then it is arr[FROM:]. + // If only To is set, then it is arr[:TO]. + // If neither are set and index is not set, then it is arr[:]. + FromTo [2]Expression +} + +func (e *ExpressionArrayAccess) Accept(v Visitor) any { + return v.VisitExpressionArrayAccess(e) +} + +func (e *ExpressionArrayAccess) assignable() {} + +// ExpressionMakeArray makes a new array. +type ExpressionMakeArray struct { + Position + Typecastable + Values []Expression +} + +func (e *ExpressionMakeArray) Accept(v Visitor) any { + return v.VisitExpressionMakeArray(e) +} + +// ExpressionFieldAccess accesses a field in a record. +type ExpressionFieldAccess struct { + Position + Typecastable + // Record is the record that is being accessed. + Record Expression + // Field is the field that is being accessed. + Field string +} + +func (e *ExpressionFieldAccess) Accept(v Visitor) any { + return v.VisitExpressionFieldAccess(e) +} + +// ExpressionParenthesized is a parenthesized expression. +type ExpressionParenthesized struct { + Position + Typecastable + // Inner is the inner expression. + Inner Expression +} + +func (e *ExpressionParenthesized) Accept(v Visitor) any { + return v.VisitExpressionParenthesized(e) +} + +// ExpressionComparison is a comparison expression. +type ExpressionComparison struct { + Position + // Left is the left side of the comparison. + Left Expression + // Right is the right side of the comparison. + Right Expression + // Operator is the operator of the comparison. + Operator ComparisonOperator +} + +func (e *ExpressionComparison) Accept(v Visitor) any { + return v.VisitExpressionComparison(e) +} + +type ComparisonOperator string + +const ( + ComparisonOperatorEqual ComparisonOperator = "=" + ComparisonOperatorNotEqual ComparisonOperator = "<>" + ComparisonOperatorGreaterThan ComparisonOperator = ">" + ComparisonOperatorLessThan ComparisonOperator = "<" + ComparisonOperatorGreaterThanOrEqual ComparisonOperator = ">=" + ComparisonOperatorLessThanOrEqual ComparisonOperator = "<=" +) + +// ExpressionLogical is a logical expression. +type ExpressionLogical struct { + Position + // Left is the left side of the logical expression. + Left Expression + // Right is the right side of the logical expression. + Right Expression + // Operator is the operator of the logical expression. + Operator LogicalOperator +} + +func (e *ExpressionLogical) Accept(v Visitor) any { + return v.VisitExpressionLogical(e) +} + +type LogicalOperator string + +const ( + LogicalOperatorAnd LogicalOperator = "AND" + LogicalOperatorOr LogicalOperator = "OR" +) + +// ExpressionArithmetic is an arithmetic expression. +type ExpressionArithmetic struct { + Position + // Left is the left side of the arithmetic expression. + Left Expression + // Right is the right side of the arithmetic expression. + Right Expression + // Operator is the operator of the arithmetic expression. + Operator ArithmeticOperator +} + +func (e *ExpressionArithmetic) Accept(v Visitor) any { + return v.VisitExpressionArithmetic(e) +} + +type ArithmeticOperator string + +const ( + ArithmeticOperatorAdd ArithmeticOperator = "+" + ArithmeticOperatorSubtract ArithmeticOperator = "-" + ArithmeticOperatorMultiply ArithmeticOperator = "*" + ArithmeticOperatorDivide ArithmeticOperator = "/" + ArithmeticOperatorModulo ArithmeticOperator = "%" + ArithmeticOperatorConcat ArithmeticOperator = "||" +) + +type ExpressionUnary struct { + Position + // Expression is the expression that is being operated on. + Expression Expression + // Operator is the operator of the unary expression. + Operator UnaryOperator +} + +func (e *ExpressionUnary) Accept(v Visitor) any { + return v.VisitExpressionUnary(e) +} + +type UnaryOperator string + +const ( + // Not can be either NOT or ! + UnaryOperatorNot UnaryOperator = "NOT" + UnaryOperatorNeg UnaryOperator = "-" + UnaryOperatorPos UnaryOperator = "+" +) + +// ExpressionColumn is a column in a table. +type ExpressionColumn struct { + Position + Typecastable + // Table is the table that the column is in. + Table string // can be empty + // Column is the name of the column. + Column string +} + +func (e *ExpressionColumn) String() string { + if e.Table == "" { + return e.Column + } + return e.Table + "." + e.Column +} + +func (e *ExpressionColumn) Accept(v Visitor) any { + return v.VisitExpressionColumn(e) +} + +// ExpressionCollate is an expression with a collation. +type ExpressionCollate struct { + Position + // Expression is the expression that is being collated. + Expression Expression + // Collation is the collation that is being used. + Collation string +} + +func (e *ExpressionCollate) Accept(v Visitor) any { + return v.VisitExpressionCollate(e) +} + +// ExpressionStringComparison is a string comparison expression. +type ExpressionStringComparison struct { + Position + // Left is the left side of the comparison. + Left Expression + // Right is the right side of the comparison. + Right Expression + Not bool + // Operator is the operator of the comparison. + Operator StringComparisonOperator +} + +func (e *ExpressionStringComparison) Accept(v Visitor) any { + return v.VisitExpressionStringComparison(e) +} + +type StringComparisonOperator string + +const ( + StringComparisonOperatorLike StringComparisonOperator = "LIKE" + StringComparisonOperatorILike StringComparisonOperator = "ILIKE" +) + +// ExpressionIs is an IS expression. +type ExpressionIs struct { + Position + // Left is the left side of the IS expression. + Left Expression + // Right is the right side of the IS expression. + Right Expression + // Not is true if the IS expression is a NOT IS expression. + Not bool + // Distinct is true if the IS expression is a DISTINCT IS expression. + Distinct bool +} + +func (e *ExpressionIs) Accept(v Visitor) any { + return v.VisitExpressionIs(e) +} + +// ExpressionBetween is a BETWEEN expression. +type ExpressionBetween struct { + Position + // Expression is the expression that is being compared. + Expression Expression + // Lower is the left side of the BETWEEN expression. + Lower Expression + // Upper is the right side of the BETWEEN expression. + Upper Expression + // Not is true if the BETWEEN expression is a NOT BETWEEN expression. + Not bool +} + +func (e *ExpressionBetween) Accept(v Visitor) any { + return v.VisitExpressionBetween(e) +} + +type ExpressionIn struct { + Position + // Expression is the expression that is being compared. + Expression Expression + // List is the list of expressions that the expression is being compared to. + // Either List or Subquery is set, but not both. + List []Expression + // Subquery is the subquery that the expression is being compared to. + // Either List or Subquery is set, but not both. + Subquery *SelectStatement + // Not is true if the IN expression is a NOT IN expression. + Not bool +} + +func (e *ExpressionIn) Accept(v Visitor) any { + return v.VisitExpressionIn(e) +} + +// ExpressionSubquery is a subquery expression. +type ExpressionSubquery struct { + Position + Typecastable + Not bool + Exists bool + Subquery *SelectStatement +} + +func (e *ExpressionSubquery) Accept(v Visitor) any { + return v.VisitExpressionSubquery(e) +} + +// ExpressionCase is a CASE expression. +type ExpressionCase struct { + Position + Case Expression + WhenThen [][2]Expression + Else Expression +} + +func (e *ExpressionCase) Accept(v Visitor) any { + return v.VisitExpressionCase(e) +} + +// CommonTableExpression is a common table expression. +type CommonTableExpression struct { + Position + // Name is the name of the CTE. + Name string + // Columns are the columns of the CTE. + Columns []string + // Query is the query of the CTE. + Query *SelectStatement +} + +func (c *CommonTableExpression) Accept(v Visitor) any { + return v.VisitCommonTableExpression(c) +} + +// Namespacing is a struct that can have a namespace prefix. +// This is used for top-level statements that can have a namespace prefix +// using curly braces. +type Namespacing struct { + NamespacePrefix string +} + +func (n *Namespacing) SetNamespacePrefix(prefix string) { + n.NamespacePrefix = prefix +} + +func (n *Namespacing) GetNamespacePrefix() string { + return n.NamespacePrefix +} + +type Namespaceable interface { + TopLevelStatement + SetNamespacePrefix(string) + GetNamespacePrefix() string +} + +// SQLStatement is a DML statement with common table expression. +type SQLStatement struct { + Position + Namespacing + CTEs []*CommonTableExpression + // Recursive is true if the RECURSIVE keyword is present. + Recursive bool + // SQL can be an insert, update, delete, or select statement. + SQL SQLCore + // raw is the raw SQL string. + raw *string +} + +func (s *SQLStatement) topLevelStatement() {} + +func (s *SQLStatement) Accept(v Visitor) any { + return v.VisitSQLStatement(s) +} + +func (s *SQLStatement) Raw() (string, error) { + if s.raw == nil { + return "", fmt.Errorf("raw SQL is not set") + } + + return *s.raw, nil +} + +// SQLCore is a DML statement. +// It can be INSERT, UPDATE, DELETE, SELECT. +type SQLCore interface { + Node + sqlCore() +} + +// CreateActionStatement is a CREATE ACTION statement. +type CreateActionStatement struct { + Position + Namespacing + // Either IfNotExists or OrReplace can be true, but not both. + // Both can be false. + IfNotExists bool + OrReplace bool + + // Name is the name of the action. + Name string + + // Parameters are the parameters of the action. + Parameters []*NamedType + // Public is true if the action is public. + // Public bool + + // Modifiers are things like VIEW, OWNER, etc. + Modifiers []string + // Returns specifies the return type of the action. + // It can be nil if the action does not return anything. + Returns *ActionReturn + // Statements are the statements in the action. + Statements []ActionStmt + // Raw is the raw CREATE ACTION statement. + Raw string +} + +func (c *CreateActionStatement) topLevelStatement() {} + +func (c *CreateActionStatement) Accept(v Visitor) any { + return v.VisitCreateActionStatement(c) +} + +// NamedType is a type with a name. +type NamedType struct { + Name string + Type *types.DataType +} + +type DropActionStatement struct { + Position + Namespacing + // IfExists is true if the IF EXISTS clause is present. + IfExists bool + // Name is the name of the action. + Name string +} + +func (d *DropActionStatement) topLevelStatement() {} + +func (d *DropActionStatement) Accept(v Visitor) any { + return v.VisitDropActionStatement(d) +} + +// ActionReturn is the return struct of the action. +type ActionReturn struct { + Position + // IsTable is true if the return type is a table. + IsTable bool + // Fields are the fields of the return type. + Fields []*NamedType +} + +// CreateTableStatement is a CREATE TABLE statement. +type CreateTableStatement struct { + Position + Namespacing + IfNotExists bool + Name string + Columns []*Column + // Constraints contains the non-inline constraints + Constraints []*OutOfLineConstraint +} + +func (c *CreateTableStatement) topLevelStatement() {} + +func (c *CreateTableStatement) Accept(v Visitor) any { + return v.VisitCreateTableStatement(c) +} + +// Column represents a table column. +type Column struct { + Position + + Name string + Type *types.DataType + Constraints []InlineConstraint +} + +func (c *Column) Accept(v Visitor) any { + return v.VisitColumn(c) +} + +// OutOfLineConstraint is a constraint that is not inline with the column. +// e.g. CREATE TABLE t (a INT, CONSTRAINT c CHECK (a > 0)) +type OutOfLineConstraint struct { + Position + Name string // can be empty if the name should be auto-generated + Constraint OutOfLineConstraintClause +} + +// InlineConstraint is a constraint that is inline with the column. +type InlineConstraint interface { + Node + inlineConstraint() +} + +// OutOfLineConstraintClause is a constraint that is not inline with the column. +type OutOfLineConstraintClause interface { + Node + outOfLineConstraintClause() + // LocalColumns returns the local columns that the constraint is applied to. + LocalColumns() []string +} + +type PrimaryKeyInlineConstraint struct { + Position +} + +func (c *PrimaryKeyInlineConstraint) inlineConstraint() {} + +func (c *PrimaryKeyInlineConstraint) Accept(v Visitor) any { + return v.VisitPrimaryKeyInlineConstraint(c) +} + +type PrimaryKeyOutOfLineConstraint struct { + Position + Columns []string +} + +func (c *PrimaryKeyOutOfLineConstraint) Accept(v Visitor) any { + return v.VisitPrimaryKeyOutOfLineConstraint(c) +} + +func (c *PrimaryKeyOutOfLineConstraint) outOfLineConstraintClause() {} + +func (c *PrimaryKeyOutOfLineConstraint) LocalColumns() []string { return c.Columns } + +type UniqueInlineConstraint struct { + Position +} + +func (c *UniqueInlineConstraint) Accept(v Visitor) any { + return v.VisitUniqueInlineConstraint(c) +} + +func (c *UniqueInlineConstraint) inlineConstraint() {} + +type UniqueOutOfLineConstraint struct { + Position + Columns []string +} + +func (c *UniqueOutOfLineConstraint) Accept(v Visitor) any { + return v.VisitUniqueOutOfLineConstraint(c) +} + +func (c *UniqueOutOfLineConstraint) outOfLineConstraintClause() {} + +func (c *UniqueOutOfLineConstraint) LocalColumns() []string { return c.Columns } + +type DefaultConstraint struct { + Position + Value Expression +} + +func (c *DefaultConstraint) Accept(v Visitor) any { + return v.VisitDefaultConstraint(c) +} + +func (c *DefaultConstraint) inlineConstraint() {} + +type NotNullConstraint struct { + Position +} + +func (c *NotNullConstraint) Accept(v Visitor) any { + return v.VisitNotNullConstraint(c) +} + +func (c *NotNullConstraint) inlineConstraint() {} + +type CheckConstraint struct { + Position + Expression Expression +} + +func (c *CheckConstraint) Accept(v Visitor) any { + return v.VisitCheckConstraint(c) +} + +func (c *CheckConstraint) inlineConstraint() {} + +func (c *CheckConstraint) outOfLineConstraintClause() {} + +func (c *CheckConstraint) LocalColumns() []string { return nil } + +type ForeignKeyReferences struct { + Position + + // RefTableNamespace is the qualifier of the referenced table. + // It can be empty if the table is in the same schema. + RefTableNamespace string + RefTable string + RefColumns []string + Actions []*ForeignKeyAction +} + +func (c *ForeignKeyReferences) Accept(v Visitor) any { + return v.VisitForeignKeyReferences(c) +} + +func (c *ForeignKeyReferences) inlineConstraint() {} + +type ForeignKeyOutOfLineConstraint struct { + Position + Columns []string + References *ForeignKeyReferences +} + +func (c *ForeignKeyOutOfLineConstraint) Accept(v Visitor) any { + return v.VisitForeignKeyOutOfLineConstraint(c) +} + +func (c *ForeignKeyOutOfLineConstraint) outOfLineConstraintClause() {} + +func (c *ForeignKeyOutOfLineConstraint) LocalColumns() []string { return c.Columns } + +type IndexType string + +const ( + // IndexTypeBTree is the default index, created by using `INDEX`. + IndexTypeBTree IndexType = "BTREE" + // IndexTypeUnique is a unique BTree index, created by using `UNIQUE INDEX`. + IndexTypeUnique IndexType = "UNIQUE" +) + +// ForeignKey is a foreign key in a table. +type ForeignKey struct { + // ChildKeys are the columns that are referencing another. + // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "a" is the child key + ChildKeys []string `json:"child_keys"` + + // ParentKeys are the columns that are being referred to. + // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "b" is the parent key + ParentKeys []string `json:"parent_keys"` + + // ParentTable is the table that holds the parent columns. + // For example, in FOREIGN KEY (a) REFERENCES tbl2(b), "tbl2" is the parent table + ParentTable string `json:"parent_table"` + + // Action refers to what the foreign key should do when the parent is altered. + // This is NOT the same as a database action. + // For example, ON DELETE CASCADE is a foreign key action + Actions []*ForeignKeyAction `json:"actions"` +} + +// ForeignKeyActionOn specifies when a foreign key action should occur. +// It can be either "UPDATE" or "DELETE". +type ForeignKeyActionOn string + +// ForeignKeyActionOn types +const ( + // ON_UPDATE is used to specify an action should occur when a parent key is updated + ON_UPDATE ForeignKeyActionOn = "UPDATE" + // ON_DELETE is used to specify an action should occur when a parent key is deleted + ON_DELETE ForeignKeyActionOn = "DELETE" +) + +// ForeignKeyActionDo specifies what should be done when a foreign key action is triggered. +type ForeignKeyActionDo string + +// ForeignKeyActionDo types +const ( + // DO_NO_ACTION does nothing when a parent key is altered + DO_NO_ACTION ForeignKeyActionDo = "NO ACTION" + + // DO_RESTRICT prevents the parent key from being altered + DO_RESTRICT ForeignKeyActionDo = "RESTRICT" + + // DO_SET_NULL sets the child key(s) to NULL + DO_SET_NULL ForeignKeyActionDo = "SET NULL" + + // DO_SET_DEFAULT sets the child key(s) to their default values + DO_SET_DEFAULT ForeignKeyActionDo = "SET DEFAULT" + + // DO_CASCADE updates the child key(s) or deletes the records (depending on the action type) + DO_CASCADE ForeignKeyActionDo = "CASCADE" +) + +// ForeignKeyAction is used to specify what should occur +// if a parent key is updated or deleted +type ForeignKeyAction struct { + // On can be either "UPDATE" or "DELETE" + On ForeignKeyActionOn `json:"on"` + + // Do specifies what a foreign key action should do + Do ForeignKeyActionDo `json:"do"` +} + +type DropBehavior string + +const ( + DropBehaviorDefault DropBehavior = "" + DropBehaviorCascade DropBehavior = "CASCADE" + DropBehaviorRestrict DropBehavior = "RESTRICT" +) + +type DropTableStatement struct { + Position + Namespacing + Tables []string + IfExists bool + Behavior DropBehavior +} + +func (s *DropTableStatement) topLevelStatement() {} + +func (s *DropTableStatement) Accept(v Visitor) any { + return v.VisitDropTableStatement(s) +} + +type AlterTableAction interface { + Node + + alterTableAction() +} + +// AlterTableStatement is a ALTER TABLE statement. +type AlterTableStatement struct { + Position + Namespacing + Table string + Action AlterTableAction +} + +func (a *AlterTableStatement) topLevelStatement() {} + +func (a *AlterTableStatement) alterTableAction() {} + +func (a *AlterTableStatement) Accept(v Visitor) any { + return v.VisitAlterTableStatement(a) +} + +// ConstraintType is a constraint in a table. +type ConstraintType interface { + String() string + constraint() +} + +// SingleColumnConstraintType is a constraint type that can only ever +// be applied to a single column. These are NOT NULL and DEFAULT. +type SingleColumnConstraintType string + +func (t SingleColumnConstraintType) String() string { + return string(t) +} + +func (t SingleColumnConstraintType) constraint() {} + +const ( + ConstraintTypeNotNull SingleColumnConstraintType = "NOT NULL" + ConstraintTypeDefault SingleColumnConstraintType = "DEFAULT" +) + +// MultiColumnConstraintType is a constraint type that can be applied +// to multiple columns. These are PRIMARY KEY, FOREIGN KEY, UNIQUE, and CHECK. + +type MultiColumnConstraintType string + +func (t MultiColumnConstraintType) String() string { + return string(t) +} + +func (t MultiColumnConstraintType) constraint() {} + +const ( + ConstraintTypeUnique MultiColumnConstraintType = "UNIQUE" + ConstraintTypeCheck MultiColumnConstraintType = "CHECK" + ConstraintTypeForeignKey MultiColumnConstraintType = "FOREIGN KEY" + ConstraintTypePrimaryKey MultiColumnConstraintType = "PRIMARY KEY" +) + +// AlterColumnSet is "ALTER COLUMN ... SET ..." statement. +type AlterColumnSet struct { + Position + // Column is the column that is being altered. + Column string + // Type is the type of constraint that is being set. + Type SingleColumnConstraintType + // Value is the value of the constraint. + // It is only set if the type is DEFAULT. + Value Expression +} + +func (a *AlterColumnSet) alterTableAction() {} + +func (a *AlterColumnSet) Accept(v Visitor) any { + return v.VisitAlterColumnSet(a) +} + +// AlterColumnDrop is "ALTER COLUMN ... DROP ..." statement. +type AlterColumnDrop struct { + Position + Column string + Type SingleColumnConstraintType +} + +func (a *AlterColumnDrop) alterTableAction() {} + +func (a *AlterColumnDrop) Accept(v Visitor) any { + return v.VisitAlterColumnDrop(a) +} + +type AddColumn struct { + Position + Name string + Type *types.DataType +} + +func (a *AddColumn) alterTableAction() {} + +func (a *AddColumn) Accept(v Visitor) any { + return v.VisitAddColumn(a) +} + +type DropColumn struct { + Position + Name string +} + +func (a *DropColumn) alterTableAction() {} + +func (a *DropColumn) Accept(v Visitor) any { + return v.VisitDropColumn(a) +} + +type RenameColumn struct { + Position + OldName string + NewName string +} + +func (a *RenameColumn) alterTableAction() {} + +func (a *RenameColumn) Accept(v Visitor) any { + return v.VisitRenameColumn(a) +} + +type RenameTable struct { + Position + Name string +} + +func (a *RenameTable) alterTableAction() {} + +func (a *RenameTable) Accept(v Visitor) any { + return v.VisitRenameTable(a) +} + +// AddTableConstraint is a constraint that is being added to a table. +// It is used to specify multi-column constraints. +type AddTableConstraint struct { + Position + Constraint *OutOfLineConstraint +} + +func (a *AddTableConstraint) alterTableAction() {} + +func (a *AddTableConstraint) Accept(v Visitor) any { + return v.VisitAddTableConstraint(a) +} + +type DropTableConstraint struct { + Position + Name string +} + +func (a *DropTableConstraint) alterTableAction() {} + +func (a *DropTableConstraint) Accept(v Visitor) any { + return v.VisitDropTableConstraint(a) +} + +type CreateIndexStatement struct { + Position + Namespacing + IfNotExists bool + Name string + On string + Columns []string + Type IndexType +} + +func (s *CreateIndexStatement) topLevelStatement() {} + +func (s *CreateIndexStatement) Accept(v Visitor) any { + return v.VisitCreateIndexStatement(s) +} + +type DropIndexStatement struct { + Position + Namespacing + Name string + CheckExist bool +} + +func (s *DropIndexStatement) topLevelStatement() {} + +func (s *DropIndexStatement) Accept(v Visitor) any { + return v.VisitDropIndexStatement(s) +} + +type GrantOrRevokeStatement struct { + Position + // IsGrant is true if the statement is a GRANT statement. + // If it is false, then it is a REVOKE statement. + IsGrant bool + // Privileges are the privileges that are being granted. + // Either Privileges or Role must be set, but not both. + Privileges []string + // Namespace is the namespace that the privileges are being granted on. + // It can be nil if they are global. + Namespace *string + // OnNam + // Role is the role being granted + // Either Privileges or Role must be set, but not both. + GrantRole string + // ToRole is the role being granted to. + // Either ToUser or ToRole must be set, but not both. + ToRole string + // ToUser is the user being granted to. + // Either ToUser or ToRole must be set, but not both. + ToUser string +} + +func (g *GrantOrRevokeStatement) topLevelStatement() {} + +func (g *GrantOrRevokeStatement) Accept(v Visitor) any { + return v.VisitGrantOrRevokeStatement(g) +} + +type CreateRoleStatement struct { + Position + // IfNotExists is true if the IF NOT EXISTS clause is present. + IfNotExists bool + // Role is the role that is being created or dropped. + Role string +} + +func (c *CreateRoleStatement) topLevelStatement() {} + +func (c *CreateRoleStatement) Accept(v Visitor) any { + return v.VisitCreateRoleStatement(c) +} + +type DropRoleStatement struct { + Position + // IfExists is true if the IF EXISTS clause is present. + IfExists bool + // Role is the role that is being created or dropped. + Role string +} + +func (d *DropRoleStatement) topLevelStatement() {} + +func (d *DropRoleStatement) Accept(v Visitor) any { + return v.VisitDropRoleStatement(d) +} + +type TransferOwnershipStatement struct { + Position + // To is the user that the ownership is being transferred to. + To string +} + +func (t *TransferOwnershipStatement) topLevelStatement() {} + +func (t *TransferOwnershipStatement) Accept(v Visitor) any { + return v.VisitTransferOwnershipStatement(t) +} + +type UseExtensionStatement struct { + Position + IfNotExists bool + ExtName string + Config []*struct { + Key string + Value Expression + } + Alias string +} + +func (u *UseExtensionStatement) topLevelStatement() {} + +func (u *UseExtensionStatement) Accept(v Visitor) any { + return v.VisitUseExtensionStatement(u) +} + +type UnuseExtensionStatement struct { + Position + IfExists bool + Alias string +} + +func (u *UnuseExtensionStatement) topLevelStatement() {} + +func (u *UnuseExtensionStatement) Accept(v Visitor) any { + return v.VisitUnuseExtensionStatement(u) +} + +type CreateNamespaceStatement struct { + Position + // IfNotExists is true if the IF NOT EXISTS clause is present. + IfNotExists bool + // Namespace is the namespace that is being created. + Namespace string +} + +func (c *CreateNamespaceStatement) topLevelStatement() {} + +func (c *CreateNamespaceStatement) Accept(v Visitor) any { + return v.VisitCreateNamespaceStatement(c) +} + +type DropNamespaceStatement struct { + Position + // IfExists is true if the IF EXISTS clause is present. + IfExists bool + // Namespace is the namespace that is being dropped. + Namespace string +} + +func (d *DropNamespaceStatement) topLevelStatement() {} + +func (d *DropNamespaceStatement) Accept(v Visitor) any { + return v.VisitDropNamespaceStatement(d) +} + +// SelectStatement is a SELECT statement. +type SelectStatement struct { + Position + SelectCores []*SelectCore + CompoundOperators []CompoundOperator + Ordering []*OrderingTerm + Limit Expression + Offset Expression +} + +func (s *SelectStatement) Accept(v Visitor) any { + return v.VisitSelectStatement(s) +} + +func (SelectStatement) sqlCore() {} + +type CompoundOperator string + +const ( + CompoundOperatorUnion CompoundOperator = "UNION" + CompoundOperatorUnionAll CompoundOperator = "UNION ALL" + CompoundOperatorIntersect CompoundOperator = "INTERSECT" + CompoundOperatorExcept CompoundOperator = "EXCEPT" +) + +// OrderingTerm is a term in an order by clause +type OrderingTerm struct { + Position + Expression Expression + Order OrderType + Nulls NullOrder +} + +func (o *OrderingTerm) Accept(v Visitor) any { + return v.VisitOrderingTerm(o) +} + +type OrderType string + +const ( + OrderTypeAsc OrderType = "ASC" + OrderTypeDesc OrderType = "DESC" +) + +type NullOrder string + +const ( + NullOrderFirst NullOrder = "FIRST" + NullOrderLast NullOrder = "LAST" +) + +type SelectCore struct { + Position + // Distinct is true if the SELECT statement is a DISTINCT SELECT statement. + Distinct bool + Columns []ResultColumn + From Table // can be nil + Joins []*Join // can be nil + Where Expression // can be nil + GroupBy []Expression // can be nil + Having Expression // can be nil + Windows []*struct { + Name string + Window *WindowImpl + } // can be nil +} + +func (s *SelectCore) Accept(v Visitor) any { + return v.VisitSelectCore(s) +} + +type ResultColumn interface { + Node + ResultColumnType() ResultColumnType +} + +type ResultColumnType string + +const ( + ResultColumnTypeExpression ResultColumnType = "expression" + ResultColumnTypeWildcard ResultColumnType = "wildcare" +) + +type ResultColumnExpression struct { + Position + + Expression Expression + Alias string // can be empty +} + +func (r *ResultColumnExpression) Accept(v Visitor) any { + return v.VisitResultColumnExpression(r) +} + +func (r *ResultColumnExpression) ResultColumnType() ResultColumnType { + return ResultColumnTypeExpression +} + +type ResultColumnWildcard struct { + Position + Table string // can be empty +} + +func (r *ResultColumnWildcard) Accept(v Visitor) any { + return v.VisitResultColumnWildcard(r) +} + +func (r *ResultColumnWildcard) ResultColumnType() ResultColumnType { + return ResultColumnTypeWildcard +} + +type Table interface { + Node + table() +} + +type RelationTable struct { + Position + // Namespace is the namespace of the table. + // If it is empty, then the table is in the current namespace. + Namespace string + // Table is the name of the table. + Table string + Alias string // can be empty +} + +func (r *RelationTable) Accept(v Visitor) any { + return v.VisitRelationTable(r) +} + +func (RelationTable) table() {} + +type RelationSubquery struct { + Position + Subquery *SelectStatement + // Alias cannot be empty, as our syntax + // forces it for subqueries. + Alias string +} + +func (r *RelationSubquery) Accept(v Visitor) any { + return v.VisitRelationSubquery(r) +} + +func (RelationSubquery) table() {} + +// Join is a join in a SELECT statement. +type Join struct { + Position + Type JoinType + Relation Table + On Expression +} + +func (j *Join) Accept(v Visitor) any { + return v.VisitJoin(j) +} + +type JoinType string + +const ( + JoinTypeInner JoinType = "INNER" + JoinTypeLeft JoinType = "LEFT" + JoinTypeRight JoinType = "RIGHT" + JoinTypeFull JoinType = "FULL" +) + +type UpdateStatement struct { + Position + Table string + Alias string // can be empty + SetClause []*UpdateSetClause + From Table // can be nil + Joins []*Join // can be nil + Where Expression // can be nil +} + +func (u *UpdateStatement) Accept(v Visitor) any { + return v.VisitUpdateStatement(u) +} + +func (u *UpdateStatement) sqlCore() {} + +type UpdateSetClause struct { + Position + Column string + Value Expression +} + +func (u *UpdateSetClause) Accept(v Visitor) any { + return v.VisitUpdateSetClause(u) +} + +type DeleteStatement struct { + Position + + Table string + Alias string // can be empty + From Table // can be nil + Joins []*Join // can be nil + Where Expression // can be nil +} + +func (d *DeleteStatement) Accept(v Visitor) any { + return v.VisitDeleteStatement(d) +} + +func (d *DeleteStatement) sqlCore() {} + +type InsertStatement struct { + Position + Table string + Alias string // can be empty + Columns []string // can be empty + // Either Values or Select is set, but not both. + Values [][]Expression // can be empty + Select *SelectStatement // can be nil + OnConflict *OnConflict // can be nil +} + +func (i *InsertStatement) Accept(v Visitor) any { + return v.VisitInsertStatement(i) +} + +func (i InsertStatement) sqlCore() {} + +type OnConflict struct { + Position + ConflictColumns []string // can be empty + ConflictWhere Expression // can be nil + DoUpdate []*UpdateSetClause // if nil, then do nothing + UpdateWhere Expression // can be nil +} + +func (u *OnConflict) Accept(v Visitor) any { + return v.VisitUpsertClause(u) +} + +// action logic ast: + +// ActionStmt is a statement in a actiob. +// it is the top-level interface for all action statements. +type ActionStmt interface { + Node + actionStmt() +} + +type baseActionStmt struct { + Position +} + +func (baseActionStmt) actionStmt() {} + +// ActionStmtDeclaration is a variable declaration in an action. +type ActionStmtDeclaration struct { + baseActionStmt + // Variable is the variable that is being declared. + Variable *ExpressionVariable + Type *types.DataType +} + +func (p *ActionStmtDeclaration) Accept(v Visitor) any { + return v.VisitActionStmtDeclaration(p) +} + +// ActionStmtAssign is a variable assignment in an action. +// It should only be called on variables that have already been declared. +type ActionStmtAssign struct { + baseActionStmt + // Variable is the variable that is being assigned. + Variable Assignable + // Type is the type of the variable. + // It can be nil if the variable is not being assigned, + // or if the type should be inferred. + Type *types.DataType + // Value is the value that is being assigned. + Value Expression +} + +func (p *ActionStmtAssign) Accept(v Visitor) any { + return v.VisitActionStmtAssignment(p) +} + +// ActionStmtCall is a call to another action or built-in function. +type ActionStmtCall struct { + baseActionStmt + // Receivers are the variables being assigned. If nil, then the + // receiver can be ignored. + Receivers []*ExpressionVariable + Call *ExpressionFunctionCall +} + +func (p *ActionStmtCall) Accept(v Visitor) any { + return v.VisitActionStmtCall(p) +} + +type ActionStmtForLoop struct { + baseActionStmt + // Receiver is the variable that is assigned on each iteration. + Receiver *ExpressionVariable + // LoopTerm is what the loop is looping through. + LoopTerm LoopTerm + // Body is the body of the loop. + Body []ActionStmt +} + +func (p *ActionStmtForLoop) Accept(v Visitor) any { + return v.VisitActionStmtForLoop(p) +} + +// LoopTerm what the loop is looping through. +type LoopTerm interface { + Node + loopTerm() +} + +type baseLoopTerm struct { + Position +} + +func (baseLoopTerm) loopTerm() {} + +type LoopTermRange struct { + baseLoopTerm + // Start is the start of the range. + Start Expression + // End is the end of the range. + End Expression +} + +func (e *LoopTermRange) Accept(v Visitor) interface{} { + return v.VisitLoopTermRange(e) +} + +type LoopTermSQL struct { + baseLoopTerm + // Statement is the Statement statement to execute. + Statement *SQLStatement +} + +func (e *LoopTermSQL) Accept(v Visitor) interface{} { + return v.VisitLoopTermSQL(e) +} + +type LoopTermVariable struct { + baseLoopTerm + // Variable is the variable to loop through. + // It must be an array. + Variable *ExpressionVariable +} + +func (e *LoopTermVariable) Accept(v Visitor) interface{} { + return v.VisitLoopTermVariable(e) +} + +type ActionStmtIf struct { + baseActionStmt + // IfThens are the if statements. + // They are evaluated in order, as + // IF ... THEN ... ELSEIF ... THEN ... + IfThens []*IfThen + // Else is the else statement. + // It is evaluated if no other if statement + // is true. + Else []ActionStmt +} + +func (p *ActionStmtIf) Accept(v Visitor) any { + return v.VisitActionStmtIf(p) +} + +type IfThen struct { + Position + If Expression + Then []ActionStmt +} + +func (i *IfThen) Accept(v Visitor) any { + return v.VisitIfThen(i) +} + +type ActionStmtSQL struct { + baseActionStmt + SQL *SQLStatement +} + +func (p *ActionStmtSQL) Accept(v Visitor) any { + return v.VisitActionStmtSQL(p) +} + +type ActionStmtBreak struct { + baseActionStmt +} + +func (p *ActionStmtBreak) Accept(v Visitor) any { + return v.VisitActionStmtBreak(p) +} + +type ActionStmtReturn struct { + baseActionStmt + // Values are the values to return. + // Either values is set or SQL is set, but not both. + Values []Expression + // SQL is the SQL statement to return. + // Either values is set or SQL is set, but not both. + SQL *SQLStatement +} + +func (p *ActionStmtReturn) Accept(v Visitor) any { + return v.VisitActionStmtReturn(p) +} + +type ActionStmtReturnNext struct { + baseActionStmt + // Values are the values to return. + Values []Expression +} + +func (p *ActionStmtReturnNext) Accept(v Visitor) any { + return v.VisitActionStmtReturnNext(p) +} + +/* + There are three types of visitors, all which compose on each other: + - Visitor: top-level visitor capable of visiting actions, DDL, and SQL. + - ActionVisitor: a visitor capable of only visiting actions and SQL. It must include + SQL because actions themselves rely on SQL/ + - SQLVisitor: a visitor capable of only visiting SQL. +*/ + +// Visitor is an interface for visiting nodes in the parse tree. +type Visitor interface { + ActionVisitor + DDLVisitor +} + +// DDLVisitor includes visit methods only needed to analyze DDL statements. +type DDLVisitor interface { + // DDL + VisitCreateTableStatement(*CreateTableStatement) any + VisitAlterTableStatement(*AlterTableStatement) any + VisitDropTableStatement(*DropTableStatement) any + VisitCreateIndexStatement(*CreateIndexStatement) any + VisitDropIndexStatement(*DropIndexStatement) any + VisitGrantOrRevokeStatement(*GrantOrRevokeStatement) any + VisitAlterColumnSet(*AlterColumnSet) any + VisitAlterColumnDrop(*AlterColumnDrop) any + VisitAddColumn(*AddColumn) any + VisitDropColumn(*DropColumn) any + VisitRenameColumn(*RenameColumn) any + VisitRenameTable(*RenameTable) any + VisitAddTableConstraint(*AddTableConstraint) any + VisitDropTableConstraint(*DropTableConstraint) any + VisitColumn(*Column) any + VisitCreateRoleStatement(*CreateRoleStatement) any + VisitDropRoleStatement(*DropRoleStatement) any + VisitTransferOwnershipStatement(*TransferOwnershipStatement) any + VisitUseExtensionStatement(*UseExtensionStatement) any + VisitUnuseExtensionStatement(*UnuseExtensionStatement) any + VisitCreateNamespaceStatement(*CreateNamespaceStatement) any + VisitDropNamespaceStatement(*DropNamespaceStatement) any + VisitCreateActionStatement(*CreateActionStatement) any + VisitDropActionStatement(*DropActionStatement) any + // Constraints + VisitPrimaryKeyInlineConstraint(*PrimaryKeyInlineConstraint) any + VisitPrimaryKeyOutOfLineConstraint(*PrimaryKeyOutOfLineConstraint) any + VisitUniqueInlineConstraint(*UniqueInlineConstraint) any + VisitUniqueOutOfLineConstraint(*UniqueOutOfLineConstraint) any + VisitDefaultConstraint(*DefaultConstraint) any + VisitNotNullConstraint(*NotNullConstraint) any + VisitCheckConstraint(*CheckConstraint) any + VisitForeignKeyReferences(*ForeignKeyReferences) any + VisitForeignKeyOutOfLineConstraint(*ForeignKeyOutOfLineConstraint) any +} + +// ActionVisitor includes visit methods only needed to analyze actions. +// It does not need visit methods for structs that are for the schema or actions +type ActionVisitor interface { + SQLVisitor + VisitActionStmtDeclaration(*ActionStmtDeclaration) any + VisitActionStmtAssignment(*ActionStmtAssign) any + VisitActionStmtCall(*ActionStmtCall) any + VisitActionStmtForLoop(*ActionStmtForLoop) any + VisitLoopTermRange(*LoopTermRange) any + VisitLoopTermSQL(*LoopTermSQL) any + VisitLoopTermVariable(*LoopTermVariable) any + VisitActionStmtIf(*ActionStmtIf) any + VisitIfThen(*IfThen) any + VisitActionStmtSQL(*ActionStmtSQL) any + VisitActionStmtBreak(*ActionStmtBreak) any + VisitActionStmtReturn(*ActionStmtReturn) any + VisitActionStmtReturnNext(*ActionStmtReturnNext) any +} + +// SQLVisitor is a visitor that only has methods for SQL nodes. +type SQLVisitor interface { + VisitExpressionLiteral(*ExpressionLiteral) any + VisitExpressionFunctionCall(*ExpressionFunctionCall) any + VisitExpressionWindowFunctionCall(*ExpressionWindowFunctionCall) any + VisitWindowImpl(*WindowImpl) any + VisitWindowReference(*WindowReference) any + VisitExpressionVariable(*ExpressionVariable) any + VisitExpressionArrayAccess(*ExpressionArrayAccess) any + VisitExpressionMakeArray(*ExpressionMakeArray) any + VisitExpressionFieldAccess(*ExpressionFieldAccess) any + VisitExpressionParenthesized(*ExpressionParenthesized) any + VisitExpressionComparison(*ExpressionComparison) any + VisitExpressionLogical(*ExpressionLogical) any + VisitExpressionArithmetic(*ExpressionArithmetic) any + VisitExpressionUnary(*ExpressionUnary) any + VisitExpressionColumn(*ExpressionColumn) any + VisitExpressionCollate(*ExpressionCollate) any + VisitExpressionStringComparison(*ExpressionStringComparison) any + VisitExpressionIs(*ExpressionIs) any + VisitExpressionIn(*ExpressionIn) any + VisitExpressionBetween(*ExpressionBetween) any + VisitExpressionSubquery(*ExpressionSubquery) any + VisitExpressionCase(*ExpressionCase) any + VisitCommonTableExpression(*CommonTableExpression) any + VisitSQLStatement(*SQLStatement) any + VisitSelectStatement(*SelectStatement) any + VisitSelectCore(*SelectCore) any + VisitResultColumnExpression(*ResultColumnExpression) any + VisitResultColumnWildcard(*ResultColumnWildcard) any + VisitRelationTable(*RelationTable) any + VisitRelationSubquery(*RelationSubquery) any + VisitJoin(*Join) any + VisitUpdateStatement(*UpdateStatement) any + VisitUpdateSetClause(*UpdateSetClause) any + VisitDeleteStatement(*DeleteStatement) any + VisitInsertStatement(*InsertStatement) any + VisitUpsertClause(*OnConflict) any + VisitOrderingTerm(*OrderingTerm) any +} + +// UnimplementedActionVisitor is meant to be used when an implementing visitor only intends +// to implement the ActionVisitor interface. It will implement the full visitor interface, +// but will panic if any of the methods are called. It does not implement the SQLVisitor or +// ActionVisitor interfaces, so it alone cannot be used as a visitor. +type UnimplementedActionVisitor struct{} + +func (s *UnimplementedActionVisitor) VisitActionStmtDeclaration(p0 *ActionStmtDeclaration) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtAssignment(p0 *ActionStmtAssign) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtCall(p0 *ActionStmtCall) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtForLoop(p0 *ActionStmtForLoop) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitLoopTermRange(p0 *LoopTermRange) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitLoopTermSQL(p0 *LoopTermSQL) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitLoopTermVariable(p0 *LoopTermVariable) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtIf(p0 *ActionStmtIf) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitIfThen(p0 *IfThen) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtSQL(p0 *ActionStmtSQL) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtBreak(p0 *ActionStmtBreak) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtReturn(p0 *ActionStmtReturn) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +func (s *UnimplementedActionVisitor) VisitActionStmtReturnNext(p0 *ActionStmtReturnNext) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) +} + +type UnimplementedDDLVisitor struct{} + +func (u *UnimplementedDDLVisitor) VisitCreateTableStatement(p0 *CreateTableStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitAlterTableStatement(p0 *AlterTableStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropTableStatement(p0 *DropTableStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitCreateIndexStatement(p0 *CreateIndexStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropIndexStatement(p0 *DropIndexStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitGrantOrRevokeStatement(p0 *GrantOrRevokeStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitAlterColumnSet(p0 *AlterColumnSet) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitAlterColumnDrop(p0 *AlterColumnDrop) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitAddColumn(p0 *AddColumn) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropColumn(p0 *DropColumn) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitRenameColumn(p0 *RenameColumn) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitRenameTable(p0 *RenameTable) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitAddTableConstraint(p0 *AddTableConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropTableConstraint(p0 *DropTableConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitColumn(p0 *Column) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitCreateRoleStatement(p0 *CreateRoleStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropRoleStatement(p0 *DropRoleStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitTransferOwnershipStatement(p0 *TransferOwnershipStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitPrimaryKeyInlineConstraint(p0 *PrimaryKeyInlineConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitPrimaryKeyOutOfLineConstraint(p0 *PrimaryKeyOutOfLineConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitUniqueInlineConstraint(p0 *UniqueInlineConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitUniqueOutOfLineConstraint(p0 *UniqueOutOfLineConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDefaultConstraint(p0 *DefaultConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitNotNullConstraint(p0 *NotNullConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitCheckConstraint(p0 *CheckConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitForeignKeyReferences(p0 *ForeignKeyReferences) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitForeignKeyOutOfLineConstraint(p0 *ForeignKeyOutOfLineConstraint) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitCreateActionStatement(p0 *CreateActionStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropActionStatement(p0 *DropActionStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitUseExtensionStatement(p0 *UseExtensionStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitUnuseExtensionStatement(p0 *UnuseExtensionStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitCreateNamespaceStatement(p0 *CreateNamespaceStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} + +func (u *UnimplementedDDLVisitor) VisitDropNamespaceStatement(p0 *DropNamespaceStatement) any { + panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", u)) +} diff --git a/parse/contextual.go b/node/engine/parse/contextual.go similarity index 80% rename from parse/contextual.go rename to node/engine/parse/contextual.go index 67c41594c..55550172b 100644 --- a/parse/contextual.go +++ b/node/engine/parse/contextual.go @@ -4,6 +4,7 @@ import ( "github.com/kwilteam/kwil-db/core/types" ) +// TODO: all of these should be moved to the engine. var ( // caller is the session variable for the caller. CallerVar = "caller" @@ -31,13 +32,3 @@ var ( Authenticator: types.TextType, } ) - -// makeSessionVars creates a new map of session variables. -// It includes the @ symbol in the keys. -func makeSessionVars() map[string]*types.DataType { - newMap := make(map[string]*types.DataType) - for k, v := range SessionVars { - newMap["@"+k] = v.Copy() - } - return newMap -} diff --git a/parse/errors.go b/node/engine/parse/errors.go similarity index 78% rename from parse/errors.go rename to node/engine/parse/errors.go index eb8b34c89..84721a3f0 100644 --- a/parse/errors.go +++ b/node/engine/parse/errors.go @@ -223,39 +223,20 @@ func (e *errorListener) ReportContextSensitivity(recognizer antlr.Parser, dfa *a var ( ErrSyntax = errors.New("syntax error") - ErrDuplicateBlock = errors.New("duplicate block name") ErrUndeclaredVariable = errors.New("undeclared variable") - ErrVariableAlreadyDeclared = errors.New("variable already declared") ErrType = errors.New("type error") - ErrAssignment = errors.New("assignment error") - ErrUnknownTable = errors.New("unknown table reference") ErrTableDefinition = errors.New("table definition error") ErrUnknownColumn = errors.New("unknown column reference") ErrColumnConstraint = errors.New("column constraint error") - ErrAmbiguousColumn = errors.New("ambiguous column reference") + ErrDuplicateParameterName = errors.New("duplicate parameter name") ErrDuplicateResultColumnName = errors.New("duplicate result column name") ErrUnknownFunctionOrProcedure = errors.New("unknown function or procedure") - // ErrFunctionSignature is returned when a function/procedure is called with the wrong number of arguments, - // or returns an unexpected number of values / table. - ErrFunctionSignature = errors.New("function/procedure signature error") - ErrTableAlreadyExists = errors.New("table already exists") - // ErrResultShape is used if the result of a query is not in a shape we expect. - ErrResultShape = errors.New("result shape error") - ErrUnnamedResultColumn = errors.New("unnamed result column") - ErrTableAlreadyJoined = errors.New("table already joined") - ErrUnnamedJoin = errors.New("unnamed join") - ErrBreak = errors.New("break error") - ErrReturn = errors.New("return type error") - ErrAggregate = errors.New("aggregate error") - ErrUnknownContextualVariable = errors.New("unknown contextual variable") - ErrIdentifier = errors.New("identifier error") - ErrActionNotFound = errors.New("action not found") - ErrViewMutatesState = errors.New("view mutates state") - ErrOrdering = errors.New("ordering error") - ErrCrossScopeDeclaration = errors.New("cross-scope declaration") - ErrInvalidExcludedTable = errors.New("invalid excluded table usage") - ErrAmbiguousConflictTable = errors.New("ambiguous conflict table") - ErrCollation = errors.New("collation error") - ErrNoPrimaryKey = errors.New("missing primary key") - ErrReservedKeyword = errors.New("reserved keyword") + ErrUnknownContextualVariable = errors.New("unknown contextual variable") + ErrIdentifier = errors.New("identifier error") + ErrViewMutatesState = errors.New("view mutates state") + ErrCollation = errors.New("collation error") + ErrNoPrimaryKey = errors.New("missing primary key") + ErrRedeclaredPrimaryKey = errors.New("redeclare primary key") + ErrRedeclaredConstraint = errors.New("redeclared constraint") + ErrGrantOrRevoke = errors.New("grant or revoke error") ) diff --git a/parse/functions.go b/node/engine/parse/functions.go similarity index 56% rename from parse/functions.go rename to node/engine/parse/functions.go index 14fbfd5bd..d45e9b223 100644 --- a/parse/functions.go +++ b/node/engine/parse/functions.go @@ -8,9 +8,9 @@ import ( ) var ( - Functions = map[string]*FunctionDefinition{ - "abs": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + Functions = map[string]FunctionDefinition{ + "abs": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -21,10 +21,10 @@ var ( return args[0], nil }, - PGFormat: defaultFormat("abs"), + PGFormatFunc: defaultFormat("abs"), }, - "error": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "error": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -37,10 +37,10 @@ var ( // It doesn't really matter, since error will cancel execution anyways. return types.NullType, nil }, - PGFormat: defaultFormat("error"), + PGFormatFunc: defaultFormat("error"), }, - "parse_unix_timestamp": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "parse_unix_timestamp": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // two args, both text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -56,10 +56,10 @@ var ( return decimal16_6, nil }, - PGFormat: defaultFormat("parse_unix_timestamp"), + PGFormatFunc: defaultFormat("parse_unix_timestamp"), }, - "format_unix_timestamp": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "format_unix_timestamp": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first arg must be decimal(16, 6), second arg must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -75,10 +75,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("format_unix_timestamp"), + PGFormatFunc: defaultFormat("format_unix_timestamp"), }, - "notice": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "notice": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -91,22 +91,12 @@ var ( // It doesn't really matter, since error will cancel execution anyways. return types.NullType, nil }, - PGFormat: func(inputs []string, distinct, star bool) (string, error) { - if star { - return "", errStar("notice") - } - if distinct { - return "", errDistinct("notice") - } - - // TODO: this is implicitly coupled to internal/engine/generate, and should be moved there. - // we can only move this there once we move all PGFormat, which will also be affected by - // v0.9 changes, so leaving it here for now. - return fmt.Sprintf("notice('txid:' || current_setting('ctx.txid') || ' ' || %s)", inputs[0]), nil + PGFormatFunc: func(inputs []string) (string, error) { + return "", fmt.Errorf("notice cannot be used in SQL statements") }, }, - "uuid_generate_v5": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "uuid_generate_v5": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first argument must be a uuid, second argument must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -122,10 +112,10 @@ var ( return types.UUIDType, nil }, - PGFormat: defaultFormat("uuid_generate_v5"), + PGFormatFunc: defaultFormat("uuid_generate_v5"), }, - "encode": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "encode": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first must be blob, second must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -141,10 +131,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("encode"), + PGFormatFunc: defaultFormat("encode"), }, - "decode": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "decode": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first must be text, second must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -160,10 +150,10 @@ var ( return types.BlobType, nil }, - PGFormat: defaultFormat("decode"), + PGFormatFunc: defaultFormat("decode"), }, - "digest": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "digest": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first must be either text or blob, second must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -179,10 +169,10 @@ var ( return types.BlobType, nil }, - PGFormat: defaultFormat("digest"), + PGFormatFunc: defaultFormat("digest"), }, - "generate_dbid": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "generate_dbid": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // first should be text, second should be blob if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -198,87 +188,79 @@ var ( return types.TextType, nil }, - PGFormat: func(inputs []string, distinct, star bool) (string, error) { - if star { - return "", errStar("generate_dbid") - } - - if distinct { - return "", errDistinct("generate_dbid") - } - + PGFormatFunc: func(inputs []string) (string, error) { return fmt.Sprintf(`(select 'x' || encode(sha224(lower(%s)::bytea || %s), 'hex'))`, inputs[0], inputs[1]), nil }, }, // array functions - "array_append": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_append": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) } if !args[0].IsArray { - return nil, fmt.Errorf("%w: expected first argument to be an array, got %s", ErrType, args[0].String()) + return nil, fmt.Errorf("expected first argument to be an array, got %s", args[0].String()) } if args[1].IsArray { - return nil, fmt.Errorf("%w: expected second argument to be a scalar, got %s", ErrType, args[1].String()) + return nil, fmt.Errorf("expected second argument to be a scalar, got %s", args[1].String()) } if !strings.EqualFold(args[0].Name, args[1].Name) { - return nil, fmt.Errorf("%w: append type must be equal to scalar array type: array type: %s append type: %s", ErrType, args[0].Name, args[1].Name) + return nil, fmt.Errorf("append type must be equal to scalar array type: array type: %s append type: %s", args[0].Name, args[1].Name) } return args[0], nil }, - PGFormat: defaultFormat("array_append"), + PGFormatFunc: defaultFormat("array_append"), }, - "array_prepend": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_prepend": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) } if args[0].IsArray { - return nil, fmt.Errorf("%w: expected first argument to be a scalar, got %s", ErrType, args[0].String()) + return nil, fmt.Errorf("expected first argument to be a scalar, got %s", args[0].String()) } if !args[1].IsArray { - return nil, fmt.Errorf("%w: expected second argument to be an array, got %s", ErrType, args[1].String()) + return nil, fmt.Errorf("expected second argument to be an array, got %s", args[1].String()) } if !strings.EqualFold(args[0].Name, args[1].Name) { - return nil, fmt.Errorf("%w: prepend type must be equal to scalar array type: array type: %s prepend type: %s", ErrType, args[1].Name, args[0].Name) + return nil, fmt.Errorf("prepend type must be equal to scalar array type: array type: %s prepend type: %s", args[1].Name, args[0].Name) } return args[1], nil }, - PGFormat: defaultFormat("array_prepend"), + PGFormatFunc: defaultFormat("array_prepend"), }, - "array_cat": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_cat": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) } if !args[0].IsArray { - return nil, fmt.Errorf("%w: expected first argument to be an array, got %s", ErrType, args[0].String()) + return nil, fmt.Errorf("expected first argument to be an array, got %s", args[0].String()) } if !args[1].IsArray { - return nil, fmt.Errorf("%w: expected second argument to be an array, got %s", ErrType, args[1].String()) + return nil, fmt.Errorf("expected second argument to be an array, got %s", args[1].String()) } if !strings.EqualFold(args[0].Name, args[1].Name) { - return nil, fmt.Errorf("%w: expected both arrays to be of the same scalar type, got %s and %s", ErrType, args[0].Name, args[1].Name) + return nil, fmt.Errorf("expected both arrays to be of the same scalar type, got %s and %s", args[0].Name, args[1].Name) } return args[0], nil }, - PGFormat: defaultFormat("array_cat"), + PGFormatFunc: defaultFormat("array_cat"), }, - "array_length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -289,19 +271,12 @@ var ( return types.IntType, nil }, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar("array_length") - } - if distinct { - return "", errDistinct("array_length") - } - + PGFormatFunc: func(inputs []string) (string, error) { return fmt.Sprintf("array_length(%s, 1)", inputs[0]), nil }, }, - "array_remove": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_remove": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) } @@ -320,12 +295,12 @@ var ( return args[0], nil }, - PGFormat: defaultFormat("array_remove"), + PGFormatFunc: defaultFormat("array_remove"), }, // string functions // the main SQL string functions defined here: https://www.postgresql.org/docs/16.1/functions-string.html - "bit_length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "bit_length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -336,10 +311,10 @@ var ( return types.IntType, nil }, - PGFormat: defaultFormat("bit_length"), + PGFormatFunc: defaultFormat("bit_length"), }, - "char_length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "char_length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -350,10 +325,10 @@ var ( return types.IntType, nil }, - PGFormat: defaultFormat("char_length"), + PGFormatFunc: defaultFormat("char_length"), }, - "character_length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "character_length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -364,10 +339,10 @@ var ( return types.IntType, nil }, - PGFormat: defaultFormat("character_length"), + PGFormatFunc: defaultFormat("character_length"), }, - "length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -378,10 +353,10 @@ var ( return types.IntType, nil }, - PGFormat: defaultFormat("length"), + PGFormatFunc: defaultFormat("length"), }, - "lower": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "lower": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -392,10 +367,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("lower"), + PGFormatFunc: defaultFormat("lower"), }, - "lpad": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "lpad": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { //can have 2-3 args. 1 and 3 must be text, 2 must be int if len(args) < 2 || len(args) > 3 { return nil, fmt.Errorf("invalid number of arguments: expected 2 or 3, got %d", len(args)) @@ -415,16 +390,7 @@ var ( return types.TextType, nil }, - PGFormat: func(inputs []string, distinct, star bool) (string, error) { - // we need a custom function to type cast big int to int - if distinct { - return "", errDistinct("lpad") - } - - if star { - return "", errStar("lpad") - } - + PGFormatFunc: func(inputs []string) (string, error) { str := strings.Builder{} str.WriteString("lpad(") str.WriteString(inputs[0]) @@ -440,8 +406,8 @@ var ( return str.String(), nil }, }, - "ltrim": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "ltrim": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { //can have 1 or 2 args. both must be text if len(args) < 1 || len(args) > 2 { return nil, fmt.Errorf("invalid number of arguments: expected 1 or 2, got %d", len(args)) @@ -455,10 +421,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("ltrim"), + PGFormatFunc: defaultFormat("ltrim"), }, - "octet_length": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "octet_length": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -469,10 +435,10 @@ var ( return types.IntType, nil }, - PGFormat: defaultFormat("octet_length"), + PGFormatFunc: defaultFormat("octet_length"), }, - "overlay": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "overlay": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 3-4 arguments. 1 and 2 must be text, 3 must be int, 4 must be int if len(args) < 3 || len(args) > 4 { return nil, fmt.Errorf("invalid number of arguments: expected 3 or 4, got %d", len(args)) @@ -496,15 +462,7 @@ var ( return types.TextType, nil }, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if distinct { - return "", errDistinct("overlay") - } - - if star { - return "", errStar("overlay") - } - + PGFormatFunc: func(inputs []string) (string, error) { str := strings.Builder{} str.WriteString("overlay(") str.WriteString(inputs[0]) @@ -523,8 +481,8 @@ var ( return str.String(), nil }, }, - "position": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "position": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 2 arguments. both must be text if len(args) != 2 { return nil, wrapErrArgumentNumber(2, len(args)) @@ -538,20 +496,12 @@ var ( return types.IntType, nil }, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if distinct { - return "", errDistinct("position") - } - - if star { - return "", errStar("position") - } - + PGFormatFunc: func(inputs []string) (string, error) { return fmt.Sprintf("position(%s in %s)", inputs[0], inputs[1]), nil }, }, - "rpad": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "rpad": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 2-3 args, 1 and 3 must be text, 2 must be int if len(args) < 2 || len(args) > 3 { return nil, fmt.Errorf("invalid number of arguments: expected 2 or 3, got %d", len(args)) @@ -571,16 +521,7 @@ var ( return types.TextType, nil }, - PGFormat: func(inputs []string, distinct, star bool) (string, error) { - // we need a custom function to type cast big int to int - if distinct { - return "", errDistinct("lpad") - } - - if star { - return "", errStar("lpad") - } - + PGFormatFunc: func(inputs []string) (string, error) { str := strings.Builder{} str.WriteString("rpad(") str.WriteString(inputs[0]) @@ -596,8 +537,8 @@ var ( return str.String(), nil }, }, - "rtrim": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "rtrim": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 1-2 args, both must be text if len(args) < 1 || len(args) > 2 { return nil, fmt.Errorf("invalid number of arguments: expected 1 or 2, got %d", len(args)) @@ -611,10 +552,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("rtrim"), + PGFormatFunc: defaultFormat("rtrim"), }, - "substring": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "substring": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 2-3 args, 1 must be text, 2 and 3 must be int // Postgres supports several different usages of substring, however Kwil only supports 1. // In Postgres, substring can be used to both impose a string over a range, or to perform @@ -638,15 +579,7 @@ var ( return types.TextType, nil }, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if distinct { - return "", errDistinct("substring") - } - - if star { - return "", errStar("substring") - } - + PGFormatFunc: func(inputs []string) (string, error) { str := strings.Builder{} str.WriteString("substring(") str.WriteString(inputs[0]) @@ -663,8 +596,8 @@ var ( return str.String(), nil }, }, - "trim": { // kwil only supports trim both - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "trim": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // 1-2 args, both must be text if len(args) < 1 || len(args) > 2 { return nil, fmt.Errorf("invalid number of arguments: expected 1 or 2, got %d", len(args)) @@ -678,10 +611,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("trim"), + PGFormatFunc: defaultFormat("trim"), }, - "upper": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "upper": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -692,10 +625,10 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("upper"), + PGFormatFunc: defaultFormat("upper"), }, - "format": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "format": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) < 1 { return nil, fmt.Errorf("invalid number of arguments: expected at least 1, got %d", len(args)) } @@ -706,20 +639,40 @@ var ( return types.TextType, nil }, - PGFormat: defaultFormat("format"), + PGFormatFunc: defaultFormat("format"), + }, + "coalesce": &ScalarFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + if len(args) < 1 { + return nil, fmt.Errorf("invalid number of arguments: expected at least 1, got %d", len(args)) + } + + firstType := args[0] + // all arguments must be the same type + for i, arg := range args { + if !firstType.EqualsStrict(arg) { + return nil, fmt.Errorf("all arguments must be the same type, but argument %d is %s and argument 1 is %s", i+1, arg.String(), firstType.String()) + } + } + + return firstType, nil + }, + PGFormatFunc: defaultFormat("coalesce"), }, // Aggregate functions - "count": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "count": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) > 1 { return nil, fmt.Errorf("invalid number of arguments: expected at most 1, got %d", len(args)) } return types.IntType, nil }, - IsAggregate: true, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { + if len(inputs) == 0 { + if distinct { + return "", fmt.Errorf("count(DISTINCT *) is not supported") + } return "count(*)", nil } if distinct { @@ -728,10 +681,9 @@ var ( return fmt.Sprintf("count(%s)", inputs[0]), nil }, - StarArgReturn: types.IntType, }, - "sum": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "sum": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // per https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-NUMERIC-TABLE // the result of sum will be made a decimal(1000, 0) if len(args) != 1 { @@ -758,26 +710,21 @@ var ( case args[0].EqualsStrict(types.Uint256Type): retType = decimal1000.Copy() default: - panic("unexpected numeric type: " + retType.String()) + panic(fmt.Sprintf("unexpected numeric type: %s", retType.String())) } return retType, nil }, - IsAggregate: true, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar("sum") - } + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { if distinct { return "sum(DISTINCT %s)", nil } return fmt.Sprintf("sum(%s)", inputs[0]), nil }, - StarArgReturn: types.IntType, }, - "min": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "min": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // as per postgres docs, min can take any numeric or string type: https://www.postgresql.org/docs/8.0/functions-aggregate.html if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) @@ -789,11 +736,7 @@ var ( return args[0], nil }, - IsAggregate: true, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar("min") - } + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { if distinct { return "min(DISTINCT %s)", nil } @@ -801,8 +744,8 @@ var ( return fmt.Sprintf("min(%s)", inputs[0]), nil }, }, - "max": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "max": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { // as per postgres docs, max can take any numeric or string type: https://www.postgresql.org/docs/8.0/functions-aggregate.html if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) @@ -814,11 +757,7 @@ var ( return args[0], nil }, - IsAggregate: true, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar("max") - } + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { if distinct { return "max(DISTINCT %s)", nil } @@ -826,8 +765,8 @@ var ( return fmt.Sprintf("max(%s)", inputs[0]), nil }, }, - "array_agg": { - ValidateArgs: func(args []*types.DataType) (*types.DataType, error) { + "array_agg": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { if len(args) != 1 { return nil, wrapErrArgumentNumber(1, len(args)) } @@ -840,11 +779,7 @@ var ( a2.IsArray = true return a2, nil }, - IsAggregate: true, - PGFormat: func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar("array_agg") - } + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { if distinct { return "array_agg(DISTINCT %s)", nil } @@ -852,19 +787,129 @@ var ( return fmt.Sprintf("array_agg(%s ORDER BY %s)", inputs[0], inputs[0]), nil }, }, + "avg": &AggregateFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // Postgres supports any numeric type for average, but we will enforce that it is numeric + // to guarantee determinism. + if len(args) != 1 { + return nil, wrapErrArgumentNumber(1, len(args)) + } + + if !strings.EqualFold(args[0].Name, types.DecimalStr) { + return nil, fmt.Errorf("expected argument to be numeric, got %s", args[0].String()) + } + + return args[0], nil + }, + PGFormatFunc: func(inputs []string, distinct bool) (string, error) { + if distinct { + return "avg(DISTINCT %s)", nil + } + + return fmt.Sprintf("avg(%s)", inputs[0]), nil + }, + }, + // Window functions + "lag": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // LAG(value_expression [, offset [, default_value]]) + if len(args) < 1 || len(args) > 3 { + return nil, fmt.Errorf("invalid number of arguments: expected 1-3, got %d", len(args)) + } + + if len(args) >= 2 { + if !args[1].EqualsStrict(types.IntType) { + return nil, wrapErrArgumentType(types.IntType, args[1]) + } + } + + if len(args) == 3 { + if !args[2].EqualsStrict(args[0]) { + return nil, fmt.Errorf("expected default value to be the same type as the value expression: %s != %s", args[0].String(), args[2].String()) + } + } + + return args[0], nil + }, + PGFormatFunc: defaultFormat("lag"), + }, + "lead": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // LEAD(value_expression [, offset [, default_value]]) + if len(args) < 1 || len(args) > 3 { + return nil, fmt.Errorf("invalid number of arguments: expected 1-3, got %d", len(args)) + } + + if len(args) >= 2 { + if !args[1].EqualsStrict(types.IntType) { + return nil, wrapErrArgumentType(types.IntType, args[1]) + } + } + + if len(args) == 3 { + if !args[2].EqualsStrict(args[0]) { + return nil, fmt.Errorf("expected default value to be the same type as the value expression: %s != %s", args[0].String(), args[2].String()) + } + } + + return args[0], nil + }, + PGFormatFunc: defaultFormat("lead"), + }, + "first_value": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // FIRST_VALUE(value_expression) OVER (...) + if len(args) != 1 { + return nil, wrapErrArgumentNumber(1, len(args)) + } + + return args[0], nil + }, + PGFormatFunc: defaultFormat("first_value"), + }, + "last_value": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // LAST_VALUE(value_expression) OVER (...) + if len(args) != 1 { + return nil, wrapErrArgumentNumber(1, len(args)) + } + + return args[0], nil + }, + PGFormatFunc: defaultFormat("last_value"), + }, + "nth_value": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // NTH_VALUE(value_expression, nth) OVER (...) + if len(args) != 2 { + return nil, wrapErrArgumentNumber(2, len(args)) + } + + if !args[1].EqualsStrict(types.IntType) { + return nil, wrapErrArgumentType(types.IntType, args[1]) + } + + return args[0], nil + }, + PGFormatFunc: defaultFormat("nth_value"), + }, + "row_number": &WindowFunctionDefinition{ + ValidateArgsFunc: func(args []*types.DataType) (*types.DataType, error) { + // ROW_NUMBER() OVER (...) + if len(args) != 0 { + return nil, wrapErrArgumentNumber(0, len(args)) + } + + return types.IntType, nil + }, + PGFormatFunc: defaultFormat("row_number"), + }, } ) // defaultFormat is the default PGFormat function for functions that do not have a custom one. -func defaultFormat(name string) FormatFunc { - return func(inputs []string, distinct bool, star bool) (string, error) { - if star { - return "", errStar(name) - } - if distinct { - return "", errDistinct(name) - } - +func defaultFormat(name string) func(inputs []string) (string, error) { + return func(inputs []string) (string, error) { return fmt.Sprintf("%s(%s)", name, strings.Join(inputs, ", ")), nil } } @@ -891,57 +936,69 @@ func init() { } } -func errDistinct(funcName string) error { - return fmt.Errorf(`%w: cannot use DISTINCT with function "%s"`, ErrFunctionSignature, funcName) +// FunctionDefinition if a definition of a function. +// It has two implementations: ScalarFuncDef and AggregateFuncDef. +type FunctionDefinition interface { + // ValidateArgs is a function that checks the arguments passed to the function. + // It can check the argument type and amount of arguments. + // It returns the expected return type based on the arguments. + ValidateArgs(args []*types.DataType) (*types.DataType, error) + funcdef() +} + +// ScalarFunctionDefinition is a definition of a scalar function. +type ScalarFunctionDefinition struct { + ValidateArgsFunc func(args []*types.DataType) (*types.DataType, error) + PGFormatFunc func(inputs []string) (string, error) } -func errStar(funcName string) error { - return fmt.Errorf(`%w: cannot use * with function "%s"`, ErrFunctionSignature, funcName) +func (s *ScalarFunctionDefinition) ValidateArgs(args []*types.DataType) (*types.DataType, error) { + return s.ValidateArgsFunc(args) } -// FunctionDefinition defines a function that can be used in the database. -type FunctionDefinition struct { +func (s *ScalarFunctionDefinition) funcdef() {} + +// AggregateFunctionDefinition is a definition of an aggregate function. +type AggregateFunctionDefinition struct { // ValidateArgs is a function that checks the arguments passed to the function. // It can check the argument type and amount of arguments. - // It returns the expected return type based on the arguments. - ValidateArgs func(args []*types.DataType) (*types.DataType, error) - // StarArgReturn is the type the function returns if * is passed as the sole - // argument. If it is nil, the function does not support *. - StarArgReturn *types.DataType - // IsAggregate is true if the function is an aggregate function. - IsAggregate bool + ValidateArgsFunc func(args []*types.DataType) (*types.DataType, error) // PGFormat is a function that formats the inputs to the function in Postgres format. // For example, the function `sum` would format the inputs as `sum($1)`. - // It will be given the same amount of inputs as ValidateArgs() was given. - // ValidateArgs will always be called first. - PGFormat FormatFunc - // TODO: PGFormat is related to the plpgsql generation, and therefore should be moved to - // internal/engine/generate. There is some implicit coupling here. + // It can also format the inputs with DISTINCT. If no inputs are given, it is a *. + PGFormatFunc func(inputs []string, distinct bool) (string, error) + // We currently don't need to evaluate aggregates since they are handled by the engine. } -// FormatFunc is a function that formats a string of inputs for a SQL function. -type FormatFunc func(inputs []string, distinct bool, star bool) (string, error) +func (a *AggregateFunctionDefinition) ValidateArgs(args []*types.DataType) (*types.DataType, error) { + return a.ValidateArgsFunc(args) +} -func wrapErrArgumentNumber(expected, got int) error { - return fmt.Errorf("%w: expected %d, got %d", ErrFunctionSignature, expected, got) +func (a *AggregateFunctionDefinition) funcdef() {} + +type WindowFunctionDefinition struct { + // ValidateArgs is a function that checks the arguments passed to the function. + // It can check the argument type and amount of arguments. + ValidateArgsFunc func(args []*types.DataType) (*types.DataType, error) + // PGFormat is a function that formats the inputs to the function in Postgres format. + // For example, the function `sum` would format the inputs as `sum($1)`. + // It can also format the inputs with DISTINCT. If no inputs are given, it is a *. + PGFormatFunc func(inputs []string) (string, error) } -func wrapErrArgumentType(expected, got *types.DataType) error { - return fmt.Errorf("%w: expected %s, got %s", ErrType, expected.String(), got.String()) +func (w *WindowFunctionDefinition) ValidateArgs(args []*types.DataType) (*types.DataType, error) { + return w.ValidateArgsFunc(args) } -// ParseNotice parses a log raised from a notice() function. -// It returns an error if the log is not in the expected format. -func ParseNotice(log string) (txID string, notice string, err error) { - _, after, found := strings.Cut(log, "txid:") - if !found { - return "", "", fmt.Errorf("notice log does not contain txid prefix: %s", log) - } +func (w *WindowFunctionDefinition) funcdef() {} - parts := strings.SplitN(after, " ", 2) - if len(parts) != 2 { - return "", "", fmt.Errorf("notice log does not contain txid and notice separated by space: %s", log) - } +// FormatFunc is a function that formats a string of inputs for a SQL function. +type FormatFunc func(inputs []string) (string, error) - return parts[0], parts[1], nil +func wrapErrArgumentNumber(expected, got int) error { + return fmt.Errorf("expected %d, got %d", expected, got) +} + +func wrapErrArgumentType(expected, got *types.DataType) error { + return fmt.Errorf("expected %s, got %s", expected.String(), got.String()) } diff --git a/node/engine/parse/functions_test.go b/node/engine/parse/functions_test.go new file mode 100644 index 000000000..48af6155b --- /dev/null +++ b/node/engine/parse/functions_test.go @@ -0,0 +1,41 @@ +package parse_test + +import ( + "testing" + + "github.com/kwilteam/kwil-db/node/engine/parse" +) + +// tests that we have implemented all functions +func Test_AllFunctionsImplemented(t *testing.T) { + for name, fn := range parse.Functions { + switch fnt := fn.(type) { + case *parse.ScalarFunctionDefinition: + if fnt.PGFormatFunc == nil { + t.Errorf("function %s has no PGFormatFunc", name) + } + + if fnt.ValidateArgsFunc == nil { + t.Errorf("function %s has no ValidateArgsFunc", name) + } + case *parse.AggregateFunctionDefinition: + if fnt.PGFormatFunc == nil { + t.Errorf("function %s has no PGFormatFunc", name) + } + + if fnt.ValidateArgsFunc == nil { + t.Errorf("function %s has no ValidateArgsFunc", name) + } + case *parse.WindowFunctionDefinition: + if fnt.PGFormatFunc == nil { + t.Errorf("function %s has no PGFormatFunc", name) + } + + if fnt.ValidateArgsFunc == nil { + t.Errorf("function %s has no ValidateArgsFunc", name) + } + default: + t.Errorf("function %s is not a scalar, aggregate, or window function", name) + } + } +} diff --git a/node/engine/parse/gen/kuneiform_lexer.go b/node/engine/parse/gen/kuneiform_lexer.go new file mode 100644 index 000000000..f4f41ec12 --- /dev/null +++ b/node/engine/parse/gen/kuneiform_lexer.go @@ -0,0 +1,806 @@ +// Code generated from KuneiformLexer.g4 by ANTLR 4.13.1. DO NOT EDIT. + +package gen + +import ( + "fmt" + "github.com/antlr4-go/antlr/v4" + "sync" + "unicode" +) + +// Suppress unused import error +var _ = fmt.Printf +var _ = sync.Once{} +var _ = unicode.IsLetter + +type KuneiformLexer struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string +} + +var KuneiformLexerLexerStaticData struct { + once sync.Once + serializedATN []int32 + ChannelNames []string + ModeNames []string + LiteralNames []string + SymbolicNames []string + RuleNames []string + PredictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA +} + +func kuneiformlexerLexerInit() { + staticData := &KuneiformLexerLexerStaticData + staticData.ChannelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", + } + staticData.ModeNames = []string{ + "DEFAULT_MODE", + } + staticData.LiteralNames = []string{ + "", "'{'", "'}'", "'['", "']'", "':'", "';'", "'('", "')'", "','", "'@'", + "'!'", "'.'", "'||'", "'*'", "'='", "'=='", "'#'", "'$'", "'%'", "'+'", + "'-'", "'/'", "", "'<'", "'<='", "'>'", "'>='", "'::'", "'_'", "':='", + "'..'", "'\"'", "'use'", "'unuse'", "'table'", "'action'", "'create'", + "'alter'", "'column'", "'add'", "'drop'", "'rename'", "'to'", "'constraint'", + "'check'", "'foreign'", "'primary'", "'key'", "'on'", "'do'", "'unique'", + "'cascade'", "'restrict'", "'set'", "'default'", "'null'", "'delete'", + "'update'", "'references'", "'ref'", "'not'", "'index'", "'and'", "'or'", + "'like'", "'ilike'", "'in'", "'between'", "'is'", "'exists'", "'all'", + "'any'", "'join'", "'left'", "'right'", "'inner'", "'as'", "'asc'", + "'desc'", "'limit'", "'offset'", "'order'", "'by'", "'group'", "'having'", + "'returns'", "'no'", "'with'", "'case'", "'when'", "'then'", "'end'", + "'distinct'", "'from'", "'where'", "'collate'", "'select'", "'insert'", + "'values'", "'full'", "'union'", "'intersect'", "'except'", "'nulls'", + "'first'", "'last'", "'returning'", "'into'", "'conflict'", "'nothing'", + "'for'", "'if'", "'elseif'", "'else'", "'break'", "'return'", "'next'", + "'over'", "'partition'", "'window'", "'filter'", "'recursive'", "'schema'", + "'grant'", "'revoke'", "'role'", "'transfer'", "'ownership'", "'replace'", + "'array'", "'namespace'", "'roles'", "'call'", "", "'true'", "'false'", + "", "", "", "'on_update'", "'on_delete'", "'set_default'", "'set_null'", + "'no_action'", + } + staticData.SymbolicNames = []string{ + "", "LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "COL", "SCOL", "LPAREN", + "RPAREN", "COMMA", "AT", "EXCL", "PERIOD", "CONCAT", "STAR", "EQUALS", + "EQUATE", "HASH", "DOLLAR", "MOD", "PLUS", "MINUS", "DIV", "NEQ", "LT", + "LTE", "GT", "GTE", "TYPE_CAST", "UNDERSCORE", "ASSIGN", "RANGE", "DOUBLE_QUOTE", + "USE", "UNUSE", "TABLE", "ACTION", "CREATE", "ALTER", "COLUMN", "ADD", + "DROP", "RENAME", "TO", "CONSTRAINT", "CHECK", "FOREIGN", "PRIMARY", + "KEY", "ON", "DO", "UNIQUE", "CASCADE", "RESTRICT", "SET", "DEFAULT", + "NULL", "DELETE", "UPDATE", "REFERENCES", "REF", "NOT", "INDEX", "AND", + "OR", "LIKE", "ILIKE", "IN", "BETWEEN", "IS", "EXISTS", "ALL", "ANY", + "JOIN", "LEFT", "RIGHT", "INNER", "AS", "ASC", "DESC", "LIMIT", "OFFSET", + "ORDER", "BY", "GROUP", "HAVING", "RETURNS", "NO", "WITH", "CASE", "WHEN", + "THEN", "END", "DISTINCT", "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", + "VALUES", "FULL", "UNION", "INTERSECT", "EXCEPT", "NULLS", "FIRST", + "LAST", "RETURNING", "INTO", "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", + "ELSE", "BREAK", "RETURN", "NEXT", "OVER", "PARTITION", "WINDOW", "FILTER", + "RECURSIVE", "SCHEMA", "GRANT", "REVOKE", "ROLE", "TRANSFER", "OWNERSHIP", + "REPLACE", "ARRAY", "NAMESPACE", "ROLES", "CALL", "STRING_", "TRUE", + "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", "LEGACY_ON_UPDATE", + "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", "LEGACY_NO_ACTION", + "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", "HASH_IDENTIFIER", + "WS", "BLOCK_COMMENT", "LINE_COMMENT", + } + staticData.RuleNames = []string{ + "LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "COL", "SCOL", "LPAREN", + "RPAREN", "COMMA", "AT", "EXCL", "PERIOD", "CONCAT", "STAR", "EQUALS", + "EQUATE", "HASH", "DOLLAR", "MOD", "PLUS", "MINUS", "DIV", "NEQ", "LT", + "LTE", "GT", "GTE", "TYPE_CAST", "UNDERSCORE", "ASSIGN", "RANGE", "DOUBLE_QUOTE", + "USE", "UNUSE", "TABLE", "ACTION", "CREATE", "ALTER", "COLUMN", "ADD", + "DROP", "RENAME", "TO", "CONSTRAINT", "CHECK", "FOREIGN", "PRIMARY", + "KEY", "ON", "DO", "UNIQUE", "CASCADE", "RESTRICT", "SET", "DEFAULT", + "NULL", "DELETE", "UPDATE", "REFERENCES", "REF", "NOT", "INDEX", "AND", + "OR", "LIKE", "ILIKE", "IN", "BETWEEN", "IS", "EXISTS", "ALL", "ANY", + "JOIN", "LEFT", "RIGHT", "INNER", "AS", "ASC", "DESC", "LIMIT", "OFFSET", + "ORDER", "BY", "GROUP", "HAVING", "RETURNS", "NO", "WITH", "CASE", "WHEN", + "THEN", "END", "DISTINCT", "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", + "VALUES", "FULL", "UNION", "INTERSECT", "EXCEPT", "NULLS", "FIRST", + "LAST", "RETURNING", "INTO", "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", + "ELSE", "BREAK", "RETURN", "NEXT", "OVER", "PARTITION", "WINDOW", "FILTER", + "RECURSIVE", "SCHEMA", "GRANT", "REVOKE", "ROLE", "TRANSFER", "OWNERSHIP", + "REPLACE", "ARRAY", "NAMESPACE", "ROLES", "CALL", "STRING_", "TRUE", + "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", "LEGACY_ON_UPDATE", + "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", "LEGACY_NO_ACTION", + "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", "HASH_IDENTIFIER", + "WS", "BLOCK_COMMENT", "LINE_COMMENT", + } + staticData.PredictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = []int32{ + 4, 0, 151, 1141, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, + 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, + 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, + 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, + 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, + 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, + 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, + 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, + 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, + 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, + 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, + 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, + 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, + 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, + 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, + 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, + 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, + 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, + 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, + 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, + 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, + 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, + 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, + 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, + 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, + 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, + 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, + 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, + 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, + 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, + 2, 149, 7, 149, 2, 150, 7, 150, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, + 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, + 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, + 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, + 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, + 3, 22, 354, 8, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, + 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, + 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, + 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, + 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, + 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, + 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, + 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, + 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, + 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, + 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, + 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, + 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, + 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, + 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, + 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, + 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, + 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, + 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, + 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, + 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, + 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, + 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, + 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, + 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, + 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, + 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, + 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, + 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, + 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 83, + 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, + 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, + 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, + 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, + 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, + 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, + 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, + 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, + 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, + 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, + 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, + 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, + 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, + 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, + 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, + 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, + 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, + 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, + 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, + 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, + 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, + 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, + 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, + 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, + 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, + 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, + 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, + 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, + 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, + 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, + 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, + 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, + 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, + 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, + 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, + 133, 1, 133, 1, 133, 5, 133, 1000, 8, 133, 10, 133, 12, 133, 1003, 9, 133, + 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, + 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 4, 136, 1019, 8, 136, 11, 136, + 12, 136, 1020, 1, 137, 1, 137, 1, 137, 1, 137, 4, 137, 1027, 8, 137, 11, + 137, 12, 137, 1028, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, + 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 3, 138, 1044, 8, 138, + 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, + 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, + 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, + 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, + 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, + 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 5, 144, + 1099, 8, 144, 10, 144, 12, 144, 1102, 9, 144, 1, 145, 1, 145, 1, 145, 1, + 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, + 148, 1, 149, 1, 149, 1, 149, 1, 149, 5, 149, 1121, 8, 149, 10, 149, 12, + 149, 1124, 9, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, + 1, 150, 1, 150, 5, 150, 1135, 8, 150, 10, 150, 12, 150, 1138, 9, 150, 1, + 150, 1, 150, 1, 1122, 0, 151, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, + 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, + 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, + 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, + 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, + 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, 101, 51, 103, 52, + 105, 53, 107, 54, 109, 55, 111, 56, 113, 57, 115, 58, 117, 59, 119, 60, + 121, 61, 123, 62, 125, 63, 127, 64, 129, 65, 131, 66, 133, 67, 135, 68, + 137, 69, 139, 70, 141, 71, 143, 72, 145, 73, 147, 74, 149, 75, 151, 76, + 153, 77, 155, 78, 157, 79, 159, 80, 161, 81, 163, 82, 165, 83, 167, 84, + 169, 85, 171, 86, 173, 87, 175, 88, 177, 89, 179, 90, 181, 91, 183, 92, + 185, 93, 187, 94, 189, 95, 191, 96, 193, 97, 195, 98, 197, 99, 199, 100, + 201, 101, 203, 102, 205, 103, 207, 104, 209, 105, 211, 106, 213, 107, 215, + 108, 217, 109, 219, 110, 221, 111, 223, 112, 225, 113, 227, 114, 229, 115, + 231, 116, 233, 117, 235, 118, 237, 119, 239, 120, 241, 121, 243, 122, 245, + 123, 247, 124, 249, 125, 251, 126, 253, 127, 255, 128, 257, 129, 259, 130, + 261, 131, 263, 132, 265, 133, 267, 134, 269, 135, 271, 136, 273, 137, 275, + 138, 277, 139, 279, 140, 281, 141, 283, 142, 285, 143, 287, 144, 289, 145, + 291, 146, 293, 147, 295, 148, 297, 149, 299, 150, 301, 151, 1, 0, 32, 2, + 0, 85, 85, 117, 117, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, + 0, 78, 78, 110, 110, 2, 0, 84, 84, 116, 116, 2, 0, 65, 65, 97, 97, 2, 0, + 66, 66, 98, 98, 2, 0, 76, 76, 108, 108, 2, 0, 67, 67, 99, 99, 2, 0, 73, + 73, 105, 105, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 77, + 77, 109, 109, 2, 0, 68, 68, 100, 100, 2, 0, 80, 80, 112, 112, 2, 0, 72, + 72, 104, 104, 2, 0, 75, 75, 107, 107, 2, 0, 70, 70, 102, 102, 2, 0, 71, + 71, 103, 103, 2, 0, 89, 89, 121, 121, 2, 0, 81, 81, 113, 113, 2, 0, 88, + 88, 120, 120, 2, 0, 87, 87, 119, 119, 2, 0, 74, 74, 106, 106, 2, 0, 86, + 86, 118, 118, 2, 0, 39, 39, 92, 92, 1, 0, 48, 57, 3, 0, 48, 57, 65, 70, + 97, 102, 2, 0, 65, 90, 97, 122, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, + 3, 0, 9, 11, 13, 13, 32, 32, 2, 0, 10, 10, 13, 13, 1149, 0, 1, 1, 0, 0, + 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, + 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, + 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, + 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, + 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, + 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, + 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, + 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, + 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, + 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, + 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, + 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, + 0, 95, 1, 0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, + 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, 1, 0, 0, 0, 0, 109, + 1, 0, 0, 0, 0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, 0, 115, 1, 0, 0, 0, + 0, 117, 1, 0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 123, 1, + 0, 0, 0, 0, 125, 1, 0, 0, 0, 0, 127, 1, 0, 0, 0, 0, 129, 1, 0, 0, 0, 0, + 131, 1, 0, 0, 0, 0, 133, 1, 0, 0, 0, 0, 135, 1, 0, 0, 0, 0, 137, 1, 0, + 0, 0, 0, 139, 1, 0, 0, 0, 0, 141, 1, 0, 0, 0, 0, 143, 1, 0, 0, 0, 0, 145, + 1, 0, 0, 0, 0, 147, 1, 0, 0, 0, 0, 149, 1, 0, 0, 0, 0, 151, 1, 0, 0, 0, + 0, 153, 1, 0, 0, 0, 0, 155, 1, 0, 0, 0, 0, 157, 1, 0, 0, 0, 0, 159, 1, + 0, 0, 0, 0, 161, 1, 0, 0, 0, 0, 163, 1, 0, 0, 0, 0, 165, 1, 0, 0, 0, 0, + 167, 1, 0, 0, 0, 0, 169, 1, 0, 0, 0, 0, 171, 1, 0, 0, 0, 0, 173, 1, 0, + 0, 0, 0, 175, 1, 0, 0, 0, 0, 177, 1, 0, 0, 0, 0, 179, 1, 0, 0, 0, 0, 181, + 1, 0, 0, 0, 0, 183, 1, 0, 0, 0, 0, 185, 1, 0, 0, 0, 0, 187, 1, 0, 0, 0, + 0, 189, 1, 0, 0, 0, 0, 191, 1, 0, 0, 0, 0, 193, 1, 0, 0, 0, 0, 195, 1, + 0, 0, 0, 0, 197, 1, 0, 0, 0, 0, 199, 1, 0, 0, 0, 0, 201, 1, 0, 0, 0, 0, + 203, 1, 0, 0, 0, 0, 205, 1, 0, 0, 0, 0, 207, 1, 0, 0, 0, 0, 209, 1, 0, + 0, 0, 0, 211, 1, 0, 0, 0, 0, 213, 1, 0, 0, 0, 0, 215, 1, 0, 0, 0, 0, 217, + 1, 0, 0, 0, 0, 219, 1, 0, 0, 0, 0, 221, 1, 0, 0, 0, 0, 223, 1, 0, 0, 0, + 0, 225, 1, 0, 0, 0, 0, 227, 1, 0, 0, 0, 0, 229, 1, 0, 0, 0, 0, 231, 1, + 0, 0, 0, 0, 233, 1, 0, 0, 0, 0, 235, 1, 0, 0, 0, 0, 237, 1, 0, 0, 0, 0, + 239, 1, 0, 0, 0, 0, 241, 1, 0, 0, 0, 0, 243, 1, 0, 0, 0, 0, 245, 1, 0, + 0, 0, 0, 247, 1, 0, 0, 0, 0, 249, 1, 0, 0, 0, 0, 251, 1, 0, 0, 0, 0, 253, + 1, 0, 0, 0, 0, 255, 1, 0, 0, 0, 0, 257, 1, 0, 0, 0, 0, 259, 1, 0, 0, 0, + 0, 261, 1, 0, 0, 0, 0, 263, 1, 0, 0, 0, 0, 265, 1, 0, 0, 0, 0, 267, 1, + 0, 0, 0, 0, 269, 1, 0, 0, 0, 0, 271, 1, 0, 0, 0, 0, 273, 1, 0, 0, 0, 0, + 275, 1, 0, 0, 0, 0, 277, 1, 0, 0, 0, 0, 279, 1, 0, 0, 0, 0, 281, 1, 0, + 0, 0, 0, 283, 1, 0, 0, 0, 0, 285, 1, 0, 0, 0, 0, 287, 1, 0, 0, 0, 0, 289, + 1, 0, 0, 0, 0, 291, 1, 0, 0, 0, 0, 293, 1, 0, 0, 0, 0, 295, 1, 0, 0, 0, + 0, 297, 1, 0, 0, 0, 0, 299, 1, 0, 0, 0, 0, 301, 1, 0, 0, 0, 1, 303, 1, + 0, 0, 0, 3, 305, 1, 0, 0, 0, 5, 307, 1, 0, 0, 0, 7, 309, 1, 0, 0, 0, 9, + 311, 1, 0, 0, 0, 11, 313, 1, 0, 0, 0, 13, 315, 1, 0, 0, 0, 15, 317, 1, + 0, 0, 0, 17, 319, 1, 0, 0, 0, 19, 321, 1, 0, 0, 0, 21, 323, 1, 0, 0, 0, + 23, 325, 1, 0, 0, 0, 25, 327, 1, 0, 0, 0, 27, 330, 1, 0, 0, 0, 29, 332, + 1, 0, 0, 0, 31, 334, 1, 0, 0, 0, 33, 337, 1, 0, 0, 0, 35, 339, 1, 0, 0, + 0, 37, 341, 1, 0, 0, 0, 39, 343, 1, 0, 0, 0, 41, 345, 1, 0, 0, 0, 43, 347, + 1, 0, 0, 0, 45, 353, 1, 0, 0, 0, 47, 355, 1, 0, 0, 0, 49, 357, 1, 0, 0, + 0, 51, 360, 1, 0, 0, 0, 53, 362, 1, 0, 0, 0, 55, 365, 1, 0, 0, 0, 57, 368, + 1, 0, 0, 0, 59, 370, 1, 0, 0, 0, 61, 373, 1, 0, 0, 0, 63, 376, 1, 0, 0, + 0, 65, 378, 1, 0, 0, 0, 67, 382, 1, 0, 0, 0, 69, 388, 1, 0, 0, 0, 71, 394, + 1, 0, 0, 0, 73, 401, 1, 0, 0, 0, 75, 408, 1, 0, 0, 0, 77, 414, 1, 0, 0, + 0, 79, 421, 1, 0, 0, 0, 81, 425, 1, 0, 0, 0, 83, 430, 1, 0, 0, 0, 85, 437, + 1, 0, 0, 0, 87, 440, 1, 0, 0, 0, 89, 451, 1, 0, 0, 0, 91, 457, 1, 0, 0, + 0, 93, 465, 1, 0, 0, 0, 95, 473, 1, 0, 0, 0, 97, 477, 1, 0, 0, 0, 99, 480, + 1, 0, 0, 0, 101, 483, 1, 0, 0, 0, 103, 490, 1, 0, 0, 0, 105, 498, 1, 0, + 0, 0, 107, 507, 1, 0, 0, 0, 109, 511, 1, 0, 0, 0, 111, 519, 1, 0, 0, 0, + 113, 524, 1, 0, 0, 0, 115, 531, 1, 0, 0, 0, 117, 538, 1, 0, 0, 0, 119, + 549, 1, 0, 0, 0, 121, 553, 1, 0, 0, 0, 123, 557, 1, 0, 0, 0, 125, 563, + 1, 0, 0, 0, 127, 567, 1, 0, 0, 0, 129, 570, 1, 0, 0, 0, 131, 575, 1, 0, + 0, 0, 133, 581, 1, 0, 0, 0, 135, 584, 1, 0, 0, 0, 137, 592, 1, 0, 0, 0, + 139, 595, 1, 0, 0, 0, 141, 602, 1, 0, 0, 0, 143, 606, 1, 0, 0, 0, 145, + 610, 1, 0, 0, 0, 147, 615, 1, 0, 0, 0, 149, 620, 1, 0, 0, 0, 151, 626, + 1, 0, 0, 0, 153, 632, 1, 0, 0, 0, 155, 635, 1, 0, 0, 0, 157, 639, 1, 0, + 0, 0, 159, 644, 1, 0, 0, 0, 161, 650, 1, 0, 0, 0, 163, 657, 1, 0, 0, 0, + 165, 663, 1, 0, 0, 0, 167, 666, 1, 0, 0, 0, 169, 672, 1, 0, 0, 0, 171, + 679, 1, 0, 0, 0, 173, 687, 1, 0, 0, 0, 175, 690, 1, 0, 0, 0, 177, 695, + 1, 0, 0, 0, 179, 700, 1, 0, 0, 0, 181, 705, 1, 0, 0, 0, 183, 710, 1, 0, + 0, 0, 185, 714, 1, 0, 0, 0, 187, 723, 1, 0, 0, 0, 189, 728, 1, 0, 0, 0, + 191, 734, 1, 0, 0, 0, 193, 742, 1, 0, 0, 0, 195, 749, 1, 0, 0, 0, 197, + 756, 1, 0, 0, 0, 199, 763, 1, 0, 0, 0, 201, 768, 1, 0, 0, 0, 203, 774, + 1, 0, 0, 0, 205, 784, 1, 0, 0, 0, 207, 791, 1, 0, 0, 0, 209, 797, 1, 0, + 0, 0, 211, 803, 1, 0, 0, 0, 213, 808, 1, 0, 0, 0, 215, 818, 1, 0, 0, 0, + 217, 823, 1, 0, 0, 0, 219, 832, 1, 0, 0, 0, 221, 840, 1, 0, 0, 0, 223, + 844, 1, 0, 0, 0, 225, 847, 1, 0, 0, 0, 227, 854, 1, 0, 0, 0, 229, 859, + 1, 0, 0, 0, 231, 865, 1, 0, 0, 0, 233, 872, 1, 0, 0, 0, 235, 877, 1, 0, + 0, 0, 237, 882, 1, 0, 0, 0, 239, 892, 1, 0, 0, 0, 241, 899, 1, 0, 0, 0, + 243, 906, 1, 0, 0, 0, 245, 916, 1, 0, 0, 0, 247, 923, 1, 0, 0, 0, 249, + 929, 1, 0, 0, 0, 251, 936, 1, 0, 0, 0, 253, 941, 1, 0, 0, 0, 255, 950, + 1, 0, 0, 0, 257, 960, 1, 0, 0, 0, 259, 968, 1, 0, 0, 0, 261, 974, 1, 0, + 0, 0, 263, 984, 1, 0, 0, 0, 265, 990, 1, 0, 0, 0, 267, 995, 1, 0, 0, 0, + 269, 1006, 1, 0, 0, 0, 271, 1011, 1, 0, 0, 0, 273, 1018, 1, 0, 0, 0, 275, + 1022, 1, 0, 0, 0, 277, 1043, 1, 0, 0, 0, 279, 1045, 1, 0, 0, 0, 281, 1055, + 1, 0, 0, 0, 283, 1065, 1, 0, 0, 0, 285, 1077, 1, 0, 0, 0, 287, 1086, 1, + 0, 0, 0, 289, 1096, 1, 0, 0, 0, 291, 1103, 1, 0, 0, 0, 293, 1106, 1, 0, + 0, 0, 295, 1109, 1, 0, 0, 0, 297, 1112, 1, 0, 0, 0, 299, 1116, 1, 0, 0, + 0, 301, 1130, 1, 0, 0, 0, 303, 304, 5, 123, 0, 0, 304, 2, 1, 0, 0, 0, 305, + 306, 5, 125, 0, 0, 306, 4, 1, 0, 0, 0, 307, 308, 5, 91, 0, 0, 308, 6, 1, + 0, 0, 0, 309, 310, 5, 93, 0, 0, 310, 8, 1, 0, 0, 0, 311, 312, 5, 58, 0, + 0, 312, 10, 1, 0, 0, 0, 313, 314, 5, 59, 0, 0, 314, 12, 1, 0, 0, 0, 315, + 316, 5, 40, 0, 0, 316, 14, 1, 0, 0, 0, 317, 318, 5, 41, 0, 0, 318, 16, + 1, 0, 0, 0, 319, 320, 5, 44, 0, 0, 320, 18, 1, 0, 0, 0, 321, 322, 5, 64, + 0, 0, 322, 20, 1, 0, 0, 0, 323, 324, 5, 33, 0, 0, 324, 22, 1, 0, 0, 0, + 325, 326, 5, 46, 0, 0, 326, 24, 1, 0, 0, 0, 327, 328, 5, 124, 0, 0, 328, + 329, 5, 124, 0, 0, 329, 26, 1, 0, 0, 0, 330, 331, 5, 42, 0, 0, 331, 28, + 1, 0, 0, 0, 332, 333, 5, 61, 0, 0, 333, 30, 1, 0, 0, 0, 334, 335, 5, 61, + 0, 0, 335, 336, 5, 61, 0, 0, 336, 32, 1, 0, 0, 0, 337, 338, 5, 35, 0, 0, + 338, 34, 1, 0, 0, 0, 339, 340, 5, 36, 0, 0, 340, 36, 1, 0, 0, 0, 341, 342, + 5, 37, 0, 0, 342, 38, 1, 0, 0, 0, 343, 344, 5, 43, 0, 0, 344, 40, 1, 0, + 0, 0, 345, 346, 5, 45, 0, 0, 346, 42, 1, 0, 0, 0, 347, 348, 5, 47, 0, 0, + 348, 44, 1, 0, 0, 0, 349, 350, 5, 33, 0, 0, 350, 354, 5, 61, 0, 0, 351, + 352, 5, 60, 0, 0, 352, 354, 5, 62, 0, 0, 353, 349, 1, 0, 0, 0, 353, 351, + 1, 0, 0, 0, 354, 46, 1, 0, 0, 0, 355, 356, 5, 60, 0, 0, 356, 48, 1, 0, + 0, 0, 357, 358, 5, 60, 0, 0, 358, 359, 5, 61, 0, 0, 359, 50, 1, 0, 0, 0, + 360, 361, 5, 62, 0, 0, 361, 52, 1, 0, 0, 0, 362, 363, 5, 62, 0, 0, 363, + 364, 5, 61, 0, 0, 364, 54, 1, 0, 0, 0, 365, 366, 5, 58, 0, 0, 366, 367, + 5, 58, 0, 0, 367, 56, 1, 0, 0, 0, 368, 369, 5, 95, 0, 0, 369, 58, 1, 0, + 0, 0, 370, 371, 5, 58, 0, 0, 371, 372, 5, 61, 0, 0, 372, 60, 1, 0, 0, 0, + 373, 374, 5, 46, 0, 0, 374, 375, 5, 46, 0, 0, 375, 62, 1, 0, 0, 0, 376, + 377, 5, 34, 0, 0, 377, 64, 1, 0, 0, 0, 378, 379, 7, 0, 0, 0, 379, 380, + 7, 1, 0, 0, 380, 381, 7, 2, 0, 0, 381, 66, 1, 0, 0, 0, 382, 383, 7, 0, + 0, 0, 383, 384, 7, 3, 0, 0, 384, 385, 7, 0, 0, 0, 385, 386, 7, 1, 0, 0, + 386, 387, 7, 2, 0, 0, 387, 68, 1, 0, 0, 0, 388, 389, 7, 4, 0, 0, 389, 390, + 7, 5, 0, 0, 390, 391, 7, 6, 0, 0, 391, 392, 7, 7, 0, 0, 392, 393, 7, 2, + 0, 0, 393, 70, 1, 0, 0, 0, 394, 395, 7, 5, 0, 0, 395, 396, 7, 8, 0, 0, + 396, 397, 7, 4, 0, 0, 397, 398, 7, 9, 0, 0, 398, 399, 7, 10, 0, 0, 399, + 400, 7, 3, 0, 0, 400, 72, 1, 0, 0, 0, 401, 402, 7, 8, 0, 0, 402, 403, 7, + 11, 0, 0, 403, 404, 7, 2, 0, 0, 404, 405, 7, 5, 0, 0, 405, 406, 7, 4, 0, + 0, 406, 407, 7, 2, 0, 0, 407, 74, 1, 0, 0, 0, 408, 409, 7, 5, 0, 0, 409, + 410, 7, 7, 0, 0, 410, 411, 7, 4, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, + 7, 11, 0, 0, 413, 76, 1, 0, 0, 0, 414, 415, 7, 8, 0, 0, 415, 416, 7, 10, + 0, 0, 416, 417, 7, 7, 0, 0, 417, 418, 7, 0, 0, 0, 418, 419, 7, 12, 0, 0, + 419, 420, 7, 3, 0, 0, 420, 78, 1, 0, 0, 0, 421, 422, 7, 5, 0, 0, 422, 423, + 7, 13, 0, 0, 423, 424, 7, 13, 0, 0, 424, 80, 1, 0, 0, 0, 425, 426, 7, 13, + 0, 0, 426, 427, 7, 11, 0, 0, 427, 428, 7, 10, 0, 0, 428, 429, 7, 14, 0, + 0, 429, 82, 1, 0, 0, 0, 430, 431, 7, 11, 0, 0, 431, 432, 7, 2, 0, 0, 432, + 433, 7, 3, 0, 0, 433, 434, 7, 5, 0, 0, 434, 435, 7, 12, 0, 0, 435, 436, + 7, 2, 0, 0, 436, 84, 1, 0, 0, 0, 437, 438, 7, 4, 0, 0, 438, 439, 7, 10, + 0, 0, 439, 86, 1, 0, 0, 0, 440, 441, 7, 8, 0, 0, 441, 442, 7, 10, 0, 0, + 442, 443, 7, 3, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 4, 0, 0, 445, + 446, 7, 11, 0, 0, 446, 447, 7, 5, 0, 0, 447, 448, 7, 9, 0, 0, 448, 449, + 7, 3, 0, 0, 449, 450, 7, 4, 0, 0, 450, 88, 1, 0, 0, 0, 451, 452, 7, 8, + 0, 0, 452, 453, 7, 15, 0, 0, 453, 454, 7, 2, 0, 0, 454, 455, 7, 8, 0, 0, + 455, 456, 7, 16, 0, 0, 456, 90, 1, 0, 0, 0, 457, 458, 7, 17, 0, 0, 458, + 459, 7, 10, 0, 0, 459, 460, 7, 11, 0, 0, 460, 461, 7, 2, 0, 0, 461, 462, + 7, 9, 0, 0, 462, 463, 7, 18, 0, 0, 463, 464, 7, 3, 0, 0, 464, 92, 1, 0, + 0, 0, 465, 466, 7, 14, 0, 0, 466, 467, 7, 11, 0, 0, 467, 468, 7, 9, 0, + 0, 468, 469, 7, 12, 0, 0, 469, 470, 7, 5, 0, 0, 470, 471, 7, 11, 0, 0, + 471, 472, 7, 19, 0, 0, 472, 94, 1, 0, 0, 0, 473, 474, 7, 16, 0, 0, 474, + 475, 7, 2, 0, 0, 475, 476, 7, 19, 0, 0, 476, 96, 1, 0, 0, 0, 477, 478, + 7, 10, 0, 0, 478, 479, 7, 3, 0, 0, 479, 98, 1, 0, 0, 0, 480, 481, 7, 13, + 0, 0, 481, 482, 7, 10, 0, 0, 482, 100, 1, 0, 0, 0, 483, 484, 7, 0, 0, 0, + 484, 485, 7, 3, 0, 0, 485, 486, 7, 9, 0, 0, 486, 487, 7, 20, 0, 0, 487, + 488, 7, 0, 0, 0, 488, 489, 7, 2, 0, 0, 489, 102, 1, 0, 0, 0, 490, 491, + 7, 8, 0, 0, 491, 492, 7, 5, 0, 0, 492, 493, 7, 1, 0, 0, 493, 494, 7, 8, + 0, 0, 494, 495, 7, 5, 0, 0, 495, 496, 7, 13, 0, 0, 496, 497, 7, 2, 0, 0, + 497, 104, 1, 0, 0, 0, 498, 499, 7, 11, 0, 0, 499, 500, 7, 2, 0, 0, 500, + 501, 7, 1, 0, 0, 501, 502, 7, 4, 0, 0, 502, 503, 7, 11, 0, 0, 503, 504, + 7, 9, 0, 0, 504, 505, 7, 8, 0, 0, 505, 506, 7, 4, 0, 0, 506, 106, 1, 0, + 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 4, 0, 0, + 510, 108, 1, 0, 0, 0, 511, 512, 7, 13, 0, 0, 512, 513, 7, 2, 0, 0, 513, + 514, 7, 17, 0, 0, 514, 515, 7, 5, 0, 0, 515, 516, 7, 0, 0, 0, 516, 517, + 7, 7, 0, 0, 517, 518, 7, 4, 0, 0, 518, 110, 1, 0, 0, 0, 519, 520, 7, 3, + 0, 0, 520, 521, 7, 0, 0, 0, 521, 522, 7, 7, 0, 0, 522, 523, 7, 7, 0, 0, + 523, 112, 1, 0, 0, 0, 524, 525, 7, 13, 0, 0, 525, 526, 7, 2, 0, 0, 526, + 527, 7, 7, 0, 0, 527, 528, 7, 2, 0, 0, 528, 529, 7, 4, 0, 0, 529, 530, + 7, 2, 0, 0, 530, 114, 1, 0, 0, 0, 531, 532, 7, 0, 0, 0, 532, 533, 7, 14, + 0, 0, 533, 534, 7, 13, 0, 0, 534, 535, 7, 5, 0, 0, 535, 536, 7, 4, 0, 0, + 536, 537, 7, 2, 0, 0, 537, 116, 1, 0, 0, 0, 538, 539, 7, 11, 0, 0, 539, + 540, 7, 2, 0, 0, 540, 541, 7, 17, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, + 7, 11, 0, 0, 543, 544, 7, 2, 0, 0, 544, 545, 7, 3, 0, 0, 545, 546, 7, 8, + 0, 0, 546, 547, 7, 2, 0, 0, 547, 548, 7, 1, 0, 0, 548, 118, 1, 0, 0, 0, + 549, 550, 7, 11, 0, 0, 550, 551, 7, 2, 0, 0, 551, 552, 7, 17, 0, 0, 552, + 120, 1, 0, 0, 0, 553, 554, 7, 3, 0, 0, 554, 555, 7, 10, 0, 0, 555, 556, + 7, 4, 0, 0, 556, 122, 1, 0, 0, 0, 557, 558, 7, 9, 0, 0, 558, 559, 7, 3, + 0, 0, 559, 560, 7, 13, 0, 0, 560, 561, 7, 2, 0, 0, 561, 562, 7, 21, 0, + 0, 562, 124, 1, 0, 0, 0, 563, 564, 7, 5, 0, 0, 564, 565, 7, 3, 0, 0, 565, + 566, 7, 13, 0, 0, 566, 126, 1, 0, 0, 0, 567, 568, 7, 10, 0, 0, 568, 569, + 7, 11, 0, 0, 569, 128, 1, 0, 0, 0, 570, 571, 7, 7, 0, 0, 571, 572, 7, 9, + 0, 0, 572, 573, 7, 16, 0, 0, 573, 574, 7, 2, 0, 0, 574, 130, 1, 0, 0, 0, + 575, 576, 7, 9, 0, 0, 576, 577, 7, 7, 0, 0, 577, 578, 7, 9, 0, 0, 578, + 579, 7, 16, 0, 0, 579, 580, 7, 2, 0, 0, 580, 132, 1, 0, 0, 0, 581, 582, + 7, 9, 0, 0, 582, 583, 7, 3, 0, 0, 583, 134, 1, 0, 0, 0, 584, 585, 7, 6, + 0, 0, 585, 586, 7, 2, 0, 0, 586, 587, 7, 4, 0, 0, 587, 588, 7, 22, 0, 0, + 588, 589, 7, 2, 0, 0, 589, 590, 7, 2, 0, 0, 590, 591, 7, 3, 0, 0, 591, + 136, 1, 0, 0, 0, 592, 593, 7, 9, 0, 0, 593, 594, 7, 1, 0, 0, 594, 138, + 1, 0, 0, 0, 595, 596, 7, 2, 0, 0, 596, 597, 7, 21, 0, 0, 597, 598, 7, 9, + 0, 0, 598, 599, 7, 1, 0, 0, 599, 600, 7, 4, 0, 0, 600, 601, 7, 1, 0, 0, + 601, 140, 1, 0, 0, 0, 602, 603, 7, 5, 0, 0, 603, 604, 7, 7, 0, 0, 604, + 605, 7, 7, 0, 0, 605, 142, 1, 0, 0, 0, 606, 607, 7, 5, 0, 0, 607, 608, + 7, 3, 0, 0, 608, 609, 7, 19, 0, 0, 609, 144, 1, 0, 0, 0, 610, 611, 7, 23, + 0, 0, 611, 612, 7, 10, 0, 0, 612, 613, 7, 9, 0, 0, 613, 614, 7, 3, 0, 0, + 614, 146, 1, 0, 0, 0, 615, 616, 7, 7, 0, 0, 616, 617, 7, 2, 0, 0, 617, + 618, 7, 17, 0, 0, 618, 619, 7, 4, 0, 0, 619, 148, 1, 0, 0, 0, 620, 621, + 7, 11, 0, 0, 621, 622, 7, 9, 0, 0, 622, 623, 7, 18, 0, 0, 623, 624, 7, + 15, 0, 0, 624, 625, 7, 4, 0, 0, 625, 150, 1, 0, 0, 0, 626, 627, 7, 9, 0, + 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 3, 0, 0, 629, 630, 7, 2, 0, 0, 630, + 631, 7, 11, 0, 0, 631, 152, 1, 0, 0, 0, 632, 633, 7, 5, 0, 0, 633, 634, + 7, 1, 0, 0, 634, 154, 1, 0, 0, 0, 635, 636, 7, 5, 0, 0, 636, 637, 7, 1, + 0, 0, 637, 638, 7, 8, 0, 0, 638, 156, 1, 0, 0, 0, 639, 640, 7, 13, 0, 0, + 640, 641, 7, 2, 0, 0, 641, 642, 7, 1, 0, 0, 642, 643, 7, 8, 0, 0, 643, + 158, 1, 0, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 9, 0, 0, 646, 647, + 7, 12, 0, 0, 647, 648, 7, 9, 0, 0, 648, 649, 7, 4, 0, 0, 649, 160, 1, 0, + 0, 0, 650, 651, 7, 10, 0, 0, 651, 652, 7, 17, 0, 0, 652, 653, 7, 17, 0, + 0, 653, 654, 7, 1, 0, 0, 654, 655, 7, 2, 0, 0, 655, 656, 7, 4, 0, 0, 656, + 162, 1, 0, 0, 0, 657, 658, 7, 10, 0, 0, 658, 659, 7, 11, 0, 0, 659, 660, + 7, 13, 0, 0, 660, 661, 7, 2, 0, 0, 661, 662, 7, 11, 0, 0, 662, 164, 1, + 0, 0, 0, 663, 664, 7, 6, 0, 0, 664, 665, 7, 19, 0, 0, 665, 166, 1, 0, 0, + 0, 666, 667, 7, 18, 0, 0, 667, 668, 7, 11, 0, 0, 668, 669, 7, 10, 0, 0, + 669, 670, 7, 0, 0, 0, 670, 671, 7, 14, 0, 0, 671, 168, 1, 0, 0, 0, 672, + 673, 7, 15, 0, 0, 673, 674, 7, 5, 0, 0, 674, 675, 7, 24, 0, 0, 675, 676, + 7, 9, 0, 0, 676, 677, 7, 3, 0, 0, 677, 678, 7, 18, 0, 0, 678, 170, 1, 0, + 0, 0, 679, 680, 7, 11, 0, 0, 680, 681, 7, 2, 0, 0, 681, 682, 7, 4, 0, 0, + 682, 683, 7, 0, 0, 0, 683, 684, 7, 11, 0, 0, 684, 685, 7, 3, 0, 0, 685, + 686, 7, 1, 0, 0, 686, 172, 1, 0, 0, 0, 687, 688, 7, 3, 0, 0, 688, 689, + 7, 10, 0, 0, 689, 174, 1, 0, 0, 0, 690, 691, 7, 22, 0, 0, 691, 692, 7, + 9, 0, 0, 692, 693, 7, 4, 0, 0, 693, 694, 7, 15, 0, 0, 694, 176, 1, 0, 0, + 0, 695, 696, 7, 8, 0, 0, 696, 697, 7, 5, 0, 0, 697, 698, 7, 1, 0, 0, 698, + 699, 7, 2, 0, 0, 699, 178, 1, 0, 0, 0, 700, 701, 7, 22, 0, 0, 701, 702, + 7, 15, 0, 0, 702, 703, 7, 2, 0, 0, 703, 704, 7, 3, 0, 0, 704, 180, 1, 0, + 0, 0, 705, 706, 7, 4, 0, 0, 706, 707, 7, 15, 0, 0, 707, 708, 7, 2, 0, 0, + 708, 709, 7, 3, 0, 0, 709, 182, 1, 0, 0, 0, 710, 711, 7, 2, 0, 0, 711, + 712, 7, 3, 0, 0, 712, 713, 7, 13, 0, 0, 713, 184, 1, 0, 0, 0, 714, 715, + 7, 13, 0, 0, 715, 716, 7, 9, 0, 0, 716, 717, 7, 1, 0, 0, 717, 718, 7, 4, + 0, 0, 718, 719, 7, 9, 0, 0, 719, 720, 7, 3, 0, 0, 720, 721, 7, 8, 0, 0, + 721, 722, 7, 4, 0, 0, 722, 186, 1, 0, 0, 0, 723, 724, 7, 17, 0, 0, 724, + 725, 7, 11, 0, 0, 725, 726, 7, 10, 0, 0, 726, 727, 7, 12, 0, 0, 727, 188, + 1, 0, 0, 0, 728, 729, 7, 22, 0, 0, 729, 730, 7, 15, 0, 0, 730, 731, 7, + 2, 0, 0, 731, 732, 7, 11, 0, 0, 732, 733, 7, 2, 0, 0, 733, 190, 1, 0, 0, + 0, 734, 735, 7, 8, 0, 0, 735, 736, 7, 10, 0, 0, 736, 737, 7, 7, 0, 0, 737, + 738, 7, 7, 0, 0, 738, 739, 7, 5, 0, 0, 739, 740, 7, 4, 0, 0, 740, 741, + 7, 2, 0, 0, 741, 192, 1, 0, 0, 0, 742, 743, 7, 1, 0, 0, 743, 744, 7, 2, + 0, 0, 744, 745, 7, 7, 0, 0, 745, 746, 7, 2, 0, 0, 746, 747, 7, 8, 0, 0, + 747, 748, 7, 4, 0, 0, 748, 194, 1, 0, 0, 0, 749, 750, 7, 9, 0, 0, 750, + 751, 7, 3, 0, 0, 751, 752, 7, 1, 0, 0, 752, 753, 7, 2, 0, 0, 753, 754, + 7, 11, 0, 0, 754, 755, 7, 4, 0, 0, 755, 196, 1, 0, 0, 0, 756, 757, 7, 24, + 0, 0, 757, 758, 7, 5, 0, 0, 758, 759, 7, 7, 0, 0, 759, 760, 7, 0, 0, 0, + 760, 761, 7, 2, 0, 0, 761, 762, 7, 1, 0, 0, 762, 198, 1, 0, 0, 0, 763, + 764, 7, 17, 0, 0, 764, 765, 7, 0, 0, 0, 765, 766, 7, 7, 0, 0, 766, 767, + 7, 7, 0, 0, 767, 200, 1, 0, 0, 0, 768, 769, 7, 0, 0, 0, 769, 770, 7, 3, + 0, 0, 770, 771, 7, 9, 0, 0, 771, 772, 7, 10, 0, 0, 772, 773, 7, 3, 0, 0, + 773, 202, 1, 0, 0, 0, 774, 775, 7, 9, 0, 0, 775, 776, 7, 3, 0, 0, 776, + 777, 7, 4, 0, 0, 777, 778, 7, 2, 0, 0, 778, 779, 7, 11, 0, 0, 779, 780, + 7, 1, 0, 0, 780, 781, 7, 2, 0, 0, 781, 782, 7, 8, 0, 0, 782, 783, 7, 4, + 0, 0, 783, 204, 1, 0, 0, 0, 784, 785, 7, 2, 0, 0, 785, 786, 7, 21, 0, 0, + 786, 787, 7, 8, 0, 0, 787, 788, 7, 2, 0, 0, 788, 789, 7, 14, 0, 0, 789, + 790, 7, 4, 0, 0, 790, 206, 1, 0, 0, 0, 791, 792, 7, 3, 0, 0, 792, 793, + 7, 0, 0, 0, 793, 794, 7, 7, 0, 0, 794, 795, 7, 7, 0, 0, 795, 796, 7, 1, + 0, 0, 796, 208, 1, 0, 0, 0, 797, 798, 7, 17, 0, 0, 798, 799, 7, 9, 0, 0, + 799, 800, 7, 11, 0, 0, 800, 801, 7, 1, 0, 0, 801, 802, 7, 4, 0, 0, 802, + 210, 1, 0, 0, 0, 803, 804, 7, 7, 0, 0, 804, 805, 7, 5, 0, 0, 805, 806, + 7, 1, 0, 0, 806, 807, 7, 4, 0, 0, 807, 212, 1, 0, 0, 0, 808, 809, 7, 11, + 0, 0, 809, 810, 7, 2, 0, 0, 810, 811, 7, 4, 0, 0, 811, 812, 7, 0, 0, 0, + 812, 813, 7, 11, 0, 0, 813, 814, 7, 3, 0, 0, 814, 815, 7, 9, 0, 0, 815, + 816, 7, 3, 0, 0, 816, 817, 7, 18, 0, 0, 817, 214, 1, 0, 0, 0, 818, 819, + 7, 9, 0, 0, 819, 820, 7, 3, 0, 0, 820, 821, 7, 4, 0, 0, 821, 822, 7, 10, + 0, 0, 822, 216, 1, 0, 0, 0, 823, 824, 7, 8, 0, 0, 824, 825, 7, 10, 0, 0, + 825, 826, 7, 3, 0, 0, 826, 827, 7, 17, 0, 0, 827, 828, 7, 7, 0, 0, 828, + 829, 7, 9, 0, 0, 829, 830, 7, 8, 0, 0, 830, 831, 7, 4, 0, 0, 831, 218, + 1, 0, 0, 0, 832, 833, 7, 3, 0, 0, 833, 834, 7, 10, 0, 0, 834, 835, 7, 4, + 0, 0, 835, 836, 7, 15, 0, 0, 836, 837, 7, 9, 0, 0, 837, 838, 7, 3, 0, 0, + 838, 839, 7, 18, 0, 0, 839, 220, 1, 0, 0, 0, 840, 841, 7, 17, 0, 0, 841, + 842, 7, 10, 0, 0, 842, 843, 7, 11, 0, 0, 843, 222, 1, 0, 0, 0, 844, 845, + 7, 9, 0, 0, 845, 846, 7, 17, 0, 0, 846, 224, 1, 0, 0, 0, 847, 848, 7, 2, + 0, 0, 848, 849, 7, 7, 0, 0, 849, 850, 7, 1, 0, 0, 850, 851, 7, 2, 0, 0, + 851, 852, 7, 9, 0, 0, 852, 853, 7, 17, 0, 0, 853, 226, 1, 0, 0, 0, 854, + 855, 7, 2, 0, 0, 855, 856, 7, 7, 0, 0, 856, 857, 7, 1, 0, 0, 857, 858, + 7, 2, 0, 0, 858, 228, 1, 0, 0, 0, 859, 860, 7, 6, 0, 0, 860, 861, 7, 11, + 0, 0, 861, 862, 7, 2, 0, 0, 862, 863, 7, 5, 0, 0, 863, 864, 7, 16, 0, 0, + 864, 230, 1, 0, 0, 0, 865, 866, 7, 11, 0, 0, 866, 867, 7, 2, 0, 0, 867, + 868, 7, 4, 0, 0, 868, 869, 7, 0, 0, 0, 869, 870, 7, 11, 0, 0, 870, 871, + 7, 3, 0, 0, 871, 232, 1, 0, 0, 0, 872, 873, 7, 3, 0, 0, 873, 874, 7, 2, + 0, 0, 874, 875, 7, 21, 0, 0, 875, 876, 7, 4, 0, 0, 876, 234, 1, 0, 0, 0, + 877, 878, 7, 10, 0, 0, 878, 879, 7, 24, 0, 0, 879, 880, 7, 2, 0, 0, 880, + 881, 7, 11, 0, 0, 881, 236, 1, 0, 0, 0, 882, 883, 7, 14, 0, 0, 883, 884, + 7, 5, 0, 0, 884, 885, 7, 11, 0, 0, 885, 886, 7, 4, 0, 0, 886, 887, 7, 9, + 0, 0, 887, 888, 7, 4, 0, 0, 888, 889, 7, 9, 0, 0, 889, 890, 7, 10, 0, 0, + 890, 891, 7, 3, 0, 0, 891, 238, 1, 0, 0, 0, 892, 893, 7, 22, 0, 0, 893, + 894, 7, 9, 0, 0, 894, 895, 7, 3, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, + 7, 10, 0, 0, 897, 898, 7, 22, 0, 0, 898, 240, 1, 0, 0, 0, 899, 900, 7, + 17, 0, 0, 900, 901, 7, 9, 0, 0, 901, 902, 7, 7, 0, 0, 902, 903, 7, 4, 0, + 0, 903, 904, 7, 2, 0, 0, 904, 905, 7, 11, 0, 0, 905, 242, 1, 0, 0, 0, 906, + 907, 7, 11, 0, 0, 907, 908, 7, 2, 0, 0, 908, 909, 7, 8, 0, 0, 909, 910, + 7, 0, 0, 0, 910, 911, 7, 11, 0, 0, 911, 912, 7, 1, 0, 0, 912, 913, 7, 9, + 0, 0, 913, 914, 7, 24, 0, 0, 914, 915, 7, 2, 0, 0, 915, 244, 1, 0, 0, 0, + 916, 917, 7, 1, 0, 0, 917, 918, 7, 8, 0, 0, 918, 919, 7, 15, 0, 0, 919, + 920, 7, 2, 0, 0, 920, 921, 7, 12, 0, 0, 921, 922, 7, 5, 0, 0, 922, 246, + 1, 0, 0, 0, 923, 924, 7, 18, 0, 0, 924, 925, 7, 11, 0, 0, 925, 926, 7, + 5, 0, 0, 926, 927, 7, 3, 0, 0, 927, 928, 7, 4, 0, 0, 928, 248, 1, 0, 0, + 0, 929, 930, 7, 11, 0, 0, 930, 931, 7, 2, 0, 0, 931, 932, 7, 24, 0, 0, + 932, 933, 7, 10, 0, 0, 933, 934, 7, 16, 0, 0, 934, 935, 7, 2, 0, 0, 935, + 250, 1, 0, 0, 0, 936, 937, 7, 11, 0, 0, 937, 938, 7, 10, 0, 0, 938, 939, + 7, 7, 0, 0, 939, 940, 7, 2, 0, 0, 940, 252, 1, 0, 0, 0, 941, 942, 7, 4, + 0, 0, 942, 943, 7, 11, 0, 0, 943, 944, 7, 5, 0, 0, 944, 945, 7, 3, 0, 0, + 945, 946, 7, 1, 0, 0, 946, 947, 7, 17, 0, 0, 947, 948, 7, 2, 0, 0, 948, + 949, 7, 11, 0, 0, 949, 254, 1, 0, 0, 0, 950, 951, 7, 10, 0, 0, 951, 952, + 7, 22, 0, 0, 952, 953, 7, 3, 0, 0, 953, 954, 7, 2, 0, 0, 954, 955, 7, 11, + 0, 0, 955, 956, 7, 1, 0, 0, 956, 957, 7, 15, 0, 0, 957, 958, 7, 9, 0, 0, + 958, 959, 7, 14, 0, 0, 959, 256, 1, 0, 0, 0, 960, 961, 7, 11, 0, 0, 961, + 962, 7, 2, 0, 0, 962, 963, 7, 14, 0, 0, 963, 964, 7, 7, 0, 0, 964, 965, + 7, 5, 0, 0, 965, 966, 7, 8, 0, 0, 966, 967, 7, 2, 0, 0, 967, 258, 1, 0, + 0, 0, 968, 969, 7, 5, 0, 0, 969, 970, 7, 11, 0, 0, 970, 971, 7, 11, 0, + 0, 971, 972, 7, 5, 0, 0, 972, 973, 7, 19, 0, 0, 973, 260, 1, 0, 0, 0, 974, + 975, 7, 3, 0, 0, 975, 976, 7, 5, 0, 0, 976, 977, 7, 12, 0, 0, 977, 978, + 7, 2, 0, 0, 978, 979, 7, 1, 0, 0, 979, 980, 7, 14, 0, 0, 980, 981, 7, 5, + 0, 0, 981, 982, 7, 8, 0, 0, 982, 983, 7, 2, 0, 0, 983, 262, 1, 0, 0, 0, + 984, 985, 7, 11, 0, 0, 985, 986, 7, 10, 0, 0, 986, 987, 7, 7, 0, 0, 987, + 988, 7, 2, 0, 0, 988, 989, 7, 1, 0, 0, 989, 264, 1, 0, 0, 0, 990, 991, + 7, 8, 0, 0, 991, 992, 7, 5, 0, 0, 992, 993, 7, 7, 0, 0, 993, 994, 7, 7, + 0, 0, 994, 266, 1, 0, 0, 0, 995, 1001, 5, 39, 0, 0, 996, 1000, 8, 25, 0, + 0, 997, 998, 5, 92, 0, 0, 998, 1000, 9, 0, 0, 0, 999, 996, 1, 0, 0, 0, + 999, 997, 1, 0, 0, 0, 1000, 1003, 1, 0, 0, 0, 1001, 999, 1, 0, 0, 0, 1001, + 1002, 1, 0, 0, 0, 1002, 1004, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1004, + 1005, 5, 39, 0, 0, 1005, 268, 1, 0, 0, 0, 1006, 1007, 7, 4, 0, 0, 1007, + 1008, 7, 11, 0, 0, 1008, 1009, 7, 0, 0, 0, 1009, 1010, 7, 2, 0, 0, 1010, + 270, 1, 0, 0, 0, 1011, 1012, 7, 17, 0, 0, 1012, 1013, 7, 5, 0, 0, 1013, + 1014, 7, 7, 0, 0, 1014, 1015, 7, 1, 0, 0, 1015, 1016, 7, 2, 0, 0, 1016, + 272, 1, 0, 0, 0, 1017, 1019, 7, 26, 0, 0, 1018, 1017, 1, 0, 0, 0, 1019, + 1020, 1, 0, 0, 0, 1020, 1018, 1, 0, 0, 0, 1020, 1021, 1, 0, 0, 0, 1021, + 274, 1, 0, 0, 0, 1022, 1023, 5, 48, 0, 0, 1023, 1024, 7, 21, 0, 0, 1024, + 1026, 1, 0, 0, 0, 1025, 1027, 7, 27, 0, 0, 1026, 1025, 1, 0, 0, 0, 1027, + 1028, 1, 0, 0, 0, 1028, 1026, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, + 276, 1, 0, 0, 0, 1030, 1031, 7, 17, 0, 0, 1031, 1032, 7, 10, 0, 0, 1032, + 1033, 7, 11, 0, 0, 1033, 1034, 7, 2, 0, 0, 1034, 1035, 7, 9, 0, 0, 1035, + 1036, 7, 18, 0, 0, 1036, 1037, 7, 3, 0, 0, 1037, 1038, 5, 95, 0, 0, 1038, + 1039, 7, 16, 0, 0, 1039, 1040, 7, 2, 0, 0, 1040, 1044, 7, 19, 0, 0, 1041, + 1042, 7, 17, 0, 0, 1042, 1044, 7, 16, 0, 0, 1043, 1030, 1, 0, 0, 0, 1043, + 1041, 1, 0, 0, 0, 1044, 278, 1, 0, 0, 0, 1045, 1046, 7, 10, 0, 0, 1046, + 1047, 7, 3, 0, 0, 1047, 1048, 5, 95, 0, 0, 1048, 1049, 7, 0, 0, 0, 1049, + 1050, 7, 14, 0, 0, 1050, 1051, 7, 13, 0, 0, 1051, 1052, 7, 5, 0, 0, 1052, + 1053, 7, 4, 0, 0, 1053, 1054, 7, 2, 0, 0, 1054, 280, 1, 0, 0, 0, 1055, + 1056, 7, 10, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 5, 95, 0, 0, 1058, + 1059, 7, 13, 0, 0, 1059, 1060, 7, 2, 0, 0, 1060, 1061, 7, 7, 0, 0, 1061, + 1062, 7, 2, 0, 0, 1062, 1063, 7, 4, 0, 0, 1063, 1064, 7, 2, 0, 0, 1064, + 282, 1, 0, 0, 0, 1065, 1066, 7, 1, 0, 0, 1066, 1067, 7, 2, 0, 0, 1067, + 1068, 7, 4, 0, 0, 1068, 1069, 5, 95, 0, 0, 1069, 1070, 7, 13, 0, 0, 1070, + 1071, 7, 2, 0, 0, 1071, 1072, 7, 17, 0, 0, 1072, 1073, 7, 5, 0, 0, 1073, + 1074, 7, 0, 0, 0, 1074, 1075, 7, 7, 0, 0, 1075, 1076, 7, 4, 0, 0, 1076, + 284, 1, 0, 0, 0, 1077, 1078, 7, 1, 0, 0, 1078, 1079, 7, 2, 0, 0, 1079, + 1080, 7, 4, 0, 0, 1080, 1081, 5, 95, 0, 0, 1081, 1082, 7, 3, 0, 0, 1082, + 1083, 7, 0, 0, 0, 1083, 1084, 7, 7, 0, 0, 1084, 1085, 7, 7, 0, 0, 1085, + 286, 1, 0, 0, 0, 1086, 1087, 7, 3, 0, 0, 1087, 1088, 7, 10, 0, 0, 1088, + 1089, 5, 95, 0, 0, 1089, 1090, 7, 5, 0, 0, 1090, 1091, 7, 8, 0, 0, 1091, + 1092, 7, 4, 0, 0, 1092, 1093, 7, 9, 0, 0, 1093, 1094, 7, 10, 0, 0, 1094, + 1095, 7, 3, 0, 0, 1095, 288, 1, 0, 0, 0, 1096, 1100, 7, 28, 0, 0, 1097, + 1099, 7, 29, 0, 0, 1098, 1097, 1, 0, 0, 0, 1099, 1102, 1, 0, 0, 0, 1100, + 1098, 1, 0, 0, 0, 1100, 1101, 1, 0, 0, 0, 1101, 290, 1, 0, 0, 0, 1102, + 1100, 1, 0, 0, 0, 1103, 1104, 3, 35, 17, 0, 1104, 1105, 3, 289, 144, 0, + 1105, 292, 1, 0, 0, 0, 1106, 1107, 3, 19, 9, 0, 1107, 1108, 3, 289, 144, + 0, 1108, 294, 1, 0, 0, 0, 1109, 1110, 3, 33, 16, 0, 1110, 1111, 3, 289, + 144, 0, 1111, 296, 1, 0, 0, 0, 1112, 1113, 7, 30, 0, 0, 1113, 1114, 1, + 0, 0, 0, 1114, 1115, 6, 148, 0, 0, 1115, 298, 1, 0, 0, 0, 1116, 1117, 5, + 47, 0, 0, 1117, 1118, 5, 42, 0, 0, 1118, 1122, 1, 0, 0, 0, 1119, 1121, + 9, 0, 0, 0, 1120, 1119, 1, 0, 0, 0, 1121, 1124, 1, 0, 0, 0, 1122, 1123, + 1, 0, 0, 0, 1122, 1120, 1, 0, 0, 0, 1123, 1125, 1, 0, 0, 0, 1124, 1122, + 1, 0, 0, 0, 1125, 1126, 5, 42, 0, 0, 1126, 1127, 5, 47, 0, 0, 1127, 1128, + 1, 0, 0, 0, 1128, 1129, 6, 149, 0, 0, 1129, 300, 1, 0, 0, 0, 1130, 1131, + 5, 47, 0, 0, 1131, 1132, 5, 47, 0, 0, 1132, 1136, 1, 0, 0, 0, 1133, 1135, + 8, 31, 0, 0, 1134, 1133, 1, 0, 0, 0, 1135, 1138, 1, 0, 0, 0, 1136, 1134, + 1, 0, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1139, 1, 0, 0, 0, 1138, 1136, + 1, 0, 0, 0, 1139, 1140, 6, 150, 0, 0, 1140, 302, 1, 0, 0, 0, 10, 0, 353, + 999, 1001, 1020, 1028, 1043, 1100, 1122, 1136, 1, 0, 1, 0, + } + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.Deserialize(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } +} + +// KuneiformLexerInit initializes any static state used to implement KuneiformLexer. By default the +// static state used to implement the lexer is lazily initialized during the first call to +// NewKuneiformLexer(). You can call this function if you wish to initialize the static state ahead +// of time. +func KuneiformLexerInit() { + staticData := &KuneiformLexerLexerStaticData + staticData.once.Do(kuneiformlexerLexerInit) +} + +// NewKuneiformLexer produces a new lexer instance for the optional input antlr.CharStream. +func NewKuneiformLexer(input antlr.CharStream) *KuneiformLexer { + KuneiformLexerInit() + l := new(KuneiformLexer) + l.BaseLexer = antlr.NewBaseLexer(input) + staticData := &KuneiformLexerLexerStaticData + l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache) + l.channelNames = staticData.ChannelNames + l.modeNames = staticData.ModeNames + l.RuleNames = staticData.RuleNames + l.LiteralNames = staticData.LiteralNames + l.SymbolicNames = staticData.SymbolicNames + l.GrammarFileName = "KuneiformLexer.g4" + // TODO: l.EOF = antlr.TokenEOF + + return l +} + +// KuneiformLexer tokens. +const ( + KuneiformLexerLBRACE = 1 + KuneiformLexerRBRACE = 2 + KuneiformLexerLBRACKET = 3 + KuneiformLexerRBRACKET = 4 + KuneiformLexerCOL = 5 + KuneiformLexerSCOL = 6 + KuneiformLexerLPAREN = 7 + KuneiformLexerRPAREN = 8 + KuneiformLexerCOMMA = 9 + KuneiformLexerAT = 10 + KuneiformLexerEXCL = 11 + KuneiformLexerPERIOD = 12 + KuneiformLexerCONCAT = 13 + KuneiformLexerSTAR = 14 + KuneiformLexerEQUALS = 15 + KuneiformLexerEQUATE = 16 + KuneiformLexerHASH = 17 + KuneiformLexerDOLLAR = 18 + KuneiformLexerMOD = 19 + KuneiformLexerPLUS = 20 + KuneiformLexerMINUS = 21 + KuneiformLexerDIV = 22 + KuneiformLexerNEQ = 23 + KuneiformLexerLT = 24 + KuneiformLexerLTE = 25 + KuneiformLexerGT = 26 + KuneiformLexerGTE = 27 + KuneiformLexerTYPE_CAST = 28 + KuneiformLexerUNDERSCORE = 29 + KuneiformLexerASSIGN = 30 + KuneiformLexerRANGE = 31 + KuneiformLexerDOUBLE_QUOTE = 32 + KuneiformLexerUSE = 33 + KuneiformLexerUNUSE = 34 + KuneiformLexerTABLE = 35 + KuneiformLexerACTION = 36 + KuneiformLexerCREATE = 37 + KuneiformLexerALTER = 38 + KuneiformLexerCOLUMN = 39 + KuneiformLexerADD = 40 + KuneiformLexerDROP = 41 + KuneiformLexerRENAME = 42 + KuneiformLexerTO = 43 + KuneiformLexerCONSTRAINT = 44 + KuneiformLexerCHECK = 45 + KuneiformLexerFOREIGN = 46 + KuneiformLexerPRIMARY = 47 + KuneiformLexerKEY = 48 + KuneiformLexerON = 49 + KuneiformLexerDO = 50 + KuneiformLexerUNIQUE = 51 + KuneiformLexerCASCADE = 52 + KuneiformLexerRESTRICT = 53 + KuneiformLexerSET = 54 + KuneiformLexerDEFAULT = 55 + KuneiformLexerNULL = 56 + KuneiformLexerDELETE = 57 + KuneiformLexerUPDATE = 58 + KuneiformLexerREFERENCES = 59 + KuneiformLexerREF = 60 + KuneiformLexerNOT = 61 + KuneiformLexerINDEX = 62 + KuneiformLexerAND = 63 + KuneiformLexerOR = 64 + KuneiformLexerLIKE = 65 + KuneiformLexerILIKE = 66 + KuneiformLexerIN = 67 + KuneiformLexerBETWEEN = 68 + KuneiformLexerIS = 69 + KuneiformLexerEXISTS = 70 + KuneiformLexerALL = 71 + KuneiformLexerANY = 72 + KuneiformLexerJOIN = 73 + KuneiformLexerLEFT = 74 + KuneiformLexerRIGHT = 75 + KuneiformLexerINNER = 76 + KuneiformLexerAS = 77 + KuneiformLexerASC = 78 + KuneiformLexerDESC = 79 + KuneiformLexerLIMIT = 80 + KuneiformLexerOFFSET = 81 + KuneiformLexerORDER = 82 + KuneiformLexerBY = 83 + KuneiformLexerGROUP = 84 + KuneiformLexerHAVING = 85 + KuneiformLexerRETURNS = 86 + KuneiformLexerNO = 87 + KuneiformLexerWITH = 88 + KuneiformLexerCASE = 89 + KuneiformLexerWHEN = 90 + KuneiformLexerTHEN = 91 + KuneiformLexerEND = 92 + KuneiformLexerDISTINCT = 93 + KuneiformLexerFROM = 94 + KuneiformLexerWHERE = 95 + KuneiformLexerCOLLATE = 96 + KuneiformLexerSELECT = 97 + KuneiformLexerINSERT = 98 + KuneiformLexerVALUES = 99 + KuneiformLexerFULL = 100 + KuneiformLexerUNION = 101 + KuneiformLexerINTERSECT = 102 + KuneiformLexerEXCEPT = 103 + KuneiformLexerNULLS = 104 + KuneiformLexerFIRST = 105 + KuneiformLexerLAST = 106 + KuneiformLexerRETURNING = 107 + KuneiformLexerINTO = 108 + KuneiformLexerCONFLICT = 109 + KuneiformLexerNOTHING = 110 + KuneiformLexerFOR = 111 + KuneiformLexerIF = 112 + KuneiformLexerELSEIF = 113 + KuneiformLexerELSE = 114 + KuneiformLexerBREAK = 115 + KuneiformLexerRETURN = 116 + KuneiformLexerNEXT = 117 + KuneiformLexerOVER = 118 + KuneiformLexerPARTITION = 119 + KuneiformLexerWINDOW = 120 + KuneiformLexerFILTER = 121 + KuneiformLexerRECURSIVE = 122 + KuneiformLexerSCHEMA = 123 + KuneiformLexerGRANT = 124 + KuneiformLexerREVOKE = 125 + KuneiformLexerROLE = 126 + KuneiformLexerTRANSFER = 127 + KuneiformLexerOWNERSHIP = 128 + KuneiformLexerREPLACE = 129 + KuneiformLexerARRAY = 130 + KuneiformLexerNAMESPACE = 131 + KuneiformLexerROLES = 132 + KuneiformLexerCALL = 133 + KuneiformLexerSTRING_ = 134 + KuneiformLexerTRUE = 135 + KuneiformLexerFALSE = 136 + KuneiformLexerDIGITS_ = 137 + KuneiformLexerBINARY_ = 138 + KuneiformLexerLEGACY_FOREIGN_KEY = 139 + KuneiformLexerLEGACY_ON_UPDATE = 140 + KuneiformLexerLEGACY_ON_DELETE = 141 + KuneiformLexerLEGACY_SET_DEFAULT = 142 + KuneiformLexerLEGACY_SET_NULL = 143 + KuneiformLexerLEGACY_NO_ACTION = 144 + KuneiformLexerIDENTIFIER = 145 + KuneiformLexerVARIABLE = 146 + KuneiformLexerCONTEXTUAL_VARIABLE = 147 + KuneiformLexerHASH_IDENTIFIER = 148 + KuneiformLexerWS = 149 + KuneiformLexerBLOCK_COMMENT = 150 + KuneiformLexerLINE_COMMENT = 151 +) diff --git a/parse/gen/kuneiform_parser.go b/node/engine/parse/gen/kuneiform_parser.go similarity index 59% rename from parse/gen/kuneiform_parser.go rename to node/engine/parse/gen/kuneiform_parser.go index 54cb4188b..32ac02f0f 100644 --- a/parse/gen/kuneiform_parser.go +++ b/node/engine/parse/gen/kuneiform_parser.go @@ -35,60 +35,69 @@ func kuneiformparserParserInit() { "", "'{'", "'}'", "'['", "']'", "':'", "';'", "'('", "')'", "','", "'@'", "'!'", "'.'", "'||'", "'*'", "'='", "'=='", "'#'", "'$'", "'%'", "'+'", "'-'", "'/'", "", "'<'", "'<='", "'>'", "'>='", "'::'", "'_'", "':='", - "'..'", "'\"'", "'database'", "'use'", "'table'", "'action'", "'procedure'", - "'public'", "'private'", "'view'", "'owner'", "'foreign'", "'primary'", - "'key'", "'on'", "'do'", "'unique'", "'cascade'", "'restrict'", "'set'", - "'default'", "'null'", "'delete'", "'update'", "'references'", "'ref'", - "'not'", "'index'", "'and'", "'or'", "'like'", "'ilike'", "'in'", "'between'", - "'is'", "'exists'", "'all'", "'any'", "'join'", "'left'", "'right'", - "'inner'", "'as'", "'asc'", "'desc'", "'limit'", "'offset'", "'order'", - "'by'", "'group'", "'having'", "'returns'", "'no'", "'with'", "'case'", - "'when'", "'then'", "'end'", "'distinct'", "'from'", "'where'", "'collate'", - "'select'", "'insert'", "'values'", "'full'", "'union'", "'intersect'", - "'except'", "'nulls'", "'first'", "'last'", "'returning'", "'into'", - "'conflict'", "'nothing'", "'for'", "'if'", "'elseif'", "'else'", "'break'", - "'return'", "'next'", "", "'true'", "'false'", "", "", "", "'on_update'", - "'on_delete'", "'set_default'", "'set_null'", "'no_action'", + "'..'", "'\"'", "'use'", "'unuse'", "'table'", "'action'", "'create'", + "'alter'", "'column'", "'add'", "'drop'", "'rename'", "'to'", "'constraint'", + "'check'", "'foreign'", "'primary'", "'key'", "'on'", "'do'", "'unique'", + "'cascade'", "'restrict'", "'set'", "'default'", "'null'", "'delete'", + "'update'", "'references'", "'ref'", "'not'", "'index'", "'and'", "'or'", + "'like'", "'ilike'", "'in'", "'between'", "'is'", "'exists'", "'all'", + "'any'", "'join'", "'left'", "'right'", "'inner'", "'as'", "'asc'", + "'desc'", "'limit'", "'offset'", "'order'", "'by'", "'group'", "'having'", + "'returns'", "'no'", "'with'", "'case'", "'when'", "'then'", "'end'", + "'distinct'", "'from'", "'where'", "'collate'", "'select'", "'insert'", + "'values'", "'full'", "'union'", "'intersect'", "'except'", "'nulls'", + "'first'", "'last'", "'returning'", "'into'", "'conflict'", "'nothing'", + "'for'", "'if'", "'elseif'", "'else'", "'break'", "'return'", "'next'", + "'over'", "'partition'", "'window'", "'filter'", "'recursive'", "'schema'", + "'grant'", "'revoke'", "'role'", "'transfer'", "'ownership'", "'replace'", + "'array'", "'namespace'", "'roles'", "'call'", "", "'true'", "'false'", + "", "", "", "'on_update'", "'on_delete'", "'set_default'", "'set_null'", + "'no_action'", } staticData.SymbolicNames = []string{ "", "LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "COL", "SCOL", "LPAREN", "RPAREN", "COMMA", "AT", "EXCL", "PERIOD", "CONCAT", "STAR", "EQUALS", "EQUATE", "HASH", "DOLLAR", "MOD", "PLUS", "MINUS", "DIV", "NEQ", "LT", "LTE", "GT", "GTE", "TYPE_CAST", "UNDERSCORE", "ASSIGN", "RANGE", "DOUBLE_QUOTE", - "DATABASE", "USE", "TABLE", "ACTION", "PROCEDURE", "PUBLIC", "PRIVATE", - "VIEW", "OWNER", "FOREIGN", "PRIMARY", "KEY", "ON", "DO", "UNIQUE", - "CASCADE", "RESTRICT", "SET", "DEFAULT", "NULL", "DELETE", "UPDATE", - "REFERENCES", "REF", "NOT", "INDEX", "AND", "OR", "LIKE", "ILIKE", "IN", - "BETWEEN", "IS", "EXISTS", "ALL", "ANY", "JOIN", "LEFT", "RIGHT", "INNER", - "AS", "ASC", "DESC", "LIMIT", "OFFSET", "ORDER", "BY", "GROUP", "HAVING", - "RETURNS", "NO", "WITH", "CASE", "WHEN", "THEN", "END", "DISTINCT", - "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", "VALUES", "FULL", "UNION", - "INTERSECT", "EXCEPT", "NULLS", "FIRST", "LAST", "RETURNING", "INTO", - "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", "ELSE", "BREAK", "RETURN", - "NEXT", "STRING_", "TRUE", "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", - "LEGACY_ON_UPDATE", "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", - "LEGACY_NO_ACTION", "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", - "HASH_IDENTIFIER", "WS", "BLOCK_COMMENT", "LINE_COMMENT", + "USE", "UNUSE", "TABLE", "ACTION", "CREATE", "ALTER", "COLUMN", "ADD", + "DROP", "RENAME", "TO", "CONSTRAINT", "CHECK", "FOREIGN", "PRIMARY", + "KEY", "ON", "DO", "UNIQUE", "CASCADE", "RESTRICT", "SET", "DEFAULT", + "NULL", "DELETE", "UPDATE", "REFERENCES", "REF", "NOT", "INDEX", "AND", + "OR", "LIKE", "ILIKE", "IN", "BETWEEN", "IS", "EXISTS", "ALL", "ANY", + "JOIN", "LEFT", "RIGHT", "INNER", "AS", "ASC", "DESC", "LIMIT", "OFFSET", + "ORDER", "BY", "GROUP", "HAVING", "RETURNS", "NO", "WITH", "CASE", "WHEN", + "THEN", "END", "DISTINCT", "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", + "VALUES", "FULL", "UNION", "INTERSECT", "EXCEPT", "NULLS", "FIRST", + "LAST", "RETURNING", "INTO", "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", + "ELSE", "BREAK", "RETURN", "NEXT", "OVER", "PARTITION", "WINDOW", "FILTER", + "RECURSIVE", "SCHEMA", "GRANT", "REVOKE", "ROLE", "TRANSFER", "OWNERSHIP", + "REPLACE", "ARRAY", "NAMESPACE", "ROLES", "CALL", "STRING_", "TRUE", + "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", "LEGACY_ON_UPDATE", + "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", "LEGACY_NO_ACTION", + "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", "HASH_IDENTIFIER", + "WS", "BLOCK_COMMENT", "LINE_COMMENT", } staticData.RuleNames = []string{ - "schema_entry", "sql_entry", "action_entry", "procedure_entry", "literal", - "identifier", "identifier_list", "type", "type_cast", "variable", "variable_list", - "schema", "annotation", "database_declaration", "use_declaration", "table_declaration", - "column_def", "index_def", "foreign_key_def", "foreign_key_action", - "type_list", "named_type_list", "typed_variable_list", "constraint", - "access_modifier", "action_declaration", "procedure_declaration", "foreign_procedure_declaration", - "procedure_return", "sql", "sql_statement", "common_table_expression", - "select_statement", "compound_operator", "ordering_term", "select_core", - "relation", "join", "result_column", "update_statement", "update_set_clause", - "insert_statement", "upsert_clause", "delete_statement", "sql_expr", - "when_then_clause", "sql_expr_list", "sql_function_call", "action_block", - "action_statement", "procedure_block", "procedure_expr", "procedure_expr_list", - "proc_statement", "variable_or_underscore", "procedure_function_call", - "if_then_block", "range", + "entry", "statement", "literal", "identifier", "identifier_list", "type", + "type_cast", "variable", "table_column_def", "type_list", "named_type_list", + "inline_constraint", "fk_action", "fk_constraint", "action_return", + "sql_statement", "common_table_expression", "create_table_statement", + "table_constraint_def", "opt_drop_behavior", "drop_table_statement", + "alter_table_statement", "alter_table_action", "create_index_statement", + "drop_index_statement", "create_role_statement", "drop_role_statement", + "grant_statement", "revoke_statement", "privilege_list", "privilege", + "transfer_ownership_statement", "create_action_statement", "drop_action_statement", + "use_extension_statement", "unuse_extension_statement", "create_namespace_statement", + "drop_namespace_statement", "select_statement", "compound_operator", + "ordering_term", "select_core", "relation", "join", "result_column", + "update_statement", "update_set_clause", "insert_statement", "upsert_clause", + "delete_statement", "sql_expr", "window", "when_then_clause", "sql_expr_list", + "sql_function_call", "action_expr", "action_expr_list", "action_statement", + "variable_or_underscore", "action_function_call", "if_then_block", "range", } staticData.PredictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ - 4, 1, 131, 1162, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, + 4, 1, 151, 1324, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, @@ -99,559 +108,645 @@ func kuneiformparserParserInit() { 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, - 57, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, - 3, 1, 4, 1, 4, 3, 4, 131, 8, 4, 1, 4, 1, 4, 3, 4, 135, 8, 4, 1, 4, 1, 4, - 1, 4, 1, 4, 1, 4, 1, 4, 3, 4, 143, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, - 149, 8, 5, 1, 6, 1, 6, 1, 6, 5, 6, 154, 8, 6, 10, 6, 12, 6, 157, 9, 6, - 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 165, 8, 7, 1, 7, 1, 7, 3, 7, - 169, 8, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 5, 10, 179, - 8, 10, 10, 10, 12, 10, 182, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, - 11, 5, 11, 190, 8, 11, 10, 11, 12, 11, 193, 9, 11, 1, 12, 1, 12, 1, 12, - 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 5, 12, 204, 8, 12, 10, 12, 12, - 12, 207, 9, 12, 3, 12, 209, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, - 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, - 5, 14, 227, 8, 14, 10, 14, 12, 14, 230, 9, 14, 1, 14, 1, 14, 3, 14, 234, - 8, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, - 15, 1, 15, 1, 15, 3, 15, 248, 8, 15, 5, 15, 250, 8, 15, 10, 15, 12, 15, - 253, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 5, 16, 260, 8, 16, 10, 16, - 12, 16, 263, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, - 18, 1, 18, 3, 18, 274, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, - 1, 18, 1, 18, 1, 18, 5, 18, 285, 8, 18, 10, 18, 12, 18, 288, 9, 18, 1, - 19, 1, 19, 1, 19, 3, 19, 293, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 298, 8, - 19, 3, 19, 300, 8, 19, 1, 19, 3, 19, 303, 8, 19, 1, 19, 1, 19, 1, 19, 3, - 19, 308, 8, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 19, 1, - 19, 1, 19, 3, 19, 319, 8, 19, 1, 19, 3, 19, 322, 8, 19, 1, 20, 1, 20, 1, - 20, 5, 20, 327, 8, 20, 10, 20, 12, 20, 330, 9, 20, 1, 21, 1, 21, 1, 21, - 1, 21, 1, 21, 5, 21, 337, 8, 21, 10, 21, 12, 21, 340, 9, 21, 1, 22, 1, - 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 348, 8, 22, 10, 22, 12, 22, 351, - 9, 22, 1, 23, 1, 23, 1, 23, 3, 23, 356, 8, 23, 1, 23, 1, 23, 1, 23, 1, - 23, 3, 23, 362, 8, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 368, 8, 23, 1, - 24, 1, 24, 1, 25, 5, 25, 373, 8, 25, 10, 25, 12, 25, 376, 9, 25, 1, 25, - 1, 25, 1, 25, 1, 25, 3, 25, 382, 8, 25, 1, 25, 1, 25, 4, 25, 386, 8, 25, - 11, 25, 12, 25, 387, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 5, 26, 395, 8, - 26, 10, 26, 12, 26, 398, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 404, - 8, 26, 1, 26, 1, 26, 4, 26, 408, 8, 26, 11, 26, 12, 26, 409, 1, 26, 3, - 26, 413, 8, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, - 1, 27, 1, 27, 3, 27, 425, 8, 27, 1, 27, 1, 27, 3, 27, 429, 8, 27, 1, 28, - 1, 28, 3, 28, 433, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, - 28, 1, 28, 3, 28, 443, 8, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, - 1, 30, 5, 30, 452, 8, 30, 10, 30, 12, 30, 455, 9, 30, 3, 30, 457, 8, 30, - 1, 30, 1, 30, 1, 30, 1, 30, 3, 30, 463, 8, 30, 1, 31, 1, 31, 1, 31, 1, - 31, 1, 31, 5, 31, 470, 8, 31, 10, 31, 12, 31, 473, 9, 31, 3, 31, 475, 8, - 31, 1, 31, 3, 31, 478, 8, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, - 1, 32, 1, 32, 1, 32, 5, 32, 489, 8, 32, 10, 32, 12, 32, 492, 9, 32, 1, - 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 499, 8, 32, 10, 32, 12, 32, 502, - 9, 32, 3, 32, 504, 8, 32, 1, 32, 1, 32, 3, 32, 508, 8, 32, 1, 32, 1, 32, - 3, 32, 512, 8, 32, 1, 33, 1, 33, 3, 33, 516, 8, 33, 1, 33, 1, 33, 3, 33, - 520, 8, 33, 1, 34, 1, 34, 3, 34, 524, 8, 34, 1, 34, 1, 34, 3, 34, 528, - 8, 34, 1, 35, 1, 35, 3, 35, 532, 8, 35, 1, 35, 1, 35, 1, 35, 5, 35, 537, - 8, 35, 10, 35, 12, 35, 540, 9, 35, 1, 35, 1, 35, 1, 35, 5, 35, 545, 8, - 35, 10, 35, 12, 35, 548, 9, 35, 3, 35, 550, 8, 35, 1, 35, 1, 35, 3, 35, - 554, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 561, 8, 35, 3, 35, - 563, 8, 35, 1, 36, 1, 36, 3, 36, 567, 8, 36, 1, 36, 3, 36, 570, 8, 36, - 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 576, 8, 36, 1, 36, 3, 36, 579, 8, 36, - 1, 36, 1, 36, 3, 36, 583, 8, 36, 1, 36, 3, 36, 586, 8, 36, 3, 36, 588, - 8, 36, 1, 37, 3, 37, 591, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, - 38, 1, 38, 3, 38, 600, 8, 38, 1, 38, 3, 38, 603, 8, 38, 1, 38, 1, 38, 1, - 38, 3, 38, 608, 8, 38, 1, 38, 3, 38, 611, 8, 38, 1, 39, 1, 39, 1, 39, 3, - 39, 616, 8, 39, 1, 39, 3, 39, 619, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, - 39, 625, 8, 39, 10, 39, 12, 39, 628, 9, 39, 1, 39, 1, 39, 1, 39, 5, 39, - 633, 8, 39, 10, 39, 12, 39, 636, 9, 39, 3, 39, 638, 8, 39, 1, 39, 1, 39, - 3, 39, 642, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, - 41, 3, 41, 652, 8, 41, 1, 41, 3, 41, 655, 8, 41, 1, 41, 1, 41, 1, 41, 1, - 41, 3, 41, 661, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, - 1, 41, 1, 41, 5, 41, 672, 8, 41, 10, 41, 12, 41, 675, 9, 41, 1, 41, 3, - 41, 678, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, - 687, 8, 42, 3, 42, 689, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, - 1, 42, 5, 42, 698, 8, 42, 10, 42, 12, 42, 701, 9, 42, 1, 42, 1, 42, 3, - 42, 705, 8, 42, 3, 42, 707, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 713, - 8, 43, 1, 43, 3, 43, 716, 8, 43, 1, 43, 1, 43, 3, 43, 720, 8, 43, 1, 44, - 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 727, 8, 44, 1, 44, 1, 44, 1, 44, 1, - 44, 3, 44, 733, 8, 44, 1, 44, 1, 44, 3, 44, 737, 8, 44, 1, 44, 1, 44, 3, - 44, 741, 8, 44, 1, 44, 1, 44, 1, 44, 3, 44, 746, 8, 44, 1, 44, 1, 44, 3, - 44, 750, 8, 44, 1, 44, 1, 44, 3, 44, 754, 8, 44, 1, 44, 4, 44, 757, 8, - 44, 11, 44, 12, 44, 758, 1, 44, 1, 44, 3, 44, 763, 8, 44, 1, 44, 1, 44, - 1, 44, 3, 44, 768, 8, 44, 1, 44, 3, 44, 771, 8, 44, 1, 44, 1, 44, 1, 44, - 1, 44, 3, 44, 777, 8, 44, 1, 44, 1, 44, 3, 44, 781, 8, 44, 1, 44, 1, 44, - 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 794, - 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 800, 8, 44, 1, 44, 1, 44, 1, - 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, - 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 820, 8, 44, 1, 44, 1, 44, 1, - 44, 1, 44, 3, 44, 826, 8, 44, 1, 44, 1, 44, 3, 44, 830, 8, 44, 3, 44, 832, - 8, 44, 1, 44, 1, 44, 3, 44, 836, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, - 44, 3, 44, 843, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 849, 8, 44, 1, - 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 856, 8, 44, 1, 44, 1, 44, 1, 44, - 1, 44, 1, 44, 1, 44, 3, 44, 864, 8, 44, 5, 44, 866, 8, 44, 10, 44, 12, - 44, 869, 9, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, - 5, 46, 879, 8, 46, 10, 46, 12, 46, 882, 9, 46, 1, 47, 1, 47, 1, 47, 3, - 47, 887, 8, 47, 1, 47, 1, 47, 3, 47, 891, 8, 47, 1, 47, 1, 47, 1, 47, 1, - 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 903, 8, 47, 1, 47, - 1, 47, 3, 47, 907, 8, 47, 1, 48, 1, 48, 1, 48, 5, 48, 912, 8, 48, 10, 48, - 12, 48, 915, 9, 48, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 921, 8, 49, 1, 49, - 1, 49, 1, 49, 1, 49, 3, 49, 927, 8, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, - 49, 3, 49, 934, 8, 49, 1, 49, 3, 49, 937, 8, 49, 1, 50, 5, 50, 940, 8, - 50, 10, 50, 12, 50, 943, 9, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, - 950, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 956, 8, 51, 1, 51, 1, 51, - 3, 51, 960, 8, 51, 1, 51, 1, 51, 3, 51, 964, 8, 51, 1, 51, 1, 51, 3, 51, - 968, 8, 51, 1, 51, 1, 51, 3, 51, 972, 8, 51, 1, 51, 1, 51, 3, 51, 976, - 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, - 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, - 1, 51, 1, 51, 3, 51, 1000, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 1006, - 8, 51, 1, 51, 1, 51, 3, 51, 1010, 8, 51, 3, 51, 1012, 8, 51, 1, 51, 1, - 51, 3, 51, 1016, 8, 51, 1, 51, 1, 51, 1, 51, 3, 51, 1021, 8, 51, 1, 51, - 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 1029, 8, 51, 5, 51, 1031, 8, - 51, 10, 51, 12, 51, 1034, 9, 51, 1, 52, 1, 52, 1, 52, 5, 52, 1039, 8, 52, - 10, 52, 12, 52, 1042, 9, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, - 1, 53, 5, 53, 1051, 8, 53, 10, 53, 12, 53, 1054, 9, 53, 1, 53, 1, 53, 3, - 53, 1058, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 1065, 8, 53, - 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, - 53, 1077, 8, 53, 1, 53, 1, 53, 5, 53, 1081, 8, 53, 10, 53, 12, 53, 1084, - 9, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 5, 53, 1092, 8, 53, 10, - 53, 12, 53, 1095, 9, 53, 1, 53, 1, 53, 1, 53, 5, 53, 1100, 8, 53, 10, 53, - 12, 53, 1103, 9, 53, 1, 53, 3, 53, 1106, 8, 53, 1, 53, 1, 53, 1, 53, 1, - 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 1116, 8, 53, 1, 53, 1, 53, 1, 53, - 1, 53, 1, 53, 1, 53, 3, 53, 1124, 8, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, - 55, 3, 55, 1131, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, - 1, 55, 1, 55, 3, 55, 1142, 8, 55, 1, 55, 1, 55, 3, 55, 1146, 8, 55, 1, - 56, 1, 56, 1, 56, 5, 56, 1151, 8, 56, 10, 56, 12, 56, 1154, 9, 56, 1, 56, - 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 0, 2, 88, 102, 58, 0, 2, 4, 6, - 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, - 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, - 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, - 114, 0, 14, 1, 0, 20, 21, 1, 0, 115, 116, 1, 0, 126, 127, 3, 0, 43, 43, - 47, 47, 58, 58, 1, 0, 55, 56, 1, 0, 38, 41, 1, 0, 74, 75, 1, 0, 101, 102, - 2, 0, 70, 72, 96, 96, 3, 0, 14, 14, 19, 19, 22, 22, 1, 0, 61, 62, 2, 0, - 15, 16, 23, 27, 2, 0, 11, 11, 20, 21, 2, 0, 29, 29, 126, 126, 1332, 0, - 116, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 4, 122, 1, 0, 0, 0, 6, 125, 1, 0, - 0, 0, 8, 142, 1, 0, 0, 0, 10, 148, 1, 0, 0, 0, 12, 150, 1, 0, 0, 0, 14, - 158, 1, 0, 0, 0, 16, 170, 1, 0, 0, 0, 18, 173, 1, 0, 0, 0, 20, 175, 1, - 0, 0, 0, 22, 183, 1, 0, 0, 0, 24, 194, 1, 0, 0, 0, 26, 212, 1, 0, 0, 0, - 28, 216, 1, 0, 0, 0, 30, 239, 1, 0, 0, 0, 32, 256, 1, 0, 0, 0, 34, 264, - 1, 0, 0, 0, 36, 273, 1, 0, 0, 0, 38, 299, 1, 0, 0, 0, 40, 323, 1, 0, 0, - 0, 42, 331, 1, 0, 0, 0, 44, 341, 1, 0, 0, 0, 46, 361, 1, 0, 0, 0, 48, 369, - 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 396, 1, 0, 0, 0, 54, 418, 1, 0, 0, - 0, 56, 430, 1, 0, 0, 0, 58, 444, 1, 0, 0, 0, 60, 456, 1, 0, 0, 0, 62, 464, - 1, 0, 0, 0, 64, 484, 1, 0, 0, 0, 66, 519, 1, 0, 0, 0, 68, 521, 1, 0, 0, - 0, 70, 529, 1, 0, 0, 0, 72, 587, 1, 0, 0, 0, 74, 590, 1, 0, 0, 0, 76, 610, - 1, 0, 0, 0, 78, 612, 1, 0, 0, 0, 80, 643, 1, 0, 0, 0, 82, 647, 1, 0, 0, - 0, 84, 679, 1, 0, 0, 0, 86, 708, 1, 0, 0, 0, 88, 780, 1, 0, 0, 0, 90, 870, - 1, 0, 0, 0, 92, 875, 1, 0, 0, 0, 94, 906, 1, 0, 0, 0, 96, 913, 1, 0, 0, - 0, 98, 936, 1, 0, 0, 0, 100, 941, 1, 0, 0, 0, 102, 975, 1, 0, 0, 0, 104, - 1035, 1, 0, 0, 0, 106, 1123, 1, 0, 0, 0, 108, 1125, 1, 0, 0, 0, 110, 1145, - 1, 0, 0, 0, 112, 1147, 1, 0, 0, 0, 114, 1157, 1, 0, 0, 0, 116, 117, 3, - 22, 11, 0, 117, 118, 5, 0, 0, 1, 118, 1, 1, 0, 0, 0, 119, 120, 3, 58, 29, - 0, 120, 121, 5, 0, 0, 1, 121, 3, 1, 0, 0, 0, 122, 123, 3, 96, 48, 0, 123, - 124, 5, 0, 0, 1, 124, 5, 1, 0, 0, 0, 125, 126, 3, 100, 50, 0, 126, 127, - 5, 0, 0, 1, 127, 7, 1, 0, 0, 0, 128, 143, 5, 114, 0, 0, 129, 131, 7, 0, - 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, - 132, 143, 5, 117, 0, 0, 133, 135, 7, 0, 0, 0, 134, 133, 1, 0, 0, 0, 134, - 135, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 137, 5, 117, 0, 0, 137, 138, - 5, 12, 0, 0, 138, 143, 5, 117, 0, 0, 139, 143, 7, 1, 0, 0, 140, 143, 5, - 52, 0, 0, 141, 143, 5, 118, 0, 0, 142, 128, 1, 0, 0, 0, 142, 130, 1, 0, - 0, 0, 142, 134, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, - 142, 141, 1, 0, 0, 0, 143, 9, 1, 0, 0, 0, 144, 145, 5, 32, 0, 0, 145, 146, - 5, 125, 0, 0, 146, 149, 5, 32, 0, 0, 147, 149, 5, 125, 0, 0, 148, 144, - 1, 0, 0, 0, 148, 147, 1, 0, 0, 0, 149, 11, 1, 0, 0, 0, 150, 155, 3, 10, - 5, 0, 151, 152, 5, 9, 0, 0, 152, 154, 3, 10, 5, 0, 153, 151, 1, 0, 0, 0, - 154, 157, 1, 0, 0, 0, 155, 153, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, - 13, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 164, 5, 125, 0, 0, 159, 160, - 5, 7, 0, 0, 160, 161, 5, 117, 0, 0, 161, 162, 5, 9, 0, 0, 162, 163, 5, - 117, 0, 0, 163, 165, 5, 8, 0, 0, 164, 159, 1, 0, 0, 0, 164, 165, 1, 0, - 0, 0, 165, 168, 1, 0, 0, 0, 166, 167, 5, 3, 0, 0, 167, 169, 5, 4, 0, 0, - 168, 166, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 15, 1, 0, 0, 0, 170, 171, - 5, 28, 0, 0, 171, 172, 3, 14, 7, 0, 172, 17, 1, 0, 0, 0, 173, 174, 7, 2, - 0, 0, 174, 19, 1, 0, 0, 0, 175, 180, 3, 18, 9, 0, 176, 177, 5, 9, 0, 0, - 177, 179, 3, 18, 9, 0, 178, 176, 1, 0, 0, 0, 179, 182, 1, 0, 0, 0, 180, - 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 21, 1, 0, 0, 0, 182, 180, 1, - 0, 0, 0, 183, 191, 3, 26, 13, 0, 184, 190, 3, 28, 14, 0, 185, 190, 3, 30, - 15, 0, 186, 190, 3, 50, 25, 0, 187, 190, 3, 52, 26, 0, 188, 190, 3, 54, - 27, 0, 189, 184, 1, 0, 0, 0, 189, 185, 1, 0, 0, 0, 189, 186, 1, 0, 0, 0, - 189, 187, 1, 0, 0, 0, 189, 188, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, - 189, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 23, 1, 0, 0, 0, 193, 191, 1, - 0, 0, 0, 194, 195, 5, 127, 0, 0, 195, 208, 5, 7, 0, 0, 196, 197, 5, 125, - 0, 0, 197, 198, 5, 15, 0, 0, 198, 205, 3, 8, 4, 0, 199, 200, 5, 9, 0, 0, - 200, 201, 5, 125, 0, 0, 201, 202, 5, 15, 0, 0, 202, 204, 3, 8, 4, 0, 203, - 199, 1, 0, 0, 0, 204, 207, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, - 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 208, 196, 1, 0, - 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 211, 5, 8, 0, 0, - 211, 25, 1, 0, 0, 0, 212, 213, 5, 33, 0, 0, 213, 214, 5, 125, 0, 0, 214, - 215, 5, 6, 0, 0, 215, 27, 1, 0, 0, 0, 216, 217, 5, 34, 0, 0, 217, 233, - 5, 125, 0, 0, 218, 219, 5, 1, 0, 0, 219, 220, 5, 125, 0, 0, 220, 221, 5, - 5, 0, 0, 221, 228, 3, 8, 4, 0, 222, 223, 5, 9, 0, 0, 223, 224, 5, 125, - 0, 0, 224, 225, 5, 5, 0, 0, 225, 227, 3, 8, 4, 0, 226, 222, 1, 0, 0, 0, - 227, 230, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, - 231, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 231, 232, 5, 2, 0, 0, 232, 234, - 1, 0, 0, 0, 233, 218, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, - 0, 0, 235, 236, 5, 73, 0, 0, 236, 237, 5, 125, 0, 0, 237, 238, 5, 6, 0, - 0, 238, 29, 1, 0, 0, 0, 239, 240, 5, 35, 0, 0, 240, 241, 5, 125, 0, 0, - 241, 242, 5, 1, 0, 0, 242, 251, 3, 32, 16, 0, 243, 247, 5, 9, 0, 0, 244, - 248, 3, 32, 16, 0, 245, 248, 3, 34, 17, 0, 246, 248, 3, 36, 18, 0, 247, - 244, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 246, 1, 0, 0, 0, 248, 250, - 1, 0, 0, 0, 249, 243, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, - 0, 0, 251, 252, 1, 0, 0, 0, 252, 254, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, - 254, 255, 5, 2, 0, 0, 255, 31, 1, 0, 0, 0, 256, 257, 5, 125, 0, 0, 257, - 261, 3, 14, 7, 0, 258, 260, 3, 46, 23, 0, 259, 258, 1, 0, 0, 0, 260, 263, - 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 33, 1, 0, - 0, 0, 263, 261, 1, 0, 0, 0, 264, 265, 5, 128, 0, 0, 265, 266, 7, 3, 0, - 0, 266, 267, 5, 7, 0, 0, 267, 268, 3, 12, 6, 0, 268, 269, 5, 8, 0, 0, 269, - 35, 1, 0, 0, 0, 270, 271, 5, 42, 0, 0, 271, 274, 5, 44, 0, 0, 272, 274, - 5, 119, 0, 0, 273, 270, 1, 0, 0, 0, 273, 272, 1, 0, 0, 0, 274, 275, 1, - 0, 0, 0, 275, 276, 5, 7, 0, 0, 276, 277, 3, 12, 6, 0, 277, 278, 5, 8, 0, - 0, 278, 279, 7, 4, 0, 0, 279, 280, 5, 125, 0, 0, 280, 281, 5, 7, 0, 0, - 281, 282, 3, 12, 6, 0, 282, 286, 5, 8, 0, 0, 283, 285, 3, 38, 19, 0, 284, - 283, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, - 1, 0, 0, 0, 287, 37, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 45, - 0, 0, 290, 293, 5, 54, 0, 0, 291, 293, 5, 120, 0, 0, 292, 289, 1, 0, 0, - 0, 292, 291, 1, 0, 0, 0, 293, 300, 1, 0, 0, 0, 294, 295, 5, 45, 0, 0, 295, - 298, 5, 53, 0, 0, 296, 298, 5, 121, 0, 0, 297, 294, 1, 0, 0, 0, 297, 296, - 1, 0, 0, 0, 298, 300, 1, 0, 0, 0, 299, 292, 1, 0, 0, 0, 299, 297, 1, 0, - 0, 0, 300, 302, 1, 0, 0, 0, 301, 303, 5, 46, 0, 0, 302, 301, 1, 0, 0, 0, - 302, 303, 1, 0, 0, 0, 303, 321, 1, 0, 0, 0, 304, 305, 5, 83, 0, 0, 305, - 308, 5, 36, 0, 0, 306, 308, 5, 124, 0, 0, 307, 304, 1, 0, 0, 0, 307, 306, - 1, 0, 0, 0, 308, 322, 1, 0, 0, 0, 309, 322, 5, 48, 0, 0, 310, 311, 5, 50, - 0, 0, 311, 314, 5, 52, 0, 0, 312, 314, 5, 123, 0, 0, 313, 310, 1, 0, 0, - 0, 313, 312, 1, 0, 0, 0, 314, 322, 1, 0, 0, 0, 315, 316, 5, 50, 0, 0, 316, - 319, 5, 51, 0, 0, 317, 319, 5, 122, 0, 0, 318, 315, 1, 0, 0, 0, 318, 317, - 1, 0, 0, 0, 319, 322, 1, 0, 0, 0, 320, 322, 5, 49, 0, 0, 321, 307, 1, 0, - 0, 0, 321, 309, 1, 0, 0, 0, 321, 313, 1, 0, 0, 0, 321, 318, 1, 0, 0, 0, - 321, 320, 1, 0, 0, 0, 322, 39, 1, 0, 0, 0, 323, 328, 3, 14, 7, 0, 324, - 325, 5, 9, 0, 0, 325, 327, 3, 14, 7, 0, 326, 324, 1, 0, 0, 0, 327, 330, - 1, 0, 0, 0, 328, 326, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 41, 1, 0, - 0, 0, 330, 328, 1, 0, 0, 0, 331, 332, 5, 125, 0, 0, 332, 338, 3, 14, 7, - 0, 333, 334, 5, 9, 0, 0, 334, 335, 5, 125, 0, 0, 335, 337, 3, 14, 7, 0, - 336, 333, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 338, - 339, 1, 0, 0, 0, 339, 43, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 341, 342, 3, - 18, 9, 0, 342, 349, 3, 14, 7, 0, 343, 344, 5, 9, 0, 0, 344, 345, 3, 18, - 9, 0, 345, 346, 3, 14, 7, 0, 346, 348, 1, 0, 0, 0, 347, 343, 1, 0, 0, 0, - 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, - 45, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 352, 362, 5, 125, 0, 0, 353, 355, - 5, 43, 0, 0, 354, 356, 5, 44, 0, 0, 355, 354, 1, 0, 0, 0, 355, 356, 1, - 0, 0, 0, 356, 362, 1, 0, 0, 0, 357, 358, 5, 57, 0, 0, 358, 362, 5, 52, - 0, 0, 359, 362, 5, 51, 0, 0, 360, 362, 5, 47, 0, 0, 361, 352, 1, 0, 0, - 0, 361, 353, 1, 0, 0, 0, 361, 357, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 361, - 360, 1, 0, 0, 0, 362, 367, 1, 0, 0, 0, 363, 364, 5, 7, 0, 0, 364, 365, - 3, 8, 4, 0, 365, 366, 5, 8, 0, 0, 366, 368, 1, 0, 0, 0, 367, 363, 1, 0, - 0, 0, 367, 368, 1, 0, 0, 0, 368, 47, 1, 0, 0, 0, 369, 370, 7, 5, 0, 0, - 370, 49, 1, 0, 0, 0, 371, 373, 3, 24, 12, 0, 372, 371, 1, 0, 0, 0, 373, - 376, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 377, - 1, 0, 0, 0, 376, 374, 1, 0, 0, 0, 377, 378, 5, 36, 0, 0, 378, 379, 5, 125, - 0, 0, 379, 381, 5, 7, 0, 0, 380, 382, 3, 20, 10, 0, 381, 380, 1, 0, 0, - 0, 381, 382, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 385, 5, 8, 0, 0, 384, - 386, 3, 48, 24, 0, 385, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 385, - 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 390, 5, 1, - 0, 0, 390, 391, 3, 96, 48, 0, 391, 392, 5, 2, 0, 0, 392, 51, 1, 0, 0, 0, - 393, 395, 3, 24, 12, 0, 394, 393, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, - 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 399, 1, 0, 0, 0, 398, 396, - 1, 0, 0, 0, 399, 400, 5, 37, 0, 0, 400, 401, 5, 125, 0, 0, 401, 403, 5, - 7, 0, 0, 402, 404, 3, 44, 22, 0, 403, 402, 1, 0, 0, 0, 403, 404, 1, 0, - 0, 0, 404, 405, 1, 0, 0, 0, 405, 407, 5, 8, 0, 0, 406, 408, 3, 48, 24, - 0, 407, 406, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 409, - 410, 1, 0, 0, 0, 410, 412, 1, 0, 0, 0, 411, 413, 3, 56, 28, 0, 412, 411, - 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 5, 1, - 0, 0, 415, 416, 3, 100, 50, 0, 416, 417, 5, 2, 0, 0, 417, 53, 1, 0, 0, - 0, 418, 419, 5, 42, 0, 0, 419, 420, 5, 37, 0, 0, 420, 421, 5, 125, 0, 0, - 421, 424, 5, 7, 0, 0, 422, 425, 3, 40, 20, 0, 423, 425, 3, 44, 22, 0, 424, - 422, 1, 0, 0, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 426, - 1, 0, 0, 0, 426, 428, 5, 8, 0, 0, 427, 429, 3, 56, 28, 0, 428, 427, 1, - 0, 0, 0, 428, 429, 1, 0, 0, 0, 429, 55, 1, 0, 0, 0, 430, 442, 5, 82, 0, - 0, 431, 433, 5, 35, 0, 0, 432, 431, 1, 0, 0, 0, 432, 433, 1, 0, 0, 0, 433, - 434, 1, 0, 0, 0, 434, 435, 5, 7, 0, 0, 435, 436, 3, 42, 21, 0, 436, 437, - 5, 8, 0, 0, 437, 443, 1, 0, 0, 0, 438, 439, 5, 7, 0, 0, 439, 440, 3, 40, - 20, 0, 440, 441, 5, 8, 0, 0, 441, 443, 1, 0, 0, 0, 442, 432, 1, 0, 0, 0, - 442, 438, 1, 0, 0, 0, 443, 57, 1, 0, 0, 0, 444, 445, 3, 60, 30, 0, 445, - 446, 5, 6, 0, 0, 446, 59, 1, 0, 0, 0, 447, 448, 5, 84, 0, 0, 448, 453, - 3, 62, 31, 0, 449, 450, 5, 9, 0, 0, 450, 452, 3, 62, 31, 0, 451, 449, 1, - 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, - 0, 454, 457, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 447, 1, 0, 0, 0, 456, - 457, 1, 0, 0, 0, 457, 462, 1, 0, 0, 0, 458, 463, 3, 64, 32, 0, 459, 463, - 3, 78, 39, 0, 460, 463, 3, 82, 41, 0, 461, 463, 3, 86, 43, 0, 462, 458, - 1, 0, 0, 0, 462, 459, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 462, 461, 1, 0, - 0, 0, 463, 61, 1, 0, 0, 0, 464, 477, 3, 10, 5, 0, 465, 474, 5, 7, 0, 0, - 466, 471, 3, 10, 5, 0, 467, 468, 5, 9, 0, 0, 468, 470, 3, 10, 5, 0, 469, - 467, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, - 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 466, 1, 0, - 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 478, 5, 8, 0, 0, - 477, 465, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, - 480, 5, 73, 0, 0, 480, 481, 5, 7, 0, 0, 481, 482, 3, 64, 32, 0, 482, 483, - 5, 8, 0, 0, 483, 63, 1, 0, 0, 0, 484, 490, 3, 70, 35, 0, 485, 486, 3, 66, - 33, 0, 486, 487, 3, 70, 35, 0, 487, 489, 1, 0, 0, 0, 488, 485, 1, 0, 0, - 0, 489, 492, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, - 503, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 494, 5, 78, 0, 0, 494, 495, - 5, 79, 0, 0, 495, 500, 3, 68, 34, 0, 496, 497, 5, 9, 0, 0, 497, 499, 3, - 68, 34, 0, 498, 496, 1, 0, 0, 0, 499, 502, 1, 0, 0, 0, 500, 498, 1, 0, - 0, 0, 500, 501, 1, 0, 0, 0, 501, 504, 1, 0, 0, 0, 502, 500, 1, 0, 0, 0, - 503, 493, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, - 506, 5, 76, 0, 0, 506, 508, 3, 88, 44, 0, 507, 505, 1, 0, 0, 0, 507, 508, - 1, 0, 0, 0, 508, 511, 1, 0, 0, 0, 509, 510, 5, 77, 0, 0, 510, 512, 3, 88, - 44, 0, 511, 509, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 65, 1, 0, 0, 0, - 513, 515, 5, 97, 0, 0, 514, 516, 5, 67, 0, 0, 515, 514, 1, 0, 0, 0, 515, - 516, 1, 0, 0, 0, 516, 520, 1, 0, 0, 0, 517, 520, 5, 98, 0, 0, 518, 520, - 5, 99, 0, 0, 519, 513, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 519, 518, 1, 0, - 0, 0, 520, 67, 1, 0, 0, 0, 521, 523, 3, 88, 44, 0, 522, 524, 7, 6, 0, 0, - 523, 522, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 527, 1, 0, 0, 0, 525, - 526, 5, 100, 0, 0, 526, 528, 7, 7, 0, 0, 527, 525, 1, 0, 0, 0, 527, 528, - 1, 0, 0, 0, 528, 69, 1, 0, 0, 0, 529, 531, 5, 93, 0, 0, 530, 532, 5, 89, - 0, 0, 531, 530, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, - 533, 538, 3, 76, 38, 0, 534, 535, 5, 9, 0, 0, 535, 537, 3, 76, 38, 0, 536, - 534, 1, 0, 0, 0, 537, 540, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 538, 539, - 1, 0, 0, 0, 539, 549, 1, 0, 0, 0, 540, 538, 1, 0, 0, 0, 541, 542, 5, 90, - 0, 0, 542, 546, 3, 72, 36, 0, 543, 545, 3, 74, 37, 0, 544, 543, 1, 0, 0, - 0, 545, 548, 1, 0, 0, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, - 550, 1, 0, 0, 0, 548, 546, 1, 0, 0, 0, 549, 541, 1, 0, 0, 0, 549, 550, - 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 552, 5, 91, 0, 0, 552, 554, 3, 88, - 44, 0, 553, 551, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 562, 1, 0, 0, 0, - 555, 556, 5, 80, 0, 0, 556, 557, 5, 79, 0, 0, 557, 560, 3, 92, 46, 0, 558, - 559, 5, 81, 0, 0, 559, 561, 3, 88, 44, 0, 560, 558, 1, 0, 0, 0, 560, 561, - 1, 0, 0, 0, 561, 563, 1, 0, 0, 0, 562, 555, 1, 0, 0, 0, 562, 563, 1, 0, - 0, 0, 563, 71, 1, 0, 0, 0, 564, 569, 3, 10, 5, 0, 565, 567, 5, 73, 0, 0, - 566, 565, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, - 570, 3, 10, 5, 0, 569, 566, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 588, - 1, 0, 0, 0, 571, 572, 5, 7, 0, 0, 572, 573, 3, 64, 32, 0, 573, 578, 5, - 8, 0, 0, 574, 576, 5, 73, 0, 0, 575, 574, 1, 0, 0, 0, 575, 576, 1, 0, 0, - 0, 576, 577, 1, 0, 0, 0, 577, 579, 3, 10, 5, 0, 578, 575, 1, 0, 0, 0, 578, - 579, 1, 0, 0, 0, 579, 588, 1, 0, 0, 0, 580, 582, 3, 94, 47, 0, 581, 583, - 5, 73, 0, 0, 582, 581, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 585, 1, 0, - 0, 0, 584, 586, 3, 10, 5, 0, 585, 584, 1, 0, 0, 0, 585, 586, 1, 0, 0, 0, - 586, 588, 1, 0, 0, 0, 587, 564, 1, 0, 0, 0, 587, 571, 1, 0, 0, 0, 587, - 580, 1, 0, 0, 0, 588, 73, 1, 0, 0, 0, 589, 591, 7, 8, 0, 0, 590, 589, 1, - 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 593, 5, 69, 0, - 0, 593, 594, 3, 72, 36, 0, 594, 595, 5, 45, 0, 0, 595, 596, 3, 88, 44, - 0, 596, 75, 1, 0, 0, 0, 597, 602, 3, 88, 44, 0, 598, 600, 5, 73, 0, 0, - 599, 598, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, - 603, 3, 10, 5, 0, 602, 599, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 611, - 1, 0, 0, 0, 604, 605, 3, 10, 5, 0, 605, 606, 5, 12, 0, 0, 606, 608, 1, - 0, 0, 0, 607, 604, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 609, 1, 0, 0, - 0, 609, 611, 5, 14, 0, 0, 610, 597, 1, 0, 0, 0, 610, 607, 1, 0, 0, 0, 611, - 77, 1, 0, 0, 0, 612, 613, 5, 54, 0, 0, 613, 618, 3, 10, 5, 0, 614, 616, - 5, 73, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, - 0, 0, 617, 619, 3, 10, 5, 0, 618, 615, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, - 619, 620, 1, 0, 0, 0, 620, 621, 5, 50, 0, 0, 621, 626, 3, 80, 40, 0, 622, - 623, 5, 9, 0, 0, 623, 625, 3, 80, 40, 0, 624, 622, 1, 0, 0, 0, 625, 628, - 1, 0, 0, 0, 626, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 637, 1, 0, - 0, 0, 628, 626, 1, 0, 0, 0, 629, 630, 5, 90, 0, 0, 630, 634, 3, 72, 36, - 0, 631, 633, 3, 74, 37, 0, 632, 631, 1, 0, 0, 0, 633, 636, 1, 0, 0, 0, - 634, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 638, 1, 0, 0, 0, 636, - 634, 1, 0, 0, 0, 637, 629, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 641, - 1, 0, 0, 0, 639, 640, 5, 91, 0, 0, 640, 642, 3, 88, 44, 0, 641, 639, 1, - 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 79, 1, 0, 0, 0, 643, 644, 3, 10, 5, - 0, 644, 645, 5, 15, 0, 0, 645, 646, 3, 88, 44, 0, 646, 81, 1, 0, 0, 0, - 647, 648, 5, 94, 0, 0, 648, 649, 5, 104, 0, 0, 649, 654, 3, 10, 5, 0, 650, - 652, 5, 73, 0, 0, 651, 650, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, - 1, 0, 0, 0, 653, 655, 3, 10, 5, 0, 654, 651, 1, 0, 0, 0, 654, 655, 1, 0, - 0, 0, 655, 660, 1, 0, 0, 0, 656, 657, 5, 7, 0, 0, 657, 658, 3, 12, 6, 0, - 658, 659, 5, 8, 0, 0, 659, 661, 1, 0, 0, 0, 660, 656, 1, 0, 0, 0, 660, - 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 5, 95, 0, 0, 663, 664, - 5, 7, 0, 0, 664, 665, 3, 92, 46, 0, 665, 673, 5, 8, 0, 0, 666, 667, 5, - 9, 0, 0, 667, 668, 5, 7, 0, 0, 668, 669, 3, 92, 46, 0, 669, 670, 5, 8, - 0, 0, 670, 672, 1, 0, 0, 0, 671, 666, 1, 0, 0, 0, 672, 675, 1, 0, 0, 0, - 673, 671, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, - 673, 1, 0, 0, 0, 676, 678, 3, 84, 42, 0, 677, 676, 1, 0, 0, 0, 677, 678, - 1, 0, 0, 0, 678, 83, 1, 0, 0, 0, 679, 680, 5, 45, 0, 0, 680, 688, 5, 105, - 0, 0, 681, 682, 5, 7, 0, 0, 682, 683, 3, 12, 6, 0, 683, 686, 5, 8, 0, 0, - 684, 685, 5, 91, 0, 0, 685, 687, 3, 88, 44, 0, 686, 684, 1, 0, 0, 0, 686, - 687, 1, 0, 0, 0, 687, 689, 1, 0, 0, 0, 688, 681, 1, 0, 0, 0, 688, 689, - 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 706, 5, 46, 0, 0, 691, 707, 5, 106, - 0, 0, 692, 693, 5, 54, 0, 0, 693, 694, 5, 50, 0, 0, 694, 699, 3, 80, 40, - 0, 695, 696, 5, 9, 0, 0, 696, 698, 3, 80, 40, 0, 697, 695, 1, 0, 0, 0, - 698, 701, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, - 704, 1, 0, 0, 0, 701, 699, 1, 0, 0, 0, 702, 703, 5, 91, 0, 0, 703, 705, - 3, 88, 44, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 707, 1, - 0, 0, 0, 706, 691, 1, 0, 0, 0, 706, 692, 1, 0, 0, 0, 707, 85, 1, 0, 0, - 0, 708, 709, 5, 53, 0, 0, 709, 710, 5, 90, 0, 0, 710, 715, 3, 10, 5, 0, - 711, 713, 5, 73, 0, 0, 712, 711, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, - 714, 1, 0, 0, 0, 714, 716, 3, 10, 5, 0, 715, 712, 1, 0, 0, 0, 715, 716, - 1, 0, 0, 0, 716, 719, 1, 0, 0, 0, 717, 718, 5, 91, 0, 0, 718, 720, 3, 88, - 44, 0, 719, 717, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 87, 1, 0, 0, 0, - 721, 722, 6, 44, -1, 0, 722, 723, 5, 7, 0, 0, 723, 724, 3, 88, 44, 0, 724, - 726, 5, 8, 0, 0, 725, 727, 3, 16, 8, 0, 726, 725, 1, 0, 0, 0, 726, 727, - 1, 0, 0, 0, 727, 781, 1, 0, 0, 0, 728, 729, 7, 0, 0, 0, 729, 781, 3, 88, - 44, 19, 730, 732, 3, 8, 4, 0, 731, 733, 3, 16, 8, 0, 732, 731, 1, 0, 0, - 0, 732, 733, 1, 0, 0, 0, 733, 781, 1, 0, 0, 0, 734, 736, 3, 94, 47, 0, - 735, 737, 3, 16, 8, 0, 736, 735, 1, 0, 0, 0, 736, 737, 1, 0, 0, 0, 737, - 781, 1, 0, 0, 0, 738, 740, 3, 18, 9, 0, 739, 741, 3, 16, 8, 0, 740, 739, - 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 781, 1, 0, 0, 0, 742, 743, 3, 10, - 5, 0, 743, 744, 5, 12, 0, 0, 744, 746, 1, 0, 0, 0, 745, 742, 1, 0, 0, 0, - 745, 746, 1, 0, 0, 0, 746, 747, 1, 0, 0, 0, 747, 749, 3, 10, 5, 0, 748, - 750, 3, 16, 8, 0, 749, 748, 1, 0, 0, 0, 749, 750, 1, 0, 0, 0, 750, 781, - 1, 0, 0, 0, 751, 753, 5, 85, 0, 0, 752, 754, 3, 88, 44, 0, 753, 752, 1, - 0, 0, 0, 753, 754, 1, 0, 0, 0, 754, 756, 1, 0, 0, 0, 755, 757, 3, 90, 45, - 0, 756, 755, 1, 0, 0, 0, 757, 758, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 758, - 759, 1, 0, 0, 0, 759, 762, 1, 0, 0, 0, 760, 761, 5, 110, 0, 0, 761, 763, - 3, 88, 44, 0, 762, 760, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 764, 1, - 0, 0, 0, 764, 765, 5, 88, 0, 0, 765, 781, 1, 0, 0, 0, 766, 768, 5, 57, - 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 769, 1, 0, 0, 0, - 769, 771, 5, 66, 0, 0, 770, 767, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, - 772, 1, 0, 0, 0, 772, 773, 5, 7, 0, 0, 773, 774, 3, 64, 32, 0, 774, 776, - 5, 8, 0, 0, 775, 777, 3, 16, 8, 0, 776, 775, 1, 0, 0, 0, 776, 777, 1, 0, - 0, 0, 777, 781, 1, 0, 0, 0, 778, 779, 5, 57, 0, 0, 779, 781, 3, 88, 44, - 3, 780, 721, 1, 0, 0, 0, 780, 728, 1, 0, 0, 0, 780, 730, 1, 0, 0, 0, 780, - 734, 1, 0, 0, 0, 780, 738, 1, 0, 0, 0, 780, 745, 1, 0, 0, 0, 780, 751, - 1, 0, 0, 0, 780, 770, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 781, 867, 1, 0, - 0, 0, 782, 783, 10, 17, 0, 0, 783, 784, 7, 9, 0, 0, 784, 866, 3, 88, 44, - 18, 785, 786, 10, 16, 0, 0, 786, 787, 7, 0, 0, 0, 787, 866, 3, 88, 44, - 17, 788, 789, 10, 9, 0, 0, 789, 790, 5, 13, 0, 0, 790, 866, 3, 88, 44, - 10, 791, 793, 10, 7, 0, 0, 792, 794, 5, 57, 0, 0, 793, 792, 1, 0, 0, 0, - 793, 794, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 796, 7, 10, 0, 0, 796, - 866, 3, 88, 44, 8, 797, 799, 10, 6, 0, 0, 798, 800, 5, 57, 0, 0, 799, 798, - 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 802, 5, 64, - 0, 0, 802, 803, 3, 88, 44, 0, 803, 804, 5, 59, 0, 0, 804, 805, 3, 88, 44, - 7, 805, 866, 1, 0, 0, 0, 806, 807, 10, 5, 0, 0, 807, 808, 7, 11, 0, 0, - 808, 866, 3, 88, 44, 6, 809, 810, 10, 2, 0, 0, 810, 811, 5, 59, 0, 0, 811, - 866, 3, 88, 44, 3, 812, 813, 10, 1, 0, 0, 813, 814, 5, 60, 0, 0, 814, 866, - 3, 88, 44, 2, 815, 816, 10, 21, 0, 0, 816, 817, 5, 12, 0, 0, 817, 819, - 3, 10, 5, 0, 818, 820, 3, 16, 8, 0, 819, 818, 1, 0, 0, 0, 819, 820, 1, - 0, 0, 0, 820, 866, 1, 0, 0, 0, 821, 822, 10, 20, 0, 0, 822, 831, 5, 3, - 0, 0, 823, 832, 3, 88, 44, 0, 824, 826, 3, 88, 44, 0, 825, 824, 1, 0, 0, - 0, 825, 826, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 829, 5, 5, 0, 0, 828, - 830, 3, 88, 44, 0, 829, 828, 1, 0, 0, 0, 829, 830, 1, 0, 0, 0, 830, 832, - 1, 0, 0, 0, 831, 823, 1, 0, 0, 0, 831, 825, 1, 0, 0, 0, 832, 833, 1, 0, - 0, 0, 833, 835, 5, 4, 0, 0, 834, 836, 3, 16, 8, 0, 835, 834, 1, 0, 0, 0, - 835, 836, 1, 0, 0, 0, 836, 866, 1, 0, 0, 0, 837, 838, 10, 18, 0, 0, 838, - 839, 5, 92, 0, 0, 839, 866, 3, 10, 5, 0, 840, 842, 10, 8, 0, 0, 841, 843, - 5, 57, 0, 0, 842, 841, 1, 0, 0, 0, 842, 843, 1, 0, 0, 0, 843, 844, 1, 0, - 0, 0, 844, 845, 5, 63, 0, 0, 845, 848, 5, 7, 0, 0, 846, 849, 3, 92, 46, - 0, 847, 849, 3, 64, 32, 0, 848, 846, 1, 0, 0, 0, 848, 847, 1, 0, 0, 0, - 849, 850, 1, 0, 0, 0, 850, 851, 5, 8, 0, 0, 851, 866, 1, 0, 0, 0, 852, - 853, 10, 4, 0, 0, 853, 855, 5, 65, 0, 0, 854, 856, 5, 57, 0, 0, 855, 854, - 1, 0, 0, 0, 855, 856, 1, 0, 0, 0, 856, 863, 1, 0, 0, 0, 857, 858, 5, 89, - 0, 0, 858, 859, 5, 90, 0, 0, 859, 864, 3, 88, 44, 0, 860, 864, 5, 52, 0, - 0, 861, 864, 5, 115, 0, 0, 862, 864, 5, 116, 0, 0, 863, 857, 1, 0, 0, 0, - 863, 860, 1, 0, 0, 0, 863, 861, 1, 0, 0, 0, 863, 862, 1, 0, 0, 0, 864, - 866, 1, 0, 0, 0, 865, 782, 1, 0, 0, 0, 865, 785, 1, 0, 0, 0, 865, 788, - 1, 0, 0, 0, 865, 791, 1, 0, 0, 0, 865, 797, 1, 0, 0, 0, 865, 806, 1, 0, - 0, 0, 865, 809, 1, 0, 0, 0, 865, 812, 1, 0, 0, 0, 865, 815, 1, 0, 0, 0, - 865, 821, 1, 0, 0, 0, 865, 837, 1, 0, 0, 0, 865, 840, 1, 0, 0, 0, 865, - 852, 1, 0, 0, 0, 866, 869, 1, 0, 0, 0, 867, 865, 1, 0, 0, 0, 867, 868, - 1, 0, 0, 0, 868, 89, 1, 0, 0, 0, 869, 867, 1, 0, 0, 0, 870, 871, 5, 86, - 0, 0, 871, 872, 3, 88, 44, 0, 872, 873, 5, 87, 0, 0, 873, 874, 3, 88, 44, - 0, 874, 91, 1, 0, 0, 0, 875, 880, 3, 88, 44, 0, 876, 877, 5, 9, 0, 0, 877, - 879, 3, 88, 44, 0, 878, 876, 1, 0, 0, 0, 879, 882, 1, 0, 0, 0, 880, 878, - 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 93, 1, 0, 0, 0, 882, 880, 1, 0, - 0, 0, 883, 884, 3, 10, 5, 0, 884, 890, 5, 7, 0, 0, 885, 887, 5, 89, 0, - 0, 886, 885, 1, 0, 0, 0, 886, 887, 1, 0, 0, 0, 887, 888, 1, 0, 0, 0, 888, - 891, 3, 92, 46, 0, 889, 891, 5, 14, 0, 0, 890, 886, 1, 0, 0, 0, 890, 889, - 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 892, 1, 0, 0, 0, 892, 893, 5, 8, - 0, 0, 893, 907, 1, 0, 0, 0, 894, 895, 3, 10, 5, 0, 895, 896, 5, 3, 0, 0, - 896, 897, 3, 88, 44, 0, 897, 898, 5, 9, 0, 0, 898, 899, 3, 88, 44, 0, 899, - 900, 5, 4, 0, 0, 900, 902, 5, 7, 0, 0, 901, 903, 3, 92, 46, 0, 902, 901, - 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 905, 5, 8, - 0, 0, 905, 907, 1, 0, 0, 0, 906, 883, 1, 0, 0, 0, 906, 894, 1, 0, 0, 0, - 907, 95, 1, 0, 0, 0, 908, 909, 3, 98, 49, 0, 909, 910, 5, 6, 0, 0, 910, - 912, 1, 0, 0, 0, 911, 908, 1, 0, 0, 0, 912, 915, 1, 0, 0, 0, 913, 911, - 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 97, 1, 0, 0, 0, 915, 913, 1, 0, - 0, 0, 916, 937, 3, 60, 30, 0, 917, 918, 5, 125, 0, 0, 918, 920, 5, 7, 0, - 0, 919, 921, 3, 104, 52, 0, 920, 919, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, - 921, 922, 1, 0, 0, 0, 922, 937, 5, 8, 0, 0, 923, 924, 3, 20, 10, 0, 924, - 925, 5, 15, 0, 0, 925, 927, 1, 0, 0, 0, 926, 923, 1, 0, 0, 0, 926, 927, - 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 929, 5, 125, 0, 0, 929, 930, 5, - 12, 0, 0, 930, 931, 5, 125, 0, 0, 931, 933, 5, 7, 0, 0, 932, 934, 3, 104, - 52, 0, 933, 932, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, - 935, 937, 5, 8, 0, 0, 936, 916, 1, 0, 0, 0, 936, 917, 1, 0, 0, 0, 936, - 926, 1, 0, 0, 0, 937, 99, 1, 0, 0, 0, 938, 940, 3, 106, 53, 0, 939, 938, - 1, 0, 0, 0, 940, 943, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 942, 1, 0, - 0, 0, 942, 101, 1, 0, 0, 0, 943, 941, 1, 0, 0, 0, 944, 945, 6, 51, -1, - 0, 945, 946, 5, 7, 0, 0, 946, 947, 3, 102, 51, 0, 947, 949, 5, 8, 0, 0, - 948, 950, 3, 16, 8, 0, 949, 948, 1, 0, 0, 0, 949, 950, 1, 0, 0, 0, 950, - 976, 1, 0, 0, 0, 951, 952, 7, 12, 0, 0, 952, 976, 3, 102, 51, 13, 953, - 955, 3, 8, 4, 0, 954, 956, 3, 16, 8, 0, 955, 954, 1, 0, 0, 0, 955, 956, - 1, 0, 0, 0, 956, 976, 1, 0, 0, 0, 957, 959, 3, 110, 55, 0, 958, 960, 3, - 16, 8, 0, 959, 958, 1, 0, 0, 0, 959, 960, 1, 0, 0, 0, 960, 976, 1, 0, 0, - 0, 961, 963, 3, 18, 9, 0, 962, 964, 3, 16, 8, 0, 963, 962, 1, 0, 0, 0, - 963, 964, 1, 0, 0, 0, 964, 976, 1, 0, 0, 0, 965, 967, 5, 3, 0, 0, 966, - 968, 3, 104, 52, 0, 967, 966, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 969, - 1, 0, 0, 0, 969, 971, 5, 4, 0, 0, 970, 972, 3, 16, 8, 0, 971, 970, 1, 0, - 0, 0, 971, 972, 1, 0, 0, 0, 972, 976, 1, 0, 0, 0, 973, 974, 5, 57, 0, 0, - 974, 976, 3, 102, 51, 3, 975, 944, 1, 0, 0, 0, 975, 951, 1, 0, 0, 0, 975, - 953, 1, 0, 0, 0, 975, 957, 1, 0, 0, 0, 975, 961, 1, 0, 0, 0, 975, 965, - 1, 0, 0, 0, 975, 973, 1, 0, 0, 0, 976, 1032, 1, 0, 0, 0, 977, 978, 10, - 12, 0, 0, 978, 979, 7, 9, 0, 0, 979, 1031, 3, 102, 51, 13, 980, 981, 10, - 11, 0, 0, 981, 982, 7, 0, 0, 0, 982, 1031, 3, 102, 51, 12, 983, 984, 10, - 6, 0, 0, 984, 985, 5, 13, 0, 0, 985, 1031, 3, 102, 51, 7, 986, 987, 10, - 5, 0, 0, 987, 988, 7, 11, 0, 0, 988, 1031, 3, 102, 51, 6, 989, 990, 10, - 2, 0, 0, 990, 991, 5, 59, 0, 0, 991, 1031, 3, 102, 51, 3, 992, 993, 10, - 1, 0, 0, 993, 994, 5, 60, 0, 0, 994, 1031, 3, 102, 51, 2, 995, 996, 10, - 15, 0, 0, 996, 997, 5, 12, 0, 0, 997, 999, 5, 125, 0, 0, 998, 1000, 3, - 16, 8, 0, 999, 998, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 1031, 1, 0, - 0, 0, 1001, 1002, 10, 14, 0, 0, 1002, 1011, 5, 3, 0, 0, 1003, 1012, 3, - 102, 51, 0, 1004, 1006, 3, 102, 51, 0, 1005, 1004, 1, 0, 0, 0, 1005, 1006, - 1, 0, 0, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1009, 5, 5, 0, 0, 1008, 1010, - 3, 102, 51, 0, 1009, 1008, 1, 0, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1012, - 1, 0, 0, 0, 1011, 1003, 1, 0, 0, 0, 1011, 1005, 1, 0, 0, 0, 1012, 1013, - 1, 0, 0, 0, 1013, 1015, 5, 4, 0, 0, 1014, 1016, 3, 16, 8, 0, 1015, 1014, - 1, 0, 0, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1031, 1, 0, 0, 0, 1017, 1018, - 10, 4, 0, 0, 1018, 1020, 5, 65, 0, 0, 1019, 1021, 5, 57, 0, 0, 1020, 1019, - 1, 0, 0, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1028, 1, 0, 0, 0, 1022, 1023, - 5, 89, 0, 0, 1023, 1024, 5, 90, 0, 0, 1024, 1029, 3, 102, 51, 0, 1025, - 1029, 5, 52, 0, 0, 1026, 1029, 5, 115, 0, 0, 1027, 1029, 5, 116, 0, 0, - 1028, 1022, 1, 0, 0, 0, 1028, 1025, 1, 0, 0, 0, 1028, 1026, 1, 0, 0, 0, - 1028, 1027, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 977, 1, 0, 0, 0, - 1030, 980, 1, 0, 0, 0, 1030, 983, 1, 0, 0, 0, 1030, 986, 1, 0, 0, 0, 1030, - 989, 1, 0, 0, 0, 1030, 992, 1, 0, 0, 0, 1030, 995, 1, 0, 0, 0, 1030, 1001, - 1, 0, 0, 0, 1030, 1017, 1, 0, 0, 0, 1031, 1034, 1, 0, 0, 0, 1032, 1030, - 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 103, 1, 0, 0, 0, 1034, 1032, - 1, 0, 0, 0, 1035, 1040, 3, 102, 51, 0, 1036, 1037, 5, 9, 0, 0, 1037, 1039, - 3, 102, 51, 0, 1038, 1036, 1, 0, 0, 0, 1039, 1042, 1, 0, 0, 0, 1040, 1038, - 1, 0, 0, 0, 1040, 1041, 1, 0, 0, 0, 1041, 105, 1, 0, 0, 0, 1042, 1040, - 1, 0, 0, 0, 1043, 1044, 5, 126, 0, 0, 1044, 1045, 3, 14, 7, 0, 1045, 1046, - 5, 6, 0, 0, 1046, 1124, 1, 0, 0, 0, 1047, 1052, 3, 108, 54, 0, 1048, 1049, - 5, 9, 0, 0, 1049, 1051, 3, 108, 54, 0, 1050, 1048, 1, 0, 0, 0, 1051, 1054, - 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1055, - 1, 0, 0, 0, 1054, 1052, 1, 0, 0, 0, 1055, 1056, 5, 30, 0, 0, 1056, 1058, - 1, 0, 0, 0, 1057, 1047, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, - 1, 0, 0, 0, 1059, 1060, 3, 110, 55, 0, 1060, 1061, 5, 6, 0, 0, 1061, 1124, - 1, 0, 0, 0, 1062, 1064, 3, 102, 51, 0, 1063, 1065, 3, 14, 7, 0, 1064, 1063, - 1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, - 5, 30, 0, 0, 1067, 1068, 3, 102, 51, 0, 1068, 1069, 5, 6, 0, 0, 1069, 1124, - 1, 0, 0, 0, 1070, 1071, 5, 107, 0, 0, 1071, 1072, 5, 126, 0, 0, 1072, 1076, - 5, 63, 0, 0, 1073, 1077, 3, 114, 57, 0, 1074, 1077, 3, 18, 9, 0, 1075, - 1077, 3, 60, 30, 0, 1076, 1073, 1, 0, 0, 0, 1076, 1074, 1, 0, 0, 0, 1076, - 1075, 1, 0, 0, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1082, 5, 1, 0, 0, 1079, - 1081, 3, 106, 53, 0, 1080, 1079, 1, 0, 0, 0, 1081, 1084, 1, 0, 0, 0, 1082, - 1080, 1, 0, 0, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1085, 1, 0, 0, 0, 1084, - 1082, 1, 0, 0, 0, 1085, 1086, 5, 2, 0, 0, 1086, 1124, 1, 0, 0, 0, 1087, - 1088, 5, 108, 0, 0, 1088, 1093, 3, 112, 56, 0, 1089, 1090, 5, 109, 0, 0, - 1090, 1092, 3, 112, 56, 0, 1091, 1089, 1, 0, 0, 0, 1092, 1095, 1, 0, 0, - 0, 1093, 1091, 1, 0, 0, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1105, 1, 0, 0, - 0, 1095, 1093, 1, 0, 0, 0, 1096, 1097, 5, 110, 0, 0, 1097, 1101, 5, 1, - 0, 0, 1098, 1100, 3, 106, 53, 0, 1099, 1098, 1, 0, 0, 0, 1100, 1103, 1, - 0, 0, 0, 1101, 1099, 1, 0, 0, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1104, 1, - 0, 0, 0, 1103, 1101, 1, 0, 0, 0, 1104, 1106, 5, 2, 0, 0, 1105, 1096, 1, - 0, 0, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1124, 1, 0, 0, 0, 1107, 1108, 3, - 60, 30, 0, 1108, 1109, 5, 6, 0, 0, 1109, 1124, 1, 0, 0, 0, 1110, 1111, - 5, 111, 0, 0, 1111, 1124, 5, 6, 0, 0, 1112, 1115, 5, 112, 0, 0, 1113, 1116, - 3, 104, 52, 0, 1114, 1116, 3, 60, 30, 0, 1115, 1113, 1, 0, 0, 0, 1115, - 1114, 1, 0, 0, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 1, 0, 0, 0, 1117, - 1124, 5, 6, 0, 0, 1118, 1119, 5, 112, 0, 0, 1119, 1120, 5, 113, 0, 0, 1120, - 1121, 3, 104, 52, 0, 1121, 1122, 5, 6, 0, 0, 1122, 1124, 1, 0, 0, 0, 1123, - 1043, 1, 0, 0, 0, 1123, 1057, 1, 0, 0, 0, 1123, 1062, 1, 0, 0, 0, 1123, - 1070, 1, 0, 0, 0, 1123, 1087, 1, 0, 0, 0, 1123, 1107, 1, 0, 0, 0, 1123, - 1110, 1, 0, 0, 0, 1123, 1112, 1, 0, 0, 0, 1123, 1118, 1, 0, 0, 0, 1124, - 107, 1, 0, 0, 0, 1125, 1126, 7, 13, 0, 0, 1126, 109, 1, 0, 0, 0, 1127, - 1128, 5, 125, 0, 0, 1128, 1130, 5, 7, 0, 0, 1129, 1131, 3, 104, 52, 0, - 1130, 1129, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, - 1132, 1146, 5, 8, 0, 0, 1133, 1134, 5, 125, 0, 0, 1134, 1135, 5, 3, 0, - 0, 1135, 1136, 3, 102, 51, 0, 1136, 1137, 5, 9, 0, 0, 1137, 1138, 3, 102, - 51, 0, 1138, 1139, 5, 4, 0, 0, 1139, 1141, 5, 7, 0, 0, 1140, 1142, 3, 104, - 52, 0, 1141, 1140, 1, 0, 0, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 1, 0, - 0, 0, 1143, 1144, 5, 8, 0, 0, 1144, 1146, 1, 0, 0, 0, 1145, 1127, 1, 0, - 0, 0, 1145, 1133, 1, 0, 0, 0, 1146, 111, 1, 0, 0, 0, 1147, 1148, 3, 102, - 51, 0, 1148, 1152, 5, 1, 0, 0, 1149, 1151, 3, 106, 53, 0, 1150, 1149, 1, - 0, 0, 0, 1151, 1154, 1, 0, 0, 0, 1152, 1150, 1, 0, 0, 0, 1152, 1153, 1, - 0, 0, 0, 1153, 1155, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1155, 1156, 5, - 2, 0, 0, 1156, 113, 1, 0, 0, 0, 1157, 1158, 3, 102, 51, 0, 1158, 1159, - 5, 31, 0, 0, 1159, 1160, 3, 102, 51, 0, 1160, 115, 1, 0, 0, 0, 165, 130, - 134, 142, 148, 155, 164, 168, 180, 189, 191, 205, 208, 228, 233, 247, 251, - 261, 273, 286, 292, 297, 299, 302, 307, 313, 318, 321, 328, 338, 349, 355, - 361, 367, 374, 381, 387, 396, 403, 409, 412, 424, 428, 432, 442, 453, 456, - 462, 471, 474, 477, 490, 500, 503, 507, 511, 515, 519, 523, 527, 531, 538, - 546, 549, 553, 560, 562, 566, 569, 575, 578, 582, 585, 587, 590, 599, 602, - 607, 610, 615, 618, 626, 634, 637, 641, 651, 654, 660, 673, 677, 686, 688, - 699, 704, 706, 712, 715, 719, 726, 732, 736, 740, 745, 749, 753, 758, 762, - 767, 770, 776, 780, 793, 799, 819, 825, 829, 831, 835, 842, 848, 855, 863, - 865, 867, 880, 886, 890, 902, 906, 913, 920, 926, 933, 936, 941, 949, 955, - 959, 963, 967, 971, 975, 999, 1005, 1009, 1011, 1015, 1020, 1028, 1030, - 1032, 1040, 1052, 1057, 1064, 1076, 1082, 1093, 1101, 1105, 1115, 1123, - 1130, 1141, 1145, 1152, + 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, + 1, 0, 5, 0, 128, 8, 0, 10, 0, 12, 0, 131, 9, 0, 1, 0, 3, 0, 134, 8, 0, + 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 142, 8, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 3, 1, 161, 8, 1, 1, 2, 1, 2, 3, 2, 165, 8, 2, 1, 2, 1, 2, 3, + 2, 169, 8, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 177, 8, 2, 1, 3, + 1, 3, 1, 3, 1, 3, 3, 3, 183, 8, 3, 1, 4, 1, 4, 1, 4, 5, 4, 188, 8, 4, 10, + 4, 12, 4, 191, 9, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 199, 8, + 5, 1, 5, 1, 5, 3, 5, 203, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, + 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 5, + 9, 221, 8, 9, 10, 9, 12, 9, 224, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, + 1, 10, 5, 10, 232, 8, 10, 10, 10, 12, 10, 235, 9, 10, 1, 11, 1, 11, 1, + 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, + 3, 11, 250, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, + 12, 1, 12, 1, 12, 3, 12, 262, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, + 268, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 276, 8, 13, + 3, 13, 278, 8, 13, 1, 14, 1, 14, 3, 14, 282, 8, 14, 1, 14, 1, 14, 1, 14, + 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 292, 8, 14, 1, 15, 1, 15, 3, + 15, 296, 8, 15, 1, 15, 1, 15, 1, 15, 5, 15, 301, 8, 15, 10, 15, 12, 15, + 304, 9, 15, 3, 15, 306, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 312, + 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 319, 8, 16, 10, 16, 12, + 16, 322, 9, 16, 3, 16, 324, 8, 16, 1, 16, 3, 16, 327, 8, 16, 1, 16, 1, + 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 339, + 8, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 345, 8, 17, 1, 17, 1, 17, 1, + 17, 3, 17, 350, 8, 17, 5, 17, 352, 8, 17, 10, 17, 12, 17, 355, 9, 17, 1, + 17, 1, 17, 1, 18, 1, 18, 3, 18, 361, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, + 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, + 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 386, + 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 394, 8, 20, 1, + 20, 1, 20, 3, 20, 398, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, + 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 413, 8, 22, 1, + 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 422, 8, 22, 1, 22, + 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, + 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, + 3, 22, 446, 8, 22, 1, 23, 1, 23, 3, 23, 450, 8, 23, 1, 23, 1, 23, 1, 23, + 1, 23, 3, 23, 456, 8, 23, 1, 23, 3, 23, 459, 8, 23, 1, 23, 1, 23, 1, 23, + 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 471, 8, 24, 1, + 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 480, 8, 25, 1, 25, + 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 488, 8, 26, 1, 26, 1, 26, 1, + 27, 1, 27, 1, 27, 3, 27, 495, 8, 27, 1, 27, 1, 27, 3, 27, 499, 8, 27, 1, + 27, 1, 27, 1, 27, 3, 27, 504, 8, 27, 1, 28, 1, 28, 1, 28, 3, 28, 509, 8, + 28, 1, 28, 1, 28, 3, 28, 513, 8, 28, 1, 28, 1, 28, 1, 28, 3, 28, 518, 8, + 28, 1, 29, 1, 29, 1, 29, 5, 29, 523, 8, 29, 10, 29, 12, 29, 526, 9, 29, + 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, + 32, 1, 32, 1, 32, 1, 32, 3, 32, 542, 8, 32, 1, 32, 1, 32, 1, 32, 1, 32, + 1, 32, 1, 32, 1, 32, 5, 32, 551, 8, 32, 10, 32, 12, 32, 554, 9, 32, 3, + 32, 556, 8, 32, 1, 32, 1, 32, 5, 32, 560, 8, 32, 10, 32, 12, 32, 563, 9, + 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 1, 32, 5, 32, 570, 8, 32, 10, 32, + 12, 32, 573, 9, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 581, + 8, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 590, 8, + 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, + 601, 8, 34, 10, 34, 12, 34, 604, 9, 34, 3, 34, 606, 8, 34, 1, 34, 3, 34, + 609, 8, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 618, + 8, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 625, 8, 36, 1, 36, 1, + 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 633, 8, 37, 1, 37, 1, 37, 1, 38, + 1, 38, 1, 38, 1, 38, 5, 38, 641, 8, 38, 10, 38, 12, 38, 644, 9, 38, 1, + 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 651, 8, 38, 10, 38, 12, 38, 654, + 9, 38, 3, 38, 656, 8, 38, 1, 38, 1, 38, 3, 38, 660, 8, 38, 1, 38, 1, 38, + 3, 38, 664, 8, 38, 1, 39, 1, 39, 3, 39, 668, 8, 39, 1, 39, 1, 39, 3, 39, + 672, 8, 39, 1, 40, 1, 40, 3, 40, 676, 8, 40, 1, 40, 1, 40, 3, 40, 680, + 8, 40, 1, 41, 1, 41, 3, 41, 684, 8, 41, 1, 41, 1, 41, 1, 41, 5, 41, 689, + 8, 41, 10, 41, 12, 41, 692, 9, 41, 1, 41, 1, 41, 1, 41, 5, 41, 697, 8, + 41, 10, 41, 12, 41, 700, 9, 41, 3, 41, 702, 8, 41, 1, 41, 1, 41, 3, 41, + 706, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 713, 8, 41, 3, 41, + 715, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, + 41, 5, 41, 726, 8, 41, 10, 41, 12, 41, 729, 9, 41, 3, 41, 731, 8, 41, 1, + 42, 1, 42, 1, 42, 3, 42, 736, 8, 42, 1, 42, 1, 42, 3, 42, 740, 8, 42, 1, + 42, 3, 42, 743, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 749, 8, 42, 1, + 42, 3, 42, 752, 8, 42, 3, 42, 754, 8, 42, 1, 43, 3, 43, 757, 8, 43, 1, + 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 3, 44, 766, 8, 44, 1, 44, + 3, 44, 769, 8, 44, 1, 44, 1, 44, 1, 44, 3, 44, 774, 8, 44, 1, 44, 3, 44, + 777, 8, 44, 1, 45, 1, 45, 1, 45, 3, 45, 782, 8, 45, 1, 45, 3, 45, 785, + 8, 45, 1, 45, 1, 45, 1, 45, 1, 45, 5, 45, 791, 8, 45, 10, 45, 12, 45, 794, + 9, 45, 1, 45, 1, 45, 1, 45, 5, 45, 799, 8, 45, 10, 45, 12, 45, 802, 9, + 45, 3, 45, 804, 8, 45, 1, 45, 1, 45, 3, 45, 808, 8, 45, 1, 46, 1, 46, 1, + 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 818, 8, 47, 1, 47, 3, 47, + 821, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 827, 8, 47, 1, 47, 1, 47, + 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 838, 8, 47, 10, + 47, 12, 47, 841, 9, 47, 1, 47, 3, 47, 844, 8, 47, 1, 47, 3, 47, 847, 8, + 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 3, 48, 856, 8, 48, + 3, 48, 858, 8, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 5, + 48, 867, 8, 48, 10, 48, 12, 48, 870, 9, 48, 1, 48, 1, 48, 3, 48, 874, 8, + 48, 3, 48, 876, 8, 48, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 882, 8, 49, 1, + 49, 3, 49, 885, 8, 49, 1, 49, 1, 49, 3, 49, 889, 8, 49, 1, 50, 1, 50, 1, + 50, 1, 50, 1, 50, 3, 50, 896, 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, + 902, 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 911, + 8, 50, 1, 50, 1, 50, 1, 50, 3, 50, 916, 8, 50, 1, 50, 1, 50, 3, 50, 920, + 8, 50, 1, 50, 1, 50, 3, 50, 924, 8, 50, 1, 50, 1, 50, 1, 50, 3, 50, 929, + 8, 50, 1, 50, 1, 50, 3, 50, 933, 8, 50, 1, 50, 1, 50, 1, 50, 3, 50, 938, + 8, 50, 1, 50, 1, 50, 3, 50, 942, 8, 50, 1, 50, 1, 50, 3, 50, 946, 8, 50, + 1, 50, 4, 50, 949, 8, 50, 11, 50, 12, 50, 950, 1, 50, 1, 50, 3, 50, 955, + 8, 50, 1, 50, 1, 50, 1, 50, 3, 50, 960, 8, 50, 1, 50, 3, 50, 963, 8, 50, + 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 969, 8, 50, 1, 50, 1, 50, 3, 50, 973, + 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, + 50, 1, 50, 3, 50, 986, 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 992, 8, + 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, + 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 1012, 8, + 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 1018, 8, 50, 1, 50, 1, 50, 3, 50, + 1022, 8, 50, 3, 50, 1024, 8, 50, 1, 50, 1, 50, 3, 50, 1028, 8, 50, 1, 50, + 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 1035, 8, 50, 1, 50, 1, 50, 1, 50, 1, + 50, 3, 50, 1041, 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 1048, + 8, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 1056, 8, 50, 5, + 50, 1058, 8, 50, 10, 50, 12, 50, 1061, 9, 50, 1, 51, 1, 51, 1, 51, 1, 51, + 3, 51, 1067, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 1074, 8, + 51, 10, 51, 12, 51, 1077, 9, 51, 3, 51, 1079, 8, 51, 1, 51, 1, 51, 1, 52, + 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 5, 53, 1091, 8, 53, 10, + 53, 12, 53, 1094, 9, 53, 1, 54, 1, 54, 1, 54, 3, 54, 1099, 8, 54, 1, 54, + 1, 54, 3, 54, 1103, 8, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, + 55, 3, 55, 1112, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 1118, 8, 55, + 1, 55, 1, 55, 3, 55, 1122, 8, 55, 1, 55, 1, 55, 3, 55, 1126, 8, 55, 1, + 55, 3, 55, 1129, 8, 55, 1, 55, 1, 55, 3, 55, 1133, 8, 55, 1, 55, 1, 55, + 3, 55, 1137, 8, 55, 1, 55, 1, 55, 3, 55, 1141, 8, 55, 1, 55, 1, 55, 1, + 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, + 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 1165, + 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 1171, 8, 55, 1, 55, 1, 55, 3, + 55, 1175, 8, 55, 3, 55, 1177, 8, 55, 1, 55, 1, 55, 3, 55, 1181, 8, 55, + 1, 55, 1, 55, 1, 55, 3, 55, 1186, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, + 55, 1, 55, 3, 55, 1194, 8, 55, 5, 55, 1196, 8, 55, 10, 55, 12, 55, 1199, + 9, 55, 1, 56, 1, 56, 1, 56, 5, 56, 1204, 8, 56, 10, 56, 12, 56, 1207, 9, + 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 5, 57, 1216, 8, 57, + 10, 57, 12, 57, 1219, 9, 57, 1, 57, 1, 57, 3, 57, 1223, 8, 57, 1, 57, 1, + 57, 1, 57, 1, 57, 1, 57, 3, 57, 1230, 8, 57, 1, 57, 1, 57, 1, 57, 1, 57, + 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 1242, 8, 57, 1, 57, 1, + 57, 5, 57, 1246, 8, 57, 10, 57, 12, 57, 1249, 9, 57, 1, 57, 1, 57, 3, 57, + 1253, 8, 57, 1, 57, 1, 57, 1, 57, 1, 57, 5, 57, 1259, 8, 57, 10, 57, 12, + 57, 1262, 9, 57, 1, 57, 1, 57, 1, 57, 5, 57, 1267, 8, 57, 10, 57, 12, 57, + 1270, 9, 57, 1, 57, 3, 57, 1273, 8, 57, 1, 57, 3, 57, 1276, 8, 57, 1, 57, + 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 1286, 8, 57, 1, + 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 1294, 8, 57, 1, 58, 1, 58, + 1, 59, 1, 59, 1, 59, 3, 59, 1301, 8, 59, 1, 59, 1, 59, 1, 59, 3, 59, 1306, + 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 5, 60, 1313, 8, 60, 10, 60, 12, + 60, 1316, 9, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 0, 2, + 100, 110, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, + 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, + 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, + 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 14, 1, 0, 20, 21, + 1, 0, 135, 136, 1, 0, 146, 147, 1, 0, 57, 58, 1, 0, 52, 53, 6, 0, 33, 33, + 37, 38, 41, 41, 57, 58, 97, 98, 132, 133, 1, 0, 78, 79, 1, 0, 105, 106, + 2, 0, 74, 76, 100, 100, 3, 0, 14, 14, 19, 19, 22, 22, 1, 0, 65, 66, 2, + 0, 15, 16, 23, 27, 2, 0, 11, 11, 20, 21, 2, 0, 29, 29, 146, 146, 1529, + 0, 124, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 4, 176, 1, 0, 0, 0, 6, 182, 1, + 0, 0, 0, 8, 184, 1, 0, 0, 0, 10, 192, 1, 0, 0, 0, 12, 204, 1, 0, 0, 0, + 14, 207, 1, 0, 0, 0, 16, 209, 1, 0, 0, 0, 18, 217, 1, 0, 0, 0, 20, 225, + 1, 0, 0, 0, 22, 249, 1, 0, 0, 0, 24, 251, 1, 0, 0, 0, 26, 263, 1, 0, 0, + 0, 28, 279, 1, 0, 0, 0, 30, 305, 1, 0, 0, 0, 32, 313, 1, 0, 0, 0, 34, 333, + 1, 0, 0, 0, 36, 360, 1, 0, 0, 0, 38, 387, 1, 0, 0, 0, 40, 389, 1, 0, 0, + 0, 42, 399, 1, 0, 0, 0, 44, 445, 1, 0, 0, 0, 46, 447, 1, 0, 0, 0, 48, 466, + 1, 0, 0, 0, 50, 474, 1, 0, 0, 0, 52, 483, 1, 0, 0, 0, 54, 491, 1, 0, 0, + 0, 56, 505, 1, 0, 0, 0, 58, 519, 1, 0, 0, 0, 60, 527, 1, 0, 0, 0, 62, 529, + 1, 0, 0, 0, 64, 534, 1, 0, 0, 0, 66, 576, 1, 0, 0, 0, 68, 584, 1, 0, 0, + 0, 70, 613, 1, 0, 0, 0, 72, 619, 1, 0, 0, 0, 74, 628, 1, 0, 0, 0, 76, 636, + 1, 0, 0, 0, 78, 671, 1, 0, 0, 0, 80, 673, 1, 0, 0, 0, 82, 681, 1, 0, 0, + 0, 84, 753, 1, 0, 0, 0, 86, 756, 1, 0, 0, 0, 88, 776, 1, 0, 0, 0, 90, 778, + 1, 0, 0, 0, 92, 809, 1, 0, 0, 0, 94, 813, 1, 0, 0, 0, 96, 848, 1, 0, 0, + 0, 98, 877, 1, 0, 0, 0, 100, 972, 1, 0, 0, 0, 102, 1062, 1, 0, 0, 0, 104, + 1082, 1, 0, 0, 0, 106, 1087, 1, 0, 0, 0, 108, 1095, 1, 0, 0, 0, 110, 1140, + 1, 0, 0, 0, 112, 1200, 1, 0, 0, 0, 114, 1293, 1, 0, 0, 0, 116, 1295, 1, + 0, 0, 0, 118, 1300, 1, 0, 0, 0, 120, 1309, 1, 0, 0, 0, 122, 1319, 1, 0, + 0, 0, 124, 129, 3, 2, 1, 0, 125, 126, 5, 6, 0, 0, 126, 128, 3, 2, 1, 0, + 127, 125, 1, 0, 0, 0, 128, 131, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 129, + 130, 1, 0, 0, 0, 130, 133, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 132, 134, + 5, 6, 0, 0, 133, 132, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0, 134, 135, 1, 0, + 0, 0, 135, 136, 5, 0, 0, 1, 136, 1, 1, 0, 0, 0, 137, 138, 5, 1, 0, 0, 138, + 139, 3, 6, 3, 0, 139, 140, 5, 2, 0, 0, 140, 142, 1, 0, 0, 0, 141, 137, + 1, 0, 0, 0, 141, 142, 1, 0, 0, 0, 142, 160, 1, 0, 0, 0, 143, 161, 3, 30, + 15, 0, 144, 161, 3, 34, 17, 0, 145, 161, 3, 42, 21, 0, 146, 161, 3, 40, + 20, 0, 147, 161, 3, 46, 23, 0, 148, 161, 3, 48, 24, 0, 149, 161, 3, 50, + 25, 0, 150, 161, 3, 52, 26, 0, 151, 161, 3, 54, 27, 0, 152, 161, 3, 56, + 28, 0, 153, 161, 3, 62, 31, 0, 154, 161, 3, 64, 32, 0, 155, 161, 3, 66, + 33, 0, 156, 161, 3, 68, 34, 0, 157, 161, 3, 70, 35, 0, 158, 161, 3, 72, + 36, 0, 159, 161, 3, 74, 37, 0, 160, 143, 1, 0, 0, 0, 160, 144, 1, 0, 0, + 0, 160, 145, 1, 0, 0, 0, 160, 146, 1, 0, 0, 0, 160, 147, 1, 0, 0, 0, 160, + 148, 1, 0, 0, 0, 160, 149, 1, 0, 0, 0, 160, 150, 1, 0, 0, 0, 160, 151, + 1, 0, 0, 0, 160, 152, 1, 0, 0, 0, 160, 153, 1, 0, 0, 0, 160, 154, 1, 0, + 0, 0, 160, 155, 1, 0, 0, 0, 160, 156, 1, 0, 0, 0, 160, 157, 1, 0, 0, 0, + 160, 158, 1, 0, 0, 0, 160, 159, 1, 0, 0, 0, 161, 3, 1, 0, 0, 0, 162, 177, + 5, 134, 0, 0, 163, 165, 7, 0, 0, 0, 164, 163, 1, 0, 0, 0, 164, 165, 1, + 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 177, 5, 137, 0, 0, 167, 169, 7, 0, + 0, 0, 168, 167, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, + 170, 171, 5, 137, 0, 0, 171, 172, 5, 12, 0, 0, 172, 177, 5, 137, 0, 0, + 173, 177, 7, 1, 0, 0, 174, 177, 5, 56, 0, 0, 175, 177, 5, 138, 0, 0, 176, + 162, 1, 0, 0, 0, 176, 164, 1, 0, 0, 0, 176, 168, 1, 0, 0, 0, 176, 173, + 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 175, 1, 0, 0, 0, 177, 5, 1, 0, 0, + 0, 178, 179, 5, 32, 0, 0, 179, 180, 5, 145, 0, 0, 180, 183, 5, 32, 0, 0, + 181, 183, 5, 145, 0, 0, 182, 178, 1, 0, 0, 0, 182, 181, 1, 0, 0, 0, 183, + 7, 1, 0, 0, 0, 184, 189, 3, 6, 3, 0, 185, 186, 5, 9, 0, 0, 186, 188, 3, + 6, 3, 0, 187, 185, 1, 0, 0, 0, 188, 191, 1, 0, 0, 0, 189, 187, 1, 0, 0, + 0, 189, 190, 1, 0, 0, 0, 190, 9, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, + 198, 3, 6, 3, 0, 193, 194, 5, 7, 0, 0, 194, 195, 5, 137, 0, 0, 195, 196, + 5, 9, 0, 0, 196, 197, 5, 137, 0, 0, 197, 199, 5, 8, 0, 0, 198, 193, 1, + 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 202, 1, 0, 0, 0, 200, 201, 5, 3, 0, + 0, 201, 203, 5, 4, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, + 11, 1, 0, 0, 0, 204, 205, 5, 28, 0, 0, 205, 206, 3, 10, 5, 0, 206, 13, + 1, 0, 0, 0, 207, 208, 7, 2, 0, 0, 208, 15, 1, 0, 0, 0, 209, 210, 3, 6, + 3, 0, 210, 214, 3, 10, 5, 0, 211, 213, 3, 22, 11, 0, 212, 211, 1, 0, 0, + 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, + 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 222, 3, 10, 5, 0, 218, 219, + 5, 9, 0, 0, 219, 221, 3, 10, 5, 0, 220, 218, 1, 0, 0, 0, 221, 224, 1, 0, + 0, 0, 222, 220, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 19, 1, 0, 0, 0, + 224, 222, 1, 0, 0, 0, 225, 226, 3, 6, 3, 0, 226, 233, 3, 10, 5, 0, 227, + 228, 5, 9, 0, 0, 228, 229, 3, 6, 3, 0, 229, 230, 3, 10, 5, 0, 230, 232, + 1, 0, 0, 0, 231, 227, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231, 1, 0, + 0, 0, 233, 234, 1, 0, 0, 0, 234, 21, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, + 236, 237, 5, 47, 0, 0, 237, 250, 5, 48, 0, 0, 238, 250, 5, 51, 0, 0, 239, + 240, 5, 61, 0, 0, 240, 250, 5, 56, 0, 0, 241, 242, 5, 55, 0, 0, 242, 250, + 3, 110, 55, 0, 243, 250, 3, 26, 13, 0, 244, 245, 5, 45, 0, 0, 245, 246, + 5, 7, 0, 0, 246, 247, 3, 100, 50, 0, 247, 248, 5, 8, 0, 0, 248, 250, 1, + 0, 0, 0, 249, 236, 1, 0, 0, 0, 249, 238, 1, 0, 0, 0, 249, 239, 1, 0, 0, + 0, 249, 241, 1, 0, 0, 0, 249, 243, 1, 0, 0, 0, 249, 244, 1, 0, 0, 0, 250, + 23, 1, 0, 0, 0, 251, 252, 5, 49, 0, 0, 252, 261, 7, 3, 0, 0, 253, 254, + 5, 54, 0, 0, 254, 262, 5, 56, 0, 0, 255, 256, 5, 54, 0, 0, 256, 262, 5, + 55, 0, 0, 257, 262, 5, 53, 0, 0, 258, 259, 5, 87, 0, 0, 259, 262, 5, 36, + 0, 0, 260, 262, 5, 52, 0, 0, 261, 253, 1, 0, 0, 0, 261, 255, 1, 0, 0, 0, + 261, 257, 1, 0, 0, 0, 261, 258, 1, 0, 0, 0, 261, 260, 1, 0, 0, 0, 262, + 25, 1, 0, 0, 0, 263, 267, 5, 59, 0, 0, 264, 265, 3, 6, 3, 0, 265, 266, + 5, 12, 0, 0, 266, 268, 1, 0, 0, 0, 267, 264, 1, 0, 0, 0, 267, 268, 1, 0, + 0, 0, 268, 269, 1, 0, 0, 0, 269, 270, 3, 6, 3, 0, 270, 271, 5, 7, 0, 0, + 271, 272, 3, 8, 4, 0, 272, 277, 5, 8, 0, 0, 273, 275, 3, 24, 12, 0, 274, + 276, 3, 24, 12, 0, 275, 274, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 278, + 1, 0, 0, 0, 277, 273, 1, 0, 0, 0, 277, 278, 1, 0, 0, 0, 278, 27, 1, 0, + 0, 0, 279, 291, 5, 86, 0, 0, 280, 282, 5, 35, 0, 0, 281, 280, 1, 0, 0, + 0, 281, 282, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 284, 5, 7, 0, 0, 284, + 285, 3, 20, 10, 0, 285, 286, 5, 8, 0, 0, 286, 292, 1, 0, 0, 0, 287, 288, + 5, 7, 0, 0, 288, 289, 3, 18, 9, 0, 289, 290, 5, 8, 0, 0, 290, 292, 1, 0, + 0, 0, 291, 281, 1, 0, 0, 0, 291, 287, 1, 0, 0, 0, 292, 29, 1, 0, 0, 0, + 293, 295, 5, 88, 0, 0, 294, 296, 5, 122, 0, 0, 295, 294, 1, 0, 0, 0, 295, + 296, 1, 0, 0, 0, 296, 297, 1, 0, 0, 0, 297, 302, 3, 32, 16, 0, 298, 299, + 5, 9, 0, 0, 299, 301, 3, 32, 16, 0, 300, 298, 1, 0, 0, 0, 301, 304, 1, + 0, 0, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 306, 1, 0, 0, + 0, 304, 302, 1, 0, 0, 0, 305, 293, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, + 311, 1, 0, 0, 0, 307, 312, 3, 76, 38, 0, 308, 312, 3, 90, 45, 0, 309, 312, + 3, 94, 47, 0, 310, 312, 3, 98, 49, 0, 311, 307, 1, 0, 0, 0, 311, 308, 1, + 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 310, 1, 0, 0, 0, 312, 31, 1, 0, 0, + 0, 313, 326, 3, 6, 3, 0, 314, 323, 5, 7, 0, 0, 315, 320, 3, 6, 3, 0, 316, + 317, 5, 9, 0, 0, 317, 319, 3, 6, 3, 0, 318, 316, 1, 0, 0, 0, 319, 322, + 1, 0, 0, 0, 320, 318, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 324, 1, 0, + 0, 0, 322, 320, 1, 0, 0, 0, 323, 315, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, + 324, 325, 1, 0, 0, 0, 325, 327, 5, 8, 0, 0, 326, 314, 1, 0, 0, 0, 326, + 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 5, 77, 0, 0, 329, 330, + 5, 7, 0, 0, 330, 331, 3, 76, 38, 0, 331, 332, 5, 8, 0, 0, 332, 33, 1, 0, + 0, 0, 333, 334, 5, 37, 0, 0, 334, 338, 5, 35, 0, 0, 335, 336, 5, 112, 0, + 0, 336, 337, 5, 61, 0, 0, 337, 339, 5, 70, 0, 0, 338, 335, 1, 0, 0, 0, + 338, 339, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 3, 6, 3, 0, 341, + 344, 5, 7, 0, 0, 342, 345, 3, 16, 8, 0, 343, 345, 3, 36, 18, 0, 344, 342, + 1, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 353, 1, 0, 0, 0, 346, 349, 5, 9, + 0, 0, 347, 350, 3, 16, 8, 0, 348, 350, 3, 36, 18, 0, 349, 347, 1, 0, 0, + 0, 349, 348, 1, 0, 0, 0, 350, 352, 1, 0, 0, 0, 351, 346, 1, 0, 0, 0, 352, + 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, + 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, 5, 8, 0, 0, 357, 35, 1, 0, + 0, 0, 358, 359, 5, 44, 0, 0, 359, 361, 3, 6, 3, 0, 360, 358, 1, 0, 0, 0, + 360, 361, 1, 0, 0, 0, 361, 385, 1, 0, 0, 0, 362, 363, 5, 51, 0, 0, 363, + 364, 5, 7, 0, 0, 364, 365, 3, 8, 4, 0, 365, 366, 5, 8, 0, 0, 366, 386, + 1, 0, 0, 0, 367, 368, 5, 45, 0, 0, 368, 369, 5, 7, 0, 0, 369, 370, 3, 100, + 50, 0, 370, 371, 5, 8, 0, 0, 371, 386, 1, 0, 0, 0, 372, 373, 5, 46, 0, + 0, 373, 374, 5, 48, 0, 0, 374, 375, 5, 7, 0, 0, 375, 376, 3, 8, 4, 0, 376, + 377, 5, 8, 0, 0, 377, 378, 3, 26, 13, 0, 378, 386, 1, 0, 0, 0, 379, 380, + 5, 47, 0, 0, 380, 381, 5, 48, 0, 0, 381, 382, 5, 7, 0, 0, 382, 383, 3, + 8, 4, 0, 383, 384, 5, 8, 0, 0, 384, 386, 1, 0, 0, 0, 385, 362, 1, 0, 0, + 0, 385, 367, 1, 0, 0, 0, 385, 372, 1, 0, 0, 0, 385, 379, 1, 0, 0, 0, 386, + 37, 1, 0, 0, 0, 387, 388, 7, 4, 0, 0, 388, 39, 1, 0, 0, 0, 389, 390, 5, + 41, 0, 0, 390, 393, 5, 35, 0, 0, 391, 392, 5, 112, 0, 0, 392, 394, 5, 70, + 0, 0, 393, 391, 1, 0, 0, 0, 393, 394, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, + 395, 397, 3, 8, 4, 0, 396, 398, 3, 38, 19, 0, 397, 396, 1, 0, 0, 0, 397, + 398, 1, 0, 0, 0, 398, 41, 1, 0, 0, 0, 399, 400, 5, 38, 0, 0, 400, 401, + 5, 35, 0, 0, 401, 402, 3, 6, 3, 0, 402, 403, 3, 44, 22, 0, 403, 43, 1, + 0, 0, 0, 404, 405, 5, 38, 0, 0, 405, 406, 5, 39, 0, 0, 406, 407, 3, 6, + 3, 0, 407, 412, 5, 54, 0, 0, 408, 409, 5, 61, 0, 0, 409, 413, 5, 56, 0, + 0, 410, 411, 5, 55, 0, 0, 411, 413, 3, 110, 55, 0, 412, 408, 1, 0, 0, 0, + 412, 410, 1, 0, 0, 0, 413, 446, 1, 0, 0, 0, 414, 415, 5, 38, 0, 0, 415, + 416, 5, 39, 0, 0, 416, 417, 3, 6, 3, 0, 417, 421, 5, 41, 0, 0, 418, 419, + 5, 61, 0, 0, 419, 422, 5, 56, 0, 0, 420, 422, 5, 55, 0, 0, 421, 418, 1, + 0, 0, 0, 421, 420, 1, 0, 0, 0, 422, 446, 1, 0, 0, 0, 423, 424, 5, 40, 0, + 0, 424, 425, 5, 39, 0, 0, 425, 426, 3, 6, 3, 0, 426, 427, 3, 10, 5, 0, + 427, 446, 1, 0, 0, 0, 428, 429, 5, 41, 0, 0, 429, 430, 5, 39, 0, 0, 430, + 446, 3, 6, 3, 0, 431, 432, 5, 42, 0, 0, 432, 433, 5, 39, 0, 0, 433, 434, + 3, 6, 3, 0, 434, 435, 5, 43, 0, 0, 435, 436, 3, 6, 3, 0, 436, 446, 1, 0, + 0, 0, 437, 438, 5, 42, 0, 0, 438, 439, 5, 43, 0, 0, 439, 446, 3, 6, 3, + 0, 440, 441, 5, 40, 0, 0, 441, 446, 3, 36, 18, 0, 442, 443, 5, 41, 0, 0, + 443, 444, 5, 44, 0, 0, 444, 446, 3, 6, 3, 0, 445, 404, 1, 0, 0, 0, 445, + 414, 1, 0, 0, 0, 445, 423, 1, 0, 0, 0, 445, 428, 1, 0, 0, 0, 445, 431, + 1, 0, 0, 0, 445, 437, 1, 0, 0, 0, 445, 440, 1, 0, 0, 0, 445, 442, 1, 0, + 0, 0, 446, 45, 1, 0, 0, 0, 447, 449, 5, 37, 0, 0, 448, 450, 5, 51, 0, 0, + 449, 448, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 1, 0, 0, 0, 451, + 455, 5, 62, 0, 0, 452, 453, 5, 112, 0, 0, 453, 454, 5, 61, 0, 0, 454, 456, + 5, 70, 0, 0, 455, 452, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 458, 1, 0, + 0, 0, 457, 459, 3, 6, 3, 0, 458, 457, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, + 459, 460, 1, 0, 0, 0, 460, 461, 5, 49, 0, 0, 461, 462, 3, 6, 3, 0, 462, + 463, 5, 7, 0, 0, 463, 464, 3, 8, 4, 0, 464, 465, 5, 8, 0, 0, 465, 47, 1, + 0, 0, 0, 466, 467, 5, 41, 0, 0, 467, 470, 5, 62, 0, 0, 468, 469, 5, 112, + 0, 0, 469, 471, 5, 70, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, + 471, 472, 1, 0, 0, 0, 472, 473, 3, 6, 3, 0, 473, 49, 1, 0, 0, 0, 474, 475, + 5, 37, 0, 0, 475, 479, 5, 126, 0, 0, 476, 477, 5, 112, 0, 0, 477, 478, + 5, 61, 0, 0, 478, 480, 5, 70, 0, 0, 479, 476, 1, 0, 0, 0, 479, 480, 1, + 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 3, 6, 3, 0, 482, 51, 1, 0, 0, + 0, 483, 484, 5, 41, 0, 0, 484, 487, 5, 126, 0, 0, 485, 486, 5, 112, 0, + 0, 486, 488, 5, 70, 0, 0, 487, 485, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, + 489, 1, 0, 0, 0, 489, 490, 3, 6, 3, 0, 490, 53, 1, 0, 0, 0, 491, 494, 5, + 124, 0, 0, 492, 495, 3, 58, 29, 0, 493, 495, 3, 6, 3, 0, 494, 492, 1, 0, + 0, 0, 494, 493, 1, 0, 0, 0, 495, 498, 1, 0, 0, 0, 496, 497, 5, 49, 0, 0, + 497, 499, 3, 6, 3, 0, 498, 496, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, + 500, 1, 0, 0, 0, 500, 503, 5, 43, 0, 0, 501, 504, 3, 6, 3, 0, 502, 504, + 5, 134, 0, 0, 503, 501, 1, 0, 0, 0, 503, 502, 1, 0, 0, 0, 504, 55, 1, 0, + 0, 0, 505, 508, 5, 125, 0, 0, 506, 509, 3, 58, 29, 0, 507, 509, 3, 6, 3, + 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 512, 1, 0, 0, 0, 510, + 511, 5, 49, 0, 0, 511, 513, 3, 6, 3, 0, 512, 510, 1, 0, 0, 0, 512, 513, + 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 517, 5, 94, 0, 0, 515, 518, 3, 6, + 3, 0, 516, 518, 5, 134, 0, 0, 517, 515, 1, 0, 0, 0, 517, 516, 1, 0, 0, + 0, 518, 57, 1, 0, 0, 0, 519, 524, 3, 60, 30, 0, 520, 521, 5, 9, 0, 0, 521, + 523, 3, 60, 30, 0, 522, 520, 1, 0, 0, 0, 523, 526, 1, 0, 0, 0, 524, 522, + 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 59, 1, 0, 0, 0, 526, 524, 1, 0, + 0, 0, 527, 528, 7, 5, 0, 0, 528, 61, 1, 0, 0, 0, 529, 530, 5, 127, 0, 0, + 530, 531, 5, 128, 0, 0, 531, 532, 5, 43, 0, 0, 532, 533, 3, 6, 3, 0, 533, + 63, 1, 0, 0, 0, 534, 535, 5, 37, 0, 0, 535, 541, 5, 36, 0, 0, 536, 537, + 5, 112, 0, 0, 537, 538, 5, 61, 0, 0, 538, 542, 5, 70, 0, 0, 539, 540, 5, + 64, 0, 0, 540, 542, 5, 129, 0, 0, 541, 536, 1, 0, 0, 0, 541, 539, 1, 0, + 0, 0, 541, 542, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 3, 6, 3, 0, + 544, 555, 5, 7, 0, 0, 545, 546, 5, 146, 0, 0, 546, 552, 3, 10, 5, 0, 547, + 548, 5, 9, 0, 0, 548, 549, 5, 146, 0, 0, 549, 551, 3, 10, 5, 0, 550, 547, + 1, 0, 0, 0, 551, 554, 1, 0, 0, 0, 552, 550, 1, 0, 0, 0, 552, 553, 1, 0, + 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 555, 545, 1, 0, 0, 0, + 555, 556, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 561, 5, 8, 0, 0, 558, + 560, 3, 6, 3, 0, 559, 558, 1, 0, 0, 0, 560, 563, 1, 0, 0, 0, 561, 559, + 1, 0, 0, 0, 561, 562, 1, 0, 0, 0, 562, 565, 1, 0, 0, 0, 563, 561, 1, 0, + 0, 0, 564, 566, 3, 28, 14, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, + 0, 566, 567, 1, 0, 0, 0, 567, 571, 5, 1, 0, 0, 568, 570, 3, 114, 57, 0, + 569, 568, 1, 0, 0, 0, 570, 573, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 571, + 572, 1, 0, 0, 0, 572, 574, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 575, + 5, 2, 0, 0, 575, 65, 1, 0, 0, 0, 576, 577, 5, 41, 0, 0, 577, 580, 5, 36, + 0, 0, 578, 579, 5, 112, 0, 0, 579, 581, 5, 70, 0, 0, 580, 578, 1, 0, 0, + 0, 580, 581, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 3, 6, 3, 0, 583, + 67, 1, 0, 0, 0, 584, 585, 5, 33, 0, 0, 585, 589, 3, 6, 3, 0, 586, 587, + 5, 112, 0, 0, 587, 588, 5, 61, 0, 0, 588, 590, 5, 70, 0, 0, 589, 586, 1, + 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 608, 1, 0, 0, 0, 591, 605, 5, 1, 0, + 0, 592, 593, 3, 6, 3, 0, 593, 594, 5, 5, 0, 0, 594, 602, 3, 110, 55, 0, + 595, 596, 5, 9, 0, 0, 596, 597, 3, 6, 3, 0, 597, 598, 5, 5, 0, 0, 598, + 599, 3, 110, 55, 0, 599, 601, 1, 0, 0, 0, 600, 595, 1, 0, 0, 0, 601, 604, + 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 606, 1, 0, + 0, 0, 604, 602, 1, 0, 0, 0, 605, 592, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, + 606, 607, 1, 0, 0, 0, 607, 609, 5, 2, 0, 0, 608, 591, 1, 0, 0, 0, 608, + 609, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 5, 77, 0, 0, 611, 612, + 3, 6, 3, 0, 612, 69, 1, 0, 0, 0, 613, 614, 5, 34, 0, 0, 614, 617, 3, 6, + 3, 0, 615, 616, 5, 112, 0, 0, 616, 618, 5, 70, 0, 0, 617, 615, 1, 0, 0, + 0, 617, 618, 1, 0, 0, 0, 618, 71, 1, 0, 0, 0, 619, 620, 5, 37, 0, 0, 620, + 624, 5, 131, 0, 0, 621, 622, 5, 112, 0, 0, 622, 623, 5, 61, 0, 0, 623, + 625, 5, 70, 0, 0, 624, 621, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, + 1, 0, 0, 0, 626, 627, 3, 6, 3, 0, 627, 73, 1, 0, 0, 0, 628, 629, 5, 41, + 0, 0, 629, 632, 5, 131, 0, 0, 630, 631, 5, 112, 0, 0, 631, 633, 5, 70, + 0, 0, 632, 630, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, + 634, 635, 3, 6, 3, 0, 635, 75, 1, 0, 0, 0, 636, 642, 3, 82, 41, 0, 637, + 638, 3, 78, 39, 0, 638, 639, 3, 82, 41, 0, 639, 641, 1, 0, 0, 0, 640, 637, + 1, 0, 0, 0, 641, 644, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, + 0, 0, 643, 655, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 645, 646, 5, 82, 0, 0, + 646, 647, 5, 83, 0, 0, 647, 652, 3, 80, 40, 0, 648, 649, 5, 9, 0, 0, 649, + 651, 3, 80, 40, 0, 650, 648, 1, 0, 0, 0, 651, 654, 1, 0, 0, 0, 652, 650, + 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 656, 1, 0, 0, 0, 654, 652, 1, 0, + 0, 0, 655, 645, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 659, 1, 0, 0, 0, + 657, 658, 5, 80, 0, 0, 658, 660, 3, 100, 50, 0, 659, 657, 1, 0, 0, 0, 659, + 660, 1, 0, 0, 0, 660, 663, 1, 0, 0, 0, 661, 662, 5, 81, 0, 0, 662, 664, + 3, 100, 50, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 77, 1, + 0, 0, 0, 665, 667, 5, 101, 0, 0, 666, 668, 5, 71, 0, 0, 667, 666, 1, 0, + 0, 0, 667, 668, 1, 0, 0, 0, 668, 672, 1, 0, 0, 0, 669, 672, 5, 102, 0, + 0, 670, 672, 5, 103, 0, 0, 671, 665, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, + 671, 670, 1, 0, 0, 0, 672, 79, 1, 0, 0, 0, 673, 675, 3, 100, 50, 0, 674, + 676, 7, 6, 0, 0, 675, 674, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, + 1, 0, 0, 0, 677, 678, 5, 104, 0, 0, 678, 680, 7, 7, 0, 0, 679, 677, 1, + 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 81, 1, 0, 0, 0, 681, 683, 5, 97, 0, + 0, 682, 684, 5, 93, 0, 0, 683, 682, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, + 685, 1, 0, 0, 0, 685, 690, 3, 88, 44, 0, 686, 687, 5, 9, 0, 0, 687, 689, + 3, 88, 44, 0, 688, 686, 1, 0, 0, 0, 689, 692, 1, 0, 0, 0, 690, 688, 1, + 0, 0, 0, 690, 691, 1, 0, 0, 0, 691, 701, 1, 0, 0, 0, 692, 690, 1, 0, 0, + 0, 693, 694, 5, 94, 0, 0, 694, 698, 3, 84, 42, 0, 695, 697, 3, 86, 43, + 0, 696, 695, 1, 0, 0, 0, 697, 700, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, + 699, 1, 0, 0, 0, 699, 702, 1, 0, 0, 0, 700, 698, 1, 0, 0, 0, 701, 693, + 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 704, 5, 95, + 0, 0, 704, 706, 3, 100, 50, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, + 0, 706, 714, 1, 0, 0, 0, 707, 708, 5, 84, 0, 0, 708, 709, 5, 83, 0, 0, + 709, 712, 3, 106, 53, 0, 710, 711, 5, 85, 0, 0, 711, 713, 3, 100, 50, 0, + 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 715, 1, 0, 0, 0, 714, + 707, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 730, 1, 0, 0, 0, 716, 717, + 5, 120, 0, 0, 717, 718, 3, 6, 3, 0, 718, 719, 5, 77, 0, 0, 719, 727, 3, + 102, 51, 0, 720, 721, 5, 9, 0, 0, 721, 722, 3, 6, 3, 0, 722, 723, 5, 77, + 0, 0, 723, 724, 3, 102, 51, 0, 724, 726, 1, 0, 0, 0, 725, 720, 1, 0, 0, + 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, + 731, 1, 0, 0, 0, 729, 727, 1, 0, 0, 0, 730, 716, 1, 0, 0, 0, 730, 731, + 1, 0, 0, 0, 731, 83, 1, 0, 0, 0, 732, 733, 3, 6, 3, 0, 733, 734, 5, 12, + 0, 0, 734, 736, 1, 0, 0, 0, 735, 732, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, + 736, 737, 1, 0, 0, 0, 737, 742, 3, 6, 3, 0, 738, 740, 5, 77, 0, 0, 739, + 738, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 743, + 3, 6, 3, 0, 742, 739, 1, 0, 0, 0, 742, 743, 1, 0, 0, 0, 743, 754, 1, 0, + 0, 0, 744, 745, 5, 7, 0, 0, 745, 746, 3, 76, 38, 0, 746, 751, 5, 8, 0, + 0, 747, 749, 5, 77, 0, 0, 748, 747, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, + 750, 1, 0, 0, 0, 750, 752, 3, 6, 3, 0, 751, 748, 1, 0, 0, 0, 751, 752, + 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 735, 1, 0, 0, 0, 753, 744, 1, 0, + 0, 0, 754, 85, 1, 0, 0, 0, 755, 757, 7, 8, 0, 0, 756, 755, 1, 0, 0, 0, + 756, 757, 1, 0, 0, 0, 757, 758, 1, 0, 0, 0, 758, 759, 5, 73, 0, 0, 759, + 760, 3, 84, 42, 0, 760, 761, 5, 49, 0, 0, 761, 762, 3, 100, 50, 0, 762, + 87, 1, 0, 0, 0, 763, 768, 3, 100, 50, 0, 764, 766, 5, 77, 0, 0, 765, 764, + 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 767, 1, 0, 0, 0, 767, 769, 3, 6, + 3, 0, 768, 765, 1, 0, 0, 0, 768, 769, 1, 0, 0, 0, 769, 777, 1, 0, 0, 0, + 770, 771, 3, 6, 3, 0, 771, 772, 5, 12, 0, 0, 772, 774, 1, 0, 0, 0, 773, + 770, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 777, + 5, 14, 0, 0, 776, 763, 1, 0, 0, 0, 776, 773, 1, 0, 0, 0, 777, 89, 1, 0, + 0, 0, 778, 779, 5, 58, 0, 0, 779, 784, 3, 6, 3, 0, 780, 782, 5, 77, 0, + 0, 781, 780, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 783, 1, 0, 0, 0, 783, + 785, 3, 6, 3, 0, 784, 781, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, + 1, 0, 0, 0, 786, 787, 5, 54, 0, 0, 787, 792, 3, 92, 46, 0, 788, 789, 5, + 9, 0, 0, 789, 791, 3, 92, 46, 0, 790, 788, 1, 0, 0, 0, 791, 794, 1, 0, + 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 803, 1, 0, 0, 0, + 794, 792, 1, 0, 0, 0, 795, 796, 5, 94, 0, 0, 796, 800, 3, 84, 42, 0, 797, + 799, 3, 86, 43, 0, 798, 797, 1, 0, 0, 0, 799, 802, 1, 0, 0, 0, 800, 798, + 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 804, 1, 0, 0, 0, 802, 800, 1, 0, + 0, 0, 803, 795, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, + 805, 806, 5, 95, 0, 0, 806, 808, 3, 100, 50, 0, 807, 805, 1, 0, 0, 0, 807, + 808, 1, 0, 0, 0, 808, 91, 1, 0, 0, 0, 809, 810, 3, 6, 3, 0, 810, 811, 5, + 15, 0, 0, 811, 812, 3, 100, 50, 0, 812, 93, 1, 0, 0, 0, 813, 814, 5, 98, + 0, 0, 814, 815, 5, 108, 0, 0, 815, 820, 3, 6, 3, 0, 816, 818, 5, 77, 0, + 0, 817, 816, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, + 821, 3, 6, 3, 0, 820, 817, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 826, + 1, 0, 0, 0, 822, 823, 5, 7, 0, 0, 823, 824, 3, 8, 4, 0, 824, 825, 5, 8, + 0, 0, 825, 827, 1, 0, 0, 0, 826, 822, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, + 827, 843, 1, 0, 0, 0, 828, 829, 5, 99, 0, 0, 829, 830, 5, 7, 0, 0, 830, + 831, 3, 106, 53, 0, 831, 839, 5, 8, 0, 0, 832, 833, 5, 9, 0, 0, 833, 834, + 5, 7, 0, 0, 834, 835, 3, 106, 53, 0, 835, 836, 5, 8, 0, 0, 836, 838, 1, + 0, 0, 0, 837, 832, 1, 0, 0, 0, 838, 841, 1, 0, 0, 0, 839, 837, 1, 0, 0, + 0, 839, 840, 1, 0, 0, 0, 840, 844, 1, 0, 0, 0, 841, 839, 1, 0, 0, 0, 842, + 844, 3, 76, 38, 0, 843, 828, 1, 0, 0, 0, 843, 842, 1, 0, 0, 0, 844, 846, + 1, 0, 0, 0, 845, 847, 3, 96, 48, 0, 846, 845, 1, 0, 0, 0, 846, 847, 1, + 0, 0, 0, 847, 95, 1, 0, 0, 0, 848, 849, 5, 49, 0, 0, 849, 857, 5, 109, + 0, 0, 850, 851, 5, 7, 0, 0, 851, 852, 3, 8, 4, 0, 852, 855, 5, 8, 0, 0, + 853, 854, 5, 95, 0, 0, 854, 856, 3, 100, 50, 0, 855, 853, 1, 0, 0, 0, 855, + 856, 1, 0, 0, 0, 856, 858, 1, 0, 0, 0, 857, 850, 1, 0, 0, 0, 857, 858, + 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 875, 5, 50, 0, 0, 860, 876, 5, 110, + 0, 0, 861, 862, 5, 58, 0, 0, 862, 863, 5, 54, 0, 0, 863, 868, 3, 92, 46, + 0, 864, 865, 5, 9, 0, 0, 865, 867, 3, 92, 46, 0, 866, 864, 1, 0, 0, 0, + 867, 870, 1, 0, 0, 0, 868, 866, 1, 0, 0, 0, 868, 869, 1, 0, 0, 0, 869, + 873, 1, 0, 0, 0, 870, 868, 1, 0, 0, 0, 871, 872, 5, 95, 0, 0, 872, 874, + 3, 100, 50, 0, 873, 871, 1, 0, 0, 0, 873, 874, 1, 0, 0, 0, 874, 876, 1, + 0, 0, 0, 875, 860, 1, 0, 0, 0, 875, 861, 1, 0, 0, 0, 876, 97, 1, 0, 0, + 0, 877, 878, 5, 57, 0, 0, 878, 879, 5, 94, 0, 0, 879, 884, 3, 6, 3, 0, + 880, 882, 5, 77, 0, 0, 881, 880, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, + 883, 1, 0, 0, 0, 883, 885, 3, 6, 3, 0, 884, 881, 1, 0, 0, 0, 884, 885, + 1, 0, 0, 0, 885, 888, 1, 0, 0, 0, 886, 887, 5, 95, 0, 0, 887, 889, 3, 100, + 50, 0, 888, 886, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 99, 1, 0, 0, 0, + 890, 891, 6, 50, -1, 0, 891, 892, 5, 7, 0, 0, 892, 893, 3, 100, 50, 0, + 893, 895, 5, 8, 0, 0, 894, 896, 3, 12, 6, 0, 895, 894, 1, 0, 0, 0, 895, + 896, 1, 0, 0, 0, 896, 973, 1, 0, 0, 0, 897, 898, 7, 0, 0, 0, 898, 973, + 3, 100, 50, 21, 899, 901, 3, 4, 2, 0, 900, 902, 3, 12, 6, 0, 901, 900, + 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 973, 1, 0, 0, 0, 903, 910, 3, 108, + 54, 0, 904, 905, 5, 121, 0, 0, 905, 906, 5, 7, 0, 0, 906, 907, 5, 95, 0, + 0, 907, 908, 3, 100, 50, 0, 908, 909, 5, 8, 0, 0, 909, 911, 1, 0, 0, 0, + 910, 904, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, + 915, 5, 118, 0, 0, 913, 916, 3, 102, 51, 0, 914, 916, 3, 6, 3, 0, 915, + 913, 1, 0, 0, 0, 915, 914, 1, 0, 0, 0, 916, 973, 1, 0, 0, 0, 917, 919, + 3, 108, 54, 0, 918, 920, 3, 12, 6, 0, 919, 918, 1, 0, 0, 0, 919, 920, 1, + 0, 0, 0, 920, 973, 1, 0, 0, 0, 921, 923, 3, 14, 7, 0, 922, 924, 3, 12, + 6, 0, 923, 922, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 973, 1, 0, 0, 0, + 925, 926, 5, 130, 0, 0, 926, 928, 5, 3, 0, 0, 927, 929, 3, 106, 53, 0, + 928, 927, 1, 0, 0, 0, 928, 929, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, + 932, 5, 4, 0, 0, 931, 933, 3, 12, 6, 0, 932, 931, 1, 0, 0, 0, 932, 933, + 1, 0, 0, 0, 933, 973, 1, 0, 0, 0, 934, 935, 3, 6, 3, 0, 935, 936, 5, 12, + 0, 0, 936, 938, 1, 0, 0, 0, 937, 934, 1, 0, 0, 0, 937, 938, 1, 0, 0, 0, + 938, 939, 1, 0, 0, 0, 939, 941, 3, 6, 3, 0, 940, 942, 3, 12, 6, 0, 941, + 940, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 973, 1, 0, 0, 0, 943, 945, + 5, 89, 0, 0, 944, 946, 3, 100, 50, 0, 945, 944, 1, 0, 0, 0, 945, 946, 1, + 0, 0, 0, 946, 948, 1, 0, 0, 0, 947, 949, 3, 104, 52, 0, 948, 947, 1, 0, + 0, 0, 949, 950, 1, 0, 0, 0, 950, 948, 1, 0, 0, 0, 950, 951, 1, 0, 0, 0, + 951, 954, 1, 0, 0, 0, 952, 953, 5, 114, 0, 0, 953, 955, 3, 100, 50, 0, + 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 956, 1, 0, 0, 0, 956, + 957, 5, 92, 0, 0, 957, 973, 1, 0, 0, 0, 958, 960, 5, 61, 0, 0, 959, 958, + 1, 0, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 963, 5, 70, + 0, 0, 962, 959, 1, 0, 0, 0, 962, 963, 1, 0, 0, 0, 963, 964, 1, 0, 0, 0, + 964, 965, 5, 7, 0, 0, 965, 966, 3, 76, 38, 0, 966, 968, 5, 8, 0, 0, 967, + 969, 3, 12, 6, 0, 968, 967, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 973, + 1, 0, 0, 0, 970, 971, 5, 61, 0, 0, 971, 973, 3, 100, 50, 3, 972, 890, 1, + 0, 0, 0, 972, 897, 1, 0, 0, 0, 972, 899, 1, 0, 0, 0, 972, 903, 1, 0, 0, + 0, 972, 917, 1, 0, 0, 0, 972, 921, 1, 0, 0, 0, 972, 925, 1, 0, 0, 0, 972, + 937, 1, 0, 0, 0, 972, 943, 1, 0, 0, 0, 972, 962, 1, 0, 0, 0, 972, 970, + 1, 0, 0, 0, 973, 1059, 1, 0, 0, 0, 974, 975, 10, 19, 0, 0, 975, 976, 7, + 9, 0, 0, 976, 1058, 3, 100, 50, 20, 977, 978, 10, 18, 0, 0, 978, 979, 7, + 0, 0, 0, 979, 1058, 3, 100, 50, 19, 980, 981, 10, 9, 0, 0, 981, 982, 5, + 13, 0, 0, 982, 1058, 3, 100, 50, 10, 983, 985, 10, 7, 0, 0, 984, 986, 5, + 61, 0, 0, 985, 984, 1, 0, 0, 0, 985, 986, 1, 0, 0, 0, 986, 987, 1, 0, 0, + 0, 987, 988, 7, 10, 0, 0, 988, 1058, 3, 100, 50, 8, 989, 991, 10, 6, 0, + 0, 990, 992, 5, 61, 0, 0, 991, 990, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, + 993, 1, 0, 0, 0, 993, 994, 5, 68, 0, 0, 994, 995, 3, 100, 50, 0, 995, 996, + 5, 63, 0, 0, 996, 997, 3, 100, 50, 7, 997, 1058, 1, 0, 0, 0, 998, 999, + 10, 5, 0, 0, 999, 1000, 7, 11, 0, 0, 1000, 1058, 3, 100, 50, 6, 1001, 1002, + 10, 2, 0, 0, 1002, 1003, 5, 63, 0, 0, 1003, 1058, 3, 100, 50, 3, 1004, + 1005, 10, 1, 0, 0, 1005, 1006, 5, 64, 0, 0, 1006, 1058, 3, 100, 50, 2, + 1007, 1008, 10, 23, 0, 0, 1008, 1009, 5, 12, 0, 0, 1009, 1011, 3, 6, 3, + 0, 1010, 1012, 3, 12, 6, 0, 1011, 1010, 1, 0, 0, 0, 1011, 1012, 1, 0, 0, + 0, 1012, 1058, 1, 0, 0, 0, 1013, 1014, 10, 22, 0, 0, 1014, 1023, 5, 3, + 0, 0, 1015, 1024, 3, 100, 50, 0, 1016, 1018, 3, 100, 50, 0, 1017, 1016, + 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1021, + 5, 5, 0, 0, 1020, 1022, 3, 100, 50, 0, 1021, 1020, 1, 0, 0, 0, 1021, 1022, + 1, 0, 0, 0, 1022, 1024, 1, 0, 0, 0, 1023, 1015, 1, 0, 0, 0, 1023, 1017, + 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1027, 5, 4, 0, 0, 1026, 1028, + 3, 12, 6, 0, 1027, 1026, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1058, + 1, 0, 0, 0, 1029, 1030, 10, 20, 0, 0, 1030, 1031, 5, 96, 0, 0, 1031, 1058, + 3, 6, 3, 0, 1032, 1034, 10, 8, 0, 0, 1033, 1035, 5, 61, 0, 0, 1034, 1033, + 1, 0, 0, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, + 5, 67, 0, 0, 1037, 1040, 5, 7, 0, 0, 1038, 1041, 3, 106, 53, 0, 1039, 1041, + 3, 76, 38, 0, 1040, 1038, 1, 0, 0, 0, 1040, 1039, 1, 0, 0, 0, 1041, 1042, + 1, 0, 0, 0, 1042, 1043, 5, 8, 0, 0, 1043, 1058, 1, 0, 0, 0, 1044, 1045, + 10, 4, 0, 0, 1045, 1047, 5, 69, 0, 0, 1046, 1048, 5, 61, 0, 0, 1047, 1046, + 1, 0, 0, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1055, 1, 0, 0, 0, 1049, 1050, + 5, 93, 0, 0, 1050, 1051, 5, 94, 0, 0, 1051, 1056, 3, 100, 50, 0, 1052, + 1056, 5, 56, 0, 0, 1053, 1056, 5, 135, 0, 0, 1054, 1056, 5, 136, 0, 0, + 1055, 1049, 1, 0, 0, 0, 1055, 1052, 1, 0, 0, 0, 1055, 1053, 1, 0, 0, 0, + 1055, 1054, 1, 0, 0, 0, 1056, 1058, 1, 0, 0, 0, 1057, 974, 1, 0, 0, 0, + 1057, 977, 1, 0, 0, 0, 1057, 980, 1, 0, 0, 0, 1057, 983, 1, 0, 0, 0, 1057, + 989, 1, 0, 0, 0, 1057, 998, 1, 0, 0, 0, 1057, 1001, 1, 0, 0, 0, 1057, 1004, + 1, 0, 0, 0, 1057, 1007, 1, 0, 0, 0, 1057, 1013, 1, 0, 0, 0, 1057, 1029, + 1, 0, 0, 0, 1057, 1032, 1, 0, 0, 0, 1057, 1044, 1, 0, 0, 0, 1058, 1061, + 1, 0, 0, 0, 1059, 1057, 1, 0, 0, 0, 1059, 1060, 1, 0, 0, 0, 1060, 101, + 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1062, 1066, 5, 7, 0, 0, 1063, 1064, + 5, 119, 0, 0, 1064, 1065, 5, 83, 0, 0, 1065, 1067, 3, 106, 53, 0, 1066, + 1063, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1078, 1, 0, 0, 0, 1068, + 1069, 5, 82, 0, 0, 1069, 1070, 5, 83, 0, 0, 1070, 1075, 3, 80, 40, 0, 1071, + 1072, 5, 9, 0, 0, 1072, 1074, 3, 80, 40, 0, 1073, 1071, 1, 0, 0, 0, 1074, + 1077, 1, 0, 0, 0, 1075, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, + 1079, 1, 0, 0, 0, 1077, 1075, 1, 0, 0, 0, 1078, 1068, 1, 0, 0, 0, 1078, + 1079, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 5, 8, 0, 0, 1081, + 103, 1, 0, 0, 0, 1082, 1083, 5, 90, 0, 0, 1083, 1084, 3, 100, 50, 0, 1084, + 1085, 5, 91, 0, 0, 1085, 1086, 3, 100, 50, 0, 1086, 105, 1, 0, 0, 0, 1087, + 1092, 3, 100, 50, 0, 1088, 1089, 5, 9, 0, 0, 1089, 1091, 3, 100, 50, 0, + 1090, 1088, 1, 0, 0, 0, 1091, 1094, 1, 0, 0, 0, 1092, 1090, 1, 0, 0, 0, + 1092, 1093, 1, 0, 0, 0, 1093, 107, 1, 0, 0, 0, 1094, 1092, 1, 0, 0, 0, + 1095, 1096, 3, 6, 3, 0, 1096, 1102, 5, 7, 0, 0, 1097, 1099, 5, 93, 0, 0, + 1098, 1097, 1, 0, 0, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 1, 0, 0, 0, + 1100, 1103, 3, 106, 53, 0, 1101, 1103, 5, 14, 0, 0, 1102, 1098, 1, 0, 0, + 0, 1102, 1101, 1, 0, 0, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 1, 0, 0, + 0, 1104, 1105, 5, 8, 0, 0, 1105, 109, 1, 0, 0, 0, 1106, 1107, 6, 55, -1, + 0, 1107, 1108, 5, 7, 0, 0, 1108, 1109, 3, 110, 55, 0, 1109, 1111, 5, 8, + 0, 0, 1110, 1112, 3, 12, 6, 0, 1111, 1110, 1, 0, 0, 0, 1111, 1112, 1, 0, + 0, 0, 1112, 1141, 1, 0, 0, 0, 1113, 1114, 7, 12, 0, 0, 1114, 1141, 3, 110, + 55, 13, 1115, 1117, 3, 4, 2, 0, 1116, 1118, 3, 12, 6, 0, 1117, 1116, 1, + 0, 0, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1141, 1, 0, 0, 0, 1119, 1121, 3, + 118, 59, 0, 1120, 1122, 3, 12, 6, 0, 1121, 1120, 1, 0, 0, 0, 1121, 1122, + 1, 0, 0, 0, 1122, 1141, 1, 0, 0, 0, 1123, 1125, 3, 14, 7, 0, 1124, 1126, + 3, 12, 6, 0, 1125, 1124, 1, 0, 0, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1141, + 1, 0, 0, 0, 1127, 1129, 5, 130, 0, 0, 1128, 1127, 1, 0, 0, 0, 1128, 1129, + 1, 0, 0, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1132, 5, 3, 0, 0, 1131, 1133, + 3, 112, 56, 0, 1132, 1131, 1, 0, 0, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, + 1, 0, 0, 0, 1134, 1136, 5, 4, 0, 0, 1135, 1137, 3, 12, 6, 0, 1136, 1135, + 1, 0, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1141, 1, 0, 0, 0, 1138, 1139, + 5, 61, 0, 0, 1139, 1141, 3, 110, 55, 3, 1140, 1106, 1, 0, 0, 0, 1140, 1113, + 1, 0, 0, 0, 1140, 1115, 1, 0, 0, 0, 1140, 1119, 1, 0, 0, 0, 1140, 1123, + 1, 0, 0, 0, 1140, 1128, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1141, 1197, + 1, 0, 0, 0, 1142, 1143, 10, 12, 0, 0, 1143, 1144, 7, 9, 0, 0, 1144, 1196, + 3, 110, 55, 13, 1145, 1146, 10, 11, 0, 0, 1146, 1147, 7, 0, 0, 0, 1147, + 1196, 3, 110, 55, 12, 1148, 1149, 10, 6, 0, 0, 1149, 1150, 5, 13, 0, 0, + 1150, 1196, 3, 110, 55, 7, 1151, 1152, 10, 5, 0, 0, 1152, 1153, 7, 11, + 0, 0, 1153, 1196, 3, 110, 55, 6, 1154, 1155, 10, 2, 0, 0, 1155, 1156, 5, + 63, 0, 0, 1156, 1196, 3, 110, 55, 3, 1157, 1158, 10, 1, 0, 0, 1158, 1159, + 5, 64, 0, 0, 1159, 1196, 3, 110, 55, 2, 1160, 1161, 10, 15, 0, 0, 1161, + 1162, 5, 12, 0, 0, 1162, 1164, 3, 6, 3, 0, 1163, 1165, 3, 12, 6, 0, 1164, + 1163, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1196, 1, 0, 0, 0, 1166, + 1167, 10, 14, 0, 0, 1167, 1176, 5, 3, 0, 0, 1168, 1177, 3, 110, 55, 0, + 1169, 1171, 3, 110, 55, 0, 1170, 1169, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, + 0, 1171, 1172, 1, 0, 0, 0, 1172, 1174, 5, 5, 0, 0, 1173, 1175, 3, 110, + 55, 0, 1174, 1173, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1177, 1, 0, + 0, 0, 1176, 1168, 1, 0, 0, 0, 1176, 1170, 1, 0, 0, 0, 1177, 1178, 1, 0, + 0, 0, 1178, 1180, 5, 4, 0, 0, 1179, 1181, 3, 12, 6, 0, 1180, 1179, 1, 0, + 0, 0, 1180, 1181, 1, 0, 0, 0, 1181, 1196, 1, 0, 0, 0, 1182, 1183, 10, 4, + 0, 0, 1183, 1185, 5, 69, 0, 0, 1184, 1186, 5, 61, 0, 0, 1185, 1184, 1, + 0, 0, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1193, 1, 0, 0, 0, 1187, 1188, 5, + 93, 0, 0, 1188, 1189, 5, 94, 0, 0, 1189, 1194, 3, 110, 55, 0, 1190, 1194, + 5, 56, 0, 0, 1191, 1194, 5, 135, 0, 0, 1192, 1194, 5, 136, 0, 0, 1193, + 1187, 1, 0, 0, 0, 1193, 1190, 1, 0, 0, 0, 1193, 1191, 1, 0, 0, 0, 1193, + 1192, 1, 0, 0, 0, 1194, 1196, 1, 0, 0, 0, 1195, 1142, 1, 0, 0, 0, 1195, + 1145, 1, 0, 0, 0, 1195, 1148, 1, 0, 0, 0, 1195, 1151, 1, 0, 0, 0, 1195, + 1154, 1, 0, 0, 0, 1195, 1157, 1, 0, 0, 0, 1195, 1160, 1, 0, 0, 0, 1195, + 1166, 1, 0, 0, 0, 1195, 1182, 1, 0, 0, 0, 1196, 1199, 1, 0, 0, 0, 1197, + 1195, 1, 0, 0, 0, 1197, 1198, 1, 0, 0, 0, 1198, 111, 1, 0, 0, 0, 1199, + 1197, 1, 0, 0, 0, 1200, 1205, 3, 110, 55, 0, 1201, 1202, 5, 9, 0, 0, 1202, + 1204, 3, 110, 55, 0, 1203, 1201, 1, 0, 0, 0, 1204, 1207, 1, 0, 0, 0, 1205, + 1203, 1, 0, 0, 0, 1205, 1206, 1, 0, 0, 0, 1206, 113, 1, 0, 0, 0, 1207, + 1205, 1, 0, 0, 0, 1208, 1209, 5, 146, 0, 0, 1209, 1210, 3, 10, 5, 0, 1210, + 1211, 5, 6, 0, 0, 1211, 1294, 1, 0, 0, 0, 1212, 1217, 3, 116, 58, 0, 1213, + 1214, 5, 9, 0, 0, 1214, 1216, 3, 116, 58, 0, 1215, 1213, 1, 0, 0, 0, 1216, + 1219, 1, 0, 0, 0, 1217, 1215, 1, 0, 0, 0, 1217, 1218, 1, 0, 0, 0, 1218, + 1220, 1, 0, 0, 0, 1219, 1217, 1, 0, 0, 0, 1220, 1221, 5, 30, 0, 0, 1221, + 1223, 1, 0, 0, 0, 1222, 1212, 1, 0, 0, 0, 1222, 1223, 1, 0, 0, 0, 1223, + 1224, 1, 0, 0, 0, 1224, 1225, 3, 118, 59, 0, 1225, 1226, 5, 6, 0, 0, 1226, + 1294, 1, 0, 0, 0, 1227, 1229, 3, 110, 55, 0, 1228, 1230, 3, 10, 5, 0, 1229, + 1228, 1, 0, 0, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 1, 0, 0, 0, 1231, + 1232, 5, 30, 0, 0, 1232, 1233, 3, 110, 55, 0, 1233, 1234, 5, 6, 0, 0, 1234, + 1294, 1, 0, 0, 0, 1235, 1236, 5, 111, 0, 0, 1236, 1237, 5, 146, 0, 0, 1237, + 1241, 5, 67, 0, 0, 1238, 1242, 3, 122, 61, 0, 1239, 1242, 3, 14, 7, 0, + 1240, 1242, 3, 30, 15, 0, 1241, 1238, 1, 0, 0, 0, 1241, 1239, 1, 0, 0, + 0, 1241, 1240, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1247, 5, 1, 0, + 0, 1244, 1246, 3, 114, 57, 0, 1245, 1244, 1, 0, 0, 0, 1246, 1249, 1, 0, + 0, 0, 1247, 1245, 1, 0, 0, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1250, 1, 0, + 0, 0, 1249, 1247, 1, 0, 0, 0, 1250, 1252, 5, 2, 0, 0, 1251, 1253, 5, 6, + 0, 0, 1252, 1251, 1, 0, 0, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1294, 1, 0, + 0, 0, 1254, 1255, 5, 112, 0, 0, 1255, 1260, 3, 120, 60, 0, 1256, 1257, + 5, 113, 0, 0, 1257, 1259, 3, 120, 60, 0, 1258, 1256, 1, 0, 0, 0, 1259, + 1262, 1, 0, 0, 0, 1260, 1258, 1, 0, 0, 0, 1260, 1261, 1, 0, 0, 0, 1261, + 1272, 1, 0, 0, 0, 1262, 1260, 1, 0, 0, 0, 1263, 1264, 5, 114, 0, 0, 1264, + 1268, 5, 1, 0, 0, 1265, 1267, 3, 114, 57, 0, 1266, 1265, 1, 0, 0, 0, 1267, + 1270, 1, 0, 0, 0, 1268, 1266, 1, 0, 0, 0, 1268, 1269, 1, 0, 0, 0, 1269, + 1271, 1, 0, 0, 0, 1270, 1268, 1, 0, 0, 0, 1271, 1273, 5, 2, 0, 0, 1272, + 1263, 1, 0, 0, 0, 1272, 1273, 1, 0, 0, 0, 1273, 1275, 1, 0, 0, 0, 1274, + 1276, 5, 6, 0, 0, 1275, 1274, 1, 0, 0, 0, 1275, 1276, 1, 0, 0, 0, 1276, + 1294, 1, 0, 0, 0, 1277, 1278, 3, 30, 15, 0, 1278, 1279, 5, 6, 0, 0, 1279, + 1294, 1, 0, 0, 0, 1280, 1281, 5, 115, 0, 0, 1281, 1294, 5, 6, 0, 0, 1282, + 1285, 5, 116, 0, 0, 1283, 1286, 3, 112, 56, 0, 1284, 1286, 3, 30, 15, 0, + 1285, 1283, 1, 0, 0, 0, 1285, 1284, 1, 0, 0, 0, 1285, 1286, 1, 0, 0, 0, + 1286, 1287, 1, 0, 0, 0, 1287, 1294, 5, 6, 0, 0, 1288, 1289, 5, 116, 0, + 0, 1289, 1290, 5, 117, 0, 0, 1290, 1291, 3, 112, 56, 0, 1291, 1292, 5, + 6, 0, 0, 1292, 1294, 1, 0, 0, 0, 1293, 1208, 1, 0, 0, 0, 1293, 1222, 1, + 0, 0, 0, 1293, 1227, 1, 0, 0, 0, 1293, 1235, 1, 0, 0, 0, 1293, 1254, 1, + 0, 0, 0, 1293, 1277, 1, 0, 0, 0, 1293, 1280, 1, 0, 0, 0, 1293, 1282, 1, + 0, 0, 0, 1293, 1288, 1, 0, 0, 0, 1294, 115, 1, 0, 0, 0, 1295, 1296, 7, + 13, 0, 0, 1296, 117, 1, 0, 0, 0, 1297, 1298, 3, 6, 3, 0, 1298, 1299, 5, + 12, 0, 0, 1299, 1301, 1, 0, 0, 0, 1300, 1297, 1, 0, 0, 0, 1300, 1301, 1, + 0, 0, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 3, 6, 3, 0, 1303, 1305, 5, + 7, 0, 0, 1304, 1306, 3, 112, 56, 0, 1305, 1304, 1, 0, 0, 0, 1305, 1306, + 1, 0, 0, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 5, 8, 0, 0, 1308, 119, + 1, 0, 0, 0, 1309, 1310, 3, 110, 55, 0, 1310, 1314, 5, 1, 0, 0, 1311, 1313, + 3, 114, 57, 0, 1312, 1311, 1, 0, 0, 0, 1313, 1316, 1, 0, 0, 0, 1314, 1312, + 1, 0, 0, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1317, 1, 0, 0, 0, 1316, 1314, + 1, 0, 0, 0, 1317, 1318, 5, 2, 0, 0, 1318, 121, 1, 0, 0, 0, 1319, 1320, + 3, 110, 55, 0, 1320, 1321, 5, 31, 0, 0, 1321, 1322, 3, 110, 55, 0, 1322, + 123, 1, 0, 0, 0, 184, 129, 133, 141, 160, 164, 168, 176, 182, 189, 198, + 202, 214, 222, 233, 249, 261, 267, 275, 277, 281, 291, 295, 302, 305, 311, + 320, 323, 326, 338, 344, 349, 353, 360, 385, 393, 397, 412, 421, 445, 449, + 455, 458, 470, 479, 487, 494, 498, 503, 508, 512, 517, 524, 541, 552, 555, + 561, 565, 571, 580, 589, 602, 605, 608, 617, 624, 632, 642, 652, 655, 659, + 663, 667, 671, 675, 679, 683, 690, 698, 701, 705, 712, 714, 727, 730, 735, + 739, 742, 748, 751, 753, 756, 765, 768, 773, 776, 781, 784, 792, 800, 803, + 807, 817, 820, 826, 839, 843, 846, 855, 857, 868, 873, 875, 881, 884, 888, + 895, 901, 910, 915, 919, 923, 928, 932, 937, 941, 945, 950, 954, 959, 962, + 968, 972, 985, 991, 1011, 1017, 1021, 1023, 1027, 1034, 1040, 1047, 1055, + 1057, 1059, 1066, 1075, 1078, 1092, 1098, 1102, 1111, 1117, 1121, 1125, + 1128, 1132, 1136, 1140, 1164, 1170, 1174, 1176, 1180, 1185, 1193, 1195, + 1197, 1205, 1217, 1222, 1229, 1241, 1247, 1252, 1260, 1268, 1272, 1275, + 1285, 1293, 1300, 1305, 1314, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) @@ -722,222 +817,274 @@ const ( KuneiformParserASSIGN = 30 KuneiformParserRANGE = 31 KuneiformParserDOUBLE_QUOTE = 32 - KuneiformParserDATABASE = 33 - KuneiformParserUSE = 34 + KuneiformParserUSE = 33 + KuneiformParserUNUSE = 34 KuneiformParserTABLE = 35 KuneiformParserACTION = 36 - KuneiformParserPROCEDURE = 37 - KuneiformParserPUBLIC = 38 - KuneiformParserPRIVATE = 39 - KuneiformParserVIEW = 40 - KuneiformParserOWNER = 41 - KuneiformParserFOREIGN = 42 - KuneiformParserPRIMARY = 43 - KuneiformParserKEY = 44 - KuneiformParserON = 45 - KuneiformParserDO = 46 - KuneiformParserUNIQUE = 47 - KuneiformParserCASCADE = 48 - KuneiformParserRESTRICT = 49 - KuneiformParserSET = 50 - KuneiformParserDEFAULT = 51 - KuneiformParserNULL = 52 - KuneiformParserDELETE = 53 - KuneiformParserUPDATE = 54 - KuneiformParserREFERENCES = 55 - KuneiformParserREF = 56 - KuneiformParserNOT = 57 - KuneiformParserINDEX = 58 - KuneiformParserAND = 59 - KuneiformParserOR = 60 - KuneiformParserLIKE = 61 - KuneiformParserILIKE = 62 - KuneiformParserIN = 63 - KuneiformParserBETWEEN = 64 - KuneiformParserIS = 65 - KuneiformParserEXISTS = 66 - KuneiformParserALL = 67 - KuneiformParserANY = 68 - KuneiformParserJOIN = 69 - KuneiformParserLEFT = 70 - KuneiformParserRIGHT = 71 - KuneiformParserINNER = 72 - KuneiformParserAS = 73 - KuneiformParserASC = 74 - KuneiformParserDESC = 75 - KuneiformParserLIMIT = 76 - KuneiformParserOFFSET = 77 - KuneiformParserORDER = 78 - KuneiformParserBY = 79 - KuneiformParserGROUP = 80 - KuneiformParserHAVING = 81 - KuneiformParserRETURNS = 82 - KuneiformParserNO = 83 - KuneiformParserWITH = 84 - KuneiformParserCASE = 85 - KuneiformParserWHEN = 86 - KuneiformParserTHEN = 87 - KuneiformParserEND = 88 - KuneiformParserDISTINCT = 89 - KuneiformParserFROM = 90 - KuneiformParserWHERE = 91 - KuneiformParserCOLLATE = 92 - KuneiformParserSELECT = 93 - KuneiformParserINSERT = 94 - KuneiformParserVALUES = 95 - KuneiformParserFULL = 96 - KuneiformParserUNION = 97 - KuneiformParserINTERSECT = 98 - KuneiformParserEXCEPT = 99 - KuneiformParserNULLS = 100 - KuneiformParserFIRST = 101 - KuneiformParserLAST = 102 - KuneiformParserRETURNING = 103 - KuneiformParserINTO = 104 - KuneiformParserCONFLICT = 105 - KuneiformParserNOTHING = 106 - KuneiformParserFOR = 107 - KuneiformParserIF = 108 - KuneiformParserELSEIF = 109 - KuneiformParserELSE = 110 - KuneiformParserBREAK = 111 - KuneiformParserRETURN = 112 - KuneiformParserNEXT = 113 - KuneiformParserSTRING_ = 114 - KuneiformParserTRUE = 115 - KuneiformParserFALSE = 116 - KuneiformParserDIGITS_ = 117 - KuneiformParserBINARY_ = 118 - KuneiformParserLEGACY_FOREIGN_KEY = 119 - KuneiformParserLEGACY_ON_UPDATE = 120 - KuneiformParserLEGACY_ON_DELETE = 121 - KuneiformParserLEGACY_SET_DEFAULT = 122 - KuneiformParserLEGACY_SET_NULL = 123 - KuneiformParserLEGACY_NO_ACTION = 124 - KuneiformParserIDENTIFIER = 125 - KuneiformParserVARIABLE = 126 - KuneiformParserCONTEXTUAL_VARIABLE = 127 - KuneiformParserHASH_IDENTIFIER = 128 - KuneiformParserWS = 129 - KuneiformParserBLOCK_COMMENT = 130 - KuneiformParserLINE_COMMENT = 131 + KuneiformParserCREATE = 37 + KuneiformParserALTER = 38 + KuneiformParserCOLUMN = 39 + KuneiformParserADD = 40 + KuneiformParserDROP = 41 + KuneiformParserRENAME = 42 + KuneiformParserTO = 43 + KuneiformParserCONSTRAINT = 44 + KuneiformParserCHECK = 45 + KuneiformParserFOREIGN = 46 + KuneiformParserPRIMARY = 47 + KuneiformParserKEY = 48 + KuneiformParserON = 49 + KuneiformParserDO = 50 + KuneiformParserUNIQUE = 51 + KuneiformParserCASCADE = 52 + KuneiformParserRESTRICT = 53 + KuneiformParserSET = 54 + KuneiformParserDEFAULT = 55 + KuneiformParserNULL = 56 + KuneiformParserDELETE = 57 + KuneiformParserUPDATE = 58 + KuneiformParserREFERENCES = 59 + KuneiformParserREF = 60 + KuneiformParserNOT = 61 + KuneiformParserINDEX = 62 + KuneiformParserAND = 63 + KuneiformParserOR = 64 + KuneiformParserLIKE = 65 + KuneiformParserILIKE = 66 + KuneiformParserIN = 67 + KuneiformParserBETWEEN = 68 + KuneiformParserIS = 69 + KuneiformParserEXISTS = 70 + KuneiformParserALL = 71 + KuneiformParserANY = 72 + KuneiformParserJOIN = 73 + KuneiformParserLEFT = 74 + KuneiformParserRIGHT = 75 + KuneiformParserINNER = 76 + KuneiformParserAS = 77 + KuneiformParserASC = 78 + KuneiformParserDESC = 79 + KuneiformParserLIMIT = 80 + KuneiformParserOFFSET = 81 + KuneiformParserORDER = 82 + KuneiformParserBY = 83 + KuneiformParserGROUP = 84 + KuneiformParserHAVING = 85 + KuneiformParserRETURNS = 86 + KuneiformParserNO = 87 + KuneiformParserWITH = 88 + KuneiformParserCASE = 89 + KuneiformParserWHEN = 90 + KuneiformParserTHEN = 91 + KuneiformParserEND = 92 + KuneiformParserDISTINCT = 93 + KuneiformParserFROM = 94 + KuneiformParserWHERE = 95 + KuneiformParserCOLLATE = 96 + KuneiformParserSELECT = 97 + KuneiformParserINSERT = 98 + KuneiformParserVALUES = 99 + KuneiformParserFULL = 100 + KuneiformParserUNION = 101 + KuneiformParserINTERSECT = 102 + KuneiformParserEXCEPT = 103 + KuneiformParserNULLS = 104 + KuneiformParserFIRST = 105 + KuneiformParserLAST = 106 + KuneiformParserRETURNING = 107 + KuneiformParserINTO = 108 + KuneiformParserCONFLICT = 109 + KuneiformParserNOTHING = 110 + KuneiformParserFOR = 111 + KuneiformParserIF = 112 + KuneiformParserELSEIF = 113 + KuneiformParserELSE = 114 + KuneiformParserBREAK = 115 + KuneiformParserRETURN = 116 + KuneiformParserNEXT = 117 + KuneiformParserOVER = 118 + KuneiformParserPARTITION = 119 + KuneiformParserWINDOW = 120 + KuneiformParserFILTER = 121 + KuneiformParserRECURSIVE = 122 + KuneiformParserSCHEMA = 123 + KuneiformParserGRANT = 124 + KuneiformParserREVOKE = 125 + KuneiformParserROLE = 126 + KuneiformParserTRANSFER = 127 + KuneiformParserOWNERSHIP = 128 + KuneiformParserREPLACE = 129 + KuneiformParserARRAY = 130 + KuneiformParserNAMESPACE = 131 + KuneiformParserROLES = 132 + KuneiformParserCALL = 133 + KuneiformParserSTRING_ = 134 + KuneiformParserTRUE = 135 + KuneiformParserFALSE = 136 + KuneiformParserDIGITS_ = 137 + KuneiformParserBINARY_ = 138 + KuneiformParserLEGACY_FOREIGN_KEY = 139 + KuneiformParserLEGACY_ON_UPDATE = 140 + KuneiformParserLEGACY_ON_DELETE = 141 + KuneiformParserLEGACY_SET_DEFAULT = 142 + KuneiformParserLEGACY_SET_NULL = 143 + KuneiformParserLEGACY_NO_ACTION = 144 + KuneiformParserIDENTIFIER = 145 + KuneiformParserVARIABLE = 146 + KuneiformParserCONTEXTUAL_VARIABLE = 147 + KuneiformParserHASH_IDENTIFIER = 148 + KuneiformParserWS = 149 + KuneiformParserBLOCK_COMMENT = 150 + KuneiformParserLINE_COMMENT = 151 ) // KuneiformParser rules. const ( - KuneiformParserRULE_schema_entry = 0 - KuneiformParserRULE_sql_entry = 1 - KuneiformParserRULE_action_entry = 2 - KuneiformParserRULE_procedure_entry = 3 - KuneiformParserRULE_literal = 4 - KuneiformParserRULE_identifier = 5 - KuneiformParserRULE_identifier_list = 6 - KuneiformParserRULE_type = 7 - KuneiformParserRULE_type_cast = 8 - KuneiformParserRULE_variable = 9 - KuneiformParserRULE_variable_list = 10 - KuneiformParserRULE_schema = 11 - KuneiformParserRULE_annotation = 12 - KuneiformParserRULE_database_declaration = 13 - KuneiformParserRULE_use_declaration = 14 - KuneiformParserRULE_table_declaration = 15 - KuneiformParserRULE_column_def = 16 - KuneiformParserRULE_index_def = 17 - KuneiformParserRULE_foreign_key_def = 18 - KuneiformParserRULE_foreign_key_action = 19 - KuneiformParserRULE_type_list = 20 - KuneiformParserRULE_named_type_list = 21 - KuneiformParserRULE_typed_variable_list = 22 - KuneiformParserRULE_constraint = 23 - KuneiformParserRULE_access_modifier = 24 - KuneiformParserRULE_action_declaration = 25 - KuneiformParserRULE_procedure_declaration = 26 - KuneiformParserRULE_foreign_procedure_declaration = 27 - KuneiformParserRULE_procedure_return = 28 - KuneiformParserRULE_sql = 29 - KuneiformParserRULE_sql_statement = 30 - KuneiformParserRULE_common_table_expression = 31 - KuneiformParserRULE_select_statement = 32 - KuneiformParserRULE_compound_operator = 33 - KuneiformParserRULE_ordering_term = 34 - KuneiformParserRULE_select_core = 35 - KuneiformParserRULE_relation = 36 - KuneiformParserRULE_join = 37 - KuneiformParserRULE_result_column = 38 - KuneiformParserRULE_update_statement = 39 - KuneiformParserRULE_update_set_clause = 40 - KuneiformParserRULE_insert_statement = 41 - KuneiformParserRULE_upsert_clause = 42 - KuneiformParserRULE_delete_statement = 43 - KuneiformParserRULE_sql_expr = 44 - KuneiformParserRULE_when_then_clause = 45 - KuneiformParserRULE_sql_expr_list = 46 - KuneiformParserRULE_sql_function_call = 47 - KuneiformParserRULE_action_block = 48 - KuneiformParserRULE_action_statement = 49 - KuneiformParserRULE_procedure_block = 50 - KuneiformParserRULE_procedure_expr = 51 - KuneiformParserRULE_procedure_expr_list = 52 - KuneiformParserRULE_proc_statement = 53 - KuneiformParserRULE_variable_or_underscore = 54 - KuneiformParserRULE_procedure_function_call = 55 - KuneiformParserRULE_if_then_block = 56 - KuneiformParserRULE_range = 57 + KuneiformParserRULE_entry = 0 + KuneiformParserRULE_statement = 1 + KuneiformParserRULE_literal = 2 + KuneiformParserRULE_identifier = 3 + KuneiformParserRULE_identifier_list = 4 + KuneiformParserRULE_type = 5 + KuneiformParserRULE_type_cast = 6 + KuneiformParserRULE_variable = 7 + KuneiformParserRULE_table_column_def = 8 + KuneiformParserRULE_type_list = 9 + KuneiformParserRULE_named_type_list = 10 + KuneiformParserRULE_inline_constraint = 11 + KuneiformParserRULE_fk_action = 12 + KuneiformParserRULE_fk_constraint = 13 + KuneiformParserRULE_action_return = 14 + KuneiformParserRULE_sql_statement = 15 + KuneiformParserRULE_common_table_expression = 16 + KuneiformParserRULE_create_table_statement = 17 + KuneiformParserRULE_table_constraint_def = 18 + KuneiformParserRULE_opt_drop_behavior = 19 + KuneiformParserRULE_drop_table_statement = 20 + KuneiformParserRULE_alter_table_statement = 21 + KuneiformParserRULE_alter_table_action = 22 + KuneiformParserRULE_create_index_statement = 23 + KuneiformParserRULE_drop_index_statement = 24 + KuneiformParserRULE_create_role_statement = 25 + KuneiformParserRULE_drop_role_statement = 26 + KuneiformParserRULE_grant_statement = 27 + KuneiformParserRULE_revoke_statement = 28 + KuneiformParserRULE_privilege_list = 29 + KuneiformParserRULE_privilege = 30 + KuneiformParserRULE_transfer_ownership_statement = 31 + KuneiformParserRULE_create_action_statement = 32 + KuneiformParserRULE_drop_action_statement = 33 + KuneiformParserRULE_use_extension_statement = 34 + KuneiformParserRULE_unuse_extension_statement = 35 + KuneiformParserRULE_create_namespace_statement = 36 + KuneiformParserRULE_drop_namespace_statement = 37 + KuneiformParserRULE_select_statement = 38 + KuneiformParserRULE_compound_operator = 39 + KuneiformParserRULE_ordering_term = 40 + KuneiformParserRULE_select_core = 41 + KuneiformParserRULE_relation = 42 + KuneiformParserRULE_join = 43 + KuneiformParserRULE_result_column = 44 + KuneiformParserRULE_update_statement = 45 + KuneiformParserRULE_update_set_clause = 46 + KuneiformParserRULE_insert_statement = 47 + KuneiformParserRULE_upsert_clause = 48 + KuneiformParserRULE_delete_statement = 49 + KuneiformParserRULE_sql_expr = 50 + KuneiformParserRULE_window = 51 + KuneiformParserRULE_when_then_clause = 52 + KuneiformParserRULE_sql_expr_list = 53 + KuneiformParserRULE_sql_function_call = 54 + KuneiformParserRULE_action_expr = 55 + KuneiformParserRULE_action_expr_list = 56 + KuneiformParserRULE_action_statement = 57 + KuneiformParserRULE_variable_or_underscore = 58 + KuneiformParserRULE_action_function_call = 59 + KuneiformParserRULE_if_then_block = 60 + KuneiformParserRULE_range = 61 ) -// ISchema_entryContext is an interface to support dynamic dispatch. -type ISchema_entryContext interface { +// IEntryContext is an interface to support dynamic dispatch. +type IEntryContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - Schema() ISchemaContext + AllStatement() []IStatementContext + Statement(i int) IStatementContext EOF() antlr.TerminalNode + AllSCOL() []antlr.TerminalNode + SCOL(i int) antlr.TerminalNode - // IsSchema_entryContext differentiates from other interfaces. - IsSchema_entryContext() + // IsEntryContext differentiates from other interfaces. + IsEntryContext() } -type Schema_entryContext struct { +type EntryContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptySchema_entryContext() *Schema_entryContext { - var p = new(Schema_entryContext) +func NewEmptyEntryContext() *EntryContext { + var p = new(EntryContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_schema_entry + p.RuleIndex = KuneiformParserRULE_entry return p } -func InitEmptySchema_entryContext(p *Schema_entryContext) { +func InitEmptyEntryContext(p *EntryContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_schema_entry + p.RuleIndex = KuneiformParserRULE_entry } -func (*Schema_entryContext) IsSchema_entryContext() {} +func (*EntryContext) IsEntryContext() {} -func NewSchema_entryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Schema_entryContext { - var p = new(Schema_entryContext) +func NewEntryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *EntryContext { + var p = new(EntryContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_schema_entry + p.RuleIndex = KuneiformParserRULE_entry return p } -func (s *Schema_entryContext) GetParser() antlr.Parser { return s.parser } +func (s *EntryContext) GetParser() antlr.Parser { return s.parser } + +func (s *EntryContext) AllStatement() []IStatementContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IStatementContext); ok { + len++ + } + } + + tst := make([]IStatementContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IStatementContext); ok { + tst[i] = t.(IStatementContext) + i++ + } + } + + return tst +} -func (s *Schema_entryContext) Schema() ISchemaContext { +func (s *EntryContext) Statement(i int) IStatementContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISchemaContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IStatementContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -945,41 +1092,106 @@ func (s *Schema_entryContext) Schema() ISchemaContext { return nil } - return t.(ISchemaContext) + return t.(IStatementContext) } -func (s *Schema_entryContext) EOF() antlr.TerminalNode { +func (s *EntryContext) EOF() antlr.TerminalNode { return s.GetToken(KuneiformParserEOF, 0) } -func (s *Schema_entryContext) GetRuleContext() antlr.RuleContext { +func (s *EntryContext) AllSCOL() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserSCOL) +} + +func (s *EntryContext) SCOL(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserSCOL, i) +} + +func (s *EntryContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Schema_entryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *EntryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Schema_entryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *EntryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSchema_entry(s) + return t.VisitEntry(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Schema_entry() (localctx ISchema_entryContext) { - localctx = NewSchema_entryContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 0, KuneiformParserRULE_schema_entry) +func (p *KuneiformParser) Entry() (localctx IEntryContext) { + localctx = NewEntryContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, KuneiformParserRULE_entry) + var _la int + + var _alt int + p.EnterOuterAlt(localctx, 1) { - p.SetState(116) - p.Schema() + p.SetState(124) + p.Statement() + } + p.SetState(129) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 0, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { + if _alt == 1 { + { + p.SetState(125) + p.Match(KuneiformParserSCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(126) + p.Statement() + } + + } + p.SetState(131) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 0, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + } + p.SetState(133) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserSCOL { + { + p.SetState(132) + p.Match(KuneiformParserSCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + } { - p.SetState(117) + p.SetState(135) p.Match(KuneiformParserEOF) if p.HasError() { // Recognition error - abort rule @@ -1000,57 +1212,86 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISql_entryContext is an interface to support dynamic dispatch. -type ISql_entryContext interface { +// IStatementContext is an interface to support dynamic dispatch. +type IStatementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetNamespace returns the namespace rule contexts. + GetNamespace() IIdentifierContext + + // SetNamespace sets the namespace rule contexts. + SetNamespace(IIdentifierContext) + // Getter signatures - Sql() ISqlContext - EOF() antlr.TerminalNode + Sql_statement() ISql_statementContext + Create_table_statement() ICreate_table_statementContext + Alter_table_statement() IAlter_table_statementContext + Drop_table_statement() IDrop_table_statementContext + Create_index_statement() ICreate_index_statementContext + Drop_index_statement() IDrop_index_statementContext + Create_role_statement() ICreate_role_statementContext + Drop_role_statement() IDrop_role_statementContext + Grant_statement() IGrant_statementContext + Revoke_statement() IRevoke_statementContext + Transfer_ownership_statement() ITransfer_ownership_statementContext + Create_action_statement() ICreate_action_statementContext + Drop_action_statement() IDrop_action_statementContext + Use_extension_statement() IUse_extension_statementContext + Unuse_extension_statement() IUnuse_extension_statementContext + Create_namespace_statement() ICreate_namespace_statementContext + Drop_namespace_statement() IDrop_namespace_statementContext + LBRACE() antlr.TerminalNode + RBRACE() antlr.TerminalNode + Identifier() IIdentifierContext - // IsSql_entryContext differentiates from other interfaces. - IsSql_entryContext() + // IsStatementContext differentiates from other interfaces. + IsStatementContext() } -type Sql_entryContext struct { +type StatementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + namespace IIdentifierContext } -func NewEmptySql_entryContext() *Sql_entryContext { - var p = new(Sql_entryContext) +func NewEmptyStatementContext() *StatementContext { + var p = new(StatementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_entry + p.RuleIndex = KuneiformParserRULE_statement return p } -func InitEmptySql_entryContext(p *Sql_entryContext) { +func InitEmptyStatementContext(p *StatementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_entry + p.RuleIndex = KuneiformParserRULE_statement } -func (*Sql_entryContext) IsSql_entryContext() {} +func (*StatementContext) IsStatementContext() {} -func NewSql_entryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_entryContext { - var p = new(Sql_entryContext) +func NewStatementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *StatementContext { + var p = new(StatementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql_entry + p.RuleIndex = KuneiformParserRULE_statement return p } -func (s *Sql_entryContext) GetParser() antlr.Parser { return s.parser } +func (s *StatementContext) GetParser() antlr.Parser { return s.parser } + +func (s *StatementContext) GetNamespace() IIdentifierContext { return s.namespace } -func (s *Sql_entryContext) Sql() ISqlContext { +func (s *StatementContext) SetNamespace(v IIdentifierContext) { s.namespace = v } + +func (s *StatementContext) Sql_statement() ISql_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISqlContext); ok { + if _, ok := ctx.(ISql_statementContext); ok { t = ctx.(antlr.RuleContext) break } @@ -1060,112 +1301,109 @@ func (s *Sql_entryContext) Sql() ISqlContext { return nil } - return t.(ISqlContext) + return t.(ISql_statementContext) } -func (s *Sql_entryContext) EOF() antlr.TerminalNode { - return s.GetToken(KuneiformParserEOF, 0) -} +func (s *StatementContext) Create_table_statement() ICreate_table_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ICreate_table_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func (s *Sql_entryContext) GetRuleContext() antlr.RuleContext { - return s -} + if t == nil { + return nil + } -func (s *Sql_entryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) + return t.(ICreate_table_statementContext) } -func (s *Sql_entryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitSql_entry(s) +func (s *StatementContext) Alter_table_statement() IAlter_table_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IAlter_table_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - default: - return t.VisitChildren(s) + if t == nil { + return nil } + + return t.(IAlter_table_statementContext) } -func (p *KuneiformParser) Sql_entry() (localctx ISql_entryContext) { - localctx = NewSql_entryContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 2, KuneiformParserRULE_sql_entry) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(119) - p.Sql() - } - { - p.SetState(120) - p.Match(KuneiformParserEOF) - if p.HasError() { - // Recognition error - abort rule - goto errorExit +func (s *StatementContext) Drop_table_statement() IDrop_table_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IDrop_table_statementContext); ok { + t = ctx.(antlr.RuleContext) + break } } -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) + if t == nil { + return nil } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} -// IAction_entryContext is an interface to support dynamic dispatch. -type IAction_entryContext interface { - antlr.ParserRuleContext + return t.(IDrop_table_statementContext) +} - // GetParser returns the parser. - GetParser() antlr.Parser +func (s *StatementContext) Create_index_statement() ICreate_index_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ICreate_index_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - // Getter signatures - Action_block() IAction_blockContext - EOF() antlr.TerminalNode + if t == nil { + return nil + } - // IsAction_entryContext differentiates from other interfaces. - IsAction_entryContext() + return t.(ICreate_index_statementContext) } -type Action_entryContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser -} +func (s *StatementContext) Drop_index_statement() IDrop_index_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IDrop_index_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func NewEmptyAction_entryContext() *Action_entryContext { - var p = new(Action_entryContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_entry - return p -} + if t == nil { + return nil + } -func InitEmptyAction_entryContext(p *Action_entryContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_entry + return t.(IDrop_index_statementContext) } -func (*Action_entryContext) IsAction_entryContext() {} - -func NewAction_entryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_entryContext { - var p = new(Action_entryContext) - - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) +func (s *StatementContext) Create_role_statement() ICreate_role_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ICreate_role_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - p.parser = parser - p.RuleIndex = KuneiformParserRULE_action_entry + if t == nil { + return nil + } - return p + return t.(ICreate_role_statementContext) } -func (s *Action_entryContext) GetParser() antlr.Parser { return s.parser } - -func (s *Action_entryContext) Action_block() IAction_blockContext { +func (s *StatementContext) Drop_role_statement() IDrop_role_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAction_blockContext); ok { + if _, ok := ctx.(IDrop_role_statementContext); ok { t = ctx.(antlr.RuleContext) break } @@ -1175,112 +1413,141 @@ func (s *Action_entryContext) Action_block() IAction_blockContext { return nil } - return t.(IAction_blockContext) + return t.(IDrop_role_statementContext) } -func (s *Action_entryContext) EOF() antlr.TerminalNode { - return s.GetToken(KuneiformParserEOF, 0) -} +func (s *StatementContext) Grant_statement() IGrant_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IGrant_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func (s *Action_entryContext) GetRuleContext() antlr.RuleContext { - return s -} + if t == nil { + return nil + } -func (s *Action_entryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) + return t.(IGrant_statementContext) } -func (s *Action_entryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitAction_entry(s) +func (s *StatementContext) Revoke_statement() IRevoke_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IRevoke_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - default: - return t.VisitChildren(s) + if t == nil { + return nil } + + return t.(IRevoke_statementContext) } -func (p *KuneiformParser) Action_entry() (localctx IAction_entryContext) { - localctx = NewAction_entryContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 4, KuneiformParserRULE_action_entry) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(122) - p.Action_block() - } - { - p.SetState(123) - p.Match(KuneiformParserEOF) - if p.HasError() { - // Recognition error - abort rule - goto errorExit +func (s *StatementContext) Transfer_ownership_statement() ITransfer_ownership_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ITransfer_ownership_statementContext); ok { + t = ctx.(antlr.RuleContext) + break } } -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) + if t == nil { + return nil } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} -// IProcedure_entryContext is an interface to support dynamic dispatch. -type IProcedure_entryContext interface { - antlr.ParserRuleContext + return t.(ITransfer_ownership_statementContext) +} - // GetParser returns the parser. - GetParser() antlr.Parser +func (s *StatementContext) Create_action_statement() ICreate_action_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ICreate_action_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - // Getter signatures - Procedure_block() IProcedure_blockContext - EOF() antlr.TerminalNode + if t == nil { + return nil + } - // IsProcedure_entryContext differentiates from other interfaces. - IsProcedure_entryContext() + return t.(ICreate_action_statementContext) } -type Procedure_entryContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser -} +func (s *StatementContext) Drop_action_statement() IDrop_action_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IDrop_action_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func NewEmptyProcedure_entryContext() *Procedure_entryContext { - var p = new(Procedure_entryContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_entry - return p -} + if t == nil { + return nil + } -func InitEmptyProcedure_entryContext(p *Procedure_entryContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_entry + return t.(IDrop_action_statementContext) } -func (*Procedure_entryContext) IsProcedure_entryContext() {} +func (s *StatementContext) Use_extension_statement() IUse_extension_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IUse_extension_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func NewProcedure_entryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_entryContext { - var p = new(Procedure_entryContext) + if t == nil { + return nil + } - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + return t.(IUse_extension_statementContext) +} - p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_entry +func (s *StatementContext) Unuse_extension_statement() IUnuse_extension_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IUnuse_extension_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - return p + if t == nil { + return nil + } + + return t.(IUnuse_extension_statementContext) } -func (s *Procedure_entryContext) GetParser() antlr.Parser { return s.parser } +func (s *StatementContext) Create_namespace_statement() ICreate_namespace_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ICreate_namespace_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ICreate_namespace_statementContext) +} -func (s *Procedure_entryContext) Procedure_block() IProcedure_blockContext { +func (s *StatementContext) Drop_namespace_statement() IDrop_namespace_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_blockContext); ok { + if _, ok := ctx.(IDrop_namespace_statementContext); ok { t = ctx.(antlr.RuleContext) break } @@ -1290,46 +1557,201 @@ func (s *Procedure_entryContext) Procedure_block() IProcedure_blockContext { return nil } - return t.(IProcedure_blockContext) + return t.(IDrop_namespace_statementContext) } -func (s *Procedure_entryContext) EOF() antlr.TerminalNode { - return s.GetToken(KuneiformParserEOF, 0) +func (s *StatementContext) LBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACE, 0) +} + +func (s *StatementContext) RBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACE, 0) +} + +func (s *StatementContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Procedure_entryContext) GetRuleContext() antlr.RuleContext { +func (s *StatementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_entryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *StatementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Procedure_entryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *StatementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_entry(s) + return t.VisitStatement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_entry() (localctx IProcedure_entryContext) { - localctx = NewProcedure_entryContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 6, KuneiformParserRULE_procedure_entry) +func (p *KuneiformParser) Statement() (localctx IStatementContext) { + localctx = NewStatementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 2, KuneiformParserRULE_statement) + var _la int + p.EnterOuterAlt(localctx, 1) - { - p.SetState(125) - p.Procedure_block() + p.SetState(141) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - { - p.SetState(126) - p.Match(KuneiformParserEOF) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserLBRACE { + { + p.SetState(137) + p.Match(KuneiformParserLBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(138) + + var _x = p.Identifier() + + localctx.(*StatementContext).namespace = _x + } + { + p.SetState(139) + p.Match(KuneiformParserRBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + p.SetState(160) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext()) { + case 1: + { + p.SetState(143) + p.Sql_statement() + } + + case 2: + { + p.SetState(144) + p.Create_table_statement() + } + + case 3: + { + p.SetState(145) + p.Alter_table_statement() + } + + case 4: + { + p.SetState(146) + p.Drop_table_statement() + } + + case 5: + { + p.SetState(147) + p.Create_index_statement() + } + + case 6: + { + p.SetState(148) + p.Drop_index_statement() + } + + case 7: + { + p.SetState(149) + p.Create_role_statement() + } + + case 8: + { + p.SetState(150) + p.Drop_role_statement() + } + + case 9: + { + p.SetState(151) + p.Grant_statement() + } + + case 10: + { + p.SetState(152) + p.Revoke_statement() + } + + case 11: + { + p.SetState(153) + p.Transfer_ownership_statement() + } + + case 12: + { + p.SetState(154) + p.Create_action_statement() + } + + case 13: + { + p.SetState(155) + p.Drop_action_statement() + } + + case 14: + { + p.SetState(156) + p.Use_extension_statement() + } + + case 15: + { + p.SetState(157) + p.Unuse_extension_statement() + } + + case 16: + { + p.SetState(158) + p.Create_namespace_statement() + } + + case 17: + { + p.SetState(159) + p.Drop_namespace_statement() } + + case antlr.ATNInvalidAltNumber: + goto errorExit } errorExit: @@ -1621,21 +2043,21 @@ func (s *Binary_literalContext) Accept(visitor antlr.ParseTreeVisitor) interface func (p *KuneiformParser) Literal() (localctx ILiteralContext) { localctx = NewLiteralContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 8, KuneiformParserRULE_literal) + p.EnterRule(localctx, 4, KuneiformParserRULE_literal) var _la int - p.SetState(142) + p.SetState(176) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 2, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 6, p.GetParserRuleContext()) { case 1: localctx = NewString_literalContext(p, localctx) p.EnterOuterAlt(localctx, 1) { - p.SetState(128) + p.SetState(162) p.Match(KuneiformParserSTRING_) if p.HasError() { // Recognition error - abort rule @@ -1646,7 +2068,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { case 2: localctx = NewInteger_literalContext(p, localctx) p.EnterOuterAlt(localctx, 2) - p.SetState(130) + p.SetState(164) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -1655,7 +2077,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { if _la == KuneiformParserPLUS || _la == KuneiformParserMINUS { { - p.SetState(129) + p.SetState(163) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { @@ -1668,7 +2090,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { } { - p.SetState(132) + p.SetState(166) p.Match(KuneiformParserDIGITS_) if p.HasError() { // Recognition error - abort rule @@ -1679,7 +2101,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { case 3: localctx = NewDecimal_literalContext(p, localctx) p.EnterOuterAlt(localctx, 3) - p.SetState(134) + p.SetState(168) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -1688,7 +2110,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { if _la == KuneiformParserPLUS || _la == KuneiformParserMINUS { { - p.SetState(133) + p.SetState(167) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { @@ -1701,7 +2123,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { } { - p.SetState(136) + p.SetState(170) p.Match(KuneiformParserDIGITS_) if p.HasError() { // Recognition error - abort rule @@ -1709,7 +2131,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { } } { - p.SetState(137) + p.SetState(171) p.Match(KuneiformParserPERIOD) if p.HasError() { // Recognition error - abort rule @@ -1717,7 +2139,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { } } { - p.SetState(138) + p.SetState(172) p.Match(KuneiformParserDIGITS_) if p.HasError() { // Recognition error - abort rule @@ -1729,7 +2151,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { localctx = NewBoolean_literalContext(p, localctx) p.EnterOuterAlt(localctx, 4) { - p.SetState(139) + p.SetState(173) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserTRUE || _la == KuneiformParserFALSE) { @@ -1744,7 +2166,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { localctx = NewNull_literalContext(p, localctx) p.EnterOuterAlt(localctx, 5) { - p.SetState(140) + p.SetState(174) p.Match(KuneiformParserNULL) if p.HasError() { // Recognition error - abort rule @@ -1756,7 +2178,7 @@ func (p *KuneiformParser) Literal() (localctx ILiteralContext) { localctx = NewBinary_literalContext(p, localctx) p.EnterOuterAlt(localctx, 6) { - p.SetState(141) + p.SetState(175) p.Match(KuneiformParserBINARY_) if p.HasError() { // Recognition error - abort rule @@ -1861,8 +2283,8 @@ func (s *IdentifierContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *KuneiformParser) Identifier() (localctx IIdentifierContext) { localctx = NewIdentifierContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 10, KuneiformParserRULE_identifier) - p.SetState(148) + p.EnterRule(localctx, 6, KuneiformParserRULE_identifier) + p.SetState(182) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -1872,7 +2294,7 @@ func (p *KuneiformParser) Identifier() (localctx IIdentifierContext) { case KuneiformParserDOUBLE_QUOTE: p.EnterOuterAlt(localctx, 1) { - p.SetState(144) + p.SetState(178) p.Match(KuneiformParserDOUBLE_QUOTE) if p.HasError() { // Recognition error - abort rule @@ -1880,7 +2302,7 @@ func (p *KuneiformParser) Identifier() (localctx IIdentifierContext) { } } { - p.SetState(145) + p.SetState(179) p.Match(KuneiformParserIDENTIFIER) if p.HasError() { // Recognition error - abort rule @@ -1888,7 +2310,7 @@ func (p *KuneiformParser) Identifier() (localctx IIdentifierContext) { } } { - p.SetState(146) + p.SetState(180) p.Match(KuneiformParserDOUBLE_QUOTE) if p.HasError() { // Recognition error - abort rule @@ -1899,7 +2321,7 @@ func (p *KuneiformParser) Identifier() (localctx IIdentifierContext) { case KuneiformParserIDENTIFIER: p.EnterOuterAlt(localctx, 2) { - p.SetState(147) + p.SetState(181) p.Match(KuneiformParserIDENTIFIER) if p.HasError() { // Recognition error - abort rule @@ -2043,15 +2465,15 @@ func (s *Identifier_listContext) Accept(visitor antlr.ParseTreeVisitor) interfac func (p *KuneiformParser) Identifier_list() (localctx IIdentifier_listContext) { localctx = NewIdentifier_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 12, KuneiformParserRULE_identifier_list) + p.EnterRule(localctx, 8, KuneiformParserRULE_identifier_list) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(150) + p.SetState(184) p.Identifier() } - p.SetState(155) + p.SetState(189) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -2060,7 +2482,7 @@ func (p *KuneiformParser) Identifier_list() (localctx IIdentifier_listContext) { for _la == KuneiformParserCOMMA { { - p.SetState(151) + p.SetState(185) p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule @@ -2068,11 +2490,11 @@ func (p *KuneiformParser) Identifier_list() (localctx IIdentifier_listContext) { } } { - p.SetState(152) + p.SetState(186) p.Identifier() } - p.SetState(157) + p.SetState(191) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -2101,7 +2523,7 @@ type ITypeContext interface { GetParser() antlr.Parser // Getter signatures - IDENTIFIER() antlr.TerminalNode + Identifier() IIdentifierContext LPAREN() antlr.TerminalNode AllDIGITS_() []antlr.TerminalNode DIGITS_(i int) antlr.TerminalNode @@ -2146,8 +2568,20 @@ func NewTypeContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokin func (s *TypeContext) GetParser() antlr.Parser { return s.parser } -func (s *TypeContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) +func (s *TypeContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } func (s *TypeContext) LPAREN() antlr.TerminalNode { @@ -2198,22 +2632,18 @@ func (s *TypeContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *KuneiformParser) Type_() (localctx ITypeContext) { localctx = NewTypeContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 14, KuneiformParserRULE_type) + p.EnterRule(localctx, 10, KuneiformParserRULE_type) p.EnterOuterAlt(localctx, 1) { - p.SetState(158) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(192) + p.Identifier() } - p.SetState(164) + p.SetState(198) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 9, p.GetParserRuleContext()) == 1 { { - p.SetState(159) + p.SetState(193) p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule @@ -2221,7 +2651,7 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } } { - p.SetState(160) + p.SetState(194) p.Match(KuneiformParserDIGITS_) if p.HasError() { // Recognition error - abort rule @@ -2229,7 +2659,7 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } } { - p.SetState(161) + p.SetState(195) p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule @@ -2237,7 +2667,7 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } } { - p.SetState(162) + p.SetState(196) p.Match(KuneiformParserDIGITS_) if p.HasError() { // Recognition error - abort rule @@ -2245,7 +2675,7 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } } { - p.SetState(163) + p.SetState(197) p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule @@ -2256,12 +2686,12 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } else if p.HasError() { // JIM goto errorExit } - p.SetState(168) + p.SetState(202) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 6, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 10, p.GetParserRuleContext()) == 1 { { - p.SetState(166) + p.SetState(200) p.Match(KuneiformParserLBRACKET) if p.HasError() { // Recognition error - abort rule @@ -2269,7 +2699,7 @@ func (p *KuneiformParser) Type_() (localctx ITypeContext) { } } { - p.SetState(167) + p.SetState(201) p.Match(KuneiformParserRBRACKET) if p.HasError() { // Recognition error - abort rule @@ -2381,10 +2811,10 @@ func (s *Type_castContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *KuneiformParser) Type_cast() (localctx IType_castContext) { localctx = NewType_castContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 16, KuneiformParserRULE_type_cast) + p.EnterRule(localctx, 12, KuneiformParserRULE_type_cast) p.EnterOuterAlt(localctx, 1) { - p.SetState(170) + p.SetState(204) p.Match(KuneiformParserTYPE_CAST) if p.HasError() { // Recognition error - abort rule @@ -2392,7 +2822,7 @@ func (p *KuneiformParser) Type_cast() (localctx IType_castContext) { } } { - p.SetState(171) + p.SetState(205) p.Type_() } @@ -2484,12 +2914,12 @@ func (s *VariableContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *KuneiformParser) Variable() (localctx IVariableContext) { localctx = NewVariableContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 18, KuneiformParserRULE_variable) + p.EnterRule(localctx, 14, KuneiformParserRULE_variable) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(173) + p.SetState(207) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserVARIABLE || _la == KuneiformParserCONTEXTUAL_VARIABLE) { @@ -2513,81 +2943,124 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IVariable_listContext is an interface to support dynamic dispatch. -type IVariable_listContext interface { +// ITable_column_defContext is an interface to support dynamic dispatch. +type ITable_column_defContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetName returns the name rule contexts. + GetName() IIdentifierContext + + // SetName sets the name rule contexts. + SetName(IIdentifierContext) + // Getter signatures - AllVariable() []IVariableContext - Variable(i int) IVariableContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + Type_() ITypeContext + Identifier() IIdentifierContext + AllInline_constraint() []IInline_constraintContext + Inline_constraint(i int) IInline_constraintContext - // IsVariable_listContext differentiates from other interfaces. - IsVariable_listContext() + // IsTable_column_defContext differentiates from other interfaces. + IsTable_column_defContext() } -type Variable_listContext struct { +type Table_column_defContext struct { antlr.BaseParserRuleContext parser antlr.Parser + name IIdentifierContext } -func NewEmptyVariable_listContext() *Variable_listContext { - var p = new(Variable_listContext) +func NewEmptyTable_column_defContext() *Table_column_defContext { + var p = new(Table_column_defContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_variable_list + p.RuleIndex = KuneiformParserRULE_table_column_def return p } -func InitEmptyVariable_listContext(p *Variable_listContext) { +func InitEmptyTable_column_defContext(p *Table_column_defContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_variable_list + p.RuleIndex = KuneiformParserRULE_table_column_def } -func (*Variable_listContext) IsVariable_listContext() {} +func (*Table_column_defContext) IsTable_column_defContext() {} -func NewVariable_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Variable_listContext { - var p = new(Variable_listContext) +func NewTable_column_defContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Table_column_defContext { + var p = new(Table_column_defContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_variable_list + p.RuleIndex = KuneiformParserRULE_table_column_def return p } -func (s *Variable_listContext) GetParser() antlr.Parser { return s.parser } +func (s *Table_column_defContext) GetParser() antlr.Parser { return s.parser } -func (s *Variable_listContext) AllVariable() []IVariableContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IVariableContext); ok { - len++ +func (s *Table_column_defContext) GetName() IIdentifierContext { return s.name } + +func (s *Table_column_defContext) SetName(v IIdentifierContext) { s.name = v } + +func (s *Table_column_defContext) Type_() ITypeContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ITypeContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]IVariableContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IVariableContext); ok { - tst[i] = t.(IVariableContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(ITypeContext) } -func (s *Variable_listContext) Variable(i int) IVariableContext { +func (s *Table_column_defContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) +} + +func (s *Table_column_defContext) AllInline_constraint() []IInline_constraintContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IInline_constraintContext); ok { + len++ + } + } + + tst := make([]IInline_constraintContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IInline_constraintContext); ok { + tst[i] = t.(IInline_constraintContext) + i++ + } + } + + return tst +} + +func (s *Table_column_defContext) Inline_constraint(i int) IInline_constraintContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IVariableContext); ok { + if _, ok := ctx.(IInline_constraintContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -2600,67 +3073,58 @@ func (s *Variable_listContext) Variable(i int) IVariableContext { return nil } - return t.(IVariableContext) -} - -func (s *Variable_listContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) + return t.(IInline_constraintContext) } -func (s *Variable_listContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) -} - -func (s *Variable_listContext) GetRuleContext() antlr.RuleContext { +func (s *Table_column_defContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Variable_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Table_column_defContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Variable_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Table_column_defContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitVariable_list(s) + return t.VisitTable_column_def(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Variable_list() (localctx IVariable_listContext) { - localctx = NewVariable_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 20, KuneiformParserRULE_variable_list) +func (p *KuneiformParser) Table_column_def() (localctx ITable_column_defContext) { + localctx = NewTable_column_defContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 16, KuneiformParserRULE_table_column_def) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(175) - p.Variable() + p.SetState(209) + + var _x = p.Identifier() + + localctx.(*Table_column_defContext).name = _x + } + { + p.SetState(210) + p.Type_() } - p.SetState(180) + p.SetState(214) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { - { - p.SetState(176) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2920760280210210816) != 0 { { - p.SetState(177) - p.Variable() + p.SetState(211) + p.Inline_constraint() } - p.SetState(182) + p.SetState(216) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -2681,92 +3145,69 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISchemaContext is an interface to support dynamic dispatch. -type ISchemaContext interface { +// IType_listContext is an interface to support dynamic dispatch. +type IType_listContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - Database_declaration() IDatabase_declarationContext - AllUse_declaration() []IUse_declarationContext - Use_declaration(i int) IUse_declarationContext - AllTable_declaration() []ITable_declarationContext - Table_declaration(i int) ITable_declarationContext - AllAction_declaration() []IAction_declarationContext - Action_declaration(i int) IAction_declarationContext - AllProcedure_declaration() []IProcedure_declarationContext - Procedure_declaration(i int) IProcedure_declarationContext - AllForeign_procedure_declaration() []IForeign_procedure_declarationContext - Foreign_procedure_declaration(i int) IForeign_procedure_declarationContext - - // IsSchemaContext differentiates from other interfaces. - IsSchemaContext() -} - -type SchemaContext struct { + AllType_() []ITypeContext + Type_(i int) ITypeContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode + + // IsType_listContext differentiates from other interfaces. + IsType_listContext() +} + +type Type_listContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptySchemaContext() *SchemaContext { - var p = new(SchemaContext) +func NewEmptyType_listContext() *Type_listContext { + var p = new(Type_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_schema + p.RuleIndex = KuneiformParserRULE_type_list return p } -func InitEmptySchemaContext(p *SchemaContext) { +func InitEmptyType_listContext(p *Type_listContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_schema + p.RuleIndex = KuneiformParserRULE_type_list } -func (*SchemaContext) IsSchemaContext() {} +func (*Type_listContext) IsType_listContext() {} -func NewSchemaContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *SchemaContext { - var p = new(SchemaContext) +func NewType_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Type_listContext { + var p = new(Type_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_schema + p.RuleIndex = KuneiformParserRULE_type_list return p } -func (s *SchemaContext) GetParser() antlr.Parser { return s.parser } - -func (s *SchemaContext) Database_declaration() IDatabase_declarationContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IDatabase_declarationContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IDatabase_declarationContext) -} +func (s *Type_listContext) GetParser() antlr.Parser { return s.parser } -func (s *SchemaContext) AllUse_declaration() []IUse_declarationContext { +func (s *Type_listContext) AllType_() []ITypeContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IUse_declarationContext); ok { + if _, ok := ctx.(ITypeContext); ok { len++ } } - tst := make([]IUse_declarationContext, len) + tst := make([]ITypeContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IUse_declarationContext); ok { - tst[i] = t.(IUse_declarationContext) + if t, ok := ctx.(ITypeContext); ok { + tst[i] = t.(ITypeContext) i++ } } @@ -2774,11 +3215,11 @@ func (s *SchemaContext) AllUse_declaration() []IUse_declarationContext { return tst } -func (s *SchemaContext) Use_declaration(i int) IUse_declarationContext { +func (s *Type_listContext) Type_(i int) ITypeContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IUse_declarationContext); ok { + if _, ok := ctx.(ITypeContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -2791,64 +3232,152 @@ func (s *SchemaContext) Use_declaration(i int) IUse_declarationContext { return nil } - return t.(IUse_declarationContext) + return t.(ITypeContext) } -func (s *SchemaContext) AllTable_declaration() []ITable_declarationContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ITable_declarationContext); ok { - len++ - } - } +func (s *Type_listContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} - tst := make([]ITable_declarationContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ITable_declarationContext); ok { - tst[i] = t.(ITable_declarationContext) - i++ - } - } +func (s *Type_listContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} - return tst +func (s *Type_listContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *SchemaContext) Table_declaration(i int) ITable_declarationContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITable_declarationContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break +func (s *Type_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Type_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitType_list(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *KuneiformParser) Type_list() (localctx IType_listContext) { + localctx = NewType_listContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 18, KuneiformParserRULE_type_list) + var _la int + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(217) + p.Type_() + } + p.SetState(222) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + for _la == KuneiformParserCOMMA { + { + p.SetState(218) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - j++ } + { + p.SetState(219) + p.Type_() + } + + p.SetState(224) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) } - if t == nil { - return nil +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} + +// INamed_type_listContext is an interface to support dynamic dispatch. +type INamed_type_listContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser - return t.(ITable_declarationContext) + // Getter signatures + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + AllType_() []ITypeContext + Type_(i int) ITypeContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode + + // IsNamed_type_listContext differentiates from other interfaces. + IsNamed_type_listContext() +} + +type Named_type_listContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyNamed_type_listContext() *Named_type_listContext { + var p = new(Named_type_listContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_named_type_list + return p +} + +func InitEmptyNamed_type_listContext(p *Named_type_listContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_named_type_list +} + +func (*Named_type_listContext) IsNamed_type_listContext() {} + +func NewNamed_type_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Named_type_listContext { + var p = new(Named_type_listContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + + p.parser = parser + p.RuleIndex = KuneiformParserRULE_named_type_list + + return p } -func (s *SchemaContext) AllAction_declaration() []IAction_declarationContext { +func (s *Named_type_listContext) GetParser() antlr.Parser { return s.parser } + +func (s *Named_type_listContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IAction_declarationContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]IAction_declarationContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IAction_declarationContext); ok { - tst[i] = t.(IAction_declarationContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -2856,11 +3385,11 @@ func (s *SchemaContext) AllAction_declaration() []IAction_declarationContext { return tst } -func (s *SchemaContext) Action_declaration(i int) IAction_declarationContext { +func (s *Named_type_listContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAction_declarationContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -2873,23 +3402,23 @@ func (s *SchemaContext) Action_declaration(i int) IAction_declarationContext { return nil } - return t.(IAction_declarationContext) + return t.(IIdentifierContext) } -func (s *SchemaContext) AllProcedure_declaration() []IProcedure_declarationContext { +func (s *Named_type_listContext) AllType_() []ITypeContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_declarationContext); ok { + if _, ok := ctx.(ITypeContext); ok { len++ } } - tst := make([]IProcedure_declarationContext, len) + tst := make([]ITypeContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_declarationContext); ok { - tst[i] = t.(IProcedure_declarationContext) + if t, ok := ctx.(ITypeContext); ok { + tst[i] = t.(ITypeContext) i++ } } @@ -2897,11 +3426,11 @@ func (s *SchemaContext) AllProcedure_declaration() []IProcedure_declarationConte return tst } -func (s *SchemaContext) Procedure_declaration(i int) IProcedure_declarationContext { +func (s *Named_type_listContext) Type_(i int) ITypeContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_declarationContext); ok { + if _, ok := ctx.(ITypeContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -2914,128 +3443,75 @@ func (s *SchemaContext) Procedure_declaration(i int) IProcedure_declarationConte return nil } - return t.(IProcedure_declarationContext) + return t.(ITypeContext) } -func (s *SchemaContext) AllForeign_procedure_declaration() []IForeign_procedure_declarationContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IForeign_procedure_declarationContext); ok { - len++ - } - } - - tst := make([]IForeign_procedure_declarationContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IForeign_procedure_declarationContext); ok { - tst[i] = t.(IForeign_procedure_declarationContext) - i++ - } - } - - return tst +func (s *Named_type_listContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func (s *SchemaContext) Foreign_procedure_declaration(i int) IForeign_procedure_declarationContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IForeign_procedure_declarationContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(IForeign_procedure_declarationContext) +func (s *Named_type_listContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *SchemaContext) GetRuleContext() antlr.RuleContext { +func (s *Named_type_listContext) GetRuleContext() antlr.RuleContext { return s } -func (s *SchemaContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Named_type_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *SchemaContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Named_type_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSchema(s) + return t.VisitNamed_type_list(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Schema() (localctx ISchemaContext) { - localctx = NewSchemaContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 22, KuneiformParserRULE_schema) +func (p *KuneiformParser) Named_type_list() (localctx INamed_type_listContext) { + localctx = NewNamed_type_listContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 20, KuneiformParserRULE_named_type_list) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(183) - p.Database_declaration() + p.SetState(225) + p.Identifier() + } + { + p.SetState(226) + p.Type_() } - p.SetState(191) + p.SetState(233) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4655744548864) != 0) || _la == KuneiformParserCONTEXTUAL_VARIABLE { - p.SetState(189) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) { - case 1: - { - p.SetState(184) - p.Use_declaration() - } - - case 2: - { - p.SetState(185) - p.Table_declaration() - } - - case 3: - { - p.SetState(186) - p.Action_declaration() - } - - case 4: - { - p.SetState(187) - p.Procedure_declaration() - } - - case 5: - { - p.SetState(188) - p.Foreign_procedure_declaration() + for _la == KuneiformParserCOMMA { + { + p.SetState(227) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - case antlr.ATNInvalidAltNumber: - goto errorExit + } + { + p.SetState(228) + p.Identifier() + } + { + p.SetState(229) + p.Type_() } - p.SetState(193) + p.SetState(235) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -3056,121 +3532,109 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IAnnotationContext is an interface to support dynamic dispatch. -type IAnnotationContext interface { +// IInline_constraintContext is an interface to support dynamic dispatch. +type IInline_constraintContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - CONTEXTUAL_VARIABLE() antlr.TerminalNode + PRIMARY() antlr.TerminalNode + KEY() antlr.TerminalNode + UNIQUE() antlr.TerminalNode + NOT() antlr.TerminalNode + NULL() antlr.TerminalNode + DEFAULT() antlr.TerminalNode + Action_expr() IAction_exprContext + Fk_constraint() IFk_constraintContext + CHECK() antlr.TerminalNode LPAREN() antlr.TerminalNode + Sql_expr() ISql_exprContext RPAREN() antlr.TerminalNode - AllIDENTIFIER() []antlr.TerminalNode - IDENTIFIER(i int) antlr.TerminalNode - AllEQUALS() []antlr.TerminalNode - EQUALS(i int) antlr.TerminalNode - AllLiteral() []ILiteralContext - Literal(i int) ILiteralContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode - // IsAnnotationContext differentiates from other interfaces. - IsAnnotationContext() + // IsInline_constraintContext differentiates from other interfaces. + IsInline_constraintContext() } -type AnnotationContext struct { +type Inline_constraintContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyAnnotationContext() *AnnotationContext { - var p = new(AnnotationContext) +func NewEmptyInline_constraintContext() *Inline_constraintContext { + var p = new(Inline_constraintContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_annotation + p.RuleIndex = KuneiformParserRULE_inline_constraint return p } -func InitEmptyAnnotationContext(p *AnnotationContext) { +func InitEmptyInline_constraintContext(p *Inline_constraintContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_annotation + p.RuleIndex = KuneiformParserRULE_inline_constraint } -func (*AnnotationContext) IsAnnotationContext() {} +func (*Inline_constraintContext) IsInline_constraintContext() {} -func NewAnnotationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *AnnotationContext { - var p = new(AnnotationContext) +func NewInline_constraintContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Inline_constraintContext { + var p = new(Inline_constraintContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_annotation + p.RuleIndex = KuneiformParserRULE_inline_constraint return p } -func (s *AnnotationContext) GetParser() antlr.Parser { return s.parser } - -func (s *AnnotationContext) CONTEXTUAL_VARIABLE() antlr.TerminalNode { - return s.GetToken(KuneiformParserCONTEXTUAL_VARIABLE, 0) -} +func (s *Inline_constraintContext) GetParser() antlr.Parser { return s.parser } -func (s *AnnotationContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Inline_constraintContext) PRIMARY() antlr.TerminalNode { + return s.GetToken(KuneiformParserPRIMARY, 0) } -func (s *AnnotationContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Inline_constraintContext) KEY() antlr.TerminalNode { + return s.GetToken(KuneiformParserKEY, 0) } -func (s *AnnotationContext) AllIDENTIFIER() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserIDENTIFIER) +func (s *Inline_constraintContext) UNIQUE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUNIQUE, 0) } -func (s *AnnotationContext) IDENTIFIER(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, i) +func (s *Inline_constraintContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) } -func (s *AnnotationContext) AllEQUALS() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserEQUALS) +func (s *Inline_constraintContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) } -func (s *AnnotationContext) EQUALS(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUALS, i) +func (s *Inline_constraintContext) DEFAULT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDEFAULT, 0) } -func (s *AnnotationContext) AllLiteral() []ILiteralContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ILiteralContext); ok { - len++ +func (s *Inline_constraintContext) Action_expr() IAction_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IAction_exprContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]ILiteralContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ILiteralContext); ok { - tst[i] = t.(ILiteralContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(IAction_exprContext) } -func (s *AnnotationContext) Literal(i int) ILiteralContext { +func (s *Inline_constraintContext) Fk_constraint() IFk_constraintContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ILiteralContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IFk_constraintContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -3178,258 +3642,407 @@ func (s *AnnotationContext) Literal(i int) ILiteralContext { return nil } - return t.(ILiteralContext) + return t.(IFk_constraintContext) } -func (s *AnnotationContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Inline_constraintContext) CHECK() antlr.TerminalNode { + return s.GetToken(KuneiformParserCHECK, 0) } -func (s *AnnotationContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Inline_constraintContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Inline_constraintContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Inline_constraintContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *AnnotationContext) GetRuleContext() antlr.RuleContext { +func (s *Inline_constraintContext) GetRuleContext() antlr.RuleContext { return s } -func (s *AnnotationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Inline_constraintContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *AnnotationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Inline_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitAnnotation(s) + return t.VisitInline_constraint(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Annotation() (localctx IAnnotationContext) { - localctx = NewAnnotationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 24, KuneiformParserRULE_annotation) - var _la int - - p.EnterOuterAlt(localctx, 1) - { - p.SetState(194) - p.Match(KuneiformParserCONTEXTUAL_VARIABLE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(195) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(208) +func (p *KuneiformParser) Inline_constraint() (localctx IInline_constraintContext) { + localctx = NewInline_constraintContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 22, KuneiformParserRULE_inline_constraint) + p.SetState(249) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserIDENTIFIER { + switch p.GetTokenStream().LA(1) { + case KuneiformParserPRIMARY: + p.EnterOuterAlt(localctx, 1) { - p.SetState(196) - p.Match(KuneiformParserIDENTIFIER) + p.SetState(236) + p.Match(KuneiformParserPRIMARY) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(197) - p.Match(KuneiformParserEQUALS) + p.SetState(237) + p.Match(KuneiformParserKEY) if p.HasError() { // Recognition error - abort rule goto errorExit } } + + case KuneiformParserUNIQUE: + p.EnterOuterAlt(localctx, 2) { - p.SetState(198) - p.Literal() - } - p.SetState(205) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + p.SetState(238) + p.Match(KuneiformParserUNIQUE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { - { - p.SetState(199) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(200) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(201) - p.Match(KuneiformParserEQUALS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + case KuneiformParserNOT: + p.EnterOuterAlt(localctx, 3) + { + p.SetState(239) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - { - p.SetState(202) - p.Literal() + } + { + p.SetState(240) + p.Match(KuneiformParserNULL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } + } - p.SetState(207) - p.GetErrorHandler().Sync(p) + case KuneiformParserDEFAULT: + p.EnterOuterAlt(localctx, 4) + { + p.SetState(241) + p.Match(KuneiformParserDEFAULT) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) } - - } - { - p.SetState(210) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(242) + p.action_expr(0) } - } -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} + case KuneiformParserREFERENCES: + p.EnterOuterAlt(localctx, 5) + { + p.SetState(243) + p.Fk_constraint() + } -// IDatabase_declarationContext is an interface to support dynamic dispatch. -type IDatabase_declarationContext interface { - antlr.ParserRuleContext + case KuneiformParserCHECK: + p.EnterOuterAlt(localctx, 6) + { + p.SetState(244) + p.Match(KuneiformParserCHECK) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + { + p.SetState(245) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(246) + p.sql_expr(0) + } + { + p.SetState(247) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} + +// IFk_actionContext is an interface to support dynamic dispatch. +type IFk_actionContext interface { + antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - DATABASE() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - SCOL() antlr.TerminalNode + ON() antlr.TerminalNode + UPDATE() antlr.TerminalNode + DELETE() antlr.TerminalNode + SET() antlr.TerminalNode + NULL() antlr.TerminalNode + DEFAULT() antlr.TerminalNode + RESTRICT() antlr.TerminalNode + NO() antlr.TerminalNode + ACTION() antlr.TerminalNode + CASCADE() antlr.TerminalNode - // IsDatabase_declarationContext differentiates from other interfaces. - IsDatabase_declarationContext() + // IsFk_actionContext differentiates from other interfaces. + IsFk_actionContext() } -type Database_declarationContext struct { +type Fk_actionContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyDatabase_declarationContext() *Database_declarationContext { - var p = new(Database_declarationContext) +func NewEmptyFk_actionContext() *Fk_actionContext { + var p = new(Fk_actionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_database_declaration + p.RuleIndex = KuneiformParserRULE_fk_action return p } -func InitEmptyDatabase_declarationContext(p *Database_declarationContext) { +func InitEmptyFk_actionContext(p *Fk_actionContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_database_declaration + p.RuleIndex = KuneiformParserRULE_fk_action } -func (*Database_declarationContext) IsDatabase_declarationContext() {} +func (*Fk_actionContext) IsFk_actionContext() {} -func NewDatabase_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Database_declarationContext { - var p = new(Database_declarationContext) +func NewFk_actionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Fk_actionContext { + var p = new(Fk_actionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_database_declaration + p.RuleIndex = KuneiformParserRULE_fk_action return p } -func (s *Database_declarationContext) GetParser() antlr.Parser { return s.parser } +func (s *Fk_actionContext) GetParser() antlr.Parser { return s.parser } -func (s *Database_declarationContext) DATABASE() antlr.TerminalNode { - return s.GetToken(KuneiformParserDATABASE, 0) +func (s *Fk_actionContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) } -func (s *Database_declarationContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) +func (s *Fk_actionContext) UPDATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUPDATE, 0) } -func (s *Database_declarationContext) SCOL() antlr.TerminalNode { - return s.GetToken(KuneiformParserSCOL, 0) +func (s *Fk_actionContext) DELETE() antlr.TerminalNode { + return s.GetToken(KuneiformParserDELETE, 0) +} + +func (s *Fk_actionContext) SET() antlr.TerminalNode { + return s.GetToken(KuneiformParserSET, 0) +} + +func (s *Fk_actionContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) +} + +func (s *Fk_actionContext) DEFAULT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDEFAULT, 0) +} + +func (s *Fk_actionContext) RESTRICT() antlr.TerminalNode { + return s.GetToken(KuneiformParserRESTRICT, 0) +} + +func (s *Fk_actionContext) NO() antlr.TerminalNode { + return s.GetToken(KuneiformParserNO, 0) +} + +func (s *Fk_actionContext) ACTION() antlr.TerminalNode { + return s.GetToken(KuneiformParserACTION, 0) } -func (s *Database_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Fk_actionContext) CASCADE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCASCADE, 0) +} + +func (s *Fk_actionContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Database_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Fk_actionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Database_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Fk_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitDatabase_declaration(s) + return t.VisitFk_action(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Database_declaration() (localctx IDatabase_declarationContext) { - localctx = NewDatabase_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 26, KuneiformParserRULE_database_declaration) +func (p *KuneiformParser) Fk_action() (localctx IFk_actionContext) { + localctx = NewFk_actionContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 24, KuneiformParserRULE_fk_action) + var _la int + p.EnterOuterAlt(localctx, 1) { - p.SetState(212) - p.Match(KuneiformParserDATABASE) + p.SetState(251) + p.Match(KuneiformParserON) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(213) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + p.SetState(252) + _la = p.GetTokenStream().LA(1) + + if !(_la == KuneiformParserDELETE || _la == KuneiformParserUPDATE) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() } } - { - p.SetState(214) - p.Match(KuneiformParserSCOL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + p.SetState(261) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 15, p.GetParserRuleContext()) { + case 1: + { + p.SetState(253) + p.Match(KuneiformParserSET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(254) + p.Match(KuneiformParserNULL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case 2: + { + p.SetState(255) + p.Match(KuneiformParserSET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(256) + p.Match(KuneiformParserDEFAULT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case 3: + { + p.SetState(257) + p.Match(KuneiformParserRESTRICT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case 4: + { + p.SetState(258) + p.Match(KuneiformParserNO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(259) + p.Match(KuneiformParserACTION) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case 5: + { + p.SetState(260) + p.Match(KuneiformParserCASCADE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + case antlr.ATNInvalidAltNumber: + goto errorExit } errorExit: @@ -3445,110 +4058,124 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IUse_declarationContext is an interface to support dynamic dispatch. -type IUse_declarationContext interface { +// IFk_constraintContext is an interface to support dynamic dispatch. +type IFk_constraintContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetNamespace returns the namespace rule contexts. + GetNamespace() IIdentifierContext + + // GetTable returns the table rule contexts. + GetTable() IIdentifierContext + + // SetNamespace sets the namespace rule contexts. + SetNamespace(IIdentifierContext) + + // SetTable sets the table rule contexts. + SetTable(IIdentifierContext) + // Getter signatures - USE() antlr.TerminalNode - AllIDENTIFIER() []antlr.TerminalNode - IDENTIFIER(i int) antlr.TerminalNode - AS() antlr.TerminalNode - SCOL() antlr.TerminalNode - LBRACE() antlr.TerminalNode - AllCOL() []antlr.TerminalNode - COL(i int) antlr.TerminalNode - AllLiteral() []ILiteralContext - Literal(i int) ILiteralContext - RBRACE() antlr.TerminalNode - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + REFERENCES() antlr.TerminalNode + LPAREN() antlr.TerminalNode + Identifier_list() IIdentifier_listContext + RPAREN() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + PERIOD() antlr.TerminalNode + AllFk_action() []IFk_actionContext + Fk_action(i int) IFk_actionContext - // IsUse_declarationContext differentiates from other interfaces. - IsUse_declarationContext() + // IsFk_constraintContext differentiates from other interfaces. + IsFk_constraintContext() } -type Use_declarationContext struct { +type Fk_constraintContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + namespace IIdentifierContext + table IIdentifierContext } -func NewEmptyUse_declarationContext() *Use_declarationContext { - var p = new(Use_declarationContext) +func NewEmptyFk_constraintContext() *Fk_constraintContext { + var p = new(Fk_constraintContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_use_declaration + p.RuleIndex = KuneiformParserRULE_fk_constraint return p } -func InitEmptyUse_declarationContext(p *Use_declarationContext) { +func InitEmptyFk_constraintContext(p *Fk_constraintContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_use_declaration + p.RuleIndex = KuneiformParserRULE_fk_constraint } -func (*Use_declarationContext) IsUse_declarationContext() {} +func (*Fk_constraintContext) IsFk_constraintContext() {} -func NewUse_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Use_declarationContext { - var p = new(Use_declarationContext) +func NewFk_constraintContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Fk_constraintContext { + var p = new(Fk_constraintContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_use_declaration + p.RuleIndex = KuneiformParserRULE_fk_constraint return p } -func (s *Use_declarationContext) GetParser() antlr.Parser { return s.parser } +func (s *Fk_constraintContext) GetParser() antlr.Parser { return s.parser } -func (s *Use_declarationContext) USE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUSE, 0) -} +func (s *Fk_constraintContext) GetNamespace() IIdentifierContext { return s.namespace } -func (s *Use_declarationContext) AllIDENTIFIER() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserIDENTIFIER) -} +func (s *Fk_constraintContext) GetTable() IIdentifierContext { return s.table } -func (s *Use_declarationContext) IDENTIFIER(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, i) -} +func (s *Fk_constraintContext) SetNamespace(v IIdentifierContext) { s.namespace = v } -func (s *Use_declarationContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) -} +func (s *Fk_constraintContext) SetTable(v IIdentifierContext) { s.table = v } -func (s *Use_declarationContext) SCOL() antlr.TerminalNode { - return s.GetToken(KuneiformParserSCOL, 0) +func (s *Fk_constraintContext) REFERENCES() antlr.TerminalNode { + return s.GetToken(KuneiformParserREFERENCES, 0) } -func (s *Use_declarationContext) LBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACE, 0) +func (s *Fk_constraintContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Use_declarationContext) AllCOL() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOL) +func (s *Fk_constraintContext) Identifier_list() IIdentifier_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifier_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifier_listContext) } -func (s *Use_declarationContext) COL(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOL, i) +func (s *Fk_constraintContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Use_declarationContext) AllLiteral() []ILiteralContext { +func (s *Fk_constraintContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ILiteralContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]ILiteralContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ILiteralContext); ok { - tst[i] = t.(ILiteralContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -3556,11 +4183,11 @@ func (s *Use_declarationContext) AllLiteral() []ILiteralContext { return tst } -func (s *Use_declarationContext) Literal(i int) ILiteralContext { +func (s *Fk_constraintContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ILiteralContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -3573,174 +4200,163 @@ func (s *Use_declarationContext) Literal(i int) ILiteralContext { return nil } - return t.(ILiteralContext) + return t.(IIdentifierContext) } -func (s *Use_declarationContext) RBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACE, 0) +func (s *Fk_constraintContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) } -func (s *Use_declarationContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Fk_constraintContext) AllFk_action() []IFk_actionContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IFk_actionContext); ok { + len++ + } + } + + tst := make([]IFk_actionContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IFk_actionContext); ok { + tst[i] = t.(IFk_actionContext) + i++ + } + } + + return tst } -func (s *Use_declarationContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Fk_constraintContext) Fk_action(i int) IFk_actionContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IFk_actionContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IFk_actionContext) } -func (s *Use_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Fk_constraintContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Use_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Fk_constraintContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Use_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Fk_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitUse_declaration(s) + return t.VisitFk_constraint(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Use_declaration() (localctx IUse_declarationContext) { - localctx = NewUse_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 28, KuneiformParserRULE_use_declaration) +func (p *KuneiformParser) Fk_constraint() (localctx IFk_constraintContext) { + localctx = NewFk_constraintContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 26, KuneiformParserRULE_fk_constraint) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(216) - p.Match(KuneiformParserUSE) + p.SetState(263) + p.Match(KuneiformParserREFERENCES) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(217) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(233) + p.SetState(267) p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserLBRACE { - { - p.SetState(218) - p.Match(KuneiformParserLBRACE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(219) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(220) - p.Match(KuneiformParserCOL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 16, p.GetParserRuleContext()) == 1 { { - p.SetState(221) - p.Literal() - } - p.SetState(228) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.SetState(264) - for _la == KuneiformParserCOMMA { - { - p.SetState(222) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(223) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(224) - p.Match(KuneiformParserCOL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(225) - p.Literal() - } + var _x = p.Identifier() - p.SetState(230) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + localctx.(*Fk_constraintContext).namespace = _x } { - p.SetState(231) - p.Match(KuneiformParserRBRACE) + p.SetState(265) + p.Match(KuneiformParserPERIOD) if p.HasError() { // Recognition error - abort rule goto errorExit } } + } else if p.HasError() { // JIM + goto errorExit + } + { + p.SetState(269) + + var _x = p.Identifier() + + localctx.(*Fk_constraintContext).table = _x } { - p.SetState(235) - p.Match(KuneiformParserAS) + p.SetState(270) + p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(236) - p.Match(KuneiformParserIDENTIFIER) + p.SetState(271) + p.Identifier_list() + } + { + p.SetState(272) + p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(237) - p.Match(KuneiformParserSCOL) + p.SetState(277) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserON { + { + p.SetState(273) + p.Fk_action() + } + p.SetState(275) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserON { + { + p.SetState(274) + p.Fk_action() + } + + } + } errorExit: @@ -3756,159 +4372,101 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ITable_declarationContext is an interface to support dynamic dispatch. -type ITable_declarationContext interface { +// IAction_returnContext is an interface to support dynamic dispatch. +type IAction_returnContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetReturn_columns returns the return_columns rule contexts. + GetReturn_columns() INamed_type_listContext + + // GetUnnamed_return_types returns the unnamed_return_types rule contexts. + GetUnnamed_return_types() IType_listContext + + // SetReturn_columns sets the return_columns rule contexts. + SetReturn_columns(INamed_type_listContext) + + // SetUnnamed_return_types sets the unnamed_return_types rule contexts. + SetUnnamed_return_types(IType_listContext) + // Getter signatures + RETURNS() antlr.TerminalNode + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + Named_type_list() INamed_type_listContext + Type_list() IType_listContext TABLE() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - LBRACE() antlr.TerminalNode - AllColumn_def() []IColumn_defContext - Column_def(i int) IColumn_defContext - RBRACE() antlr.TerminalNode - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode - AllIndex_def() []IIndex_defContext - Index_def(i int) IIndex_defContext - AllForeign_key_def() []IForeign_key_defContext - Foreign_key_def(i int) IForeign_key_defContext - // IsTable_declarationContext differentiates from other interfaces. - IsTable_declarationContext() + // IsAction_returnContext differentiates from other interfaces. + IsAction_returnContext() } -type Table_declarationContext struct { +type Action_returnContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + return_columns INamed_type_listContext + unnamed_return_types IType_listContext } -func NewEmptyTable_declarationContext() *Table_declarationContext { - var p = new(Table_declarationContext) +func NewEmptyAction_returnContext() *Action_returnContext { + var p = new(Action_returnContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_table_declaration + p.RuleIndex = KuneiformParserRULE_action_return return p } -func InitEmptyTable_declarationContext(p *Table_declarationContext) { +func InitEmptyAction_returnContext(p *Action_returnContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_table_declaration + p.RuleIndex = KuneiformParserRULE_action_return } -func (*Table_declarationContext) IsTable_declarationContext() {} +func (*Action_returnContext) IsAction_returnContext() {} -func NewTable_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Table_declarationContext { - var p = new(Table_declarationContext) +func NewAction_returnContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_returnContext { + var p = new(Action_returnContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_table_declaration + p.RuleIndex = KuneiformParserRULE_action_return return p } -func (s *Table_declarationContext) GetParser() antlr.Parser { return s.parser } - -func (s *Table_declarationContext) TABLE() antlr.TerminalNode { - return s.GetToken(KuneiformParserTABLE, 0) -} - -func (s *Table_declarationContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} - -func (s *Table_declarationContext) LBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACE, 0) -} - -func (s *Table_declarationContext) AllColumn_def() []IColumn_defContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IColumn_defContext); ok { - len++ - } - } +func (s *Action_returnContext) GetParser() antlr.Parser { return s.parser } - tst := make([]IColumn_defContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IColumn_defContext); ok { - tst[i] = t.(IColumn_defContext) - i++ - } - } +func (s *Action_returnContext) GetReturn_columns() INamed_type_listContext { return s.return_columns } - return tst +func (s *Action_returnContext) GetUnnamed_return_types() IType_listContext { + return s.unnamed_return_types } -func (s *Table_declarationContext) Column_def(i int) IColumn_defContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IColumn_defContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(IColumn_defContext) -} +func (s *Action_returnContext) SetReturn_columns(v INamed_type_listContext) { s.return_columns = v } -func (s *Table_declarationContext) RBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACE, 0) +func (s *Action_returnContext) SetUnnamed_return_types(v IType_listContext) { + s.unnamed_return_types = v } -func (s *Table_declarationContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Action_returnContext) RETURNS() antlr.TerminalNode { + return s.GetToken(KuneiformParserRETURNS, 0) } -func (s *Table_declarationContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Action_returnContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Table_declarationContext) AllIndex_def() []IIndex_defContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IIndex_defContext); ok { - len++ - } - } - - tst := make([]IIndex_defContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IIndex_defContext); ok { - tst[i] = t.(IIndex_defContext) - i++ - } - } - - return tst +func (s *Action_returnContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Table_declarationContext) Index_def(i int) IIndex_defContext { +func (s *Action_returnContext) Named_type_list() INamed_type_listContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIndex_defContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(INamed_type_listContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -3916,40 +4474,15 @@ func (s *Table_declarationContext) Index_def(i int) IIndex_defContext { return nil } - return t.(IIndex_defContext) -} - -func (s *Table_declarationContext) AllForeign_key_def() []IForeign_key_defContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IForeign_key_defContext); ok { - len++ - } - } - - tst := make([]IForeign_key_defContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IForeign_key_defContext); ok { - tst[i] = t.(IForeign_key_defContext) - i++ - } - } - - return tst + return t.(INamed_type_listContext) } -func (s *Table_declarationContext) Foreign_key_def(i int) IForeign_key_defContext { +func (s *Action_returnContext) Type_list() IType_listContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IForeign_key_defContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IType_listContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -3957,121 +4490,122 @@ func (s *Table_declarationContext) Foreign_key_def(i int) IForeign_key_defContex return nil } - return t.(IForeign_key_defContext) + return t.(IType_listContext) +} + +func (s *Action_returnContext) TABLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTABLE, 0) } -func (s *Table_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Action_returnContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Table_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Action_returnContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Table_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Action_returnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitTable_declaration(s) + return t.VisitAction_return(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Table_declaration() (localctx ITable_declarationContext) { - localctx = NewTable_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 30, KuneiformParserRULE_table_declaration) +func (p *KuneiformParser) Action_return() (localctx IAction_returnContext) { + localctx = NewAction_returnContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 28, KuneiformParserRULE_action_return) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(239) - p.Match(KuneiformParserTABLE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(240) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(241) - p.Match(KuneiformParserLBRACE) + p.SetState(279) + p.Match(KuneiformParserRETURNS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(242) - p.Column_def() - } - p.SetState(251) + p.SetState(291) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { - { - p.SetState(243) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(247) + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 20, p.GetParserRuleContext()) { + case 1: + p.SetState(281) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetTokenStream().LA(1) { - case KuneiformParserIDENTIFIER: + if _la == KuneiformParserTABLE { { - p.SetState(244) - p.Column_def() + p.SetState(280) + p.Match(KuneiformParserTABLE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - case KuneiformParserHASH_IDENTIFIER: - { - p.SetState(245) - p.Index_def() + } + { + p.SetState(283) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } + } + { + p.SetState(284) - case KuneiformParserFOREIGN, KuneiformParserLEGACY_FOREIGN_KEY: - { - p.SetState(246) - p.Foreign_key_def() + var _x = p.Named_type_list() + + localctx.(*Action_returnContext).return_columns = _x + } + { + p.SetState(285) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } + } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit + case 2: + { + p.SetState(287) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + { + p.SetState(288) - p.SetState(253) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + var _x = p.Type_list() + + localctx.(*Action_returnContext).unnamed_return_types = _x } - _la = p.GetTokenStream().LA(1) - } - { - p.SetState(254) - p.Match(KuneiformParserRBRACE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(289) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + case antlr.ATNInvalidAltNumber: + goto errorExit } errorExit: @@ -4087,70 +4621,97 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IColumn_defContext is an interface to support dynamic dispatch. -type IColumn_defContext interface { +// ISql_statementContext is an interface to support dynamic dispatch. +type ISql_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetName returns the name token. - GetName() antlr.Token - - // SetName sets the name token. - SetName(antlr.Token) - // Getter signatures - Type_() ITypeContext - IDENTIFIER() antlr.TerminalNode - AllConstraint() []IConstraintContext - Constraint(i int) IConstraintContext + Select_statement() ISelect_statementContext + Update_statement() IUpdate_statementContext + Insert_statement() IInsert_statementContext + Delete_statement() IDelete_statementContext + WITH() antlr.TerminalNode + AllCommon_table_expression() []ICommon_table_expressionContext + Common_table_expression(i int) ICommon_table_expressionContext + RECURSIVE() antlr.TerminalNode + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - // IsColumn_defContext differentiates from other interfaces. - IsColumn_defContext() + // IsSql_statementContext differentiates from other interfaces. + IsSql_statementContext() } -type Column_defContext struct { +type Sql_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser - name antlr.Token } -func NewEmptyColumn_defContext() *Column_defContext { - var p = new(Column_defContext) +func NewEmptySql_statementContext() *Sql_statementContext { + var p = new(Sql_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_column_def + p.RuleIndex = KuneiformParserRULE_sql_statement return p } -func InitEmptyColumn_defContext(p *Column_defContext) { +func InitEmptySql_statementContext(p *Sql_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_column_def + p.RuleIndex = KuneiformParserRULE_sql_statement } -func (*Column_defContext) IsColumn_defContext() {} +func (*Sql_statementContext) IsSql_statementContext() {} -func NewColumn_defContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Column_defContext { - var p = new(Column_defContext) +func NewSql_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_statementContext { + var p = new(Sql_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_column_def + p.RuleIndex = KuneiformParserRULE_sql_statement return p } -func (s *Column_defContext) GetParser() antlr.Parser { return s.parser } +func (s *Sql_statementContext) GetParser() antlr.Parser { return s.parser } + +func (s *Sql_statementContext) Select_statement() ISelect_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISelect_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISelect_statementContext) +} + +func (s *Sql_statementContext) Update_statement() IUpdate_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IUpdate_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func (s *Column_defContext) GetName() antlr.Token { return s.name } + if t == nil { + return nil + } -func (s *Column_defContext) SetName(v antlr.Token) { s.name = v } + return t.(IUpdate_statementContext) +} -func (s *Column_defContext) Type_() ITypeContext { +func (s *Sql_statementContext) Insert_statement() IInsert_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITypeContext); ok { + if _, ok := ctx.(IInsert_statementContext); ok { t = ctx.(antlr.RuleContext) break } @@ -4160,27 +4721,43 @@ func (s *Column_defContext) Type_() ITypeContext { return nil } - return t.(ITypeContext) + return t.(IInsert_statementContext) } -func (s *Column_defContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) +func (s *Sql_statementContext) Delete_statement() IDelete_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IDelete_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IDelete_statementContext) +} + +func (s *Sql_statementContext) WITH() antlr.TerminalNode { + return s.GetToken(KuneiformParserWITH, 0) } -func (s *Column_defContext) AllConstraint() []IConstraintContext { +func (s *Sql_statementContext) AllCommon_table_expression() []ICommon_table_expressionContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IConstraintContext); ok { + if _, ok := ctx.(ICommon_table_expressionContext); ok { len++ } } - tst := make([]IConstraintContext, len) + tst := make([]ICommon_table_expressionContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IConstraintContext); ok { - tst[i] = t.(IConstraintContext) + if t, ok := ctx.(ICommon_table_expressionContext); ok { + tst[i] = t.(ICommon_table_expressionContext) i++ } } @@ -4188,11 +4765,11 @@ func (s *Column_defContext) AllConstraint() []IConstraintContext { return tst } -func (s *Column_defContext) Constraint(i int) IConstraintContext { +func (s *Sql_statementContext) Common_table_expression(i int) ICommon_table_expressionContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IConstraintContext); ok { + if _, ok := ctx.(ICommon_table_expressionContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -4205,67 +4782,147 @@ func (s *Column_defContext) Constraint(i int) IConstraintContext { return nil } - return t.(IConstraintContext) + return t.(ICommon_table_expressionContext) +} + +func (s *Sql_statementContext) RECURSIVE() antlr.TerminalNode { + return s.GetToken(KuneiformParserRECURSIVE, 0) +} + +func (s *Sql_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *Sql_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Column_defContext) GetRuleContext() antlr.RuleContext { +func (s *Sql_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Column_defContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Sql_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Column_defContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Sql_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitColumn_def(s) + return t.VisitSql_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Column_def() (localctx IColumn_defContext) { - localctx = NewColumn_defContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 32, KuneiformParserRULE_column_def) +func (p *KuneiformParser) Sql_statement() (localctx ISql_statementContext) { + localctx = NewSql_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 30, KuneiformParserRULE_sql_statement) var _la int p.EnterOuterAlt(localctx, 1) - { - p.SetState(256) + p.SetState(305) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserWITH { + { + p.SetState(293) + p.Match(KuneiformParserWITH) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(295) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - var _m = p.Match(KuneiformParserIDENTIFIER) + if _la == KuneiformParserRECURSIVE { + { + p.SetState(294) + p.Match(KuneiformParserRECURSIVE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - localctx.(*Column_defContext).name = _m + } + { + p.SetState(297) + p.Common_table_expression() + } + p.SetState(302) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } + _la = p.GetTokenStream().LA(1) + + for _la == KuneiformParserCOMMA { + { + p.SetState(298) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(299) + p.Common_table_expression() + } + + p.SetState(304) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + } + } - { - p.SetState(257) - p.Type_() - } - p.SetState(261) + p.SetState(311) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&146516521470918656) != 0) || _la == KuneiformParserIDENTIFIER { + switch p.GetTokenStream().LA(1) { + case KuneiformParserSELECT: { - p.SetState(258) - p.Constraint() + p.SetState(307) + p.Select_statement() } - p.SetState(263) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + case KuneiformParserUPDATE: + { + p.SetState(308) + p.Update_statement() } - _la = p.GetTokenStream().LA(1) + + case KuneiformParserINSERT: + { + p.SetState(309) + p.Insert_statement() + } + + case KuneiformParserDELETE: + { + p.SetState(310) + p.Delete_statement() + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } errorExit: @@ -4281,97 +4938,118 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IIndex_defContext is an interface to support dynamic dispatch. -type IIndex_defContext interface { +// ICommon_table_expressionContext is an interface to support dynamic dispatch. +type ICommon_table_expressionContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetColumns returns the columns rule contexts. - GetColumns() IIdentifier_listContext - - // SetColumns sets the columns rule contexts. - SetColumns(IIdentifier_listContext) - // Getter signatures - HASH_IDENTIFIER() antlr.TerminalNode - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - UNIQUE() antlr.TerminalNode - INDEX() antlr.TerminalNode - PRIMARY() antlr.TerminalNode - Identifier_list() IIdentifier_listContext + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + AS() antlr.TerminalNode + AllLPAREN() []antlr.TerminalNode + LPAREN(i int) antlr.TerminalNode + Select_statement() ISelect_statementContext + AllRPAREN() []antlr.TerminalNode + RPAREN(i int) antlr.TerminalNode + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - // IsIndex_defContext differentiates from other interfaces. - IsIndex_defContext() + // IsCommon_table_expressionContext differentiates from other interfaces. + IsCommon_table_expressionContext() } -type Index_defContext struct { +type Common_table_expressionContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - columns IIdentifier_listContext + parser antlr.Parser } -func NewEmptyIndex_defContext() *Index_defContext { - var p = new(Index_defContext) +func NewEmptyCommon_table_expressionContext() *Common_table_expressionContext { + var p = new(Common_table_expressionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_index_def + p.RuleIndex = KuneiformParserRULE_common_table_expression return p } -func InitEmptyIndex_defContext(p *Index_defContext) { +func InitEmptyCommon_table_expressionContext(p *Common_table_expressionContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_index_def + p.RuleIndex = KuneiformParserRULE_common_table_expression } -func (*Index_defContext) IsIndex_defContext() {} +func (*Common_table_expressionContext) IsCommon_table_expressionContext() {} -func NewIndex_defContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Index_defContext { - var p = new(Index_defContext) +func NewCommon_table_expressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Common_table_expressionContext { + var p = new(Common_table_expressionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_index_def + p.RuleIndex = KuneiformParserRULE_common_table_expression return p } -func (s *Index_defContext) GetParser() antlr.Parser { return s.parser } +func (s *Common_table_expressionContext) GetParser() antlr.Parser { return s.parser } -func (s *Index_defContext) GetColumns() IIdentifier_listContext { return s.columns } +func (s *Common_table_expressionContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } -func (s *Index_defContext) SetColumns(v IIdentifier_listContext) { s.columns = v } + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } -func (s *Index_defContext) HASH_IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserHASH_IDENTIFIER, 0) + return tst } -func (s *Index_defContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} +func (s *Common_table_expressionContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } -func (s *Index_defContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Index_defContext) UNIQUE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUNIQUE, 0) +func (s *Common_table_expressionContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) } -func (s *Index_defContext) INDEX() antlr.TerminalNode { - return s.GetToken(KuneiformParserINDEX, 0) +func (s *Common_table_expressionContext) AllLPAREN() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserLPAREN) } -func (s *Index_defContext) PRIMARY() antlr.TerminalNode { - return s.GetToken(KuneiformParserPRIMARY, 0) +func (s *Common_table_expressionContext) LPAREN(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, i) } -func (s *Index_defContext) Identifier_list() IIdentifier_listContext { +func (s *Common_table_expressionContext) Select_statement() ISelect_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifier_listContext); ok { + if _, ok := ctx.(ISelect_statementContext); ok { t = ctx.(antlr.RuleContext) break } @@ -4381,54 +5059,131 @@ func (s *Index_defContext) Identifier_list() IIdentifier_listContext { return nil } - return t.(IIdentifier_listContext) + return t.(ISelect_statementContext) +} + +func (s *Common_table_expressionContext) AllRPAREN() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserRPAREN) +} + +func (s *Common_table_expressionContext) RPAREN(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, i) +} + +func (s *Common_table_expressionContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *Common_table_expressionContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Index_defContext) GetRuleContext() antlr.RuleContext { +func (s *Common_table_expressionContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Index_defContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Common_table_expressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Index_defContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Common_table_expressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitIndex_def(s) + return t.VisitCommon_table_expression(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Index_def() (localctx IIndex_defContext) { - localctx = NewIndex_defContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 34, KuneiformParserRULE_index_def) +func (p *KuneiformParser) Common_table_expression() (localctx ICommon_table_expressionContext) { + localctx = NewCommon_table_expressionContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 32, KuneiformParserRULE_common_table_expression) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(264) - p.Match(KuneiformParserHASH_IDENTIFIER) + p.SetState(313) + p.Identifier() + } + p.SetState(326) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserLPAREN { + { + p.SetState(314) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(323) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - { - p.SetState(265) _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&288379909733089280) != 0) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { + { + p.SetState(315) + p.Identifier() + } + p.SetState(320) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + for _la == KuneiformParserCOMMA { + { + p.SetState(316) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(317) + p.Identifier() + } + + p.SetState(322) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + } + + } + { + p.SetState(325) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(328) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } { - p.SetState(266) + p.SetState(329) p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule @@ -4436,14 +5191,11 @@ func (p *KuneiformParser) Index_def() (localctx IIndex_defContext) { } } { - p.SetState(267) - - var _x = p.Identifier_list() - - localctx.(*Index_defContext).columns = _x + p.SetState(330) + p.Select_statement() } { - p.SetState(268) + p.SetState(331) p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule @@ -4464,128 +5216,122 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IForeign_key_defContext is an interface to support dynamic dispatch. -type IForeign_key_defContext interface { +// ICreate_table_statementContext is an interface to support dynamic dispatch. +type ICreate_table_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetParent_table returns the parent_table token. - GetParent_table() antlr.Token - - // SetParent_table sets the parent_table token. - SetParent_table(antlr.Token) - - // GetChild_keys returns the child_keys rule contexts. - GetChild_keys() IIdentifier_listContext + // GetName returns the name rule contexts. + GetName() IIdentifierContext - // GetParent_keys returns the parent_keys rule contexts. - GetParent_keys() IIdentifier_listContext - - // SetChild_keys sets the child_keys rule contexts. - SetChild_keys(IIdentifier_listContext) - - // SetParent_keys sets the parent_keys rule contexts. - SetParent_keys(IIdentifier_listContext) + // SetName sets the name rule contexts. + SetName(IIdentifierContext) // Getter signatures - AllLPAREN() []antlr.TerminalNode - LPAREN(i int) antlr.TerminalNode - AllRPAREN() []antlr.TerminalNode - RPAREN(i int) antlr.TerminalNode - AllIdentifier_list() []IIdentifier_listContext - Identifier_list(i int) IIdentifier_listContext - REFERENCES() antlr.TerminalNode - REF() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - FOREIGN() antlr.TerminalNode - KEY() antlr.TerminalNode - LEGACY_FOREIGN_KEY() antlr.TerminalNode - AllForeign_key_action() []IForeign_key_actionContext - Foreign_key_action(i int) IForeign_key_actionContext + CREATE() antlr.TerminalNode + TABLE() antlr.TerminalNode + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + Identifier() IIdentifierContext + AllTable_column_def() []ITable_column_defContext + Table_column_def(i int) ITable_column_defContext + AllTable_constraint_def() []ITable_constraint_defContext + Table_constraint_def(i int) ITable_constraint_defContext + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - // IsForeign_key_defContext differentiates from other interfaces. - IsForeign_key_defContext() + // IsCreate_table_statementContext differentiates from other interfaces. + IsCreate_table_statementContext() } -type Foreign_key_defContext struct { +type Create_table_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - child_keys IIdentifier_listContext - parent_table antlr.Token - parent_keys IIdentifier_listContext + parser antlr.Parser + name IIdentifierContext } -func NewEmptyForeign_key_defContext() *Foreign_key_defContext { - var p = new(Foreign_key_defContext) +func NewEmptyCreate_table_statementContext() *Create_table_statementContext { + var p = new(Create_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_key_def + p.RuleIndex = KuneiformParserRULE_create_table_statement return p } -func InitEmptyForeign_key_defContext(p *Foreign_key_defContext) { +func InitEmptyCreate_table_statementContext(p *Create_table_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_key_def + p.RuleIndex = KuneiformParserRULE_create_table_statement } -func (*Foreign_key_defContext) IsForeign_key_defContext() {} +func (*Create_table_statementContext) IsCreate_table_statementContext() {} -func NewForeign_key_defContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Foreign_key_defContext { - var p = new(Foreign_key_defContext) +func NewCreate_table_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Create_table_statementContext { + var p = new(Create_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_foreign_key_def + p.RuleIndex = KuneiformParserRULE_create_table_statement return p } -func (s *Foreign_key_defContext) GetParser() antlr.Parser { return s.parser } - -func (s *Foreign_key_defContext) GetParent_table() antlr.Token { return s.parent_table } - -func (s *Foreign_key_defContext) SetParent_table(v antlr.Token) { s.parent_table = v } +func (s *Create_table_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Foreign_key_defContext) GetChild_keys() IIdentifier_listContext { return s.child_keys } +func (s *Create_table_statementContext) GetName() IIdentifierContext { return s.name } -func (s *Foreign_key_defContext) GetParent_keys() IIdentifier_listContext { return s.parent_keys } +func (s *Create_table_statementContext) SetName(v IIdentifierContext) { s.name = v } -func (s *Foreign_key_defContext) SetChild_keys(v IIdentifier_listContext) { s.child_keys = v } - -func (s *Foreign_key_defContext) SetParent_keys(v IIdentifier_listContext) { s.parent_keys = v } +func (s *Create_table_statementContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) +} -func (s *Foreign_key_defContext) AllLPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserLPAREN) +func (s *Create_table_statementContext) TABLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTABLE, 0) } -func (s *Foreign_key_defContext) LPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, i) +func (s *Create_table_statementContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Foreign_key_defContext) AllRPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserRPAREN) +func (s *Create_table_statementContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Foreign_key_defContext) RPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, i) +func (s *Create_table_statementContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Foreign_key_defContext) AllIdentifier_list() []IIdentifier_listContext { +func (s *Create_table_statementContext) AllTable_column_def() []ITable_column_defContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IIdentifier_listContext); ok { + if _, ok := ctx.(ITable_column_defContext); ok { len++ } } - tst := make([]IIdentifier_listContext, len) + tst := make([]ITable_column_defContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IIdentifier_listContext); ok { - tst[i] = t.(IIdentifier_listContext) + if t, ok := ctx.(ITable_column_defContext); ok { + tst[i] = t.(ITable_column_defContext) i++ } } @@ -4593,11 +5339,11 @@ func (s *Foreign_key_defContext) AllIdentifier_list() []IIdentifier_listContext return tst } -func (s *Foreign_key_defContext) Identifier_list(i int) IIdentifier_listContext { +func (s *Create_table_statementContext) Table_column_def(i int) ITable_column_defContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifier_listContext); ok { + if _, ok := ctx.(ITable_column_defContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -4610,47 +5356,23 @@ func (s *Foreign_key_defContext) Identifier_list(i int) IIdentifier_listContext return nil } - return t.(IIdentifier_listContext) -} - -func (s *Foreign_key_defContext) REFERENCES() antlr.TerminalNode { - return s.GetToken(KuneiformParserREFERENCES, 0) -} - -func (s *Foreign_key_defContext) REF() antlr.TerminalNode { - return s.GetToken(KuneiformParserREF, 0) -} - -func (s *Foreign_key_defContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} - -func (s *Foreign_key_defContext) FOREIGN() antlr.TerminalNode { - return s.GetToken(KuneiformParserFOREIGN, 0) -} - -func (s *Foreign_key_defContext) KEY() antlr.TerminalNode { - return s.GetToken(KuneiformParserKEY, 0) -} - -func (s *Foreign_key_defContext) LEGACY_FOREIGN_KEY() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_FOREIGN_KEY, 0) + return t.(ITable_column_defContext) } -func (s *Foreign_key_defContext) AllForeign_key_action() []IForeign_key_actionContext { +func (s *Create_table_statementContext) AllTable_constraint_def() []ITable_constraint_defContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IForeign_key_actionContext); ok { + if _, ok := ctx.(ITable_constraint_defContext); ok { len++ } } - tst := make([]IForeign_key_actionContext, len) + tst := make([]ITable_constraint_defContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IForeign_key_actionContext); ok { - tst[i] = t.(IForeign_key_actionContext) + if t, ok := ctx.(ITable_constraint_defContext); ok { + tst[i] = t.(ITable_constraint_defContext) i++ } } @@ -4658,11 +5380,11 @@ func (s *Foreign_key_defContext) AllForeign_key_action() []IForeign_key_actionCo return tst } -func (s *Foreign_key_defContext) Foreign_key_action(i int) IForeign_key_actionContext { +func (s *Create_table_statementContext) Table_constraint_def(i int) ITable_constraint_defContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IForeign_key_actionContext); ok { + if _, ok := ctx.(ITable_constraint_defContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -4675,160 +5397,196 @@ func (s *Foreign_key_defContext) Foreign_key_action(i int) IForeign_key_actionCo return nil } - return t.(IForeign_key_actionContext) + return t.(ITable_constraint_defContext) } -func (s *Foreign_key_defContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Create_table_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *Foreign_key_defContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Create_table_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Create_table_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Create_table_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *Create_table_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} + +func (s *Create_table_statementContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Create_table_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Foreign_key_defContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Create_table_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitForeign_key_def(s) + return t.VisitCreate_table_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Foreign_key_def() (localctx IForeign_key_defContext) { - localctx = NewForeign_key_defContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 36, KuneiformParserRULE_foreign_key_def) +func (p *KuneiformParser) Create_table_statement() (localctx ICreate_table_statementContext) { + localctx = NewCreate_table_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 34, KuneiformParserRULE_create_table_statement) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(273) + { + p.SetState(333) + p.Match(KuneiformParserCREATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(334) + p.Match(KuneiformParserTABLE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(338) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetTokenStream().LA(1) { - case KuneiformParserFOREIGN: + if _la == KuneiformParserIF { { - p.SetState(270) - p.Match(KuneiformParserFOREIGN) + p.SetState(335) + p.Match(KuneiformParserIF) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(271) - p.Match(KuneiformParserKEY) + p.SetState(336) + p.Match(KuneiformParserNOT) if p.HasError() { // Recognition error - abort rule goto errorExit } } - - case KuneiformParserLEGACY_FOREIGN_KEY: { - p.SetState(272) - p.Match(KuneiformParserLEGACY_FOREIGN_KEY) + p.SetState(337) + p.Match(KuneiformParserEXISTS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit - } - { - p.SetState(275) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } } { - p.SetState(276) + p.SetState(340) - var _x = p.Identifier_list() + var _x = p.Identifier() - localctx.(*Foreign_key_defContext).child_keys = _x + localctx.(*Create_table_statementContext).name = _x } { - p.SetState(277) - p.Match(KuneiformParserRPAREN) + p.SetState(341) + p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(278) - _la = p.GetTokenStream().LA(1) - - if !(_la == KuneiformParserREFERENCES || _la == KuneiformParserREF) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } + p.SetState(344) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - { - p.SetState(279) - - var _m = p.Match(KuneiformParserIDENTIFIER) - localctx.(*Foreign_key_defContext).parent_table = _m - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(280) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + switch p.GetTokenStream().LA(1) { + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(342) + p.Table_column_def() } - } - { - p.SetState(281) - - var _x = p.Identifier_list() - localctx.(*Foreign_key_defContext).parent_keys = _x - } - { - p.SetState(282) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + case KuneiformParserCONSTRAINT, KuneiformParserCHECK, KuneiformParserFOREIGN, KuneiformParserPRIMARY, KuneiformParserUNIQUE: + { + p.SetState(343) + p.Table_constraint_def() } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } - p.SetState(286) + p.SetState(353) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserON || _la == KuneiformParserLEGACY_ON_UPDATE || _la == KuneiformParserLEGACY_ON_DELETE { + for _la == KuneiformParserCOMMA { { - p.SetState(283) - p.Foreign_key_action() + p.SetState(346) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(349) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetTokenStream().LA(1) { + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(347) + p.Table_column_def() + } + + case KuneiformParserCONSTRAINT, KuneiformParserCHECK, KuneiformParserFOREIGN, KuneiformParserPRIMARY, KuneiformParserUNIQUE: + { + p.SetState(348) + p.Table_constraint_def() + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } - p.SetState(288) + p.SetState(355) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) } + { + p.SetState(356) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } errorExit: if p.HasError() { @@ -4843,416 +5601,368 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IForeign_key_actionContext is an interface to support dynamic dispatch. -type IForeign_key_actionContext interface { +// ITable_constraint_defContext is an interface to support dynamic dispatch. +type ITable_constraint_defContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetName returns the name rule contexts. + GetName() IIdentifierContext + + // SetName sets the name rule contexts. + SetName(IIdentifierContext) + // Getter signatures - CASCADE() antlr.TerminalNode - RESTRICT() antlr.TerminalNode - DO() antlr.TerminalNode - ON() antlr.TerminalNode - UPDATE() antlr.TerminalNode - LEGACY_ON_UPDATE() antlr.TerminalNode - DELETE() antlr.TerminalNode - LEGACY_ON_DELETE() antlr.TerminalNode - NO() antlr.TerminalNode - ACTION() antlr.TerminalNode - LEGACY_NO_ACTION() antlr.TerminalNode - SET() antlr.TerminalNode - NULL() antlr.TerminalNode - LEGACY_SET_NULL() antlr.TerminalNode - DEFAULT() antlr.TerminalNode - LEGACY_SET_DEFAULT() antlr.TerminalNode + UNIQUE() antlr.TerminalNode + LPAREN() antlr.TerminalNode + Identifier_list() IIdentifier_listContext + RPAREN() antlr.TerminalNode + CHECK() antlr.TerminalNode + Sql_expr() ISql_exprContext + FOREIGN() antlr.TerminalNode + KEY() antlr.TerminalNode + Fk_constraint() IFk_constraintContext + PRIMARY() antlr.TerminalNode + CONSTRAINT() antlr.TerminalNode + Identifier() IIdentifierContext - // IsForeign_key_actionContext differentiates from other interfaces. - IsForeign_key_actionContext() + // IsTable_constraint_defContext differentiates from other interfaces. + IsTable_constraint_defContext() } -type Foreign_key_actionContext struct { +type Table_constraint_defContext struct { antlr.BaseParserRuleContext parser antlr.Parser + name IIdentifierContext } -func NewEmptyForeign_key_actionContext() *Foreign_key_actionContext { - var p = new(Foreign_key_actionContext) +func NewEmptyTable_constraint_defContext() *Table_constraint_defContext { + var p = new(Table_constraint_defContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_key_action + p.RuleIndex = KuneiformParserRULE_table_constraint_def return p } -func InitEmptyForeign_key_actionContext(p *Foreign_key_actionContext) { +func InitEmptyTable_constraint_defContext(p *Table_constraint_defContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_key_action + p.RuleIndex = KuneiformParserRULE_table_constraint_def } -func (*Foreign_key_actionContext) IsForeign_key_actionContext() {} +func (*Table_constraint_defContext) IsTable_constraint_defContext() {} -func NewForeign_key_actionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Foreign_key_actionContext { - var p = new(Foreign_key_actionContext) +func NewTable_constraint_defContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Table_constraint_defContext { + var p = new(Table_constraint_defContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_foreign_key_action + p.RuleIndex = KuneiformParserRULE_table_constraint_def return p } -func (s *Foreign_key_actionContext) GetParser() antlr.Parser { return s.parser } +func (s *Table_constraint_defContext) GetParser() antlr.Parser { return s.parser } -func (s *Foreign_key_actionContext) CASCADE() antlr.TerminalNode { - return s.GetToken(KuneiformParserCASCADE, 0) -} +func (s *Table_constraint_defContext) GetName() IIdentifierContext { return s.name } -func (s *Foreign_key_actionContext) RESTRICT() antlr.TerminalNode { - return s.GetToken(KuneiformParserRESTRICT, 0) -} +func (s *Table_constraint_defContext) SetName(v IIdentifierContext) { s.name = v } -func (s *Foreign_key_actionContext) DO() antlr.TerminalNode { - return s.GetToken(KuneiformParserDO, 0) +func (s *Table_constraint_defContext) UNIQUE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUNIQUE, 0) } -func (s *Foreign_key_actionContext) ON() antlr.TerminalNode { - return s.GetToken(KuneiformParserON, 0) +func (s *Table_constraint_defContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Foreign_key_actionContext) UPDATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUPDATE, 0) -} +func (s *Table_constraint_defContext) Identifier_list() IIdentifier_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifier_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func (s *Foreign_key_actionContext) LEGACY_ON_UPDATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_ON_UPDATE, 0) -} + if t == nil { + return nil + } -func (s *Foreign_key_actionContext) DELETE() antlr.TerminalNode { - return s.GetToken(KuneiformParserDELETE, 0) + return t.(IIdentifier_listContext) } -func (s *Foreign_key_actionContext) LEGACY_ON_DELETE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_ON_DELETE, 0) +func (s *Table_constraint_defContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Foreign_key_actionContext) NO() antlr.TerminalNode { - return s.GetToken(KuneiformParserNO, 0) +func (s *Table_constraint_defContext) CHECK() antlr.TerminalNode { + return s.GetToken(KuneiformParserCHECK, 0) } -func (s *Foreign_key_actionContext) ACTION() antlr.TerminalNode { - return s.GetToken(KuneiformParserACTION, 0) +func (s *Table_constraint_defContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) } -func (s *Foreign_key_actionContext) LEGACY_NO_ACTION() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_NO_ACTION, 0) +func (s *Table_constraint_defContext) FOREIGN() antlr.TerminalNode { + return s.GetToken(KuneiformParserFOREIGN, 0) } -func (s *Foreign_key_actionContext) SET() antlr.TerminalNode { - return s.GetToken(KuneiformParserSET, 0) +func (s *Table_constraint_defContext) KEY() antlr.TerminalNode { + return s.GetToken(KuneiformParserKEY, 0) } -func (s *Foreign_key_actionContext) NULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserNULL, 0) +func (s *Table_constraint_defContext) Fk_constraint() IFk_constraintContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IFk_constraintContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IFk_constraintContext) } -func (s *Foreign_key_actionContext) LEGACY_SET_NULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_SET_NULL, 0) +func (s *Table_constraint_defContext) PRIMARY() antlr.TerminalNode { + return s.GetToken(KuneiformParserPRIMARY, 0) } -func (s *Foreign_key_actionContext) DEFAULT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDEFAULT, 0) +func (s *Table_constraint_defContext) CONSTRAINT() antlr.TerminalNode { + return s.GetToken(KuneiformParserCONSTRAINT, 0) } -func (s *Foreign_key_actionContext) LEGACY_SET_DEFAULT() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEGACY_SET_DEFAULT, 0) +func (s *Table_constraint_defContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Foreign_key_actionContext) GetRuleContext() antlr.RuleContext { +func (s *Table_constraint_defContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Foreign_key_actionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Table_constraint_defContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Foreign_key_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Table_constraint_defContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitForeign_key_action(s) + return t.VisitTable_constraint_def(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Foreign_key_action() (localctx IForeign_key_actionContext) { - localctx = NewForeign_key_actionContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 38, KuneiformParserRULE_foreign_key_action) +func (p *KuneiformParser) Table_constraint_def() (localctx ITable_constraint_defContext) { + localctx = NewTable_constraint_defContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 36, KuneiformParserRULE_table_constraint_def) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(299) + p.SetState(360) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 21, p.GetParserRuleContext()) { - case 1: - p.SetState(292) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + if _la == KuneiformParserCONSTRAINT { + { + p.SetState(358) + p.Match(KuneiformParserCONSTRAINT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + { + p.SetState(359) - switch p.GetTokenStream().LA(1) { - case KuneiformParserON: - { - p.SetState(289) - p.Match(KuneiformParserON) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(290) - p.Match(KuneiformParserUPDATE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + var _x = p.Identifier() - case KuneiformParserLEGACY_ON_UPDATE: - { - p.SetState(291) - p.Match(KuneiformParserLEGACY_ON_UPDATE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + localctx.(*Table_constraint_defContext).name = _x + } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit - } - - case 2: - p.SetState(297) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - - switch p.GetTokenStream().LA(1) { - case KuneiformParserON: - { - p.SetState(294) - p.Match(KuneiformParserON) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(295) - p.Match(KuneiformParserDELETE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - case KuneiformParserLEGACY_ON_DELETE: - { - p.SetState(296) - p.Match(KuneiformParserLEGACY_ON_DELETE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit - } - - case antlr.ATNInvalidAltNumber: - goto errorExit } - p.SetState(302) + p.SetState(385) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserDO { + switch p.GetTokenStream().LA(1) { + case KuneiformParserUNIQUE: { - p.SetState(301) - p.Match(KuneiformParserDO) + p.SetState(362) + p.Match(KuneiformParserUNIQUE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - - } - p.SetState(321) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 26, p.GetParserRuleContext()) { - case 1: - p.SetState(307) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - - switch p.GetTokenStream().LA(1) { - case KuneiformParserNO: - { - p.SetState(304) - p.Match(KuneiformParserNO) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + { + p.SetState(363) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - { - p.SetState(305) - p.Match(KuneiformParserACTION) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + } + { + p.SetState(364) + p.Identifier_list() + } + { + p.SetState(365) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } + } - case KuneiformParserLEGACY_NO_ACTION: - { - p.SetState(306) - p.Match(KuneiformParserLEGACY_NO_ACTION) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + case KuneiformParserCHECK: + { + p.SetState(367) + p.Match(KuneiformParserCHECK) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit } - - case 2: { - p.SetState(309) - p.Match(KuneiformParserCASCADE) + p.SetState(368) + p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - - case 3: - p.SetState(313) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + { + p.SetState(369) + p.sql_expr(0) + } + { + p.SetState(370) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - switch p.GetTokenStream().LA(1) { - case KuneiformParserSET: - { - p.SetState(310) - p.Match(KuneiformParserSET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + case KuneiformParserFOREIGN: + { + p.SetState(372) + p.Match(KuneiformParserFOREIGN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - { - p.SetState(311) - p.Match(KuneiformParserNULL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + } + { + p.SetState(373) + p.Match(KuneiformParserKEY) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - case KuneiformParserLEGACY_SET_NULL: - { - p.SetState(312) - p.Match(KuneiformParserLEGACY_SET_NULL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + } + { + p.SetState(374) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit } - - case 4: - p.SetState(318) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + { + p.SetState(375) + p.Identifier_list() + } + { + p.SetState(376) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(377) + p.Fk_constraint() } - switch p.GetTokenStream().LA(1) { - case KuneiformParserSET: - { - p.SetState(315) - p.Match(KuneiformParserSET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + case KuneiformParserPRIMARY: + { + p.SetState(379) + p.Match(KuneiformParserPRIMARY) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - { - p.SetState(316) - p.Match(KuneiformParserDEFAULT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + } + { + p.SetState(380) + p.Match(KuneiformParserKEY) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - case KuneiformParserLEGACY_SET_DEFAULT: - { - p.SetState(317) - p.Match(KuneiformParserLEGACY_SET_DEFAULT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + } + { + p.SetState(381) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit } - - case 5: { - p.SetState(320) - p.Match(KuneiformParserRESTRICT) + p.SetState(382) + p.Identifier_list() + } + { + p.SetState(383) + p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - case antlr.ATNInvalidAltNumber: + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) goto errorExit } @@ -5269,159 +5979,95 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IType_listContext is an interface to support dynamic dispatch. -type IType_listContext interface { +// IOpt_drop_behaviorContext is an interface to support dynamic dispatch. +type IOpt_drop_behaviorContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - AllType_() []ITypeContext - Type_(i int) ITypeContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + CASCADE() antlr.TerminalNode + RESTRICT() antlr.TerminalNode - // IsType_listContext differentiates from other interfaces. - IsType_listContext() + // IsOpt_drop_behaviorContext differentiates from other interfaces. + IsOpt_drop_behaviorContext() } -type Type_listContext struct { +type Opt_drop_behaviorContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyType_listContext() *Type_listContext { - var p = new(Type_listContext) +func NewEmptyOpt_drop_behaviorContext() *Opt_drop_behaviorContext { + var p = new(Opt_drop_behaviorContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_type_list + p.RuleIndex = KuneiformParserRULE_opt_drop_behavior return p } -func InitEmptyType_listContext(p *Type_listContext) { +func InitEmptyOpt_drop_behaviorContext(p *Opt_drop_behaviorContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_type_list + p.RuleIndex = KuneiformParserRULE_opt_drop_behavior } -func (*Type_listContext) IsType_listContext() {} +func (*Opt_drop_behaviorContext) IsOpt_drop_behaviorContext() {} -func NewType_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Type_listContext { - var p = new(Type_listContext) +func NewOpt_drop_behaviorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Opt_drop_behaviorContext { + var p = new(Opt_drop_behaviorContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_type_list + p.RuleIndex = KuneiformParserRULE_opt_drop_behavior return p } -func (s *Type_listContext) GetParser() antlr.Parser { return s.parser } +func (s *Opt_drop_behaviorContext) GetParser() antlr.Parser { return s.parser } -func (s *Type_listContext) AllType_() []ITypeContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ITypeContext); ok { - len++ - } - } +func (s *Opt_drop_behaviorContext) CASCADE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCASCADE, 0) +} - tst := make([]ITypeContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ITypeContext); ok { - tst[i] = t.(ITypeContext) - i++ - } - } - - return tst -} - -func (s *Type_listContext) Type_(i int) ITypeContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITypeContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(ITypeContext) -} - -func (s *Type_listContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) -} - -func (s *Type_listContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Opt_drop_behaviorContext) RESTRICT() antlr.TerminalNode { + return s.GetToken(KuneiformParserRESTRICT, 0) } -func (s *Type_listContext) GetRuleContext() antlr.RuleContext { +func (s *Opt_drop_behaviorContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Type_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Opt_drop_behaviorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Type_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Opt_drop_behaviorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitType_list(s) + return t.VisitOpt_drop_behavior(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Type_list() (localctx IType_listContext) { - localctx = NewType_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 40, KuneiformParserRULE_type_list) +func (p *KuneiformParser) Opt_drop_behavior() (localctx IOpt_drop_behaviorContext) { + localctx = NewOpt_drop_behaviorContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 38, KuneiformParserRULE_opt_drop_behavior) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(323) - p.Type_() - } - p.SetState(328) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for _la == KuneiformParserCOMMA { - { - p.SetState(324) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(325) - p.Type_() - } + p.SetState(387) + _la = p.GetTokenStream().LA(1) - p.SetState(330) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + if !(_la == KuneiformParserCASCADE || _la == KuneiformParserRESTRICT) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() } - _la = p.GetTokenStream().LA(1) } errorExit: @@ -5437,96 +6083,106 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// INamed_type_listContext is an interface to support dynamic dispatch. -type INamed_type_listContext interface { +// IDrop_table_statementContext is an interface to support dynamic dispatch. +type IDrop_table_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetTables returns the tables rule contexts. + GetTables() IIdentifier_listContext + + // SetTables sets the tables rule contexts. + SetTables(IIdentifier_listContext) + // Getter signatures - AllIDENTIFIER() []antlr.TerminalNode - IDENTIFIER(i int) antlr.TerminalNode - AllType_() []ITypeContext - Type_(i int) ITypeContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + DROP() antlr.TerminalNode + TABLE() antlr.TerminalNode + Identifier_list() IIdentifier_listContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode + Opt_drop_behavior() IOpt_drop_behaviorContext - // IsNamed_type_listContext differentiates from other interfaces. - IsNamed_type_listContext() + // IsDrop_table_statementContext differentiates from other interfaces. + IsDrop_table_statementContext() } -type Named_type_listContext struct { +type Drop_table_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser + tables IIdentifier_listContext } -func NewEmptyNamed_type_listContext() *Named_type_listContext { - var p = new(Named_type_listContext) +func NewEmptyDrop_table_statementContext() *Drop_table_statementContext { + var p = new(Drop_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_named_type_list + p.RuleIndex = KuneiformParserRULE_drop_table_statement return p } -func InitEmptyNamed_type_listContext(p *Named_type_listContext) { +func InitEmptyDrop_table_statementContext(p *Drop_table_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_named_type_list + p.RuleIndex = KuneiformParserRULE_drop_table_statement } -func (*Named_type_listContext) IsNamed_type_listContext() {} +func (*Drop_table_statementContext) IsDrop_table_statementContext() {} -func NewNamed_type_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Named_type_listContext { - var p = new(Named_type_listContext) +func NewDrop_table_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Drop_table_statementContext { + var p = new(Drop_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_named_type_list + p.RuleIndex = KuneiformParserRULE_drop_table_statement return p } -func (s *Named_type_listContext) GetParser() antlr.Parser { return s.parser } +func (s *Drop_table_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Named_type_listContext) AllIDENTIFIER() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserIDENTIFIER) +func (s *Drop_table_statementContext) GetTables() IIdentifier_listContext { return s.tables } + +func (s *Drop_table_statementContext) SetTables(v IIdentifier_listContext) { s.tables = v } + +func (s *Drop_table_statementContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) } -func (s *Named_type_listContext) IDENTIFIER(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, i) +func (s *Drop_table_statementContext) TABLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTABLE, 0) } -func (s *Named_type_listContext) AllType_() []ITypeContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ITypeContext); ok { - len++ +func (s *Drop_table_statementContext) Identifier_list() IIdentifier_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifier_listContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]ITypeContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ITypeContext); ok { - tst[i] = t.(ITypeContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(IIdentifier_listContext) } -func (s *Named_type_listContext) Type_(i int) ITypeContext { +func (s *Drop_table_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) +} + +func (s *Drop_table_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Drop_table_statementContext) Opt_drop_behavior() IOpt_drop_behaviorContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITypeContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IOpt_drop_behaviorContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -5534,88 +6190,95 @@ func (s *Named_type_listContext) Type_(i int) ITypeContext { return nil } - return t.(ITypeContext) -} - -func (s *Named_type_listContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) -} - -func (s *Named_type_listContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) + return t.(IOpt_drop_behaviorContext) } -func (s *Named_type_listContext) GetRuleContext() antlr.RuleContext { +func (s *Drop_table_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Named_type_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Drop_table_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Named_type_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_table_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitNamed_type_list(s) + return t.VisitDrop_table_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Named_type_list() (localctx INamed_type_listContext) { - localctx = NewNamed_type_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 42, KuneiformParserRULE_named_type_list) +func (p *KuneiformParser) Drop_table_statement() (localctx IDrop_table_statementContext) { + localctx = NewDrop_table_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 40, KuneiformParserRULE_drop_table_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(331) - p.Match(KuneiformParserIDENTIFIER) + p.SetState(389) + p.Match(KuneiformParserDROP) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(332) - p.Type_() + p.SetState(390) + p.Match(KuneiformParserTABLE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - p.SetState(338) + p.SetState(393) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { + if _la == KuneiformParserIF { { - p.SetState(333) - p.Match(KuneiformParserCOMMA) + p.SetState(391) + p.Match(KuneiformParserIF) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(334) - p.Match(KuneiformParserIDENTIFIER) + p.SetState(392) + p.Match(KuneiformParserEXISTS) if p.HasError() { // Recognition error - abort rule goto errorExit } } + + } + { + p.SetState(395) + + var _x = p.Identifier_list() + + localctx.(*Drop_table_statementContext).tables = _x + } + p.SetState(397) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserCASCADE || _la == KuneiformParserRESTRICT { { - p.SetState(335) - p.Type_() + p.SetState(396) + p.Opt_drop_behavior() } - p.SetState(340) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) } errorExit: @@ -5631,88 +6294,80 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ITyped_variable_listContext is an interface to support dynamic dispatch. -type ITyped_variable_listContext interface { +// IAlter_table_statementContext is an interface to support dynamic dispatch. +type IAlter_table_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetTable returns the table rule contexts. + GetTable() IIdentifierContext + + // SetTable sets the table rule contexts. + SetTable(IIdentifierContext) + // Getter signatures - AllVariable() []IVariableContext - Variable(i int) IVariableContext - AllType_() []ITypeContext - Type_(i int) ITypeContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + ALTER() antlr.TerminalNode + TABLE() antlr.TerminalNode + Alter_table_action() IAlter_table_actionContext + Identifier() IIdentifierContext - // IsTyped_variable_listContext differentiates from other interfaces. - IsTyped_variable_listContext() + // IsAlter_table_statementContext differentiates from other interfaces. + IsAlter_table_statementContext() } -type Typed_variable_listContext struct { +type Alter_table_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser + table IIdentifierContext } -func NewEmptyTyped_variable_listContext() *Typed_variable_listContext { - var p = new(Typed_variable_listContext) +func NewEmptyAlter_table_statementContext() *Alter_table_statementContext { + var p = new(Alter_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_typed_variable_list + p.RuleIndex = KuneiformParserRULE_alter_table_statement return p } -func InitEmptyTyped_variable_listContext(p *Typed_variable_listContext) { +func InitEmptyAlter_table_statementContext(p *Alter_table_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_typed_variable_list + p.RuleIndex = KuneiformParserRULE_alter_table_statement } -func (*Typed_variable_listContext) IsTyped_variable_listContext() {} +func (*Alter_table_statementContext) IsAlter_table_statementContext() {} -func NewTyped_variable_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Typed_variable_listContext { - var p = new(Typed_variable_listContext) +func NewAlter_table_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Alter_table_statementContext { + var p = new(Alter_table_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_typed_variable_list + p.RuleIndex = KuneiformParserRULE_alter_table_statement return p } -func (s *Typed_variable_listContext) GetParser() antlr.Parser { return s.parser } +func (s *Alter_table_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Typed_variable_listContext) AllVariable() []IVariableContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IVariableContext); ok { - len++ - } - } +func (s *Alter_table_statementContext) GetTable() IIdentifierContext { return s.table } - tst := make([]IVariableContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IVariableContext); ok { - tst[i] = t.(IVariableContext) - i++ - } - } +func (s *Alter_table_statementContext) SetTable(v IIdentifierContext) { s.table = v } - return tst +func (s *Alter_table_statementContext) ALTER() antlr.TerminalNode { + return s.GetToken(KuneiformParserALTER, 0) +} + +func (s *Alter_table_statementContext) TABLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTABLE, 0) } -func (s *Typed_variable_listContext) Variable(i int) IVariableContext { +func (s *Alter_table_statementContext) Alter_table_action() IAlter_table_actionContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IVariableContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IAlter_table_actionContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -5720,40 +6375,15 @@ func (s *Typed_variable_listContext) Variable(i int) IVariableContext { return nil } - return t.(IVariableContext) -} - -func (s *Typed_variable_listContext) AllType_() []ITypeContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ITypeContext); ok { - len++ - } - } - - tst := make([]ITypeContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ITypeContext); ok { - tst[i] = t.(ITypeContext) - i++ - } - } - - return tst + return t.(IAlter_table_actionContext) } -func (s *Typed_variable_listContext) Type_(i int) ITypeContext { +func (s *Alter_table_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITypeContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -5761,80 +6391,57 @@ func (s *Typed_variable_listContext) Type_(i int) ITypeContext { return nil } - return t.(ITypeContext) -} - -func (s *Typed_variable_listContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) -} - -func (s *Typed_variable_listContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) + return t.(IIdentifierContext) } -func (s *Typed_variable_listContext) GetRuleContext() antlr.RuleContext { +func (s *Alter_table_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Typed_variable_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Alter_table_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Typed_variable_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Alter_table_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitTyped_variable_list(s) + return t.VisitAlter_table_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Typed_variable_list() (localctx ITyped_variable_listContext) { - localctx = NewTyped_variable_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 44, KuneiformParserRULE_typed_variable_list) - var _la int - +func (p *KuneiformParser) Alter_table_statement() (localctx IAlter_table_statementContext) { + localctx = NewAlter_table_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 42, KuneiformParserRULE_alter_table_statement) p.EnterOuterAlt(localctx, 1) { - p.SetState(341) - p.Variable() + p.SetState(399) + p.Match(KuneiformParserALTER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { - p.SetState(342) - p.Type_() - } - p.SetState(349) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for _la == KuneiformParserCOMMA { - { - p.SetState(343) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(344) - p.Variable() - } - { - p.SetState(345) - p.Type_() - } - - p.SetState(351) - p.GetErrorHandler().Sync(p) + p.SetState(400) + p.Match(KuneiformParserTABLE) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) + } + { + p.SetState(401) + + var _x = p.Identifier() + + localctx.(*Alter_table_statementContext).table = _x + } + { + p.SetState(402) + p.Alter_table_action() } errorExit: @@ -5850,93 +6457,99 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IConstraintContext is an interface to support dynamic dispatch. -type IConstraintContext interface { +// IAlter_table_actionContext is an interface to support dynamic dispatch. +type IAlter_table_actionContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - - // Getter signatures - IDENTIFIER() antlr.TerminalNode - PRIMARY() antlr.TerminalNode - NOT() antlr.TerminalNode - NULL() antlr.TerminalNode - DEFAULT() antlr.TerminalNode - UNIQUE() antlr.TerminalNode - LPAREN() antlr.TerminalNode - Literal() ILiteralContext - RPAREN() antlr.TerminalNode - KEY() antlr.TerminalNode - - // IsConstraintContext differentiates from other interfaces. - IsConstraintContext() + // IsAlter_table_actionContext differentiates from other interfaces. + IsAlter_table_actionContext() } -type ConstraintContext struct { +type Alter_table_actionContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyConstraintContext() *ConstraintContext { - var p = new(ConstraintContext) +func NewEmptyAlter_table_actionContext() *Alter_table_actionContext { + var p = new(Alter_table_actionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_constraint + p.RuleIndex = KuneiformParserRULE_alter_table_action return p } -func InitEmptyConstraintContext(p *ConstraintContext) { +func InitEmptyAlter_table_actionContext(p *Alter_table_actionContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_constraint + p.RuleIndex = KuneiformParserRULE_alter_table_action } -func (*ConstraintContext) IsConstraintContext() {} +func (*Alter_table_actionContext) IsAlter_table_actionContext() {} -func NewConstraintContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConstraintContext { - var p = new(ConstraintContext) +func NewAlter_table_actionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Alter_table_actionContext { + var p = new(Alter_table_actionContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_constraint + p.RuleIndex = KuneiformParserRULE_alter_table_action return p } -func (s *ConstraintContext) GetParser() antlr.Parser { return s.parser } +func (s *Alter_table_actionContext) GetParser() antlr.Parser { return s.parser } -func (s *ConstraintContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) +func (s *Alter_table_actionContext) CopyAll(ctx *Alter_table_actionContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) } -func (s *ConstraintContext) PRIMARY() antlr.TerminalNode { - return s.GetToken(KuneiformParserPRIMARY, 0) +func (s *Alter_table_actionContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *ConstraintContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) +func (s *Alter_table_actionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *ConstraintContext) NULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserNULL, 0) +type Drop_column_constraintContext struct { + Alter_table_actionContext + column IIdentifierContext } -func (s *ConstraintContext) DEFAULT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDEFAULT, 0) +func NewDrop_column_constraintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Drop_column_constraintContext { + var p = new(Drop_column_constraintContext) + + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) + + return p } -func (s *ConstraintContext) UNIQUE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUNIQUE, 0) +func (s *Drop_column_constraintContext) GetColumn() IIdentifierContext { return s.column } + +func (s *Drop_column_constraintContext) SetColumn(v IIdentifierContext) { s.column = v } + +func (s *Drop_column_constraintContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *ConstraintContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Drop_column_constraintContext) ALTER() antlr.TerminalNode { + return s.GetToken(KuneiformParserALTER, 0) +} + +func (s *Drop_column_constraintContext) COLUMN() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLUMN, 0) +} + +func (s *Drop_column_constraintContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) } -func (s *ConstraintContext) Literal() ILiteralContext { +func (s *Drop_column_constraintContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ILiteralContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -5946,367 +6559,282 @@ func (s *ConstraintContext) Literal() ILiteralContext { return nil } - return t.(ILiteralContext) -} - -func (s *ConstraintContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) + return t.(IIdentifierContext) } -func (s *ConstraintContext) KEY() antlr.TerminalNode { - return s.GetToken(KuneiformParserKEY, 0) +func (s *Drop_column_constraintContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) } -func (s *ConstraintContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Drop_column_constraintContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) } -func (s *ConstraintContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Drop_column_constraintContext) DEFAULT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDEFAULT, 0) } -func (s *ConstraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_column_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitConstraint(s) + return t.VisitDrop_column_constraint(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Constraint() (localctx IConstraintContext) { - localctx = NewConstraintContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 46, KuneiformParserRULE_constraint) - var _la int +type Add_columnContext struct { + Alter_table_actionContext + column IIdentifierContext +} - p.EnterOuterAlt(localctx, 1) - p.SetState(361) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } +func NewAdd_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Add_columnContext { + var p = new(Add_columnContext) - switch p.GetTokenStream().LA(1) { - case KuneiformParserIDENTIFIER: - { - p.SetState(352) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) - case KuneiformParserPRIMARY: - { - p.SetState(353) - p.Match(KuneiformParserPRIMARY) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(355) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + return p +} - if _la == KuneiformParserKEY { - { - p.SetState(354) - p.Match(KuneiformParserKEY) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } +func (s *Add_columnContext) GetColumn() IIdentifierContext { return s.column } - } +func (s *Add_columnContext) SetColumn(v IIdentifierContext) { s.column = v } - case KuneiformParserNOT: - { - p.SetState(357) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(358) - p.Match(KuneiformParserNULL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } +func (s *Add_columnContext) GetRuleContext() antlr.RuleContext { + return s +} - case KuneiformParserDEFAULT: - { - p.SetState(359) - p.Match(KuneiformParserDEFAULT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } +func (s *Add_columnContext) ADD() antlr.TerminalNode { + return s.GetToken(KuneiformParserADD, 0) +} - case KuneiformParserUNIQUE: - { - p.SetState(360) - p.Match(KuneiformParserUNIQUE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } +func (s *Add_columnContext) COLUMN() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLUMN, 0) +} - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit +func (s *Add_columnContext) Type_() ITypeContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ITypeContext); ok { + t = ctx.(antlr.RuleContext) + break + } } - p.SetState(367) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + + if t == nil { + return nil } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserLPAREN { - { - p.SetState(363) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(364) - p.Literal() - } - { - p.SetState(365) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + return t.(ITypeContext) +} +func (s *Add_columnContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } } -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) + if t == nil { + return nil } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} -// IAccess_modifierContext is an interface to support dynamic dispatch. -type IAccess_modifierContext interface { - antlr.ParserRuleContext - - // GetParser returns the parser. - GetParser() antlr.Parser + return t.(IIdentifierContext) +} - // Getter signatures - PUBLIC() antlr.TerminalNode - PRIVATE() antlr.TerminalNode - VIEW() antlr.TerminalNode - OWNER() antlr.TerminalNode +func (s *Add_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitAdd_column(s) - // IsAccess_modifierContext differentiates from other interfaces. - IsAccess_modifierContext() + default: + return t.VisitChildren(s) + } } -type Access_modifierContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser +type Rename_columnContext struct { + Alter_table_actionContext + old_column IIdentifierContext + new_column IIdentifierContext } -func NewEmptyAccess_modifierContext() *Access_modifierContext { - var p = new(Access_modifierContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_access_modifier +func NewRename_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Rename_columnContext { + var p = new(Rename_columnContext) + + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) + return p } -func InitEmptyAccess_modifierContext(p *Access_modifierContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_access_modifier -} +func (s *Rename_columnContext) GetOld_column() IIdentifierContext { return s.old_column } -func (*Access_modifierContext) IsAccess_modifierContext() {} +func (s *Rename_columnContext) GetNew_column() IIdentifierContext { return s.new_column } -func NewAccess_modifierContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Access_modifierContext { - var p = new(Access_modifierContext) +func (s *Rename_columnContext) SetOld_column(v IIdentifierContext) { s.old_column = v } - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - - p.parser = parser - p.RuleIndex = KuneiformParserRULE_access_modifier +func (s *Rename_columnContext) SetNew_column(v IIdentifierContext) { s.new_column = v } - return p +func (s *Rename_columnContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Access_modifierContext) GetParser() antlr.Parser { return s.parser } - -func (s *Access_modifierContext) PUBLIC() antlr.TerminalNode { - return s.GetToken(KuneiformParserPUBLIC, 0) +func (s *Rename_columnContext) RENAME() antlr.TerminalNode { + return s.GetToken(KuneiformParserRENAME, 0) } -func (s *Access_modifierContext) PRIVATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserPRIVATE, 0) +func (s *Rename_columnContext) COLUMN() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLUMN, 0) } -func (s *Access_modifierContext) VIEW() antlr.TerminalNode { - return s.GetToken(KuneiformParserVIEW, 0) +func (s *Rename_columnContext) TO() antlr.TerminalNode { + return s.GetToken(KuneiformParserTO, 0) } -func (s *Access_modifierContext) OWNER() antlr.TerminalNode { - return s.GetToken(KuneiformParserOWNER, 0) -} +func (s *Rename_columnContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } -func (s *Access_modifierContext) GetRuleContext() antlr.RuleContext { - return s + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } + + return tst } -func (s *Access_modifierContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Rename_columnContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Access_modifierContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Rename_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitAccess_modifier(s) + return t.VisitRename_column(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Access_modifier() (localctx IAccess_modifierContext) { - localctx = NewAccess_modifierContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 48, KuneiformParserRULE_access_modifier) - var _la int +type Add_table_constraintContext struct { + Alter_table_actionContext +} - p.EnterOuterAlt(localctx, 1) - { - p.SetState(369) - _la = p.GetTokenStream().LA(1) +func NewAdd_table_constraintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Add_table_constraintContext { + var p = new(Add_table_constraintContext) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4123168604160) != 0) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } - } + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used + return p } -// IAction_declarationContext is an interface to support dynamic dispatch. -type IAction_declarationContext interface { - antlr.ParserRuleContext +func (s *Add_table_constraintContext) GetRuleContext() antlr.RuleContext { + return s +} - // GetParser returns the parser. - GetParser() antlr.Parser +func (s *Add_table_constraintContext) ADD() antlr.TerminalNode { + return s.GetToken(KuneiformParserADD, 0) +} - // Getter signatures - ACTION() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - LBRACE() antlr.TerminalNode - Action_block() IAction_blockContext - RBRACE() antlr.TerminalNode - AllAnnotation() []IAnnotationContext - Annotation(i int) IAnnotationContext - Variable_list() IVariable_listContext - AllAccess_modifier() []IAccess_modifierContext - Access_modifier(i int) IAccess_modifierContext +func (s *Add_table_constraintContext) Table_constraint_def() ITable_constraint_defContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ITable_constraint_defContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - // IsAction_declarationContext differentiates from other interfaces. - IsAction_declarationContext() -} + if t == nil { + return nil + } -type Action_declarationContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser + return t.(ITable_constraint_defContext) } -func NewEmptyAction_declarationContext() *Action_declarationContext { - var p = new(Action_declarationContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_declaration - return p -} +func (s *Add_table_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitAdd_table_constraint(s) -func InitEmptyAction_declarationContext(p *Action_declarationContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_declaration + default: + return t.VisitChildren(s) + } } -func (*Action_declarationContext) IsAction_declarationContext() {} - -func NewAction_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_declarationContext { - var p = new(Action_declarationContext) +type Add_column_constraintContext struct { + Alter_table_actionContext + column IIdentifierContext +} - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) +func NewAdd_column_constraintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Add_column_constraintContext { + var p = new(Add_column_constraintContext) + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) p.parser = parser - p.RuleIndex = KuneiformParserRULE_action_declaration + p.CopyAll(ctx.(*Alter_table_actionContext)) return p } -func (s *Action_declarationContext) GetParser() antlr.Parser { return s.parser } +func (s *Add_column_constraintContext) GetColumn() IIdentifierContext { return s.column } -func (s *Action_declarationContext) ACTION() antlr.TerminalNode { - return s.GetToken(KuneiformParserACTION, 0) -} +func (s *Add_column_constraintContext) SetColumn(v IIdentifierContext) { s.column = v } -func (s *Action_declarationContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) +func (s *Add_column_constraintContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Action_declarationContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Add_column_constraintContext) ALTER() antlr.TerminalNode { + return s.GetToken(KuneiformParserALTER, 0) } -func (s *Action_declarationContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Add_column_constraintContext) COLUMN() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLUMN, 0) } -func (s *Action_declarationContext) LBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACE, 0) +func (s *Add_column_constraintContext) SET() antlr.TerminalNode { + return s.GetToken(KuneiformParserSET, 0) } -func (s *Action_declarationContext) Action_block() IAction_blockContext { +func (s *Add_column_constraintContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAction_blockContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -6316,44 +6844,27 @@ func (s *Action_declarationContext) Action_block() IAction_blockContext { return nil } - return t.(IAction_blockContext) + return t.(IIdentifierContext) } -func (s *Action_declarationContext) RBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACE, 0) +func (s *Add_column_constraintContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) } -func (s *Action_declarationContext) AllAnnotation() []IAnnotationContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IAnnotationContext); ok { - len++ - } - } - - tst := make([]IAnnotationContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IAnnotationContext); ok { - tst[i] = t.(IAnnotationContext) - i++ - } - } +func (s *Add_column_constraintContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) +} - return tst +func (s *Add_column_constraintContext) DEFAULT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDEFAULT, 0) } -func (s *Action_declarationContext) Annotation(i int) IAnnotationContext { +func (s *Add_column_constraintContext) Action_expr() IAction_exprContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAnnotationContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IAction_exprContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -6361,13 +6872,54 @@ func (s *Action_declarationContext) Annotation(i int) IAnnotationContext { return nil } - return t.(IAnnotationContext) + return t.(IAction_exprContext) +} + +func (s *Add_column_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitAdd_column_constraint(s) + + default: + return t.VisitChildren(s) + } +} + +type Rename_tableContext struct { + Alter_table_actionContext + new_table IIdentifierContext +} + +func NewRename_tableContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Rename_tableContext { + var p = new(Rename_tableContext) + + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) + + return p +} + +func (s *Rename_tableContext) GetNew_table() IIdentifierContext { return s.new_table } + +func (s *Rename_tableContext) SetNew_table(v IIdentifierContext) { s.new_table = v } + +func (s *Rename_tableContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Rename_tableContext) RENAME() antlr.TerminalNode { + return s.GetToken(KuneiformParserRENAME, 0) } -func (s *Action_declarationContext) Variable_list() IVariable_listContext { +func (s *Rename_tableContext) TO() antlr.TerminalNode { + return s.GetToken(KuneiformParserTO, 0) +} + +func (s *Rename_tableContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IVariable_listContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -6377,40 +6929,51 @@ func (s *Action_declarationContext) Variable_list() IVariable_listContext { return nil } - return t.(IVariable_listContext) + return t.(IIdentifierContext) } -func (s *Action_declarationContext) AllAccess_modifier() []IAccess_modifierContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IAccess_modifierContext); ok { - len++ - } - } +func (s *Rename_tableContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitRename_table(s) - tst := make([]IAccess_modifierContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IAccess_modifierContext); ok { - tst[i] = t.(IAccess_modifierContext) - i++ - } + default: + return t.VisitChildren(s) } +} - return tst +type Drop_table_constraintContext struct { + Alter_table_actionContext +} + +func NewDrop_table_constraintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Drop_table_constraintContext { + var p = new(Drop_table_constraintContext) + + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) + + return p +} + +func (s *Drop_table_constraintContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Drop_table_constraintContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) +} + +func (s *Drop_table_constraintContext) CONSTRAINT() antlr.TerminalNode { + return s.GetToken(KuneiformParserCONSTRAINT, 0) } -func (s *Action_declarationContext) Access_modifier(i int) IAccess_modifierContext { +func (s *Drop_table_constraintContext) Identifier() IIdentifierContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAccess_modifierContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -6418,322 +6981,546 @@ func (s *Action_declarationContext) Access_modifier(i int) IAccess_modifierConte return nil } - return t.(IAccess_modifierContext) + return t.(IIdentifierContext) +} + +func (s *Drop_table_constraintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitDrop_table_constraint(s) + + default: + return t.VisitChildren(s) + } +} + +type Drop_columnContext struct { + Alter_table_actionContext + column IIdentifierContext +} + +func NewDrop_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Drop_columnContext { + var p = new(Drop_columnContext) + + InitEmptyAlter_table_actionContext(&p.Alter_table_actionContext) + p.parser = parser + p.CopyAll(ctx.(*Alter_table_actionContext)) + + return p } -func (s *Action_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Drop_columnContext) GetColumn() IIdentifierContext { return s.column } + +func (s *Drop_columnContext) SetColumn(v IIdentifierContext) { s.column = v } + +func (s *Drop_columnContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Action_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Drop_columnContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) +} + +func (s *Drop_columnContext) COLUMN() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLUMN, 0) +} + +func (s *Drop_columnContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Action_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitAction_declaration(s) + return t.VisitDrop_column(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Action_declaration() (localctx IAction_declarationContext) { - localctx = NewAction_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 50, KuneiformParserRULE_action_declaration) - var _la int - - p.EnterOuterAlt(localctx, 1) - p.SetState(374) +func (p *KuneiformParser) Alter_table_action() (localctx IAlter_table_actionContext) { + localctx = NewAlter_table_actionContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 44, KuneiformParserRULE_alter_table_action) + p.SetState(445) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCONTEXTUAL_VARIABLE { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 38, p.GetParserRuleContext()) { + case 1: + localctx = NewAdd_column_constraintContext(p, localctx) + p.EnterOuterAlt(localctx, 1) { - p.SetState(371) - p.Annotation() + p.SetState(404) + p.Match(KuneiformParserALTER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - - p.SetState(376) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + { + p.SetState(405) + p.Match(KuneiformParserCOLUMN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - _la = p.GetTokenStream().LA(1) - } - { - p.SetState(377) - p.Match(KuneiformParserACTION) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(406) + + var _x = p.Identifier() + + localctx.(*Add_column_constraintContext).column = _x } - } - { - p.SetState(378) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(407) + p.Match(KuneiformParserSET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } - { - p.SetState(379) - p.Match(KuneiformParserLPAREN) + p.SetState(412) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - p.SetState(381) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserVARIABLE || _la == KuneiformParserCONTEXTUAL_VARIABLE { - { - p.SetState(380) - p.Variable_list() - } + switch p.GetTokenStream().LA(1) { + case KuneiformParserNOT: + { + p.SetState(408) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(409) + p.Match(KuneiformParserNULL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - } - { - p.SetState(383) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule + case KuneiformParserDEFAULT: + { + p.SetState(410) + p.Match(KuneiformParserDEFAULT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(411) + p.action_expr(0) + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) goto errorExit } - } - p.SetState(385) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - for ok := true; ok; ok = ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4123168604160) != 0) { + case 2: + localctx = NewDrop_column_constraintContext(p, localctx) + p.EnterOuterAlt(localctx, 2) + { + p.SetState(414) + p.Match(KuneiformParserALTER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } { - p.SetState(384) - p.Access_modifier() + p.SetState(415) + p.Match(KuneiformParserCOLUMN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + { + p.SetState(416) - p.SetState(387) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + var _x = p.Identifier() + + localctx.(*Drop_column_constraintContext).column = _x } - _la = p.GetTokenStream().LA(1) - } - { - p.SetState(389) - p.Match(KuneiformParserLBRACE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(417) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } - { - p.SetState(390) - p.Action_block() - } - { - p.SetState(391) - p.Match(KuneiformParserRBRACE) + p.SetState(421) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} - -// IProcedure_declarationContext is an interface to support dynamic dispatch. -type IProcedure_declarationContext interface { - antlr.ParserRuleContext - // GetParser returns the parser. - GetParser() antlr.Parser + switch p.GetTokenStream().LA(1) { + case KuneiformParserNOT: + { + p.SetState(418) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(419) + p.Match(KuneiformParserNULL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - // Getter signatures - PROCEDURE() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - LBRACE() antlr.TerminalNode - Procedure_block() IProcedure_blockContext - RBRACE() antlr.TerminalNode - AllAnnotation() []IAnnotationContext - Annotation(i int) IAnnotationContext - Typed_variable_list() ITyped_variable_listContext - AllAccess_modifier() []IAccess_modifierContext - Access_modifier(i int) IAccess_modifierContext - Procedure_return() IProcedure_returnContext + case KuneiformParserDEFAULT: + { + p.SetState(420) + p.Match(KuneiformParserDEFAULT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - // IsProcedure_declarationContext differentiates from other interfaces. - IsProcedure_declarationContext() -} + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } -type Procedure_declarationContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser -} + case 3: + localctx = NewAdd_columnContext(p, localctx) + p.EnterOuterAlt(localctx, 3) + { + p.SetState(423) + p.Match(KuneiformParserADD) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(424) + p.Match(KuneiformParserCOLUMN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(425) -func NewEmptyProcedure_declarationContext() *Procedure_declarationContext { - var p = new(Procedure_declarationContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_declaration - return p -} + var _x = p.Identifier() -func InitEmptyProcedure_declarationContext(p *Procedure_declarationContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_declaration -} + localctx.(*Add_columnContext).column = _x + } + { + p.SetState(426) + p.Type_() + } -func (*Procedure_declarationContext) IsProcedure_declarationContext() {} + case 4: + localctx = NewDrop_columnContext(p, localctx) + p.EnterOuterAlt(localctx, 4) + { + p.SetState(428) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(429) + p.Match(KuneiformParserCOLUMN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(430) -func NewProcedure_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_declarationContext { - var p = new(Procedure_declarationContext) + var _x = p.Identifier() - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + localctx.(*Drop_columnContext).column = _x + } - p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_declaration + case 5: + localctx = NewRename_columnContext(p, localctx) + p.EnterOuterAlt(localctx, 5) + { + p.SetState(431) + p.Match(KuneiformParserRENAME) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(432) + p.Match(KuneiformParserCOLUMN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(433) - return p -} + var _x = p.Identifier() -func (s *Procedure_declarationContext) GetParser() antlr.Parser { return s.parser } + localctx.(*Rename_columnContext).old_column = _x + } + { + p.SetState(434) + p.Match(KuneiformParserTO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(435) -func (s *Procedure_declarationContext) PROCEDURE() antlr.TerminalNode { - return s.GetToken(KuneiformParserPROCEDURE, 0) -} + var _x = p.Identifier() -func (s *Procedure_declarationContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} + localctx.(*Rename_columnContext).new_column = _x + } -func (s *Procedure_declarationContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} + case 6: + localctx = NewRename_tableContext(p, localctx) + p.EnterOuterAlt(localctx, 6) + { + p.SetState(437) + p.Match(KuneiformParserRENAME) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(438) + p.Match(KuneiformParserTO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(439) -func (s *Procedure_declarationContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) -} + var _x = p.Identifier() -func (s *Procedure_declarationContext) LBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACE, 0) -} + localctx.(*Rename_tableContext).new_table = _x + } -func (s *Procedure_declarationContext) Procedure_block() IProcedure_blockContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_blockContext); ok { - t = ctx.(antlr.RuleContext) - break + case 7: + localctx = NewAdd_table_constraintContext(p, localctx) + p.EnterOuterAlt(localctx, 7) + { + p.SetState(440) + p.Match(KuneiformParserADD) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(441) + p.Table_constraint_def() + } + + case 8: + localctx = NewDrop_table_constraintContext(p, localctx) + p.EnterOuterAlt(localctx, 8) + { + p.SetState(442) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(443) + p.Match(KuneiformParserCONSTRAINT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + { + p.SetState(444) + p.Identifier() + } + + case antlr.ATNInvalidAltNumber: + goto errorExit } - if t == nil { - return nil +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} + +// ICreate_index_statementContext is an interface to support dynamic dispatch. +type ICreate_index_statementContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetName returns the name rule contexts. + GetName() IIdentifierContext + + // GetTable returns the table rule contexts. + GetTable() IIdentifierContext + + // GetColumns returns the columns rule contexts. + GetColumns() IIdentifier_listContext + + // SetName sets the name rule contexts. + SetName(IIdentifierContext) + + // SetTable sets the table rule contexts. + SetTable(IIdentifierContext) + + // SetColumns sets the columns rule contexts. + SetColumns(IIdentifier_listContext) + + // Getter signatures + CREATE() antlr.TerminalNode + INDEX() antlr.TerminalNode + ON() antlr.TerminalNode + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + Identifier_list() IIdentifier_listContext + UNIQUE() antlr.TerminalNode + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode - return t.(IProcedure_blockContext) + // IsCreate_index_statementContext differentiates from other interfaces. + IsCreate_index_statementContext() } -func (s *Procedure_declarationContext) RBRACE() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACE, 0) +type Create_index_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + name IIdentifierContext + table IIdentifierContext + columns IIdentifier_listContext } -func (s *Procedure_declarationContext) AllAnnotation() []IAnnotationContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IAnnotationContext); ok { - len++ - } - } +func NewEmptyCreate_index_statementContext() *Create_index_statementContext { + var p = new(Create_index_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_create_index_statement + return p +} - tst := make([]IAnnotationContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IAnnotationContext); ok { - tst[i] = t.(IAnnotationContext) - i++ - } - } +func InitEmptyCreate_index_statementContext(p *Create_index_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_create_index_statement +} - return tst +func (*Create_index_statementContext) IsCreate_index_statementContext() {} + +func NewCreate_index_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Create_index_statementContext { + var p = new(Create_index_statementContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + + p.parser = parser + p.RuleIndex = KuneiformParserRULE_create_index_statement + + return p } -func (s *Procedure_declarationContext) Annotation(i int) IAnnotationContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAnnotationContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } +func (s *Create_index_statementContext) GetParser() antlr.Parser { return s.parser } - if t == nil { - return nil - } +func (s *Create_index_statementContext) GetName() IIdentifierContext { return s.name } + +func (s *Create_index_statementContext) GetTable() IIdentifierContext { return s.table } + +func (s *Create_index_statementContext) GetColumns() IIdentifier_listContext { return s.columns } - return t.(IAnnotationContext) +func (s *Create_index_statementContext) SetName(v IIdentifierContext) { s.name = v } + +func (s *Create_index_statementContext) SetTable(v IIdentifierContext) { s.table = v } + +func (s *Create_index_statementContext) SetColumns(v IIdentifier_listContext) { s.columns = v } + +func (s *Create_index_statementContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) } -func (s *Procedure_declarationContext) Typed_variable_list() ITyped_variable_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITyped_variable_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Create_index_statementContext) INDEX() antlr.TerminalNode { + return s.GetToken(KuneiformParserINDEX, 0) +} - if t == nil { - return nil - } +func (s *Create_index_statementContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) +} - return t.(ITyped_variable_listContext) +func (s *Create_index_statementContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Create_index_statementContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Procedure_declarationContext) AllAccess_modifier() []IAccess_modifierContext { +func (s *Create_index_statementContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IAccess_modifierContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]IAccess_modifierContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IAccess_modifierContext); ok { - tst[i] = t.(IAccess_modifierContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -6741,11 +7528,11 @@ func (s *Procedure_declarationContext) AllAccess_modifier() []IAccess_modifierCo return tst } -func (s *Procedure_declarationContext) Access_modifier(i int) IAccess_modifierContext { +func (s *Create_index_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAccess_modifierContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -6758,13 +7545,13 @@ func (s *Procedure_declarationContext) Access_modifier(i int) IAccess_modifierCo return nil } - return t.(IAccess_modifierContext) + return t.(IIdentifierContext) } -func (s *Procedure_declarationContext) Procedure_return() IProcedure_returnContext { +func (s *Create_index_statementContext) Identifier_list() IIdentifier_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_returnContext); ok { + if _, ok := ctx.(IIdentifier_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -6774,156 +7561,175 @@ func (s *Procedure_declarationContext) Procedure_return() IProcedure_returnConte return nil } - return t.(IProcedure_returnContext) + return t.(IIdentifier_listContext) +} + +func (s *Create_index_statementContext) UNIQUE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUNIQUE, 0) +} + +func (s *Create_index_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) +} + +func (s *Create_index_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Create_index_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) } -func (s *Procedure_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Create_index_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Create_index_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Procedure_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Create_index_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_declaration(s) + return t.VisitCreate_index_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_declaration() (localctx IProcedure_declarationContext) { - localctx = NewProcedure_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 52, KuneiformParserRULE_procedure_declaration) +func (p *KuneiformParser) Create_index_statement() (localctx ICreate_index_statementContext) { + localctx = NewCreate_index_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 46, KuneiformParserRULE_create_index_statement) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(396) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for _la == KuneiformParserCONTEXTUAL_VARIABLE { - { - p.SetState(393) - p.Annotation() - } - - p.SetState(398) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } - { - p.SetState(399) - p.Match(KuneiformParserPROCEDURE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(400) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } { - p.SetState(401) - p.Match(KuneiformParserLPAREN) + p.SetState(447) + p.Match(KuneiformParserCREATE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(403) + p.SetState(449) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserVARIABLE || _la == KuneiformParserCONTEXTUAL_VARIABLE { + if _la == KuneiformParserUNIQUE { { - p.SetState(402) - p.Typed_variable_list() + p.SetState(448) + p.Match(KuneiformParserUNIQUE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } { - p.SetState(405) - p.Match(KuneiformParserRPAREN) + p.SetState(451) + p.Match(KuneiformParserINDEX) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(407) + p.SetState(455) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ok := true; ok; ok = ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4123168604160) != 0) { + if _la == KuneiformParserIF { { - p.SetState(406) - p.Access_modifier() + p.SetState(452) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - - p.SetState(409) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + { + p.SetState(453) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - _la = p.GetTokenStream().LA(1) + { + p.SetState(454) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + } - p.SetState(412) + p.SetState(458) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserRETURNS { + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { { - p.SetState(411) - p.Procedure_return() + p.SetState(457) + + var _x = p.Identifier() + + localctx.(*Create_index_statementContext).name = _x } } { - p.SetState(414) - p.Match(KuneiformParserLBRACE) + p.SetState(460) + p.Match(KuneiformParserON) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(415) - p.Procedure_block() + p.SetState(461) + + var _x = p.Identifier() + + localctx.(*Create_index_statementContext).table = _x } { - p.SetState(416) - p.Match(KuneiformParserRBRACE) + p.SetState(462) + p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } + { + p.SetState(463) -errorExit: - if p.HasError() { + var _x = p.Identifier_list() + + localctx.(*Create_index_statementContext).columns = _x + } + { + p.SetState(464) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + +errorExit: + if p.HasError() { v := p.GetError() localctx.SetException(v) p.GetErrorHandler().ReportError(p, v) @@ -6935,113 +7741,79 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IForeign_procedure_declarationContext is an interface to support dynamic dispatch. -type IForeign_procedure_declarationContext interface { +// IDrop_index_statementContext is an interface to support dynamic dispatch. +type IDrop_index_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetUnnamed_params returns the unnamed_params rule contexts. - GetUnnamed_params() IType_listContext - - // GetNamed_params returns the named_params rule contexts. - GetNamed_params() ITyped_variable_listContext - - // SetUnnamed_params sets the unnamed_params rule contexts. - SetUnnamed_params(IType_listContext) + // GetName returns the name rule contexts. + GetName() IIdentifierContext - // SetNamed_params sets the named_params rule contexts. - SetNamed_params(ITyped_variable_listContext) + // SetName sets the name rule contexts. + SetName(IIdentifierContext) // Getter signatures - FOREIGN() antlr.TerminalNode - PROCEDURE() antlr.TerminalNode - IDENTIFIER() antlr.TerminalNode - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - Procedure_return() IProcedure_returnContext - Type_list() IType_listContext - Typed_variable_list() ITyped_variable_listContext + DROP() antlr.TerminalNode + INDEX() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode - // IsForeign_procedure_declarationContext differentiates from other interfaces. - IsForeign_procedure_declarationContext() + // IsDrop_index_statementContext differentiates from other interfaces. + IsDrop_index_statementContext() } -type Foreign_procedure_declarationContext struct { +type Drop_index_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - unnamed_params IType_listContext - named_params ITyped_variable_listContext + parser antlr.Parser + name IIdentifierContext } -func NewEmptyForeign_procedure_declarationContext() *Foreign_procedure_declarationContext { - var p = new(Foreign_procedure_declarationContext) +func NewEmptyDrop_index_statementContext() *Drop_index_statementContext { + var p = new(Drop_index_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_procedure_declaration + p.RuleIndex = KuneiformParserRULE_drop_index_statement return p } -func InitEmptyForeign_procedure_declarationContext(p *Foreign_procedure_declarationContext) { +func InitEmptyDrop_index_statementContext(p *Drop_index_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_foreign_procedure_declaration + p.RuleIndex = KuneiformParserRULE_drop_index_statement } -func (*Foreign_procedure_declarationContext) IsForeign_procedure_declarationContext() {} +func (*Drop_index_statementContext) IsDrop_index_statementContext() {} -func NewForeign_procedure_declarationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Foreign_procedure_declarationContext { - var p = new(Foreign_procedure_declarationContext) +func NewDrop_index_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Drop_index_statementContext { + var p = new(Drop_index_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_foreign_procedure_declaration + p.RuleIndex = KuneiformParserRULE_drop_index_statement return p } -func (s *Foreign_procedure_declarationContext) GetParser() antlr.Parser { return s.parser } - -func (s *Foreign_procedure_declarationContext) GetUnnamed_params() IType_listContext { - return s.unnamed_params -} - -func (s *Foreign_procedure_declarationContext) GetNamed_params() ITyped_variable_listContext { - return s.named_params -} - -func (s *Foreign_procedure_declarationContext) SetUnnamed_params(v IType_listContext) { - s.unnamed_params = v -} - -func (s *Foreign_procedure_declarationContext) SetNamed_params(v ITyped_variable_listContext) { - s.named_params = v -} - -func (s *Foreign_procedure_declarationContext) FOREIGN() antlr.TerminalNode { - return s.GetToken(KuneiformParserFOREIGN, 0) -} +func (s *Drop_index_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Foreign_procedure_declarationContext) PROCEDURE() antlr.TerminalNode { - return s.GetToken(KuneiformParserPROCEDURE, 0) -} +func (s *Drop_index_statementContext) GetName() IIdentifierContext { return s.name } -func (s *Foreign_procedure_declarationContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} +func (s *Drop_index_statementContext) SetName(v IIdentifierContext) { s.name = v } -func (s *Foreign_procedure_declarationContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Drop_index_statementContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) } -func (s *Foreign_procedure_declarationContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Drop_index_statementContext) INDEX() antlr.TerminalNode { + return s.GetToken(KuneiformParserINDEX, 0) } -func (s *Foreign_procedure_declarationContext) Procedure_return() IProcedure_returnContext { +func (s *Drop_index_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_returnContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -7051,146 +7823,89 @@ func (s *Foreign_procedure_declarationContext) Procedure_return() IProcedure_ret return nil } - return t.(IProcedure_returnContext) + return t.(IIdentifierContext) } -func (s *Foreign_procedure_declarationContext) Type_list() IType_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IType_listContext) +func (s *Drop_index_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *Foreign_procedure_declarationContext) Typed_variable_list() ITyped_variable_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ITyped_variable_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(ITyped_variable_listContext) +func (s *Drop_index_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) } -func (s *Foreign_procedure_declarationContext) GetRuleContext() antlr.RuleContext { +func (s *Drop_index_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Foreign_procedure_declarationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Drop_index_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Foreign_procedure_declarationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_index_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitForeign_procedure_declaration(s) + return t.VisitDrop_index_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Foreign_procedure_declaration() (localctx IForeign_procedure_declarationContext) { - localctx = NewForeign_procedure_declarationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 54, KuneiformParserRULE_foreign_procedure_declaration) +func (p *KuneiformParser) Drop_index_statement() (localctx IDrop_index_statementContext) { + localctx = NewDrop_index_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 48, KuneiformParserRULE_drop_index_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(418) - p.Match(KuneiformParserFOREIGN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(419) - p.Match(KuneiformParserPROCEDURE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(420) - p.Match(KuneiformParserIDENTIFIER) + p.SetState(466) + p.Match(KuneiformParserDROP) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(421) - p.Match(KuneiformParserLPAREN) + p.SetState(467) + p.Match(KuneiformParserINDEX) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(424) + p.SetState(470) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetTokenStream().LA(1) { - case KuneiformParserIDENTIFIER: - { - p.SetState(422) - - var _x = p.Type_list() + _la = p.GetTokenStream().LA(1) - localctx.(*Foreign_procedure_declarationContext).unnamed_params = _x + if _la == KuneiformParserIF { + { + p.SetState(468) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - - case KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: { - p.SetState(423) - - var _x = p.Typed_variable_list() - - localctx.(*Foreign_procedure_declarationContext).named_params = _x + p.SetState(469) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - case KuneiformParserRPAREN: - - default: } { - p.SetState(426) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(428) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.SetState(472) - if _la == KuneiformParserRETURNS { - { - p.SetState(427) - p.Procedure_return() - } + var _x = p.Identifier() + localctx.(*Drop_index_statementContext).name = _x } errorExit: @@ -7206,101 +7921,69 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProcedure_returnContext is an interface to support dynamic dispatch. -type IProcedure_returnContext interface { +// ICreate_role_statementContext is an interface to support dynamic dispatch. +type ICreate_role_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetReturn_columns returns the return_columns rule contexts. - GetReturn_columns() INamed_type_listContext - - // GetUnnamed_return_types returns the unnamed_return_types rule contexts. - GetUnnamed_return_types() IType_listContext - - // SetReturn_columns sets the return_columns rule contexts. - SetReturn_columns(INamed_type_listContext) - - // SetUnnamed_return_types sets the unnamed_return_types rule contexts. - SetUnnamed_return_types(IType_listContext) - // Getter signatures - RETURNS() antlr.TerminalNode - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - Named_type_list() INamed_type_listContext - Type_list() IType_listContext - TABLE() antlr.TerminalNode + CREATE() antlr.TerminalNode + ROLE() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode - // IsProcedure_returnContext differentiates from other interfaces. - IsProcedure_returnContext() + // IsCreate_role_statementContext differentiates from other interfaces. + IsCreate_role_statementContext() } -type Procedure_returnContext struct { +type Create_role_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - return_columns INamed_type_listContext - unnamed_return_types IType_listContext + parser antlr.Parser } -func NewEmptyProcedure_returnContext() *Procedure_returnContext { - var p = new(Procedure_returnContext) +func NewEmptyCreate_role_statementContext() *Create_role_statementContext { + var p = new(Create_role_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_return + p.RuleIndex = KuneiformParserRULE_create_role_statement return p } -func InitEmptyProcedure_returnContext(p *Procedure_returnContext) { +func InitEmptyCreate_role_statementContext(p *Create_role_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_return + p.RuleIndex = KuneiformParserRULE_create_role_statement } -func (*Procedure_returnContext) IsProcedure_returnContext() {} +func (*Create_role_statementContext) IsCreate_role_statementContext() {} -func NewProcedure_returnContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_returnContext { - var p = new(Procedure_returnContext) +func NewCreate_role_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Create_role_statementContext { + var p = new(Create_role_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_return + p.RuleIndex = KuneiformParserRULE_create_role_statement return p } -func (s *Procedure_returnContext) GetParser() antlr.Parser { return s.parser } - -func (s *Procedure_returnContext) GetReturn_columns() INamed_type_listContext { - return s.return_columns -} - -func (s *Procedure_returnContext) GetUnnamed_return_types() IType_listContext { - return s.unnamed_return_types -} - -func (s *Procedure_returnContext) SetReturn_columns(v INamed_type_listContext) { s.return_columns = v } +func (s *Create_role_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Procedure_returnContext) SetUnnamed_return_types(v IType_listContext) { - s.unnamed_return_types = v -} - -func (s *Procedure_returnContext) RETURNS() antlr.TerminalNode { - return s.GetToken(KuneiformParserRETURNS, 0) +func (s *Create_role_statementContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) } -func (s *Procedure_returnContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} - -func (s *Procedure_returnContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Create_role_statementContext) ROLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserROLE, 0) } -func (s *Procedure_returnContext) Named_type_list() INamed_type_listContext { +func (s *Create_role_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(INamed_type_listContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -7310,138 +7993,98 @@ func (s *Procedure_returnContext) Named_type_list() INamed_type_listContext { return nil } - return t.(INamed_type_listContext) + return t.(IIdentifierContext) } -func (s *Procedure_returnContext) Type_list() IType_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } +func (s *Create_role_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) +} - return t.(IType_listContext) +func (s *Create_role_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) } -func (s *Procedure_returnContext) TABLE() antlr.TerminalNode { - return s.GetToken(KuneiformParserTABLE, 0) +func (s *Create_role_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) } -func (s *Procedure_returnContext) GetRuleContext() antlr.RuleContext { +func (s *Create_role_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_returnContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Create_role_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Procedure_returnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Create_role_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_return(s) + return t.VisitCreate_role_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_return() (localctx IProcedure_returnContext) { - localctx = NewProcedure_returnContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 56, KuneiformParserRULE_procedure_return) +func (p *KuneiformParser) Create_role_statement() (localctx ICreate_role_statementContext) { + localctx = NewCreate_role_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 50, KuneiformParserRULE_create_role_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(430) - p.Match(KuneiformParserRETURNS) + p.SetState(474) + p.Match(KuneiformParserCREATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(475) + p.Match(KuneiformParserROLE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(442) + p.SetState(479) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 43, p.GetParserRuleContext()) { - case 1: - p.SetState(432) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserTABLE { - { - p.SetState(431) - p.Match(KuneiformParserTABLE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - { - p.SetState(434) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(435) - - var _x = p.Named_type_list() - - localctx.(*Procedure_returnContext).return_columns = _x - } + if _la == KuneiformParserIF { { - p.SetState(436) - p.Match(KuneiformParserRPAREN) + p.SetState(476) + p.Match(KuneiformParserIF) if p.HasError() { // Recognition error - abort rule goto errorExit } } - - case 2: { - p.SetState(438) - p.Match(KuneiformParserLPAREN) + p.SetState(477) + p.Match(KuneiformParserNOT) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(439) - - var _x = p.Type_list() - - localctx.(*Procedure_returnContext).unnamed_return_types = _x - } - { - p.SetState(440) - p.Match(KuneiformParserRPAREN) + p.SetState(478) + p.Match(KuneiformParserEXISTS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - case antlr.ATNInvalidAltNumber: - goto errorExit + } + { + p.SetState(481) + p.Identifier() } errorExit: @@ -7457,57 +8100,68 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISqlContext is an interface to support dynamic dispatch. -type ISqlContext interface { +// IDrop_role_statementContext is an interface to support dynamic dispatch. +type IDrop_role_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - Sql_statement() ISql_statementContext - SCOL() antlr.TerminalNode + DROP() antlr.TerminalNode + ROLE() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode - // IsSqlContext differentiates from other interfaces. - IsSqlContext() + // IsDrop_role_statementContext differentiates from other interfaces. + IsDrop_role_statementContext() } -type SqlContext struct { +type Drop_role_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptySqlContext() *SqlContext { - var p = new(SqlContext) +func NewEmptyDrop_role_statementContext() *Drop_role_statementContext { + var p = new(Drop_role_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql + p.RuleIndex = KuneiformParserRULE_drop_role_statement return p } -func InitEmptySqlContext(p *SqlContext) { +func InitEmptyDrop_role_statementContext(p *Drop_role_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql + p.RuleIndex = KuneiformParserRULE_drop_role_statement } -func (*SqlContext) IsSqlContext() {} +func (*Drop_role_statementContext) IsDrop_role_statementContext() {} -func NewSqlContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *SqlContext { - var p = new(SqlContext) +func NewDrop_role_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Drop_role_statementContext { + var p = new(Drop_role_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql + p.RuleIndex = KuneiformParserRULE_drop_role_statement return p } -func (s *SqlContext) GetParser() antlr.Parser { return s.parser } +func (s *Drop_role_statementContext) GetParser() antlr.Parser { return s.parser } + +func (s *Drop_role_statementContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) +} + +func (s *Drop_role_statementContext) ROLE() antlr.TerminalNode { + return s.GetToken(KuneiformParserROLE, 0) +} -func (s *SqlContext) Sql_statement() ISql_statementContext { +func (s *Drop_role_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_statementContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -7517,47 +8171,87 @@ func (s *SqlContext) Sql_statement() ISql_statementContext { return nil } - return t.(ISql_statementContext) + return t.(IIdentifierContext) } -func (s *SqlContext) SCOL() antlr.TerminalNode { - return s.GetToken(KuneiformParserSCOL, 0) +func (s *Drop_role_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *SqlContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Drop_role_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Drop_role_statementContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *SqlContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Drop_role_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *SqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_role_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSql(s) + return t.VisitDrop_role_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Sql() (localctx ISqlContext) { - localctx = NewSqlContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 58, KuneiformParserRULE_sql) +func (p *KuneiformParser) Drop_role_statement() (localctx IDrop_role_statementContext) { + localctx = NewDrop_role_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 52, KuneiformParserRULE_drop_role_statement) + var _la int + p.EnterOuterAlt(localctx, 1) { - p.SetState(444) - p.Sql_statement() + p.SetState(483) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { - p.SetState(445) - p.Match(KuneiformParserSCOL) + p.SetState(484) + p.Match(KuneiformParserROLE) if p.HasError() { // Recognition error - abort rule goto errorExit } } + p.SetState(487) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserIF { + { + p.SetState(485) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(486) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(489) + p.Identifier() + } errorExit: if p.HasError() { @@ -7572,112 +8266,114 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISql_statementContext is an interface to support dynamic dispatch. -type ISql_statementContext interface { +// IGrant_statementContext is an interface to support dynamic dispatch. +type IGrant_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetUser returns the user token. + GetUser() antlr.Token + + // SetUser sets the user token. + SetUser(antlr.Token) + + // GetGrant_role returns the grant_role rule contexts. + GetGrant_role() IIdentifierContext + + // GetNamespace returns the namespace rule contexts. + GetNamespace() IIdentifierContext + + // GetRole returns the role rule contexts. + GetRole() IIdentifierContext + + // SetGrant_role sets the grant_role rule contexts. + SetGrant_role(IIdentifierContext) + + // SetNamespace sets the namespace rule contexts. + SetNamespace(IIdentifierContext) + + // SetRole sets the role rule contexts. + SetRole(IIdentifierContext) + // Getter signatures - Select_statement() ISelect_statementContext - Update_statement() IUpdate_statementContext - Insert_statement() IInsert_statementContext - Delete_statement() IDelete_statementContext - WITH() antlr.TerminalNode - AllCommon_table_expression() []ICommon_table_expressionContext - Common_table_expression(i int) ICommon_table_expressionContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + GRANT() antlr.TerminalNode + TO() antlr.TerminalNode + Privilege_list() IPrivilege_listContext + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + ON() antlr.TerminalNode + STRING_() antlr.TerminalNode - // IsSql_statementContext differentiates from other interfaces. - IsSql_statementContext() + // IsGrant_statementContext differentiates from other interfaces. + IsGrant_statementContext() } -type Sql_statementContext struct { +type Grant_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + grant_role IIdentifierContext + namespace IIdentifierContext + role IIdentifierContext + user antlr.Token } -func NewEmptySql_statementContext() *Sql_statementContext { - var p = new(Sql_statementContext) +func NewEmptyGrant_statementContext() *Grant_statementContext { + var p = new(Grant_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_statement + p.RuleIndex = KuneiformParserRULE_grant_statement return p } -func InitEmptySql_statementContext(p *Sql_statementContext) { +func InitEmptyGrant_statementContext(p *Grant_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_statement + p.RuleIndex = KuneiformParserRULE_grant_statement } -func (*Sql_statementContext) IsSql_statementContext() {} +func (*Grant_statementContext) IsGrant_statementContext() {} -func NewSql_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_statementContext { - var p = new(Sql_statementContext) +func NewGrant_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Grant_statementContext { + var p = new(Grant_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql_statement + p.RuleIndex = KuneiformParserRULE_grant_statement return p } -func (s *Sql_statementContext) GetParser() antlr.Parser { return s.parser } +func (s *Grant_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Sql_statementContext) Select_statement() ISelect_statementContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISelect_statementContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Grant_statementContext) GetUser() antlr.Token { return s.user } - if t == nil { - return nil - } +func (s *Grant_statementContext) SetUser(v antlr.Token) { s.user = v } - return t.(ISelect_statementContext) -} +func (s *Grant_statementContext) GetGrant_role() IIdentifierContext { return s.grant_role } -func (s *Sql_statementContext) Update_statement() IUpdate_statementContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IUpdate_statementContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Grant_statementContext) GetNamespace() IIdentifierContext { return s.namespace } - if t == nil { - return nil - } +func (s *Grant_statementContext) GetRole() IIdentifierContext { return s.role } - return t.(IUpdate_statementContext) -} +func (s *Grant_statementContext) SetGrant_role(v IIdentifierContext) { s.grant_role = v } -func (s *Sql_statementContext) Insert_statement() IInsert_statementContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IInsert_statementContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Grant_statementContext) SetNamespace(v IIdentifierContext) { s.namespace = v } - if t == nil { - return nil - } +func (s *Grant_statementContext) SetRole(v IIdentifierContext) { s.role = v } - return t.(IInsert_statementContext) +func (s *Grant_statementContext) GRANT() antlr.TerminalNode { + return s.GetToken(KuneiformParserGRANT, 0) } -func (s *Sql_statementContext) Delete_statement() IDelete_statementContext { +func (s *Grant_statementContext) TO() antlr.TerminalNode { + return s.GetToken(KuneiformParserTO, 0) +} + +func (s *Grant_statementContext) Privilege_list() IPrivilege_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IDelete_statementContext); ok { + if _, ok := ctx.(IPrivilege_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -7687,27 +8383,23 @@ func (s *Sql_statementContext) Delete_statement() IDelete_statementContext { return nil } - return t.(IDelete_statementContext) -} - -func (s *Sql_statementContext) WITH() antlr.TerminalNode { - return s.GetToken(KuneiformParserWITH, 0) + return t.(IPrivilege_listContext) } -func (s *Sql_statementContext) AllCommon_table_expression() []ICommon_table_expressionContext { +func (s *Grant_statementContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ICommon_table_expressionContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]ICommon_table_expressionContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ICommon_table_expressionContext); ok { - tst[i] = t.(ICommon_table_expressionContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -7715,11 +8407,11 @@ func (s *Sql_statementContext) AllCommon_table_expression() []ICommon_table_expr return tst } -func (s *Sql_statementContext) Common_table_expression(i int) ICommon_table_expressionContext { +func (s *Grant_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ICommon_table_expressionContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -7732,120 +8424,135 @@ func (s *Sql_statementContext) Common_table_expression(i int) ICommon_table_expr return nil } - return t.(ICommon_table_expressionContext) + return t.(IIdentifierContext) } -func (s *Sql_statementContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Grant_statementContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) } -func (s *Sql_statementContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Grant_statementContext) STRING_() antlr.TerminalNode { + return s.GetToken(KuneiformParserSTRING_, 0) } -func (s *Sql_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Grant_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Sql_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Grant_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Sql_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Grant_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSql_statement(s) + return t.VisitGrant_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Sql_statement() (localctx ISql_statementContext) { - localctx = NewSql_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 60, KuneiformParserRULE_sql_statement) +func (p *KuneiformParser) Grant_statement() (localctx IGrant_statementContext) { + localctx = NewGrant_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 54, KuneiformParserRULE_grant_statement) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(456) + { + p.SetState(491) + p.Match(KuneiformParserGRANT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(494) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetTokenStream().LA(1) { + case KuneiformParserUSE, KuneiformParserCREATE, KuneiformParserALTER, KuneiformParserDROP, KuneiformParserDELETE, KuneiformParserUPDATE, KuneiformParserSELECT, KuneiformParserINSERT, KuneiformParserROLES, KuneiformParserCALL: + { + p.SetState(492) + p.Privilege_list() + } + + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(493) + + var _x = p.Identifier() + + localctx.(*Grant_statementContext).grant_role = _x + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + p.SetState(498) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWITH { + if _la == KuneiformParserON { { - p.SetState(447) - p.Match(KuneiformParserWITH) + p.SetState(496) + p.Match(KuneiformParserON) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(448) - p.Common_table_expression() - } - p.SetState(453) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.SetState(497) - for _la == KuneiformParserCOMMA { - { - p.SetState(449) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(450) - p.Common_table_expression() - } + var _x = p.Identifier() - p.SetState(455) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + localctx.(*Grant_statementContext).namespace = _x } } - p.SetState(462) + { + p.SetState(500) + p.Match(KuneiformParserTO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(503) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } switch p.GetTokenStream().LA(1) { - case KuneiformParserSELECT: + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: { - p.SetState(458) - p.Select_statement() - } + p.SetState(501) - case KuneiformParserUPDATE: - { - p.SetState(459) - p.Update_statement() - } + var _x = p.Identifier() - case KuneiformParserINSERT: - { - p.SetState(460) - p.Insert_statement() + localctx.(*Grant_statementContext).role = _x } - case KuneiformParserDELETE: + case KuneiformParserSTRING_: { - p.SetState(461) - p.Delete_statement() + p.SetState(502) + + var _m = p.Match(KuneiformParserSTRING_) + + localctx.(*Grant_statementContext).user = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } default: @@ -7866,62 +8573,127 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ICommon_table_expressionContext is an interface to support dynamic dispatch. -type ICommon_table_expressionContext interface { +// IRevoke_statementContext is an interface to support dynamic dispatch. +type IRevoke_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetUser returns the user token. + GetUser() antlr.Token + + // SetUser sets the user token. + SetUser(antlr.Token) + + // GetGrant_role returns the grant_role rule contexts. + GetGrant_role() IIdentifierContext + + // GetNamespace returns the namespace rule contexts. + GetNamespace() IIdentifierContext + + // GetRole returns the role rule contexts. + GetRole() IIdentifierContext + + // SetGrant_role sets the grant_role rule contexts. + SetGrant_role(IIdentifierContext) + + // SetNamespace sets the namespace rule contexts. + SetNamespace(IIdentifierContext) + + // SetRole sets the role rule contexts. + SetRole(IIdentifierContext) + // Getter signatures + REVOKE() antlr.TerminalNode + FROM() antlr.TerminalNode + Privilege_list() IPrivilege_listContext AllIdentifier() []IIdentifierContext Identifier(i int) IIdentifierContext - AS() antlr.TerminalNode - AllLPAREN() []antlr.TerminalNode - LPAREN(i int) antlr.TerminalNode - Select_statement() ISelect_statementContext - AllRPAREN() []antlr.TerminalNode - RPAREN(i int) antlr.TerminalNode - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + ON() antlr.TerminalNode + STRING_() antlr.TerminalNode - // IsCommon_table_expressionContext differentiates from other interfaces. - IsCommon_table_expressionContext() + // IsRevoke_statementContext differentiates from other interfaces. + IsRevoke_statementContext() } -type Common_table_expressionContext struct { +type Revoke_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + grant_role IIdentifierContext + namespace IIdentifierContext + role IIdentifierContext + user antlr.Token } -func NewEmptyCommon_table_expressionContext() *Common_table_expressionContext { - var p = new(Common_table_expressionContext) +func NewEmptyRevoke_statementContext() *Revoke_statementContext { + var p = new(Revoke_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_common_table_expression + p.RuleIndex = KuneiformParserRULE_revoke_statement return p } -func InitEmptyCommon_table_expressionContext(p *Common_table_expressionContext) { +func InitEmptyRevoke_statementContext(p *Revoke_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_common_table_expression + p.RuleIndex = KuneiformParserRULE_revoke_statement } -func (*Common_table_expressionContext) IsCommon_table_expressionContext() {} +func (*Revoke_statementContext) IsRevoke_statementContext() {} -func NewCommon_table_expressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Common_table_expressionContext { - var p = new(Common_table_expressionContext) +func NewRevoke_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Revoke_statementContext { + var p = new(Revoke_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_common_table_expression + p.RuleIndex = KuneiformParserRULE_revoke_statement return p } -func (s *Common_table_expressionContext) GetParser() antlr.Parser { return s.parser } +func (s *Revoke_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Common_table_expressionContext) AllIdentifier() []IIdentifierContext { +func (s *Revoke_statementContext) GetUser() antlr.Token { return s.user } + +func (s *Revoke_statementContext) SetUser(v antlr.Token) { s.user = v } + +func (s *Revoke_statementContext) GetGrant_role() IIdentifierContext { return s.grant_role } + +func (s *Revoke_statementContext) GetNamespace() IIdentifierContext { return s.namespace } + +func (s *Revoke_statementContext) GetRole() IIdentifierContext { return s.role } + +func (s *Revoke_statementContext) SetGrant_role(v IIdentifierContext) { s.grant_role = v } + +func (s *Revoke_statementContext) SetNamespace(v IIdentifierContext) { s.namespace = v } + +func (s *Revoke_statementContext) SetRole(v IIdentifierContext) { s.role = v } + +func (s *Revoke_statementContext) REVOKE() antlr.TerminalNode { + return s.GetToken(KuneiformParserREVOKE, 0) +} + +func (s *Revoke_statementContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) +} + +func (s *Revoke_statementContext) Privilege_list() IPrivilege_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IPrivilege_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IPrivilege_listContext) +} + +func (s *Revoke_statementContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { @@ -7942,7 +8714,7 @@ func (s *Common_table_expressionContext) AllIdentifier() []IIdentifierContext { return tst } -func (s *Common_table_expressionContext) Identifier(i int) IIdentifierContext { +func (s *Revoke_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { @@ -7962,173 +8734,137 @@ func (s *Common_table_expressionContext) Identifier(i int) IIdentifierContext { return t.(IIdentifierContext) } -func (s *Common_table_expressionContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) -} - -func (s *Common_table_expressionContext) AllLPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserLPAREN) -} - -func (s *Common_table_expressionContext) LPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, i) -} - -func (s *Common_table_expressionContext) Select_statement() ISelect_statementContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISelect_statementContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(ISelect_statementContext) -} - -func (s *Common_table_expressionContext) AllRPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserRPAREN) -} - -func (s *Common_table_expressionContext) RPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, i) -} - -func (s *Common_table_expressionContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Revoke_statementContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) } -func (s *Common_table_expressionContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Revoke_statementContext) STRING_() antlr.TerminalNode { + return s.GetToken(KuneiformParserSTRING_, 0) } -func (s *Common_table_expressionContext) GetRuleContext() antlr.RuleContext { +func (s *Revoke_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Common_table_expressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Revoke_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Common_table_expressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Revoke_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitCommon_table_expression(s) + return t.VisitRevoke_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Common_table_expression() (localctx ICommon_table_expressionContext) { - localctx = NewCommon_table_expressionContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 62, KuneiformParserRULE_common_table_expression) +func (p *KuneiformParser) Revoke_statement() (localctx IRevoke_statementContext) { + localctx = NewRevoke_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 56, KuneiformParserRULE_revoke_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(464) - p.Identifier() + p.SetState(505) + p.Match(KuneiformParserREVOKE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - p.SetState(477) + p.SetState(508) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserLPAREN { + switch p.GetTokenStream().LA(1) { + case KuneiformParserUSE, KuneiformParserCREATE, KuneiformParserALTER, KuneiformParserDROP, KuneiformParserDELETE, KuneiformParserUPDATE, KuneiformParserSELECT, KuneiformParserINSERT, KuneiformParserROLES, KuneiformParserCALL: { - p.SetState(465) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(474) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + p.SetState(506) + p.Privilege_list() } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { - { - p.SetState(466) - p.Identifier() - } - p.SetState(471) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { - { - p.SetState(467) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(468) - p.Identifier() - } + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(507) - p.SetState(473) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } + var _x = p.Identifier() + localctx.(*Revoke_statementContext).grant_role = _x } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + p.SetState(512) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserON { { - p.SetState(476) - p.Match(KuneiformParserRPAREN) + p.SetState(510) + p.Match(KuneiformParserON) if p.HasError() { // Recognition error - abort rule goto errorExit } } + { + p.SetState(511) - } - { - p.SetState(479) - p.Match(KuneiformParserAS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + var _x = p.Identifier() + + localctx.(*Revoke_statementContext).namespace = _x } + } { - p.SetState(480) - p.Match(KuneiformParserLPAREN) + p.SetState(514) + p.Match(KuneiformParserFROM) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(481) - p.Select_statement() + p.SetState(517) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - { - p.SetState(482) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + + switch p.GetTokenStream().LA(1) { + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(515) + + var _x = p.Identifier() + + localctx.(*Revoke_statementContext).role = _x + } + + case KuneiformParserSTRING_: + { + p.SetState(516) + + var _m = p.Match(KuneiformParserSTRING_) + + localctx.(*Revoke_statementContext).user = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } errorExit: @@ -8144,101 +8880,69 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISelect_statementContext is an interface to support dynamic dispatch. -type ISelect_statementContext interface { +// IPrivilege_listContext is an interface to support dynamic dispatch. +type IPrivilege_listContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetLimit returns the limit rule contexts. - GetLimit() ISql_exprContext - - // GetOffset returns the offset rule contexts. - GetOffset() ISql_exprContext - - // SetLimit sets the limit rule contexts. - SetLimit(ISql_exprContext) - - // SetOffset sets the offset rule contexts. - SetOffset(ISql_exprContext) - // Getter signatures - AllSelect_core() []ISelect_coreContext - Select_core(i int) ISelect_coreContext - AllCompound_operator() []ICompound_operatorContext - Compound_operator(i int) ICompound_operatorContext - ORDER() antlr.TerminalNode - BY() antlr.TerminalNode - AllOrdering_term() []IOrdering_termContext - Ordering_term(i int) IOrdering_termContext - LIMIT() antlr.TerminalNode - OFFSET() antlr.TerminalNode - AllSql_expr() []ISql_exprContext - Sql_expr(i int) ISql_exprContext + AllPrivilege() []IPrivilegeContext + Privilege(i int) IPrivilegeContext AllCOMMA() []antlr.TerminalNode COMMA(i int) antlr.TerminalNode - // IsSelect_statementContext differentiates from other interfaces. - IsSelect_statementContext() + // IsPrivilege_listContext differentiates from other interfaces. + IsPrivilege_listContext() } -type Select_statementContext struct { +type Privilege_listContext struct { antlr.BaseParserRuleContext parser antlr.Parser - limit ISql_exprContext - offset ISql_exprContext } -func NewEmptySelect_statementContext() *Select_statementContext { - var p = new(Select_statementContext) +func NewEmptyPrivilege_listContext() *Privilege_listContext { + var p = new(Privilege_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_select_statement + p.RuleIndex = KuneiformParserRULE_privilege_list return p } -func InitEmptySelect_statementContext(p *Select_statementContext) { +func InitEmptyPrivilege_listContext(p *Privilege_listContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_select_statement + p.RuleIndex = KuneiformParserRULE_privilege_list } -func (*Select_statementContext) IsSelect_statementContext() {} +func (*Privilege_listContext) IsPrivilege_listContext() {} -func NewSelect_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Select_statementContext { - var p = new(Select_statementContext) +func NewPrivilege_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Privilege_listContext { + var p = new(Privilege_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_select_statement + p.RuleIndex = KuneiformParserRULE_privilege_list return p } -func (s *Select_statementContext) GetParser() antlr.Parser { return s.parser } - -func (s *Select_statementContext) GetLimit() ISql_exprContext { return s.limit } - -func (s *Select_statementContext) GetOffset() ISql_exprContext { return s.offset } - -func (s *Select_statementContext) SetLimit(v ISql_exprContext) { s.limit = v } - -func (s *Select_statementContext) SetOffset(v ISql_exprContext) { s.offset = v } +func (s *Privilege_listContext) GetParser() antlr.Parser { return s.parser } -func (s *Select_statementContext) AllSelect_core() []ISelect_coreContext { +func (s *Privilege_listContext) AllPrivilege() []IPrivilegeContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISelect_coreContext); ok { + if _, ok := ctx.(IPrivilegeContext); ok { len++ } } - tst := make([]ISelect_coreContext, len) + tst := make([]IPrivilegeContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISelect_coreContext); ok { - tst[i] = t.(ISelect_coreContext) + if t, ok := ctx.(IPrivilegeContext); ok { + tst[i] = t.(IPrivilegeContext) i++ } } @@ -8246,11 +8950,11 @@ func (s *Select_statementContext) AllSelect_core() []ISelect_coreContext { return tst } -func (s *Select_statementContext) Select_core(i int) ISelect_coreContext { +func (s *Privilege_listContext) Privilege(i int) IPrivilegeContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISelect_coreContext); ok { + if _, ok := ctx.(IPrivilegeContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -8263,315 +8967,72 @@ func (s *Select_statementContext) Select_core(i int) ISelect_coreContext { return nil } - return t.(ISelect_coreContext) + return t.(IPrivilegeContext) } -func (s *Select_statementContext) AllCompound_operator() []ICompound_operatorContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ICompound_operatorContext); ok { - len++ - } - } - - tst := make([]ICompound_operatorContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ICompound_operatorContext); ok { - tst[i] = t.(ICompound_operatorContext) - i++ - } - } - - return tst -} - -func (s *Select_statementContext) Compound_operator(i int) ICompound_operatorContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ICompound_operatorContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(ICompound_operatorContext) -} - -func (s *Select_statementContext) ORDER() antlr.TerminalNode { - return s.GetToken(KuneiformParserORDER, 0) -} - -func (s *Select_statementContext) BY() antlr.TerminalNode { - return s.GetToken(KuneiformParserBY, 0) -} - -func (s *Select_statementContext) AllOrdering_term() []IOrdering_termContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IOrdering_termContext); ok { - len++ - } - } - - tst := make([]IOrdering_termContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IOrdering_termContext); ok { - tst[i] = t.(IOrdering_termContext) - i++ - } - } - - return tst -} - -func (s *Select_statementContext) Ordering_term(i int) IOrdering_termContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IOrdering_termContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(IOrdering_termContext) -} - -func (s *Select_statementContext) LIMIT() antlr.TerminalNode { - return s.GetToken(KuneiformParserLIMIT, 0) -} - -func (s *Select_statementContext) OFFSET() antlr.TerminalNode { - return s.GetToken(KuneiformParserOFFSET, 0) -} - -func (s *Select_statementContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ - } - } - - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ - } - } - - return tst -} - -func (s *Select_statementContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(ISql_exprContext) -} - -func (s *Select_statementContext) AllCOMMA() []antlr.TerminalNode { +func (s *Privilege_listContext) AllCOMMA() []antlr.TerminalNode { return s.GetTokens(KuneiformParserCOMMA) } -func (s *Select_statementContext) COMMA(i int) antlr.TerminalNode { +func (s *Privilege_listContext) COMMA(i int) antlr.TerminalNode { return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Select_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Privilege_listContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Select_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Privilege_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Select_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Privilege_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSelect_statement(s) + return t.VisitPrivilege_list(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Select_statement() (localctx ISelect_statementContext) { - localctx = NewSelect_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 64, KuneiformParserRULE_select_statement) +func (p *KuneiformParser) Privilege_list() (localctx IPrivilege_listContext) { + localctx = NewPrivilege_listContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 58, KuneiformParserRULE_privilege_list) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(484) - p.Select_core() - } - p.SetState(490) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for (int64((_la-97)) & ^0x3f) == 0 && ((int64(1)<<(_la-97))&7) != 0 { - { - p.SetState(485) - p.Compound_operator() - } - { - p.SetState(486) - p.Select_core() - } - - p.SetState(492) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.SetState(519) + p.Privilege() } - p.SetState(503) + p.SetState(524) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserORDER { - { - p.SetState(493) - p.Match(KuneiformParserORDER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + for _la == KuneiformParserCOMMA { { - p.SetState(494) - p.Match(KuneiformParserBY) + p.SetState(520) + p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(495) - p.Ordering_term() + p.SetState(521) + p.Privilege() } - p.SetState(500) + + p.SetState(526) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - - for _la == KuneiformParserCOMMA { - { - p.SetState(496) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(497) - p.Ordering_term() - } - - p.SetState(502) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } - - } - p.SetState(507) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserLIMIT { - { - p.SetState(505) - p.Match(KuneiformParserLIMIT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(506) - - var _x = p.sql_expr(0) - - localctx.(*Select_statementContext).limit = _x - } - - } - p.SetState(511) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserOFFSET { - { - p.SetState(509) - p.Match(KuneiformParserOFFSET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(510) - - var _x = p.sql_expr(0) - - localctx.(*Select_statementContext).offset = _x - } - } errorExit: @@ -8587,155 +9048,135 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ICompound_operatorContext is an interface to support dynamic dispatch. -type ICompound_operatorContext interface { +// IPrivilegeContext is an interface to support dynamic dispatch. +type IPrivilegeContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - UNION() antlr.TerminalNode - ALL() antlr.TerminalNode - INTERSECT() antlr.TerminalNode - EXCEPT() antlr.TerminalNode + SELECT() antlr.TerminalNode + INSERT() antlr.TerminalNode + UPDATE() antlr.TerminalNode + DELETE() antlr.TerminalNode + CREATE() antlr.TerminalNode + DROP() antlr.TerminalNode + ALTER() antlr.TerminalNode + ROLES() antlr.TerminalNode + CALL() antlr.TerminalNode + USE() antlr.TerminalNode - // IsCompound_operatorContext differentiates from other interfaces. - IsCompound_operatorContext() + // IsPrivilegeContext differentiates from other interfaces. + IsPrivilegeContext() } -type Compound_operatorContext struct { +type PrivilegeContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyCompound_operatorContext() *Compound_operatorContext { - var p = new(Compound_operatorContext) +func NewEmptyPrivilegeContext() *PrivilegeContext { + var p = new(PrivilegeContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_compound_operator + p.RuleIndex = KuneiformParserRULE_privilege return p } -func InitEmptyCompound_operatorContext(p *Compound_operatorContext) { +func InitEmptyPrivilegeContext(p *PrivilegeContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_compound_operator + p.RuleIndex = KuneiformParserRULE_privilege } -func (*Compound_operatorContext) IsCompound_operatorContext() {} +func (*PrivilegeContext) IsPrivilegeContext() {} -func NewCompound_operatorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Compound_operatorContext { - var p = new(Compound_operatorContext) +func NewPrivilegeContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PrivilegeContext { + var p = new(PrivilegeContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_compound_operator + p.RuleIndex = KuneiformParserRULE_privilege return p } -func (s *Compound_operatorContext) GetParser() antlr.Parser { return s.parser } +func (s *PrivilegeContext) GetParser() antlr.Parser { return s.parser } -func (s *Compound_operatorContext) UNION() antlr.TerminalNode { - return s.GetToken(KuneiformParserUNION, 0) +func (s *PrivilegeContext) SELECT() antlr.TerminalNode { + return s.GetToken(KuneiformParserSELECT, 0) } -func (s *Compound_operatorContext) ALL() antlr.TerminalNode { - return s.GetToken(KuneiformParserALL, 0) +func (s *PrivilegeContext) INSERT() antlr.TerminalNode { + return s.GetToken(KuneiformParserINSERT, 0) } -func (s *Compound_operatorContext) INTERSECT() antlr.TerminalNode { - return s.GetToken(KuneiformParserINTERSECT, 0) +func (s *PrivilegeContext) UPDATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUPDATE, 0) } -func (s *Compound_operatorContext) EXCEPT() antlr.TerminalNode { - return s.GetToken(KuneiformParserEXCEPT, 0) +func (s *PrivilegeContext) DELETE() antlr.TerminalNode { + return s.GetToken(KuneiformParserDELETE, 0) } -func (s *Compound_operatorContext) GetRuleContext() antlr.RuleContext { +func (s *PrivilegeContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) +} + +func (s *PrivilegeContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) +} + +func (s *PrivilegeContext) ALTER() antlr.TerminalNode { + return s.GetToken(KuneiformParserALTER, 0) +} + +func (s *PrivilegeContext) ROLES() antlr.TerminalNode { + return s.GetToken(KuneiformParserROLES, 0) +} + +func (s *PrivilegeContext) CALL() antlr.TerminalNode { + return s.GetToken(KuneiformParserCALL, 0) +} + +func (s *PrivilegeContext) USE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUSE, 0) +} + +func (s *PrivilegeContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Compound_operatorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *PrivilegeContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Compound_operatorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *PrivilegeContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitCompound_operator(s) + return t.VisitPrivilege(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Compound_operator() (localctx ICompound_operatorContext) { - localctx = NewCompound_operatorContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 66, KuneiformParserRULE_compound_operator) +func (p *KuneiformParser) Privilege() (localctx IPrivilegeContext) { + localctx = NewPrivilegeContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 60, KuneiformParserRULE_privilege) var _la int - p.SetState(519) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - - switch p.GetTokenStream().LA(1) { - case KuneiformParserUNION: - p.EnterOuterAlt(localctx, 1) - { - p.SetState(513) - p.Match(KuneiformParserUNION) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(515) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + p.EnterOuterAlt(localctx, 1) + { + p.SetState(527) _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserALL { - { - p.SetState(514) - p.Match(KuneiformParserALL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - - case KuneiformParserINTERSECT: - p.EnterOuterAlt(localctx, 2) - { - p.SetState(517) - p.Match(KuneiformParserINTERSECT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - case KuneiformParserEXCEPT: - p.EnterOuterAlt(localctx, 3) - { - p.SetState(518) - p.Match(KuneiformParserEXCEPT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + if !(((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&432348184157618176) != 0) || ((int64((_la-97)) & ^0x3f) == 0 && ((int64(1)<<(_la-97))&103079215107) != 0)) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() } - - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit } errorExit: @@ -8751,61 +9192,71 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IOrdering_termContext is an interface to support dynamic dispatch. -type IOrdering_termContext interface { +// ITransfer_ownership_statementContext is an interface to support dynamic dispatch. +type ITransfer_ownership_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - Sql_expr() ISql_exprContext - NULLS() antlr.TerminalNode - ASC() antlr.TerminalNode - DESC() antlr.TerminalNode - FIRST() antlr.TerminalNode - LAST() antlr.TerminalNode + TRANSFER() antlr.TerminalNode + OWNERSHIP() antlr.TerminalNode + TO() antlr.TerminalNode + Identifier() IIdentifierContext - // IsOrdering_termContext differentiates from other interfaces. - IsOrdering_termContext() + // IsTransfer_ownership_statementContext differentiates from other interfaces. + IsTransfer_ownership_statementContext() } -type Ordering_termContext struct { +type Transfer_ownership_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyOrdering_termContext() *Ordering_termContext { - var p = new(Ordering_termContext) +func NewEmptyTransfer_ownership_statementContext() *Transfer_ownership_statementContext { + var p = new(Transfer_ownership_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_ordering_term + p.RuleIndex = KuneiformParserRULE_transfer_ownership_statement return p } -func InitEmptyOrdering_termContext(p *Ordering_termContext) { +func InitEmptyTransfer_ownership_statementContext(p *Transfer_ownership_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_ordering_term + p.RuleIndex = KuneiformParserRULE_transfer_ownership_statement } -func (*Ordering_termContext) IsOrdering_termContext() {} +func (*Transfer_ownership_statementContext) IsTransfer_ownership_statementContext() {} -func NewOrdering_termContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Ordering_termContext { - var p = new(Ordering_termContext) +func NewTransfer_ownership_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Transfer_ownership_statementContext { + var p = new(Transfer_ownership_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_ordering_term + p.RuleIndex = KuneiformParserRULE_transfer_ownership_statement return p } -func (s *Ordering_termContext) GetParser() antlr.Parser { return s.parser } +func (s *Transfer_ownership_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Ordering_termContext) Sql_expr() ISql_exprContext { +func (s *Transfer_ownership_statementContext) TRANSFER() antlr.TerminalNode { + return s.GetToken(KuneiformParserTRANSFER, 0) +} + +func (s *Transfer_ownership_statementContext) OWNERSHIP() antlr.TerminalNode { + return s.GetToken(KuneiformParserOWNERSHIP, 0) +} + +func (s *Transfer_ownership_statementContext) TO() antlr.TerminalNode { + return s.GetToken(KuneiformParserTO, 0) +} + +func (s *Transfer_ownership_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -8815,106 +9266,58 @@ func (s *Ordering_termContext) Sql_expr() ISql_exprContext { return nil } - return t.(ISql_exprContext) -} - -func (s *Ordering_termContext) NULLS() antlr.TerminalNode { - return s.GetToken(KuneiformParserNULLS, 0) -} - -func (s *Ordering_termContext) ASC() antlr.TerminalNode { - return s.GetToken(KuneiformParserASC, 0) -} - -func (s *Ordering_termContext) DESC() antlr.TerminalNode { - return s.GetToken(KuneiformParserDESC, 0) -} - -func (s *Ordering_termContext) FIRST() antlr.TerminalNode { - return s.GetToken(KuneiformParserFIRST, 0) -} - -func (s *Ordering_termContext) LAST() antlr.TerminalNode { - return s.GetToken(KuneiformParserLAST, 0) + return t.(IIdentifierContext) } -func (s *Ordering_termContext) GetRuleContext() antlr.RuleContext { +func (s *Transfer_ownership_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Ordering_termContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Transfer_ownership_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Ordering_termContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Transfer_ownership_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitOrdering_term(s) + return t.VisitTransfer_ownership_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Ordering_term() (localctx IOrdering_termContext) { - localctx = NewOrdering_termContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 68, KuneiformParserRULE_ordering_term) - var _la int - +func (p *KuneiformParser) Transfer_ownership_statement() (localctx ITransfer_ownership_statementContext) { + localctx = NewTransfer_ownership_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 62, KuneiformParserRULE_transfer_ownership_statement) p.EnterOuterAlt(localctx, 1) { - p.SetState(521) - p.sql_expr(0) - } - p.SetState(523) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserASC || _la == KuneiformParserDESC { - { - p.SetState(522) - _la = p.GetTokenStream().LA(1) - - if !(_la == KuneiformParserASC || _la == KuneiformParserDESC) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } + p.SetState(529) + p.Match(KuneiformParserTRANSFER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - } - p.SetState(527) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserNULLS { - { - p.SetState(525) - p.Match(KuneiformParserNULLS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + { + p.SetState(530) + p.Match(KuneiformParserOWNERSHIP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - { - p.SetState(526) - _la = p.GetTokenStream().LA(1) - - if !(_la == KuneiformParserFIRST || _la == KuneiformParserLAST) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } + } + { + p.SetState(531) + p.Match(KuneiformParserTO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - + } + { + p.SetState(532) + p.Identifier() } errorExit: @@ -8930,119 +9333,95 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISelect_coreContext is an interface to support dynamic dispatch. -type ISelect_coreContext interface { +// ICreate_action_statementContext is an interface to support dynamic dispatch. +type ICreate_action_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetWhere returns the where rule contexts. - GetWhere() ISql_exprContext - - // GetGroup_by returns the group_by rule contexts. - GetGroup_by() ISql_expr_listContext + // Getter signatures + CREATE() antlr.TerminalNode + ACTION() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + LBRACE() antlr.TerminalNode + RBRACE() antlr.TerminalNode + AllVARIABLE() []antlr.TerminalNode + VARIABLE(i int) antlr.TerminalNode + AllType_() []ITypeContext + Type_(i int) ITypeContext + Action_return() IAction_returnContext + AllAction_statement() []IAction_statementContext + Action_statement(i int) IAction_statementContext + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode + OR() antlr.TerminalNode + REPLACE() antlr.TerminalNode + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - // GetHaving returns the having rule contexts. - GetHaving() ISql_exprContext + // IsCreate_action_statementContext differentiates from other interfaces. + IsCreate_action_statementContext() +} - // SetWhere sets the where rule contexts. - SetWhere(ISql_exprContext) +type Create_action_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} - // SetGroup_by sets the group_by rule contexts. - SetGroup_by(ISql_expr_listContext) +func NewEmptyCreate_action_statementContext() *Create_action_statementContext { + var p = new(Create_action_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_create_action_statement + return p +} - // SetHaving sets the having rule contexts. - SetHaving(ISql_exprContext) - - // Getter signatures - SELECT() antlr.TerminalNode - AllResult_column() []IResult_columnContext - Result_column(i int) IResult_columnContext - DISTINCT() antlr.TerminalNode - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode - FROM() antlr.TerminalNode - Relation() IRelationContext - WHERE() antlr.TerminalNode - GROUP() antlr.TerminalNode - BY() antlr.TerminalNode - AllSql_expr() []ISql_exprContext - Sql_expr(i int) ISql_exprContext - Sql_expr_list() ISql_expr_listContext - AllJoin() []IJoinContext - Join(i int) IJoinContext - HAVING() antlr.TerminalNode - - // IsSelect_coreContext differentiates from other interfaces. - IsSelect_coreContext() -} - -type Select_coreContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser - where ISql_exprContext - group_by ISql_expr_listContext - having ISql_exprContext -} - -func NewEmptySelect_coreContext() *Select_coreContext { - var p = new(Select_coreContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_select_core - return p -} - -func InitEmptySelect_coreContext(p *Select_coreContext) { +func InitEmptyCreate_action_statementContext(p *Create_action_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_select_core + p.RuleIndex = KuneiformParserRULE_create_action_statement } -func (*Select_coreContext) IsSelect_coreContext() {} +func (*Create_action_statementContext) IsCreate_action_statementContext() {} -func NewSelect_coreContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Select_coreContext { - var p = new(Select_coreContext) +func NewCreate_action_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Create_action_statementContext { + var p = new(Create_action_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_select_core + p.RuleIndex = KuneiformParserRULE_create_action_statement return p } -func (s *Select_coreContext) GetParser() antlr.Parser { return s.parser } - -func (s *Select_coreContext) GetWhere() ISql_exprContext { return s.where } - -func (s *Select_coreContext) GetGroup_by() ISql_expr_listContext { return s.group_by } - -func (s *Select_coreContext) GetHaving() ISql_exprContext { return s.having } - -func (s *Select_coreContext) SetWhere(v ISql_exprContext) { s.where = v } - -func (s *Select_coreContext) SetGroup_by(v ISql_expr_listContext) { s.group_by = v } +func (s *Create_action_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Select_coreContext) SetHaving(v ISql_exprContext) { s.having = v } +func (s *Create_action_statementContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) +} -func (s *Select_coreContext) SELECT() antlr.TerminalNode { - return s.GetToken(KuneiformParserSELECT, 0) +func (s *Create_action_statementContext) ACTION() antlr.TerminalNode { + return s.GetToken(KuneiformParserACTION, 0) } -func (s *Select_coreContext) AllResult_column() []IResult_columnContext { +func (s *Create_action_statementContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IResult_columnContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]IResult_columnContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IResult_columnContext); ok { - tst[i] = t.(IResult_columnContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -9050,11 +9429,11 @@ func (s *Select_coreContext) AllResult_column() []IResult_columnContext { return tst } -func (s *Select_coreContext) Result_column(i int) IResult_columnContext { +func (s *Create_action_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IResult_columnContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -9067,67 +9446,47 @@ func (s *Select_coreContext) Result_column(i int) IResult_columnContext { return nil } - return t.(IResult_columnContext) -} - -func (s *Select_coreContext) DISTINCT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDISTINCT, 0) -} - -func (s *Select_coreContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) + return t.(IIdentifierContext) } -func (s *Select_coreContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Create_action_statementContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Select_coreContext) FROM() antlr.TerminalNode { - return s.GetToken(KuneiformParserFROM, 0) +func (s *Create_action_statementContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Select_coreContext) Relation() IRelationContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IRelationContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IRelationContext) +func (s *Create_action_statementContext) LBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACE, 0) } -func (s *Select_coreContext) WHERE() antlr.TerminalNode { - return s.GetToken(KuneiformParserWHERE, 0) +func (s *Create_action_statementContext) RBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACE, 0) } -func (s *Select_coreContext) GROUP() antlr.TerminalNode { - return s.GetToken(KuneiformParserGROUP, 0) +func (s *Create_action_statementContext) AllVARIABLE() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserVARIABLE) } -func (s *Select_coreContext) BY() antlr.TerminalNode { - return s.GetToken(KuneiformParserBY, 0) +func (s *Create_action_statementContext) VARIABLE(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserVARIABLE, i) } -func (s *Select_coreContext) AllSql_expr() []ISql_exprContext { +func (s *Create_action_statementContext) AllType_() []ITypeContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(ITypeContext); ok { len++ } } - tst := make([]ISql_exprContext, len) + tst := make([]ITypeContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) + if t, ok := ctx.(ITypeContext); ok { + tst[i] = t.(ITypeContext) i++ } } @@ -9135,11 +9494,11 @@ func (s *Select_coreContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Select_coreContext) Sql_expr(i int) ISql_exprContext { +func (s *Create_action_statementContext) Type_(i int) ITypeContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(ITypeContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -9152,13 +9511,13 @@ func (s *Select_coreContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(ITypeContext) } -func (s *Select_coreContext) Sql_expr_list() ISql_expr_listContext { +func (s *Create_action_statementContext) Action_return() IAction_returnContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_expr_listContext); ok { + if _, ok := ctx.(IAction_returnContext); ok { t = ctx.(antlr.RuleContext) break } @@ -9168,23 +9527,23 @@ func (s *Select_coreContext) Sql_expr_list() ISql_expr_listContext { return nil } - return t.(ISql_expr_listContext) + return t.(IAction_returnContext) } -func (s *Select_coreContext) AllJoin() []IJoinContext { +func (s *Create_action_statementContext) AllAction_statement() []IAction_statementContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IJoinContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { len++ } } - tst := make([]IJoinContext, len) + tst := make([]IAction_statementContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IJoinContext); ok { - tst[i] = t.(IJoinContext) + if t, ok := ctx.(IAction_statementContext); ok { + tst[i] = t.(IAction_statementContext) i++ } } @@ -9192,11 +9551,11 @@ func (s *Select_coreContext) AllJoin() []IJoinContext { return tst } -func (s *Select_coreContext) Join(i int) IJoinContext { +func (s *Create_action_statementContext) Action_statement(i int) IAction_statementContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IJoinContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -9209,129 +9568,193 @@ func (s *Select_coreContext) Join(i int) IJoinContext { return nil } - return t.(IJoinContext) + return t.(IAction_statementContext) } -func (s *Select_coreContext) HAVING() antlr.TerminalNode { - return s.GetToken(KuneiformParserHAVING, 0) +func (s *Create_action_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *Select_coreContext) GetRuleContext() antlr.RuleContext { +func (s *Create_action_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Create_action_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Create_action_statementContext) OR() antlr.TerminalNode { + return s.GetToken(KuneiformParserOR, 0) +} + +func (s *Create_action_statementContext) REPLACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserREPLACE, 0) +} + +func (s *Create_action_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *Create_action_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} + +func (s *Create_action_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Select_coreContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Create_action_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Select_coreContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Create_action_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSelect_core(s) + return t.VisitCreate_action_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Select_core() (localctx ISelect_coreContext) { - localctx = NewSelect_coreContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 70, KuneiformParserRULE_select_core) +func (p *KuneiformParser) Create_action_statement() (localctx ICreate_action_statementContext) { + localctx = NewCreate_action_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 64, KuneiformParserRULE_create_action_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(529) - p.Match(KuneiformParserSELECT) + p.SetState(534) + p.Match(KuneiformParserCREATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(535) + p.Match(KuneiformParserACTION) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(531) + p.SetState(541) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDISTINCT { + switch p.GetTokenStream().LA(1) { + case KuneiformParserIF: { - p.SetState(530) - p.Match(KuneiformParserDISTINCT) + p.SetState(536) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(537) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(538) + p.Match(KuneiformParserEXISTS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - } - { - p.SetState(533) - p.Result_column() - } - p.SetState(538) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for _la == KuneiformParserCOMMA { + case KuneiformParserOR: { - p.SetState(534) - p.Match(KuneiformParserCOMMA) + p.SetState(539) + p.Match(KuneiformParserOR) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(535) - p.Result_column() + p.SetState(540) + p.Match(KuneiformParserREPLACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - p.SetState(540) - p.GetErrorHandler().Sync(p) + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + + default: + } + { + p.SetState(543) + p.Identifier() + } + { + p.SetState(544) + p.Match(KuneiformParserLPAREN) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) } - p.SetState(549) + p.SetState(555) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserFROM { + if _la == KuneiformParserVARIABLE { { - p.SetState(541) - p.Match(KuneiformParserFROM) + p.SetState(545) + p.Match(KuneiformParserVARIABLE) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(542) - p.Relation() + p.SetState(546) + p.Type_() } - p.SetState(546) + p.SetState(552) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for (int64((_la-69)) & ^0x3f) == 0 && ((int64(1)<<(_la-69))&134217743) != 0 { + for _la == KuneiformParserCOMMA { { - p.SetState(543) - p.Join() + p.SetState(547) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(548) + p.Match(KuneiformParserVARIABLE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(549) + p.Type_() } - p.SetState(548) + p.SetState(554) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -9340,88 +9763,83 @@ func (p *KuneiformParser) Select_core() (localctx ISelect_coreContext) { } } - p.SetState(553) + { + p.SetState(557) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(561) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWHERE { + for _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { { - p.SetState(551) - p.Match(KuneiformParserWHERE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(558) + p.Identifier() } - { - p.SetState(552) - - var _x = p.sql_expr(0) - localctx.(*Select_coreContext).where = _x + p.SetState(563) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - + _la = p.GetTokenStream().LA(1) } - p.SetState(562) + p.SetState(565) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserGROUP { - { - p.SetState(555) - p.Match(KuneiformParserGROUP) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if _la == KuneiformParserRETURNS { { - p.SetState(556) - p.Match(KuneiformParserBY) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(564) + p.Action_return() } - { - p.SetState(557) - var _x = p.Sql_expr_list() + } + { + p.SetState(567) + p.Match(KuneiformParserLBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(571) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - localctx.(*Select_coreContext).group_by = _x + for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2810246172314175624) != 0) || ((int64((_la-88)) & ^0x3f) == 0 && ((int64(1)<<(_la-88))&1010992146074830337) != 0) { + { + p.SetState(568) + p.Action_statement() } - p.SetState(560) + + p.SetState(573) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserHAVING { - { - p.SetState(558) - p.Match(KuneiformParserHAVING) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(559) - - var _x = p.sql_expr(0) - - localctx.(*Select_coreContext).having = _x - } - + } + { + p.SetState(574) + p.Match(KuneiformParserRBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - } errorExit: @@ -9437,87 +9855,68 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IRelationContext is an interface to support dynamic dispatch. -type IRelationContext interface { +// IDrop_action_statementContext is an interface to support dynamic dispatch. +type IDrop_action_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsRelationContext differentiates from other interfaces. - IsRelationContext() + + // Getter signatures + DROP() antlr.TerminalNode + ACTION() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode + + // IsDrop_action_statementContext differentiates from other interfaces. + IsDrop_action_statementContext() } -type RelationContext struct { +type Drop_action_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyRelationContext() *RelationContext { - var p = new(RelationContext) +func NewEmptyDrop_action_statementContext() *Drop_action_statementContext { + var p = new(Drop_action_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_relation + p.RuleIndex = KuneiformParserRULE_drop_action_statement return p } -func InitEmptyRelationContext(p *RelationContext) { +func InitEmptyDrop_action_statementContext(p *Drop_action_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_relation + p.RuleIndex = KuneiformParserRULE_drop_action_statement } -func (*RelationContext) IsRelationContext() {} +func (*Drop_action_statementContext) IsDrop_action_statementContext() {} -func NewRelationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *RelationContext { - var p = new(RelationContext) +func NewDrop_action_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Drop_action_statementContext { + var p = new(Drop_action_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_relation + p.RuleIndex = KuneiformParserRULE_drop_action_statement return p } -func (s *RelationContext) GetParser() antlr.Parser { return s.parser } - -func (s *RelationContext) CopyAll(ctx *RelationContext) { - s.CopyFrom(&ctx.BaseParserRuleContext) -} - -func (s *RelationContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *RelationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} - -type Function_relationContext struct { - RelationContext - alias IIdentifierContext -} - -func NewFunction_relationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Function_relationContext { - var p = new(Function_relationContext) - - InitEmptyRelationContext(&p.RelationContext) - p.parser = parser - p.CopyAll(ctx.(*RelationContext)) +func (s *Drop_action_statementContext) GetParser() antlr.Parser { return s.parser } - return p +func (s *Drop_action_statementContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) } -func (s *Function_relationContext) GetAlias() IIdentifierContext { return s.alias } - -func (s *Function_relationContext) SetAlias(v IIdentifierContext) { s.alias = v } - -func (s *Function_relationContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Drop_action_statementContext) ACTION() antlr.TerminalNode { + return s.GetToken(KuneiformParserACTION, 0) } -func (s *Function_relationContext) Sql_function_call() ISql_function_callContext { +func (s *Drop_action_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_function_callContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -9527,155 +9926,226 @@ func (s *Function_relationContext) Sql_function_call() ISql_function_callContext return nil } - return t.(ISql_function_callContext) + return t.(IIdentifierContext) } -func (s *Function_relationContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func (s *Drop_action_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *Function_relationContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Drop_action_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} - if t == nil { - return nil - } +func (s *Drop_action_statementContext) GetRuleContext() antlr.RuleContext { + return s +} - return t.(IIdentifierContext) +func (s *Drop_action_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Function_relationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_action_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitFunction_relation(s) + return t.VisitDrop_action_statement(s) default: return t.VisitChildren(s) } } -type Table_relationContext struct { - RelationContext - table_name IIdentifierContext - alias IIdentifierContext -} - -func NewTable_relationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Table_relationContext { - var p = new(Table_relationContext) - - InitEmptyRelationContext(&p.RelationContext) - p.parser = parser - p.CopyAll(ctx.(*RelationContext)) - - return p -} - -func (s *Table_relationContext) GetTable_name() IIdentifierContext { return s.table_name } - -func (s *Table_relationContext) GetAlias() IIdentifierContext { return s.alias } - -func (s *Table_relationContext) SetTable_name(v IIdentifierContext) { s.table_name = v } - -func (s *Table_relationContext) SetAlias(v IIdentifierContext) { s.alias = v } - -func (s *Table_relationContext) GetRuleContext() antlr.RuleContext { - return s -} +func (p *KuneiformParser) Drop_action_statement() (localctx IDrop_action_statementContext) { + localctx = NewDrop_action_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 66, KuneiformParserRULE_drop_action_statement) + var _la int -func (s *Table_relationContext) AllIdentifier() []IIdentifierContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IIdentifierContext); ok { - len++ + p.EnterOuterAlt(localctx, 1) + { + p.SetState(576) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } - - tst := make([]IIdentifierContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IIdentifierContext); ok { - tst[i] = t.(IIdentifierContext) - i++ + { + p.SetState(577) + p.Match(KuneiformParserACTION) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } + p.SetState(580) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - return tst -} - -func (s *Table_relationContext) Identifier(i int) IIdentifierContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break + if _la == KuneiformParserIF { + { + p.SetState(578) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(579) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - j++ } - } - if t == nil { - return nil + } + { + p.SetState(582) + p.Identifier() } - return t.(IIdentifierContext) -} - -func (s *Table_relationContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -func (s *Table_relationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitTable_relation(s) +// IUse_extension_statementContext is an interface to support dynamic dispatch. +type IUse_extension_statementContext interface { + antlr.ParserRuleContext - default: - return t.VisitChildren(s) - } -} + // GetParser returns the parser. + GetParser() antlr.Parser -type Subquery_relationContext struct { - RelationContext - alias IIdentifierContext -} + // GetExtension_name returns the extension_name rule contexts. + GetExtension_name() IIdentifierContext -func NewSubquery_relationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Subquery_relationContext { - var p = new(Subquery_relationContext) + // GetAlias returns the alias rule contexts. + GetAlias() IIdentifierContext + + // SetExtension_name sets the extension_name rule contexts. + SetExtension_name(IIdentifierContext) + + // SetAlias sets the alias rule contexts. + SetAlias(IIdentifierContext) + + // Getter signatures + USE() antlr.TerminalNode + AS() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode + LBRACE() antlr.TerminalNode + RBRACE() antlr.TerminalNode + AllCOL() []antlr.TerminalNode + COL(i int) antlr.TerminalNode + AllAction_expr() []IAction_exprContext + Action_expr(i int) IAction_exprContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode + + // IsUse_extension_statementContext differentiates from other interfaces. + IsUse_extension_statementContext() +} + +type Use_extension_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + extension_name IIdentifierContext + alias IIdentifierContext +} + +func NewEmptyUse_extension_statementContext() *Use_extension_statementContext { + var p = new(Use_extension_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_use_extension_statement + return p +} + +func InitEmptyUse_extension_statementContext(p *Use_extension_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_use_extension_statement +} + +func (*Use_extension_statementContext) IsUse_extension_statementContext() {} + +func NewUse_extension_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Use_extension_statementContext { + var p = new(Use_extension_statementContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptyRelationContext(&p.RelationContext) p.parser = parser - p.CopyAll(ctx.(*RelationContext)) + p.RuleIndex = KuneiformParserRULE_use_extension_statement return p } -func (s *Subquery_relationContext) GetAlias() IIdentifierContext { return s.alias } +func (s *Use_extension_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Subquery_relationContext) SetAlias(v IIdentifierContext) { s.alias = v } +func (s *Use_extension_statementContext) GetExtension_name() IIdentifierContext { + return s.extension_name +} -func (s *Subquery_relationContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Use_extension_statementContext) GetAlias() IIdentifierContext { return s.alias } + +func (s *Use_extension_statementContext) SetExtension_name(v IIdentifierContext) { + s.extension_name = v } -func (s *Subquery_relationContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Use_extension_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } + +func (s *Use_extension_statementContext) USE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUSE, 0) } -func (s *Subquery_relationContext) Select_statement() ISelect_statementContext { +func (s *Use_extension_statementContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) +} + +func (s *Use_extension_statementContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } + + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } + + return tst +} + +func (s *Use_extension_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISelect_statementContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -9683,19 +10153,68 @@ func (s *Subquery_relationContext) Select_statement() ISelect_statementContext { return nil } - return t.(ISelect_statementContext) + return t.(IIdentifierContext) } -func (s *Subquery_relationContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Use_extension_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *Subquery_relationContext) Identifier() IIdentifierContext { +func (s *Use_extension_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Use_extension_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Use_extension_statementContext) LBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACE, 0) +} + +func (s *Use_extension_statementContext) RBRACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACE, 0) +} + +func (s *Use_extension_statementContext) AllCOL() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOL) +} + +func (s *Use_extension_statementContext) COL(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOL, i) +} + +func (s *Use_extension_statementContext) AllAction_expr() []IAction_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IAction_exprContext); ok { + len++ + } + } + + tst := make([]IAction_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) + i++ + } + } + + return tst +} + +func (s *Use_extension_statementContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IAction_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -9703,186 +10222,196 @@ func (s *Subquery_relationContext) Identifier() IIdentifierContext { return nil } - return t.(IIdentifierContext) + return t.(IAction_exprContext) } -func (s *Subquery_relationContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func (s *Use_extension_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func (s *Subquery_relationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Use_extension_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} + +func (s *Use_extension_statementContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Use_extension_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Use_extension_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSubquery_relation(s) + return t.VisitUse_extension_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Relation() (localctx IRelationContext) { - localctx = NewRelationContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 72, KuneiformParserRULE_relation) +func (p *KuneiformParser) Use_extension_statement() (localctx IUse_extension_statementContext) { + localctx = NewUse_extension_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 68, KuneiformParserRULE_use_extension_statement) var _la int - p.SetState(587) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(584) + p.Match(KuneiformParserUSE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(585) + + var _x = p.Identifier() + + localctx.(*Use_extension_statementContext).extension_name = _x + } + p.SetState(589) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 72, p.GetParserRuleContext()) { - case 1: - localctx = NewTable_relationContext(p, localctx) - p.EnterOuterAlt(localctx, 1) + if _la == KuneiformParserIF { { - p.SetState(564) - - var _x = p.Identifier() - - localctx.(*Table_relationContext).table_name = _x - } - p.SetState(569) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(566) - p.GetErrorHandler().Sync(p) + p.SetState(586) + p.Match(KuneiformParserIF) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserAS { - { - p.SetState(565) - p.Match(KuneiformParserAS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - { - p.SetState(568) - - var _x = p.Identifier() - - localctx.(*Table_relationContext).alias = _x - } - } - - case 2: - localctx = NewSubquery_relationContext(p, localctx) - p.EnterOuterAlt(localctx, 2) { - p.SetState(571) - p.Match(KuneiformParserLPAREN) + p.SetState(587) + p.Match(KuneiformParserNOT) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(572) - p.Select_statement() + p.SetState(588) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + } + p.SetState(608) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserLBRACE { { - p.SetState(573) - p.Match(KuneiformParserRPAREN) + p.SetState(591) + p.Match(KuneiformParserLBRACE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(578) + p.SetState(605) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(575) + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { + { + p.SetState(592) + p.Identifier() + } + { + p.SetState(593) + p.Match(KuneiformParserCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(594) + p.action_expr(0) + } + p.SetState(602) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserAS { + for _la == KuneiformParserCOMMA { { - p.SetState(574) - p.Match(KuneiformParserAS) + p.SetState(595) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(596) + p.Identifier() + } + { + p.SetState(597) + p.Match(KuneiformParserCOL) if p.HasError() { // Recognition error - abort rule goto errorExit } } + { + p.SetState(598) + p.action_expr(0) + } - } - { - p.SetState(577) - - var _x = p.Identifier() - - localctx.(*Subquery_relationContext).alias = _x - } - - } - - case 3: - localctx = NewFunction_relationContext(p, localctx) - p.EnterOuterAlt(localctx, 3) - { - p.SetState(580) - p.Sql_function_call() - } - - p.SetState(582) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserAS { - { - p.SetState(581) - p.Match(KuneiformParserAS) + p.SetState(604) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } + _la = p.GetTokenStream().LA(1) } } - p.SetState(585) - p.GetErrorHandler().Sync(p) + { + p.SetState(607) + p.Match(KuneiformParserRBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(610) + p.Match(KuneiformParserAS) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { - { - p.SetState(584) - - var _x = p.Identifier() - - localctx.(*Function_relationContext).alias = _x - } + } + { + p.SetState(611) - } + var _x = p.Identifier() - case antlr.ATNInvalidAltNumber: - goto errorExit + localctx.(*Use_extension_statementContext).alias = _x } errorExit: @@ -9898,87 +10427,74 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IJoinContext is an interface to support dynamic dispatch. -type IJoinContext interface { +// IUnuse_extension_statementContext is an interface to support dynamic dispatch. +type IUnuse_extension_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetAlias returns the alias rule contexts. + GetAlias() IIdentifierContext + + // SetAlias sets the alias rule contexts. + SetAlias(IIdentifierContext) + // Getter signatures - JOIN() antlr.TerminalNode - Relation() IRelationContext - ON() antlr.TerminalNode - Sql_expr() ISql_exprContext - INNER() antlr.TerminalNode - LEFT() antlr.TerminalNode - RIGHT() antlr.TerminalNode - FULL() antlr.TerminalNode + UNUSE() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode - // IsJoinContext differentiates from other interfaces. - IsJoinContext() + // IsUnuse_extension_statementContext differentiates from other interfaces. + IsUnuse_extension_statementContext() } -type JoinContext struct { +type Unuse_extension_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser + alias IIdentifierContext } -func NewEmptyJoinContext() *JoinContext { - var p = new(JoinContext) +func NewEmptyUnuse_extension_statementContext() *Unuse_extension_statementContext { + var p = new(Unuse_extension_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_join + p.RuleIndex = KuneiformParserRULE_unuse_extension_statement return p } -func InitEmptyJoinContext(p *JoinContext) { +func InitEmptyUnuse_extension_statementContext(p *Unuse_extension_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_join + p.RuleIndex = KuneiformParserRULE_unuse_extension_statement } -func (*JoinContext) IsJoinContext() {} +func (*Unuse_extension_statementContext) IsUnuse_extension_statementContext() {} -func NewJoinContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *JoinContext { - var p = new(JoinContext) +func NewUnuse_extension_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Unuse_extension_statementContext { + var p = new(Unuse_extension_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_join + p.RuleIndex = KuneiformParserRULE_unuse_extension_statement return p } -func (s *JoinContext) GetParser() antlr.Parser { return s.parser } - -func (s *JoinContext) JOIN() antlr.TerminalNode { - return s.GetToken(KuneiformParserJOIN, 0) -} - -func (s *JoinContext) Relation() IRelationContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IRelationContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Unuse_extension_statementContext) GetParser() antlr.Parser { return s.parser } - if t == nil { - return nil - } +func (s *Unuse_extension_statementContext) GetAlias() IIdentifierContext { return s.alias } - return t.(IRelationContext) -} +func (s *Unuse_extension_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } -func (s *JoinContext) ON() antlr.TerminalNode { - return s.GetToken(KuneiformParserON, 0) +func (s *Unuse_extension_statementContext) UNUSE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUNUSE, 0) } -func (s *JoinContext) Sql_expr() ISql_exprContext { +func (s *Unuse_extension_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -9988,93 +10504,81 @@ func (s *JoinContext) Sql_expr() ISql_exprContext { return nil } - return t.(ISql_exprContext) -} - -func (s *JoinContext) INNER() antlr.TerminalNode { - return s.GetToken(KuneiformParserINNER, 0) -} - -func (s *JoinContext) LEFT() antlr.TerminalNode { - return s.GetToken(KuneiformParserLEFT, 0) + return t.(IIdentifierContext) } -func (s *JoinContext) RIGHT() antlr.TerminalNode { - return s.GetToken(KuneiformParserRIGHT, 0) +func (s *Unuse_extension_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) } -func (s *JoinContext) FULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserFULL, 0) +func (s *Unuse_extension_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) } -func (s *JoinContext) GetRuleContext() antlr.RuleContext { +func (s *Unuse_extension_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *JoinContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Unuse_extension_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *JoinContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Unuse_extension_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitJoin(s) + return t.VisitUnuse_extension_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Join() (localctx IJoinContext) { - localctx = NewJoinContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 74, KuneiformParserRULE_join) +func (p *KuneiformParser) Unuse_extension_statement() (localctx IUnuse_extension_statementContext) { + localctx = NewUnuse_extension_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 70, KuneiformParserRULE_unuse_extension_statement) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(590) + { + p.SetState(613) + p.Match(KuneiformParserUNUSE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(614) + + var _x = p.Identifier() + + localctx.(*Unuse_extension_statementContext).alias = _x + } + p.SetState(617) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if (int64((_la-70)) & ^0x3f) == 0 && ((int64(1)<<(_la-70))&67108871) != 0 { + if _la == KuneiformParserIF { { - p.SetState(589) - _la = p.GetTokenStream().LA(1) - - if !((int64((_la-70)) & ^0x3f) == 0 && ((int64(1)<<(_la-70))&67108871) != 0) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() + p.SetState(615) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } - - } - { - p.SetState(592) - p.Match(KuneiformParserJOIN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(593) - p.Relation() - } - { - p.SetState(594) - p.Match(KuneiformParserON) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + { + p.SetState(616) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } - { - p.SetState(595) - p.sql_expr(0) + } errorExit: @@ -10090,82 +10594,69 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IResult_columnContext is an interface to support dynamic dispatch. -type IResult_columnContext interface { +// ICreate_namespace_statementContext is an interface to support dynamic dispatch. +type ICreate_namespace_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsResult_columnContext differentiates from other interfaces. - IsResult_columnContext() + + // Getter signatures + CREATE() antlr.TerminalNode + NAMESPACE() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + NOT() antlr.TerminalNode + EXISTS() antlr.TerminalNode + + // IsCreate_namespace_statementContext differentiates from other interfaces. + IsCreate_namespace_statementContext() } -type Result_columnContext struct { +type Create_namespace_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyResult_columnContext() *Result_columnContext { - var p = new(Result_columnContext) +func NewEmptyCreate_namespace_statementContext() *Create_namespace_statementContext { + var p = new(Create_namespace_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_result_column + p.RuleIndex = KuneiformParserRULE_create_namespace_statement return p } -func InitEmptyResult_columnContext(p *Result_columnContext) { +func InitEmptyCreate_namespace_statementContext(p *Create_namespace_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_result_column + p.RuleIndex = KuneiformParserRULE_create_namespace_statement } -func (*Result_columnContext) IsResult_columnContext() {} +func (*Create_namespace_statementContext) IsCreate_namespace_statementContext() {} -func NewResult_columnContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Result_columnContext { - var p = new(Result_columnContext) +func NewCreate_namespace_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Create_namespace_statementContext { + var p = new(Create_namespace_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_result_column + p.RuleIndex = KuneiformParserRULE_create_namespace_statement return p } -func (s *Result_columnContext) GetParser() antlr.Parser { return s.parser } - -func (s *Result_columnContext) CopyAll(ctx *Result_columnContext) { - s.CopyFrom(&ctx.BaseParserRuleContext) -} - -func (s *Result_columnContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Result_columnContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} - -type Expression_result_columnContext struct { - Result_columnContext -} - -func NewExpression_result_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Expression_result_columnContext { - var p = new(Expression_result_columnContext) - - InitEmptyResult_columnContext(&p.Result_columnContext) - p.parser = parser - p.CopyAll(ctx.(*Result_columnContext)) +func (s *Create_namespace_statementContext) GetParser() antlr.Parser { return s.parser } - return p +func (s *Create_namespace_statementContext) CREATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCREATE, 0) } -func (s *Expression_result_columnContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Create_namespace_statementContext) NAMESPACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserNAMESPACE, 0) } -func (s *Expression_result_columnContext) Sql_expr() ISql_exprContext { +func (s *Create_namespace_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -10175,71 +10666,172 @@ func (s *Expression_result_columnContext) Sql_expr() ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(IIdentifierContext) } -func (s *Expression_result_columnContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Create_namespace_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) +} - if t == nil { - return nil - } +func (s *Create_namespace_statementContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} - return t.(IIdentifierContext) +func (s *Create_namespace_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) } -func (s *Expression_result_columnContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func (s *Create_namespace_statementContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Expression_result_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Create_namespace_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Create_namespace_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitExpression_result_column(s) + return t.VisitCreate_namespace_statement(s) default: return t.VisitChildren(s) } } -type Wildcard_result_columnContext struct { - Result_columnContext - table_name IIdentifierContext +func (p *KuneiformParser) Create_namespace_statement() (localctx ICreate_namespace_statementContext) { + localctx = NewCreate_namespace_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 72, KuneiformParserRULE_create_namespace_statement) + var _la int + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(619) + p.Match(KuneiformParserCREATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(620) + p.Match(KuneiformParserNAMESPACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(624) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserIF { + { + p.SetState(621) + p.Match(KuneiformParserIF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(622) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(623) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(626) + p.Identifier() + } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -func NewWildcard_result_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Wildcard_result_columnContext { - var p = new(Wildcard_result_columnContext) +// IDrop_namespace_statementContext is an interface to support dynamic dispatch. +type IDrop_namespace_statementContext interface { + antlr.ParserRuleContext - InitEmptyResult_columnContext(&p.Result_columnContext) - p.parser = parser - p.CopyAll(ctx.(*Result_columnContext)) + // GetParser returns the parser. + GetParser() antlr.Parser + + // Getter signatures + DROP() antlr.TerminalNode + NAMESPACE() antlr.TerminalNode + Identifier() IIdentifierContext + IF() antlr.TerminalNode + EXISTS() antlr.TerminalNode + + // IsDrop_namespace_statementContext differentiates from other interfaces. + IsDrop_namespace_statementContext() +} + +type Drop_namespace_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} +func NewEmptyDrop_namespace_statementContext() *Drop_namespace_statementContext { + var p = new(Drop_namespace_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_drop_namespace_statement return p } -func (s *Wildcard_result_columnContext) GetTable_name() IIdentifierContext { return s.table_name } +func InitEmptyDrop_namespace_statementContext(p *Drop_namespace_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_drop_namespace_statement +} -func (s *Wildcard_result_columnContext) SetTable_name(v IIdentifierContext) { s.table_name = v } +func (*Drop_namespace_statementContext) IsDrop_namespace_statementContext() {} -func (s *Wildcard_result_columnContext) GetRuleContext() antlr.RuleContext { - return s +func NewDrop_namespace_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Drop_namespace_statementContext { + var p = new(Drop_namespace_statementContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + + p.parser = parser + p.RuleIndex = KuneiformParserRULE_drop_namespace_statement + + return p } -func (s *Wildcard_result_columnContext) STAR() antlr.TerminalNode { - return s.GetToken(KuneiformParserSTAR, 0) +func (s *Drop_namespace_statementContext) GetParser() antlr.Parser { return s.parser } + +func (s *Drop_namespace_statementContext) DROP() antlr.TerminalNode { + return s.GetToken(KuneiformParserDROP, 0) } -func (s *Wildcard_result_columnContext) PERIOD() antlr.TerminalNode { - return s.GetToken(KuneiformParserPERIOD, 0) +func (s *Drop_namespace_statementContext) NAMESPACE() antlr.TerminalNode { + return s.GetToken(KuneiformParserNAMESPACE, 0) } -func (s *Wildcard_result_columnContext) Identifier() IIdentifierContext { +func (s *Drop_namespace_statementContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IIdentifierContext); ok { @@ -10255,107 +10847,83 @@ func (s *Wildcard_result_columnContext) Identifier() IIdentifierContext { return t.(IIdentifierContext) } -func (s *Wildcard_result_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Drop_namespace_statementContext) IF() antlr.TerminalNode { + return s.GetToken(KuneiformParserIF, 0) +} + +func (s *Drop_namespace_statementContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Drop_namespace_statementContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Drop_namespace_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Drop_namespace_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitWildcard_result_column(s) + return t.VisitDrop_namespace_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Result_column() (localctx IResult_columnContext) { - localctx = NewResult_columnContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 76, KuneiformParserRULE_result_column) +func (p *KuneiformParser) Drop_namespace_statement() (localctx IDrop_namespace_statementContext) { + localctx = NewDrop_namespace_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 74, KuneiformParserRULE_drop_namespace_statement) var _la int - p.SetState(610) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(628) + p.Match(KuneiformParserDROP) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(629) + p.Match(KuneiformParserNAMESPACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(632) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 77, p.GetParserRuleContext()) { - case 1: - localctx = NewExpression_result_columnContext(p, localctx) - p.EnterOuterAlt(localctx, 1) + if _la == KuneiformParserIF { { - p.SetState(597) - p.sql_expr(0) - } - p.SetState(602) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(599) - p.GetErrorHandler().Sync(p) + p.SetState(630) + p.Match(KuneiformParserIF) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserAS { - { - p.SetState(598) - p.Match(KuneiformParserAS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - { - p.SetState(601) - p.Identifier() - } - - } - - case 2: - localctx = NewWildcard_result_columnContext(p, localctx) - p.EnterOuterAlt(localctx, 2) - p.SetState(607) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { - { - p.SetState(604) - - var _x = p.Identifier() - - localctx.(*Wildcard_result_columnContext).table_name = _x - } - { - p.SetState(605) - p.Match(KuneiformParserPERIOD) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - } { - p.SetState(609) - p.Match(KuneiformParserSTAR) + p.SetState(631) + p.Match(KuneiformParserEXISTS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - case antlr.ATNInvalidAltNumber: - goto errorExit + } + { + p.SetState(634) + p.Identifier() } errorExit: @@ -10371,121 +10939,101 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IUpdate_statementContext is an interface to support dynamic dispatch. -type IUpdate_statementContext interface { +// ISelect_statementContext is an interface to support dynamic dispatch. +type ISelect_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetTable_name returns the table_name rule contexts. - GetTable_name() IIdentifierContext - - // GetAlias returns the alias rule contexts. - GetAlias() IIdentifierContext - - // GetWhere returns the where rule contexts. - GetWhere() ISql_exprContext + // GetLimit returns the limit rule contexts. + GetLimit() ISql_exprContext - // SetTable_name sets the table_name rule contexts. - SetTable_name(IIdentifierContext) + // GetOffset returns the offset rule contexts. + GetOffset() ISql_exprContext - // SetAlias sets the alias rule contexts. - SetAlias(IIdentifierContext) + // SetLimit sets the limit rule contexts. + SetLimit(ISql_exprContext) - // SetWhere sets the where rule contexts. - SetWhere(ISql_exprContext) + // SetOffset sets the offset rule contexts. + SetOffset(ISql_exprContext) // Getter signatures - UPDATE() antlr.TerminalNode - SET() antlr.TerminalNode - AllUpdate_set_clause() []IUpdate_set_clauseContext - Update_set_clause(i int) IUpdate_set_clauseContext - AllIdentifier() []IIdentifierContext - Identifier(i int) IIdentifierContext + AllSelect_core() []ISelect_coreContext + Select_core(i int) ISelect_coreContext + AllCompound_operator() []ICompound_operatorContext + Compound_operator(i int) ICompound_operatorContext + ORDER() antlr.TerminalNode + BY() antlr.TerminalNode + AllOrdering_term() []IOrdering_termContext + Ordering_term(i int) IOrdering_termContext + LIMIT() antlr.TerminalNode + OFFSET() antlr.TerminalNode + AllSql_expr() []ISql_exprContext + Sql_expr(i int) ISql_exprContext AllCOMMA() []antlr.TerminalNode COMMA(i int) antlr.TerminalNode - FROM() antlr.TerminalNode - Relation() IRelationContext - WHERE() antlr.TerminalNode - Sql_expr() ISql_exprContext - AS() antlr.TerminalNode - AllJoin() []IJoinContext - Join(i int) IJoinContext - // IsUpdate_statementContext differentiates from other interfaces. - IsUpdate_statementContext() + // IsSelect_statementContext differentiates from other interfaces. + IsSelect_statementContext() } -type Update_statementContext struct { +type Select_statementContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - table_name IIdentifierContext - alias IIdentifierContext - where ISql_exprContext + parser antlr.Parser + limit ISql_exprContext + offset ISql_exprContext } -func NewEmptyUpdate_statementContext() *Update_statementContext { - var p = new(Update_statementContext) +func NewEmptySelect_statementContext() *Select_statementContext { + var p = new(Select_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_update_statement + p.RuleIndex = KuneiformParserRULE_select_statement return p } -func InitEmptyUpdate_statementContext(p *Update_statementContext) { +func InitEmptySelect_statementContext(p *Select_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_update_statement + p.RuleIndex = KuneiformParserRULE_select_statement } -func (*Update_statementContext) IsUpdate_statementContext() {} +func (*Select_statementContext) IsSelect_statementContext() {} -func NewUpdate_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Update_statementContext { - var p = new(Update_statementContext) +func NewSelect_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Select_statementContext { + var p = new(Select_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_update_statement + p.RuleIndex = KuneiformParserRULE_select_statement return p } -func (s *Update_statementContext) GetParser() antlr.Parser { return s.parser } - -func (s *Update_statementContext) GetTable_name() IIdentifierContext { return s.table_name } - -func (s *Update_statementContext) GetAlias() IIdentifierContext { return s.alias } - -func (s *Update_statementContext) GetWhere() ISql_exprContext { return s.where } - -func (s *Update_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } +func (s *Select_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Update_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } +func (s *Select_statementContext) GetLimit() ISql_exprContext { return s.limit } -func (s *Update_statementContext) SetWhere(v ISql_exprContext) { s.where = v } +func (s *Select_statementContext) GetOffset() ISql_exprContext { return s.offset } -func (s *Update_statementContext) UPDATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUPDATE, 0) -} +func (s *Select_statementContext) SetLimit(v ISql_exprContext) { s.limit = v } -func (s *Update_statementContext) SET() antlr.TerminalNode { - return s.GetToken(KuneiformParserSET, 0) -} +func (s *Select_statementContext) SetOffset(v ISql_exprContext) { s.offset = v } -func (s *Update_statementContext) AllUpdate_set_clause() []IUpdate_set_clauseContext { +func (s *Select_statementContext) AllSelect_core() []ISelect_coreContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IUpdate_set_clauseContext); ok { + if _, ok := ctx.(ISelect_coreContext); ok { len++ } } - tst := make([]IUpdate_set_clauseContext, len) + tst := make([]ISelect_coreContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IUpdate_set_clauseContext); ok { - tst[i] = t.(IUpdate_set_clauseContext) + if t, ok := ctx.(ISelect_coreContext); ok { + tst[i] = t.(ISelect_coreContext) i++ } } @@ -10493,11 +11041,11 @@ func (s *Update_statementContext) AllUpdate_set_clause() []IUpdate_set_clauseCon return tst } -func (s *Update_statementContext) Update_set_clause(i int) IUpdate_set_clauseContext { +func (s *Select_statementContext) Select_core(i int) ISelect_coreContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IUpdate_set_clauseContext); ok { + if _, ok := ctx.(ISelect_coreContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -10510,23 +11058,23 @@ func (s *Update_statementContext) Update_set_clause(i int) IUpdate_set_clauseCon return nil } - return t.(IUpdate_set_clauseContext) + return t.(ISelect_coreContext) } -func (s *Update_statementContext) AllIdentifier() []IIdentifierContext { +func (s *Select_statementContext) AllCompound_operator() []ICompound_operatorContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IIdentifierContext); ok { + if _, ok := ctx.(ICompound_operatorContext); ok { len++ } } - tst := make([]IIdentifierContext, len) + tst := make([]ICompound_operatorContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IIdentifierContext); ok { - tst[i] = t.(IIdentifierContext) + if t, ok := ctx.(ICompound_operatorContext); ok { + tst[i] = t.(ICompound_operatorContext) i++ } } @@ -10534,11 +11082,11 @@ func (s *Update_statementContext) AllIdentifier() []IIdentifierContext { return tst } -func (s *Update_statementContext) Identifier(i int) IIdentifierContext { +func (s *Select_statementContext) Compound_operator(i int) ICompound_operatorContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { + if _, ok := ctx.(ICompound_operatorContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -10551,47 +11099,48 @@ func (s *Update_statementContext) Identifier(i int) IIdentifierContext { return nil } - return t.(IIdentifierContext) -} - -func (s *Update_statementContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) + return t.(ICompound_operatorContext) } -func (s *Update_statementContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Select_statementContext) ORDER() antlr.TerminalNode { + return s.GetToken(KuneiformParserORDER, 0) } -func (s *Update_statementContext) FROM() antlr.TerminalNode { - return s.GetToken(KuneiformParserFROM, 0) +func (s *Select_statementContext) BY() antlr.TerminalNode { + return s.GetToken(KuneiformParserBY, 0) } -func (s *Update_statementContext) Relation() IRelationContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IRelationContext); ok { - t = ctx.(antlr.RuleContext) - break +func (s *Select_statementContext) AllOrdering_term() []IOrdering_termContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IOrdering_termContext); ok { + len++ } } - if t == nil { - return nil + tst := make([]IOrdering_termContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IOrdering_termContext); ok { + tst[i] = t.(IOrdering_termContext) + i++ + } } - return t.(IRelationContext) -} - -func (s *Update_statementContext) WHERE() antlr.TerminalNode { - return s.GetToken(KuneiformParserWHERE, 0) + return tst } -func (s *Update_statementContext) Sql_expr() ISql_exprContext { +func (s *Select_statementContext) Ordering_term(i int) IOrdering_termContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IOrdering_termContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -10599,27 +11148,31 @@ func (s *Update_statementContext) Sql_expr() ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(IOrdering_termContext) } -func (s *Update_statementContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func (s *Select_statementContext) LIMIT() antlr.TerminalNode { + return s.GetToken(KuneiformParserLIMIT, 0) } -func (s *Update_statementContext) AllJoin() []IJoinContext { +func (s *Select_statementContext) OFFSET() antlr.TerminalNode { + return s.GetToken(KuneiformParserOFFSET, 0) +} + +func (s *Select_statementContext) AllSql_expr() []ISql_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IJoinContext); ok { + if _, ok := ctx.(ISql_exprContext); ok { len++ } } - tst := make([]IJoinContext, len) + tst := make([]ISql_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IJoinContext); ok { - tst[i] = t.(IJoinContext) + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) i++ } } @@ -10627,11 +11180,11 @@ func (s *Update_statementContext) AllJoin() []IJoinContext { return tst } -func (s *Update_statementContext) Join(i int) IJoinContext { +func (s *Select_statementContext) Sql_expr(i int) ISql_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IJoinContext); ok { + if _, ok := ctx.(ISql_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -10644,157 +11197,119 @@ func (s *Update_statementContext) Join(i int) IJoinContext { return nil } - return t.(IJoinContext) + return t.(ISql_exprContext) } -func (s *Update_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Select_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *Select_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} + +func (s *Select_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Update_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Select_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Update_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Select_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitUpdate_statement(s) + return t.VisitSelect_statement(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Update_statement() (localctx IUpdate_statementContext) { - localctx = NewUpdate_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 78, KuneiformParserRULE_update_statement) +func (p *KuneiformParser) Select_statement() (localctx ISelect_statementContext) { + localctx = NewSelect_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 76, KuneiformParserRULE_select_statement) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(612) - p.Match(KuneiformParserUPDATE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(613) - - var _x = p.Identifier() - - localctx.(*Update_statementContext).table_name = _x + p.SetState(636) + p.Select_core() } - p.SetState(618) + p.SetState(642) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(615) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserAS { - { - p.SetState(614) - p.Match(KuneiformParserAS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - + for (int64((_la-101)) & ^0x3f) == 0 && ((int64(1)<<(_la-101))&7) != 0 { + { + p.SetState(637) + p.Compound_operator() } { - p.SetState(617) - - var _x = p.Identifier() - - localctx.(*Update_statementContext).alias = _x + p.SetState(638) + p.Select_core() } - } - { - p.SetState(620) - p.Match(KuneiformParserSET) + p.SetState(644) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } + _la = p.GetTokenStream().LA(1) } - { - p.SetState(621) - p.Update_set_clause() - } - p.SetState(626) + p.SetState(655) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { + if _la == KuneiformParserORDER { { - p.SetState(622) - p.Match(KuneiformParserCOMMA) + p.SetState(645) + p.Match(KuneiformParserORDER) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(623) - p.Update_set_clause() - } - - p.SetState(628) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } - p.SetState(637) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserFROM { - { - p.SetState(629) - p.Match(KuneiformParserFROM) + p.SetState(646) + p.Match(KuneiformParserBY) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(630) - p.Relation() + p.SetState(647) + p.Ordering_term() } - p.SetState(634) + p.SetState(652) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for (int64((_la-69)) & ^0x3f) == 0 && ((int64(1)<<(_la-69))&134217743) != 0 { + for _la == KuneiformParserCOMMA { { - p.SetState(631) - p.Join() - } + p.SetState(648) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(649) + p.Ordering_term() + } - p.SetState(636) + p.SetState(654) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -10803,28 +11318,53 @@ func (p *KuneiformParser) Update_statement() (localctx IUpdate_statementContext) } } - p.SetState(641) + p.SetState(659) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWHERE { + if _la == KuneiformParserLIMIT { { - p.SetState(639) - p.Match(KuneiformParserWHERE) + p.SetState(657) + p.Match(KuneiformParserLIMIT) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(640) + p.SetState(658) var _x = p.sql_expr(0) - localctx.(*Update_statementContext).where = _x + localctx.(*Select_statementContext).limit = _x + } + + } + p.SetState(663) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserOFFSET { + { + p.SetState(661) + p.Match(KuneiformParserOFFSET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(662) + + var _x = p.sql_expr(0) + + localctx.(*Select_statementContext).offset = _x } } @@ -10842,141 +11382,155 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IUpdate_set_clauseContext is an interface to support dynamic dispatch. -type IUpdate_set_clauseContext interface { +// ICompound_operatorContext is an interface to support dynamic dispatch. +type ICompound_operatorContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetColumn returns the column rule contexts. - GetColumn() IIdentifierContext - - // SetColumn sets the column rule contexts. - SetColumn(IIdentifierContext) - // Getter signatures - EQUALS() antlr.TerminalNode - Sql_expr() ISql_exprContext - Identifier() IIdentifierContext + UNION() antlr.TerminalNode + ALL() antlr.TerminalNode + INTERSECT() antlr.TerminalNode + EXCEPT() antlr.TerminalNode - // IsUpdate_set_clauseContext differentiates from other interfaces. - IsUpdate_set_clauseContext() + // IsCompound_operatorContext differentiates from other interfaces. + IsCompound_operatorContext() } -type Update_set_clauseContext struct { +type Compound_operatorContext struct { antlr.BaseParserRuleContext parser antlr.Parser - column IIdentifierContext } -func NewEmptyUpdate_set_clauseContext() *Update_set_clauseContext { - var p = new(Update_set_clauseContext) +func NewEmptyCompound_operatorContext() *Compound_operatorContext { + var p = new(Compound_operatorContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_update_set_clause + p.RuleIndex = KuneiformParserRULE_compound_operator return p } -func InitEmptyUpdate_set_clauseContext(p *Update_set_clauseContext) { +func InitEmptyCompound_operatorContext(p *Compound_operatorContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_update_set_clause + p.RuleIndex = KuneiformParserRULE_compound_operator } -func (*Update_set_clauseContext) IsUpdate_set_clauseContext() {} +func (*Compound_operatorContext) IsCompound_operatorContext() {} -func NewUpdate_set_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Update_set_clauseContext { - var p = new(Update_set_clauseContext) +func NewCompound_operatorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Compound_operatorContext { + var p = new(Compound_operatorContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_update_set_clause + p.RuleIndex = KuneiformParserRULE_compound_operator return p } -func (s *Update_set_clauseContext) GetParser() antlr.Parser { return s.parser } - -func (s *Update_set_clauseContext) GetColumn() IIdentifierContext { return s.column } - -func (s *Update_set_clauseContext) SetColumn(v IIdentifierContext) { s.column = v } +func (s *Compound_operatorContext) GetParser() antlr.Parser { return s.parser } -func (s *Update_set_clauseContext) EQUALS() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUALS, 0) +func (s *Compound_operatorContext) UNION() antlr.TerminalNode { + return s.GetToken(KuneiformParserUNION, 0) } -func (s *Update_set_clauseContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(ISql_exprContext) +func (s *Compound_operatorContext) ALL() antlr.TerminalNode { + return s.GetToken(KuneiformParserALL, 0) } -func (s *Update_set_clauseContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } +func (s *Compound_operatorContext) INTERSECT() antlr.TerminalNode { + return s.GetToken(KuneiformParserINTERSECT, 0) +} - return t.(IIdentifierContext) +func (s *Compound_operatorContext) EXCEPT() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXCEPT, 0) } -func (s *Update_set_clauseContext) GetRuleContext() antlr.RuleContext { +func (s *Compound_operatorContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Update_set_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Compound_operatorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Update_set_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Compound_operatorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitUpdate_set_clause(s) + return t.VisitCompound_operator(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Update_set_clause() (localctx IUpdate_set_clauseContext) { - localctx = NewUpdate_set_clauseContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 80, KuneiformParserRULE_update_set_clause) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(643) - - var _x = p.Identifier() +func (p *KuneiformParser) Compound_operator() (localctx ICompound_operatorContext) { + localctx = NewCompound_operatorContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 78, KuneiformParserRULE_compound_operator) + var _la int - localctx.(*Update_set_clauseContext).column = _x + p.SetState(671) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - { - p.SetState(644) - p.Match(KuneiformParserEQUALS) + + switch p.GetTokenStream().LA(1) { + case KuneiformParserUNION: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(665) + p.Match(KuneiformParserUNION) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(667) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - { - p.SetState(645) - p.sql_expr(0) + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserALL { + { + p.SetState(666) + p.Match(KuneiformParserALL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + + case KuneiformParserINTERSECT: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(669) + p.Match(KuneiformParserINTERSECT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case KuneiformParserEXCEPT: + p.EnterOuterAlt(localctx, 3) + { + p.SetState(670) + p.Match(KuneiformParserEXCEPT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } errorExit: @@ -10992,202 +11546,63 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IInsert_statementContext is an interface to support dynamic dispatch. -type IInsert_statementContext interface { +// IOrdering_termContext is an interface to support dynamic dispatch. +type IOrdering_termContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetTable_name returns the table_name rule contexts. - GetTable_name() IIdentifierContext + // Getter signatures + Sql_expr() ISql_exprContext + NULLS() antlr.TerminalNode + ASC() antlr.TerminalNode + DESC() antlr.TerminalNode + FIRST() antlr.TerminalNode + LAST() antlr.TerminalNode - // GetAlias returns the alias rule contexts. - GetAlias() IIdentifierContext + // IsOrdering_termContext differentiates from other interfaces. + IsOrdering_termContext() +} - // GetTarget_columns returns the target_columns rule contexts. - GetTarget_columns() IIdentifier_listContext +type Ordering_termContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} - // SetTable_name sets the table_name rule contexts. - SetTable_name(IIdentifierContext) +func NewEmptyOrdering_termContext() *Ordering_termContext { + var p = new(Ordering_termContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_ordering_term + return p +} - // SetAlias sets the alias rule contexts. - SetAlias(IIdentifierContext) +func InitEmptyOrdering_termContext(p *Ordering_termContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_ordering_term +} - // SetTarget_columns sets the target_columns rule contexts. - SetTarget_columns(IIdentifier_listContext) +func (*Ordering_termContext) IsOrdering_termContext() {} - // Getter signatures - INSERT() antlr.TerminalNode - INTO() antlr.TerminalNode - VALUES() antlr.TerminalNode - AllLPAREN() []antlr.TerminalNode - LPAREN(i int) antlr.TerminalNode - AllSql_expr_list() []ISql_expr_listContext - Sql_expr_list(i int) ISql_expr_listContext - AllRPAREN() []antlr.TerminalNode - RPAREN(i int) antlr.TerminalNode - AllIdentifier() []IIdentifierContext - Identifier(i int) IIdentifierContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode - Upsert_clause() IUpsert_clauseContext - Identifier_list() IIdentifier_listContext - AS() antlr.TerminalNode - - // IsInsert_statementContext differentiates from other interfaces. - IsInsert_statementContext() -} - -type Insert_statementContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser - table_name IIdentifierContext - alias IIdentifierContext - target_columns IIdentifier_listContext -} - -func NewEmptyInsert_statementContext() *Insert_statementContext { - var p = new(Insert_statementContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_insert_statement - return p -} - -func InitEmptyInsert_statementContext(p *Insert_statementContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_insert_statement -} - -func (*Insert_statementContext) IsInsert_statementContext() {} - -func NewInsert_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Insert_statementContext { - var p = new(Insert_statementContext) +func NewOrdering_termContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Ordering_termContext { + var p = new(Ordering_termContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_insert_statement + p.RuleIndex = KuneiformParserRULE_ordering_term return p } -func (s *Insert_statementContext) GetParser() antlr.Parser { return s.parser } - -func (s *Insert_statementContext) GetTable_name() IIdentifierContext { return s.table_name } - -func (s *Insert_statementContext) GetAlias() IIdentifierContext { return s.alias } - -func (s *Insert_statementContext) GetTarget_columns() IIdentifier_listContext { - return s.target_columns -} - -func (s *Insert_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } - -func (s *Insert_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } - -func (s *Insert_statementContext) SetTarget_columns(v IIdentifier_listContext) { s.target_columns = v } - -func (s *Insert_statementContext) INSERT() antlr.TerminalNode { - return s.GetToken(KuneiformParserINSERT, 0) -} - -func (s *Insert_statementContext) INTO() antlr.TerminalNode { - return s.GetToken(KuneiformParserINTO, 0) -} - -func (s *Insert_statementContext) VALUES() antlr.TerminalNode { - return s.GetToken(KuneiformParserVALUES, 0) -} - -func (s *Insert_statementContext) AllLPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserLPAREN) -} - -func (s *Insert_statementContext) LPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, i) -} - -func (s *Insert_statementContext) AllSql_expr_list() []ISql_expr_listContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_expr_listContext); ok { - len++ - } - } - - tst := make([]ISql_expr_listContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_expr_listContext); ok { - tst[i] = t.(ISql_expr_listContext) - i++ - } - } - - return tst -} - -func (s *Insert_statementContext) Sql_expr_list(i int) ISql_expr_listContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_expr_listContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } - - if t == nil { - return nil - } - - return t.(ISql_expr_listContext) -} - -func (s *Insert_statementContext) AllRPAREN() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserRPAREN) -} - -func (s *Insert_statementContext) RPAREN(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, i) -} - -func (s *Insert_statementContext) AllIdentifier() []IIdentifierContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IIdentifierContext); ok { - len++ - } - } - - tst := make([]IIdentifierContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IIdentifierContext); ok { - tst[i] = t.(IIdentifierContext) - i++ - } - } - - return tst -} +func (s *Ordering_termContext) GetParser() antlr.Parser { return s.parser } -func (s *Insert_statementContext) Identifier(i int) IIdentifierContext { +func (s *Ordering_termContext) Sql_expr() ISql_exprContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -11195,253 +11610,106 @@ func (s *Insert_statementContext) Identifier(i int) IIdentifierContext { return nil } - return t.(IIdentifierContext) + return t.(ISql_exprContext) } -func (s *Insert_statementContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Ordering_termContext) NULLS() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULLS, 0) } -func (s *Insert_statementContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Ordering_termContext) ASC() antlr.TerminalNode { + return s.GetToken(KuneiformParserASC, 0) } -func (s *Insert_statementContext) Upsert_clause() IUpsert_clauseContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IUpsert_clauseContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IUpsert_clauseContext) +func (s *Ordering_termContext) DESC() antlr.TerminalNode { + return s.GetToken(KuneiformParserDESC, 0) } -func (s *Insert_statementContext) Identifier_list() IIdentifier_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifier_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IIdentifier_listContext) +func (s *Ordering_termContext) FIRST() antlr.TerminalNode { + return s.GetToken(KuneiformParserFIRST, 0) } -func (s *Insert_statementContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func (s *Ordering_termContext) LAST() antlr.TerminalNode { + return s.GetToken(KuneiformParserLAST, 0) } -func (s *Insert_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Ordering_termContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Insert_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Ordering_termContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Insert_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Ordering_termContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitInsert_statement(s) + return t.VisitOrdering_term(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Insert_statement() (localctx IInsert_statementContext) { - localctx = NewInsert_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 82, KuneiformParserRULE_insert_statement) +func (p *KuneiformParser) Ordering_term() (localctx IOrdering_termContext) { + localctx = NewOrdering_termContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 80, KuneiformParserRULE_ordering_term) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(647) - p.Match(KuneiformParserINSERT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(673) + p.sql_expr(0) } - { - p.SetState(648) - p.Match(KuneiformParserINTO) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(675) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - { - p.SetState(649) + _la = p.GetTokenStream().LA(1) - var _x = p.Identifier() + if _la == KuneiformParserASC || _la == KuneiformParserDESC { + { + p.SetState(674) + _la = p.GetTokenStream().LA(1) + + if !(_la == KuneiformParserASC || _la == KuneiformParserDESC) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } - localctx.(*Insert_statementContext).table_name = _x } - p.SetState(654) + p.SetState(679) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(651) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserAS { - { - p.SetState(650) - p.Match(KuneiformParserAS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - { - p.SetState(653) - - var _x = p.Identifier() - - localctx.(*Insert_statementContext).alias = _x - } - - } - p.SetState(660) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserLPAREN { - { - p.SetState(656) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + if _la == KuneiformParserNULLS { + { + p.SetState(677) + p.Match(KuneiformParserNULLS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { - p.SetState(657) - - var _x = p.Identifier_list() - - localctx.(*Insert_statementContext).target_columns = _x - } - { - p.SetState(658) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } - { - p.SetState(662) - p.Match(KuneiformParserVALUES) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(663) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(664) - p.Sql_expr_list() - } - { - p.SetState(665) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(673) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.SetState(678) + _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { - { - p.SetState(666) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(667) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(668) - p.Sql_expr_list() - } - { - p.SetState(669) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + if !(_la == KuneiformParserFIRST || _la == KuneiformParserLAST) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() } } - p.SetState(675) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } - p.SetState(677) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserON { - { - p.SetState(676) - p.Upsert_clause() - } - } errorExit: @@ -11457,141 +11725,126 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IUpsert_clauseContext is an interface to support dynamic dispatch. -type IUpsert_clauseContext interface { +// ISelect_coreContext is an interface to support dynamic dispatch. +type ISelect_coreContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // GetConflict_columns returns the conflict_columns rule contexts. - GetConflict_columns() IIdentifier_listContext + // GetWhere returns the where rule contexts. + GetWhere() ISql_exprContext - // GetConflict_where returns the conflict_where rule contexts. - GetConflict_where() ISql_exprContext + // GetGroup_by returns the group_by rule contexts. + GetGroup_by() ISql_expr_listContext - // GetUpdate_where returns the update_where rule contexts. - GetUpdate_where() ISql_exprContext + // GetHaving returns the having rule contexts. + GetHaving() ISql_exprContext - // SetConflict_columns sets the conflict_columns rule contexts. - SetConflict_columns(IIdentifier_listContext) + // SetWhere sets the where rule contexts. + SetWhere(ISql_exprContext) - // SetConflict_where sets the conflict_where rule contexts. - SetConflict_where(ISql_exprContext) + // SetGroup_by sets the group_by rule contexts. + SetGroup_by(ISql_expr_listContext) - // SetUpdate_where sets the update_where rule contexts. - SetUpdate_where(ISql_exprContext) + // SetHaving sets the having rule contexts. + SetHaving(ISql_exprContext) // Getter signatures - ON() antlr.TerminalNode - CONFLICT() antlr.TerminalNode - DO() antlr.TerminalNode - NOTHING() antlr.TerminalNode - UPDATE() antlr.TerminalNode - SET() antlr.TerminalNode - AllUpdate_set_clause() []IUpdate_set_clauseContext - Update_set_clause(i int) IUpdate_set_clauseContext - LPAREN() antlr.TerminalNode - RPAREN() antlr.TerminalNode - Identifier_list() IIdentifier_listContext + SELECT() antlr.TerminalNode + AllResult_column() []IResult_columnContext + Result_column(i int) IResult_columnContext + DISTINCT() antlr.TerminalNode AllCOMMA() []antlr.TerminalNode COMMA(i int) antlr.TerminalNode - AllWHERE() []antlr.TerminalNode - WHERE(i int) antlr.TerminalNode + FROM() antlr.TerminalNode + Relation() IRelationContext + WHERE() antlr.TerminalNode + GROUP() antlr.TerminalNode + BY() antlr.TerminalNode + WINDOW() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + AllAS() []antlr.TerminalNode + AS(i int) antlr.TerminalNode + AllWindow() []IWindowContext + Window(i int) IWindowContext AllSql_expr() []ISql_exprContext Sql_expr(i int) ISql_exprContext + Sql_expr_list() ISql_expr_listContext + AllJoin() []IJoinContext + Join(i int) IJoinContext + HAVING() antlr.TerminalNode - // IsUpsert_clauseContext differentiates from other interfaces. - IsUpsert_clauseContext() + // IsSelect_coreContext differentiates from other interfaces. + IsSelect_coreContext() } -type Upsert_clauseContext struct { +type Select_coreContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - conflict_columns IIdentifier_listContext - conflict_where ISql_exprContext - update_where ISql_exprContext + parser antlr.Parser + where ISql_exprContext + group_by ISql_expr_listContext + having ISql_exprContext } -func NewEmptyUpsert_clauseContext() *Upsert_clauseContext { - var p = new(Upsert_clauseContext) +func NewEmptySelect_coreContext() *Select_coreContext { + var p = new(Select_coreContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_upsert_clause + p.RuleIndex = KuneiformParserRULE_select_core return p } -func InitEmptyUpsert_clauseContext(p *Upsert_clauseContext) { +func InitEmptySelect_coreContext(p *Select_coreContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_upsert_clause + p.RuleIndex = KuneiformParserRULE_select_core } -func (*Upsert_clauseContext) IsUpsert_clauseContext() {} +func (*Select_coreContext) IsSelect_coreContext() {} -func NewUpsert_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Upsert_clauseContext { - var p = new(Upsert_clauseContext) +func NewSelect_coreContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Select_coreContext { + var p = new(Select_coreContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_upsert_clause + p.RuleIndex = KuneiformParserRULE_select_core return p } -func (s *Upsert_clauseContext) GetParser() antlr.Parser { return s.parser } - -func (s *Upsert_clauseContext) GetConflict_columns() IIdentifier_listContext { - return s.conflict_columns -} - -func (s *Upsert_clauseContext) GetConflict_where() ISql_exprContext { return s.conflict_where } - -func (s *Upsert_clauseContext) GetUpdate_where() ISql_exprContext { return s.update_where } - -func (s *Upsert_clauseContext) SetConflict_columns(v IIdentifier_listContext) { s.conflict_columns = v } - -func (s *Upsert_clauseContext) SetConflict_where(v ISql_exprContext) { s.conflict_where = v } +func (s *Select_coreContext) GetParser() antlr.Parser { return s.parser } -func (s *Upsert_clauseContext) SetUpdate_where(v ISql_exprContext) { s.update_where = v } +func (s *Select_coreContext) GetWhere() ISql_exprContext { return s.where } -func (s *Upsert_clauseContext) ON() antlr.TerminalNode { - return s.GetToken(KuneiformParserON, 0) -} +func (s *Select_coreContext) GetGroup_by() ISql_expr_listContext { return s.group_by } -func (s *Upsert_clauseContext) CONFLICT() antlr.TerminalNode { - return s.GetToken(KuneiformParserCONFLICT, 0) -} +func (s *Select_coreContext) GetHaving() ISql_exprContext { return s.having } -func (s *Upsert_clauseContext) DO() antlr.TerminalNode { - return s.GetToken(KuneiformParserDO, 0) -} +func (s *Select_coreContext) SetWhere(v ISql_exprContext) { s.where = v } -func (s *Upsert_clauseContext) NOTHING() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOTHING, 0) -} +func (s *Select_coreContext) SetGroup_by(v ISql_expr_listContext) { s.group_by = v } -func (s *Upsert_clauseContext) UPDATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserUPDATE, 0) -} +func (s *Select_coreContext) SetHaving(v ISql_exprContext) { s.having = v } -func (s *Upsert_clauseContext) SET() antlr.TerminalNode { - return s.GetToken(KuneiformParserSET, 0) +func (s *Select_coreContext) SELECT() antlr.TerminalNode { + return s.GetToken(KuneiformParserSELECT, 0) } -func (s *Upsert_clauseContext) AllUpdate_set_clause() []IUpdate_set_clauseContext { +func (s *Select_coreContext) AllResult_column() []IResult_columnContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IUpdate_set_clauseContext); ok { + if _, ok := ctx.(IResult_columnContext); ok { len++ } } - tst := make([]IUpdate_set_clauseContext, len) + tst := make([]IResult_columnContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IUpdate_set_clauseContext); ok { - tst[i] = t.(IUpdate_set_clauseContext) + if t, ok := ctx.(IResult_columnContext); ok { + tst[i] = t.(IResult_columnContext) i++ } } @@ -11599,11 +11852,11 @@ func (s *Upsert_clauseContext) AllUpdate_set_clause() []IUpdate_set_clauseContex return tst } -func (s *Upsert_clauseContext) Update_set_clause(i int) IUpdate_set_clauseContext { +func (s *Select_coreContext) Result_column(i int) IResult_columnContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IUpdate_set_clauseContext); ok { + if _, ok := ctx.(IResult_columnContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -11616,21 +11869,29 @@ func (s *Upsert_clauseContext) Update_set_clause(i int) IUpdate_set_clauseContex return nil } - return t.(IUpdate_set_clauseContext) + return t.(IResult_columnContext) } -func (s *Upsert_clauseContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Select_coreContext) DISTINCT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDISTINCT, 0) } -func (s *Upsert_clauseContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Select_coreContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func (s *Upsert_clauseContext) Identifier_list() IIdentifier_listContext { +func (s *Select_coreContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} + +func (s *Select_coreContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) +} + +func (s *Select_coreContext) Relation() IRelationContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifier_listContext); ok { + if _, ok := ctx.(IRelationContext); ok { t = ctx.(antlr.RuleContext) break } @@ -11640,26 +11901,116 @@ func (s *Upsert_clauseContext) Identifier_list() IIdentifier_listContext { return nil } - return t.(IIdentifier_listContext) + return t.(IRelationContext) } -func (s *Upsert_clauseContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) +func (s *Select_coreContext) WHERE() antlr.TerminalNode { + return s.GetToken(KuneiformParserWHERE, 0) } -func (s *Upsert_clauseContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) +func (s *Select_coreContext) GROUP() antlr.TerminalNode { + return s.GetToken(KuneiformParserGROUP, 0) } -func (s *Upsert_clauseContext) AllWHERE() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserWHERE) +func (s *Select_coreContext) BY() antlr.TerminalNode { + return s.GetToken(KuneiformParserBY, 0) } -func (s *Upsert_clauseContext) WHERE(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserWHERE, i) +func (s *Select_coreContext) WINDOW() antlr.TerminalNode { + return s.GetToken(KuneiformParserWINDOW, 0) } -func (s *Upsert_clauseContext) AllSql_expr() []ISql_exprContext { +func (s *Select_coreContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } + + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } + + return tst +} + +func (s *Select_coreContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) +} + +func (s *Select_coreContext) AllAS() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserAS) +} + +func (s *Select_coreContext) AS(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, i) +} + +func (s *Select_coreContext) AllWindow() []IWindowContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IWindowContext); ok { + len++ + } + } + + tst := make([]IWindowContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IWindowContext); ok { + tst[i] = t.(IWindowContext) + i++ + } + } + + return tst +} + +func (s *Select_coreContext) Window(i int) IWindowContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IWindowContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IWindowContext) +} + +func (s *Select_coreContext) AllSql_expr() []ISql_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { @@ -11680,7 +12031,7 @@ func (s *Upsert_clauseContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Upsert_clauseContext) Sql_expr(i int) ISql_exprContext { +func (s *Select_coreContext) Sql_expr(i int) ISql_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { @@ -11700,207 +12051,347 @@ func (s *Upsert_clauseContext) Sql_expr(i int) ISql_exprContext { return t.(ISql_exprContext) } -func (s *Upsert_clauseContext) GetRuleContext() antlr.RuleContext { +func (s *Select_coreContext) Sql_expr_list() ISql_expr_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_expr_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_expr_listContext) +} + +func (s *Select_coreContext) AllJoin() []IJoinContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IJoinContext); ok { + len++ + } + } + + tst := make([]IJoinContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IJoinContext); ok { + tst[i] = t.(IJoinContext) + i++ + } + } + + return tst +} + +func (s *Select_coreContext) Join(i int) IJoinContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IJoinContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IJoinContext) +} + +func (s *Select_coreContext) HAVING() antlr.TerminalNode { + return s.GetToken(KuneiformParserHAVING, 0) +} + +func (s *Select_coreContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Upsert_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Select_coreContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Upsert_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Select_coreContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitUpsert_clause(s) + return t.VisitSelect_core(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Upsert_clause() (localctx IUpsert_clauseContext) { - localctx = NewUpsert_clauseContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 84, KuneiformParserRULE_upsert_clause) +func (p *KuneiformParser) Select_core() (localctx ISelect_coreContext) { + localctx = NewSelect_coreContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 82, KuneiformParserRULE_select_core) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(679) - p.Match(KuneiformParserON) + p.SetState(681) + p.Match(KuneiformParserSELECT) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(680) - p.Match(KuneiformParserCONFLICT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit + p.SetState(683) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserDISTINCT { + { + p.SetState(682) + p.Match(KuneiformParserDISTINCT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + } + { + p.SetState(685) + p.Result_column() } - p.SetState(688) + p.SetState(690) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserLPAREN { + for _la == KuneiformParserCOMMA { { - p.SetState(681) - p.Match(KuneiformParserLPAREN) + p.SetState(686) + p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(682) - - var _x = p.Identifier_list() + p.SetState(687) + p.Result_column() + } - localctx.(*Upsert_clauseContext).conflict_columns = _x + p.SetState(692) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) + } + p.SetState(701) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserFROM { { - p.SetState(683) - p.Match(KuneiformParserRPAREN) + p.SetState(693) + p.Match(KuneiformParserFROM) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(686) + { + p.SetState(694) + p.Relation() + } + p.SetState(698) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWHERE { + for (int64((_la-73)) & ^0x3f) == 0 && ((int64(1)<<(_la-73))&134217743) != 0 { { - p.SetState(684) - p.Match(KuneiformParserWHERE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(695) + p.Join() } - { - p.SetState(685) - - var _x = p.sql_expr(0) - localctx.(*Upsert_clauseContext).conflict_where = _x + p.SetState(700) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - + _la = p.GetTokenStream().LA(1) } } - { - p.SetState(690) - p.Match(KuneiformParserDO) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(706) + p.SetState(705) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetTokenStream().LA(1) { - case KuneiformParserNOTHING: + if _la == KuneiformParserWHERE { { - p.SetState(691) - p.Match(KuneiformParserNOTHING) + p.SetState(703) + p.Match(KuneiformParserWHERE) if p.HasError() { // Recognition error - abort rule goto errorExit } } + { + p.SetState(704) - case KuneiformParserUPDATE: + var _x = p.sql_expr(0) + + localctx.(*Select_coreContext).where = _x + } + + } + p.SetState(714) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserGROUP { { - p.SetState(692) - p.Match(KuneiformParserUPDATE) + p.SetState(707) + p.Match(KuneiformParserGROUP) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(693) - p.Match(KuneiformParserSET) + p.SetState(708) + p.Match(KuneiformParserBY) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(694) - p.Update_set_clause() + p.SetState(709) + + var _x = p.Sql_expr_list() + + localctx.(*Select_coreContext).group_by = _x } - p.SetState(699) + p.SetState(712) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserCOMMA { + if _la == KuneiformParserHAVING { { - p.SetState(695) - p.Match(KuneiformParserCOMMA) + p.SetState(710) + p.Match(KuneiformParserHAVING) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(696) - p.Update_set_clause() + p.SetState(711) + + var _x = p.sql_expr(0) + + localctx.(*Select_coreContext).having = _x } - p.SetState(701) - p.GetErrorHandler().Sync(p) + } + + } + p.SetState(730) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserWINDOW { + { + p.SetState(716) + p.Match(KuneiformParserWINDOW) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) } - p.SetState(704) + { + p.SetState(717) + p.Identifier() + } + { + p.SetState(718) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(719) + p.Window() + } + p.SetState(727) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWHERE { + for _la == KuneiformParserCOMMA { { - p.SetState(702) - p.Match(KuneiformParserWHERE) + p.SetState(720) + p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(703) - - var _x = p.sql_expr(0) - - localctx.(*Upsert_clauseContext).update_where = _x + p.SetState(721) + p.Identifier() + } + { + p.SetState(722) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(723) + p.Window() } + p.SetState(729) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit } errorExit: @@ -11916,100 +12407,94 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IDelete_statementContext is an interface to support dynamic dispatch. -type IDelete_statementContext interface { +// IRelationContext is an interface to support dynamic dispatch. +type IRelationContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - - // GetTable_name returns the table_name rule contexts. - GetTable_name() IIdentifierContext - - // GetAlias returns the alias rule contexts. - GetAlias() IIdentifierContext - - // GetWhere returns the where rule contexts. - GetWhere() ISql_exprContext - - // SetTable_name sets the table_name rule contexts. - SetTable_name(IIdentifierContext) - - // SetAlias sets the alias rule contexts. - SetAlias(IIdentifierContext) - - // SetWhere sets the where rule contexts. - SetWhere(ISql_exprContext) - - // Getter signatures - DELETE() antlr.TerminalNode - FROM() antlr.TerminalNode - AllIdentifier() []IIdentifierContext - Identifier(i int) IIdentifierContext - WHERE() antlr.TerminalNode - Sql_expr() ISql_exprContext - AS() antlr.TerminalNode - - // IsDelete_statementContext differentiates from other interfaces. - IsDelete_statementContext() + // IsRelationContext differentiates from other interfaces. + IsRelationContext() } -type Delete_statementContext struct { +type RelationContext struct { antlr.BaseParserRuleContext - parser antlr.Parser - table_name IIdentifierContext - alias IIdentifierContext - where ISql_exprContext + parser antlr.Parser } -func NewEmptyDelete_statementContext() *Delete_statementContext { - var p = new(Delete_statementContext) +func NewEmptyRelationContext() *RelationContext { + var p = new(RelationContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_delete_statement + p.RuleIndex = KuneiformParserRULE_relation return p } -func InitEmptyDelete_statementContext(p *Delete_statementContext) { +func InitEmptyRelationContext(p *RelationContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_delete_statement + p.RuleIndex = KuneiformParserRULE_relation } -func (*Delete_statementContext) IsDelete_statementContext() {} +func (*RelationContext) IsRelationContext() {} -func NewDelete_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Delete_statementContext { - var p = new(Delete_statementContext) +func NewRelationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *RelationContext { + var p = new(RelationContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_delete_statement + p.RuleIndex = KuneiformParserRULE_relation return p } -func (s *Delete_statementContext) GetParser() antlr.Parser { return s.parser } +func (s *RelationContext) GetParser() antlr.Parser { return s.parser } -func (s *Delete_statementContext) GetTable_name() IIdentifierContext { return s.table_name } +func (s *RelationContext) CopyAll(ctx *RelationContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) +} -func (s *Delete_statementContext) GetAlias() IIdentifierContext { return s.alias } +func (s *RelationContext) GetRuleContext() antlr.RuleContext { + return s +} -func (s *Delete_statementContext) GetWhere() ISql_exprContext { return s.where } +func (s *RelationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} -func (s *Delete_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } +type Table_relationContext struct { + RelationContext + namespace IIdentifierContext + table_name IIdentifierContext + alias IIdentifierContext +} -func (s *Delete_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } +func NewTable_relationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Table_relationContext { + var p = new(Table_relationContext) -func (s *Delete_statementContext) SetWhere(v ISql_exprContext) { s.where = v } + InitEmptyRelationContext(&p.RelationContext) + p.parser = parser + p.CopyAll(ctx.(*RelationContext)) -func (s *Delete_statementContext) DELETE() antlr.TerminalNode { - return s.GetToken(KuneiformParserDELETE, 0) + return p } -func (s *Delete_statementContext) FROM() antlr.TerminalNode { - return s.GetToken(KuneiformParserFROM, 0) +func (s *Table_relationContext) GetNamespace() IIdentifierContext { return s.namespace } + +func (s *Table_relationContext) GetTable_name() IIdentifierContext { return s.table_name } + +func (s *Table_relationContext) GetAlias() IIdentifierContext { return s.alias } + +func (s *Table_relationContext) SetNamespace(v IIdentifierContext) { s.namespace = v } + +func (s *Table_relationContext) SetTable_name(v IIdentifierContext) { s.table_name = v } + +func (s *Table_relationContext) SetAlias(v IIdentifierContext) { s.alias = v } + +func (s *Table_relationContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Delete_statementContext) AllIdentifier() []IIdentifierContext { +func (s *Table_relationContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { @@ -12030,7 +12515,7 @@ func (s *Delete_statementContext) AllIdentifier() []IIdentifierContext { return tst } -func (s *Delete_statementContext) Identifier(i int) IIdentifierContext { +func (s *Table_relationContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { @@ -12050,136 +12535,244 @@ func (s *Delete_statementContext) Identifier(i int) IIdentifierContext { return t.(IIdentifierContext) } -func (s *Delete_statementContext) WHERE() antlr.TerminalNode { - return s.GetToken(KuneiformParserWHERE, 0) +func (s *Table_relationContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) } -func (s *Delete_statementContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Table_relationContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) +} - if t == nil { - return nil +func (s *Table_relationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitTable_relation(s) + + default: + return t.VisitChildren(s) } +} - return t.(ISql_exprContext) +type Subquery_relationContext struct { + RelationContext + alias IIdentifierContext } -func (s *Delete_statementContext) AS() antlr.TerminalNode { - return s.GetToken(KuneiformParserAS, 0) +func NewSubquery_relationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Subquery_relationContext { + var p = new(Subquery_relationContext) + + InitEmptyRelationContext(&p.RelationContext) + p.parser = parser + p.CopyAll(ctx.(*RelationContext)) + + return p } -func (s *Delete_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Subquery_relationContext) GetAlias() IIdentifierContext { return s.alias } + +func (s *Subquery_relationContext) SetAlias(v IIdentifierContext) { s.alias = v } + +func (s *Subquery_relationContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Delete_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Subquery_relationContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Delete_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitDelete_statement(s) +func (s *Subquery_relationContext) Select_statement() ISelect_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISelect_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - default: - return t.VisitChildren(s) + if t == nil { + return nil } + + return t.(ISelect_statementContext) } -func (p *KuneiformParser) Delete_statement() (localctx IDelete_statementContext) { - localctx = NewDelete_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 86, KuneiformParserRULE_delete_statement) - var _la int +func (s *Subquery_relationContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} - p.EnterOuterAlt(localctx, 1) - { - p.SetState(708) - p.Match(KuneiformParserDELETE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit +func (s *Subquery_relationContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break } } - { - p.SetState(709) - p.Match(KuneiformParserFROM) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + + if t == nil { + return nil } - { - p.SetState(710) - var _x = p.Identifier() + return t.(IIdentifierContext) +} - localctx.(*Delete_statementContext).table_name = _x +func (s *Subquery_relationContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) +} + +func (s *Subquery_relationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitSubquery_relation(s) + + default: + return t.VisitChildren(s) } - p.SetState(715) +} + +func (p *KuneiformParser) Relation() (localctx IRelationContext) { + localctx = NewRelationContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 84, KuneiformParserRULE_relation) + var _la int + + p.SetState(753) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { - p.SetState(712) + switch p.GetTokenStream().LA(1) { + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + localctx = NewTable_relationContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + p.SetState(735) p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserAS { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 84, p.GetParserRuleContext()) == 1 { { - p.SetState(711) - p.Match(KuneiformParserAS) + p.SetState(732) + + var _x = p.Identifier() + + localctx.(*Table_relationContext).namespace = _x + } + { + p.SetState(733) + p.Match(KuneiformParserPERIOD) if p.HasError() { // Recognition error - abort rule goto errorExit } } + } else if p.HasError() { // JIM + goto errorExit } { - p.SetState(714) + p.SetState(737) var _x = p.Identifier() - localctx.(*Delete_statementContext).alias = _x + localctx.(*Table_relationContext).table_name = _x + } + p.SetState(742) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) - } - p.SetState(719) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(739) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserWHERE { + if _la == KuneiformParserAS { + { + p.SetState(738) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(741) + + var _x = p.Identifier() + + localctx.(*Table_relationContext).alias = _x + } + + } + + case KuneiformParserLPAREN: + localctx = NewSubquery_relationContext(p, localctx) + p.EnterOuterAlt(localctx, 2) { - p.SetState(717) - p.Match(KuneiformParserWHERE) + p.SetState(744) + p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(718) + p.SetState(745) + p.Select_statement() + } + { + p.SetState(746) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(751) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - var _x = p.sql_expr(0) + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(748) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserAS { + { + p.SetState(747) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(750) + + var _x = p.Identifier() + + localctx.(*Subquery_relationContext).alias = _x + } - localctx.(*Delete_statementContext).where = _x } + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } errorExit: @@ -12195,222 +12788,292 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// ISql_exprContext is an interface to support dynamic dispatch. -type ISql_exprContext interface { +// IJoinContext is an interface to support dynamic dispatch. +type IJoinContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsSql_exprContext differentiates from other interfaces. - IsSql_exprContext() + + // Getter signatures + JOIN() antlr.TerminalNode + Relation() IRelationContext + ON() antlr.TerminalNode + Sql_expr() ISql_exprContext + INNER() antlr.TerminalNode + LEFT() antlr.TerminalNode + RIGHT() antlr.TerminalNode + FULL() antlr.TerminalNode + + // IsJoinContext differentiates from other interfaces. + IsJoinContext() } -type Sql_exprContext struct { +type JoinContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptySql_exprContext() *Sql_exprContext { - var p = new(Sql_exprContext) +func NewEmptyJoinContext() *JoinContext { + var p = new(JoinContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_expr + p.RuleIndex = KuneiformParserRULE_join return p } -func InitEmptySql_exprContext(p *Sql_exprContext) { +func InitEmptyJoinContext(p *JoinContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_expr + p.RuleIndex = KuneiformParserRULE_join } -func (*Sql_exprContext) IsSql_exprContext() {} +func (*JoinContext) IsJoinContext() {} -func NewSql_exprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_exprContext { - var p = new(Sql_exprContext) +func NewJoinContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *JoinContext { + var p = new(JoinContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql_expr + p.RuleIndex = KuneiformParserRULE_join return p } -func (s *Sql_exprContext) GetParser() antlr.Parser { return s.parser } +func (s *JoinContext) GetParser() antlr.Parser { return s.parser } -func (s *Sql_exprContext) CopyAll(ctx *Sql_exprContext) { - s.CopyFrom(&ctx.BaseParserRuleContext) +func (s *JoinContext) JOIN() antlr.TerminalNode { + return s.GetToken(KuneiformParserJOIN, 0) } -func (s *Sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} +func (s *JoinContext) Relation() IRelationContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IRelationContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } -func (s *Sql_exprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} + if t == nil { + return nil + } -type Column_sql_exprContext struct { - Sql_exprContext - table IIdentifierContext - column IIdentifierContext + return t.(IRelationContext) } -func NewColumn_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Column_sql_exprContext { - var p = new(Column_sql_exprContext) +func (s *JoinContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) +} - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) +func (s *JoinContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - return p + if t == nil { + return nil + } + + return t.(ISql_exprContext) } -func (s *Column_sql_exprContext) GetTable() IIdentifierContext { return s.table } +func (s *JoinContext) INNER() antlr.TerminalNode { + return s.GetToken(KuneiformParserINNER, 0) +} -func (s *Column_sql_exprContext) GetColumn() IIdentifierContext { return s.column } +func (s *JoinContext) LEFT() antlr.TerminalNode { + return s.GetToken(KuneiformParserLEFT, 0) +} -func (s *Column_sql_exprContext) SetTable(v IIdentifierContext) { s.table = v } +func (s *JoinContext) RIGHT() antlr.TerminalNode { + return s.GetToken(KuneiformParserRIGHT, 0) +} -func (s *Column_sql_exprContext) SetColumn(v IIdentifierContext) { s.column = v } +func (s *JoinContext) FULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserFULL, 0) +} -func (s *Column_sql_exprContext) GetRuleContext() antlr.RuleContext { +func (s *JoinContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Column_sql_exprContext) AllIdentifier() []IIdentifierContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IIdentifierContext); ok { - len++ - } - } +func (s *JoinContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} - tst := make([]IIdentifierContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IIdentifierContext); ok { - tst[i] = t.(IIdentifierContext) - i++ - } - } +func (s *JoinContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitJoin(s) - return tst + default: + return t.VisitChildren(s) + } } -func (s *Column_sql_exprContext) Identifier(i int) IIdentifierContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } +func (p *KuneiformParser) Join() (localctx IJoinContext) { + localctx = NewJoinContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 86, KuneiformParserRULE_join) + var _la int - if t == nil { - return nil + p.EnterOuterAlt(localctx, 1) + p.SetState(756) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) - return t.(IIdentifierContext) -} + if (int64((_la-74)) & ^0x3f) == 0 && ((int64(1)<<(_la-74))&67108871) != 0 { + { + p.SetState(755) + _la = p.GetTokenStream().LA(1) -func (s *Column_sql_exprContext) PERIOD() antlr.TerminalNode { - return s.GetToken(KuneiformParserPERIOD, 0) -} + if !((int64((_la-74)) & ^0x3f) == 0 && ((int64(1)<<(_la-74))&67108871) != 0) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } -func (s *Column_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break + } + { + p.SetState(758) + p.Match(KuneiformParserJOIN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(759) + p.Relation() + } + { + p.SetState(760) + p.Match(KuneiformParserON) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } + { + p.SetState(761) + p.sql_expr(0) + } - if t == nil { - return nil +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} - return t.(IType_castContext) +// IResult_columnContext is an interface to support dynamic dispatch. +type IResult_columnContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + // IsResult_columnContext differentiates from other interfaces. + IsResult_columnContext() } -func (s *Column_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitColumn_sql_expr(s) +type Result_columnContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} - default: - return t.VisitChildren(s) - } +func NewEmptyResult_columnContext() *Result_columnContext { + var p = new(Result_columnContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_result_column + return p } -type Logical_sql_exprContext struct { - Sql_exprContext - left ISql_exprContext - right ISql_exprContext +func InitEmptyResult_columnContext(p *Result_columnContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_result_column } -func NewLogical_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Logical_sql_exprContext { - var p = new(Logical_sql_exprContext) +func (*Result_columnContext) IsResult_columnContext() {} + +func NewResult_columnContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Result_columnContext { + var p = new(Result_columnContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptySql_exprContext(&p.Sql_exprContext) p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + p.RuleIndex = KuneiformParserRULE_result_column return p } -func (s *Logical_sql_exprContext) GetLeft() ISql_exprContext { return s.left } +func (s *Result_columnContext) GetParser() antlr.Parser { return s.parser } -func (s *Logical_sql_exprContext) GetRight() ISql_exprContext { return s.right } +func (s *Result_columnContext) CopyAll(ctx *Result_columnContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) +} -func (s *Logical_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } +func (s *Result_columnContext) GetRuleContext() antlr.RuleContext { + return s +} -func (s *Logical_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } +func (s *Result_columnContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} -func (s *Logical_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +type Expression_result_columnContext struct { + Result_columnContext } -func (s *Logical_sql_exprContext) AND() antlr.TerminalNode { - return s.GetToken(KuneiformParserAND, 0) +func NewExpression_result_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Expression_result_columnContext { + var p = new(Expression_result_columnContext) + + InitEmptyResult_columnContext(&p.Result_columnContext) + p.parser = parser + p.CopyAll(ctx.(*Result_columnContext)) + + return p } -func (s *Logical_sql_exprContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { +func (s *Expression_result_columnContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Expression_result_columnContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { if _, ok := ctx.(ISql_exprContext); ok { - len++ + t = ctx.(antlr.RuleContext) + break } } - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(ISql_exprContext) } -func (s *Logical_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Expression_result_columnContext) Identifier() IIdentifierContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -12418,62 +13081,55 @@ func (s *Logical_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(IIdentifierContext) } -func (s *Logical_sql_exprContext) OR() antlr.TerminalNode { - return s.GetToken(KuneiformParserOR, 0) +func (s *Expression_result_columnContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) } -func (s *Logical_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Expression_result_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitLogical_sql_expr(s) + return t.VisitExpression_result_column(s) default: return t.VisitChildren(s) } } -type Field_access_sql_exprContext struct { - Sql_exprContext +type Wildcard_result_columnContext struct { + Result_columnContext + table_name IIdentifierContext } -func NewField_access_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Field_access_sql_exprContext { - var p = new(Field_access_sql_exprContext) +func NewWildcard_result_columnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Wildcard_result_columnContext { + var p = new(Wildcard_result_columnContext) - InitEmptySql_exprContext(&p.Sql_exprContext) + InitEmptyResult_columnContext(&p.Result_columnContext) p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + p.CopyAll(ctx.(*Result_columnContext)) return p } -func (s *Field_access_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} +func (s *Wildcard_result_columnContext) GetTable_name() IIdentifierContext { return s.table_name } -func (s *Field_access_sql_exprContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Wildcard_result_columnContext) SetTable_name(v IIdentifierContext) { s.table_name = v } - if t == nil { - return nil - } +func (s *Wildcard_result_columnContext) GetRuleContext() antlr.RuleContext { + return s +} - return t.(ISql_exprContext) +func (s *Wildcard_result_columnContext) STAR() antlr.TerminalNode { + return s.GetToken(KuneiformParserSTAR, 0) } -func (s *Field_access_sql_exprContext) PERIOD() antlr.TerminalNode { +func (s *Wildcard_result_columnContext) PERIOD() antlr.TerminalNode { return s.GetToken(KuneiformParserPERIOD, 0) } -func (s *Field_access_sql_exprContext) Identifier() IIdentifierContext { +func (s *Wildcard_result_columnContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IIdentifierContext); ok { @@ -12489,191 +13145,237 @@ func (s *Field_access_sql_exprContext) Identifier() IIdentifierContext { return t.(IIdentifierContext) } -func (s *Field_access_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IType_castContext) -} - -func (s *Field_access_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Wildcard_result_columnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitField_access_sql_expr(s) + return t.VisitWildcard_result_column(s) default: return t.VisitChildren(s) } } -type Array_access_sql_exprContext struct { - Sql_exprContext - array_element ISql_exprContext - single ISql_exprContext - left ISql_exprContext - right ISql_exprContext -} - -func NewArray_access_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Array_access_sql_exprContext { - var p = new(Array_access_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} - -func (s *Array_access_sql_exprContext) GetArray_element() ISql_exprContext { return s.array_element } - -func (s *Array_access_sql_exprContext) GetSingle() ISql_exprContext { return s.single } +func (p *KuneiformParser) Result_column() (localctx IResult_columnContext) { + localctx = NewResult_columnContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 88, KuneiformParserRULE_result_column) + var _la int -func (s *Array_access_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + p.SetState(776) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } -func (s *Array_access_sql_exprContext) GetRight() ISql_exprContext { return s.right } + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 94, p.GetParserRuleContext()) { + case 1: + localctx = NewExpression_result_columnContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(763) + p.sql_expr(0) + } + p.SetState(768) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Array_access_sql_exprContext) SetArray_element(v ISql_exprContext) { s.array_element = v } + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(765) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Array_access_sql_exprContext) SetSingle(v ISql_exprContext) { s.single = v } + if _la == KuneiformParserAS { + { + p.SetState(764) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } -func (s *Array_access_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + } + { + p.SetState(767) + p.Identifier() + } -func (s *Array_access_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + } -func (s *Array_access_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} + case 2: + localctx = NewWildcard_result_columnContext(p, localctx) + p.EnterOuterAlt(localctx, 2) + p.SetState(773) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Array_access_sql_exprContext) LBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACKET, 0) -} + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { + { + p.SetState(770) -func (s *Array_access_sql_exprContext) RBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACKET, 0) -} + var _x = p.Identifier() -func (s *Array_access_sql_exprContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ - } - } + localctx.(*Wildcard_result_columnContext).table_name = _x + } + { + p.SetState(771) + p.Match(KuneiformParserPERIOD) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ } - } - - return tst -} - -func (s *Array_access_sql_exprContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break + { + p.SetState(775) + p.Match(KuneiformParserSTAR) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - j++ } - } - if t == nil { - return nil + case antlr.ATNInvalidAltNumber: + goto errorExit } - return t.(ISql_exprContext) -} - -func (s *Array_access_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break - } +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} - if t == nil { - return nil - } +// IUpdate_statementContext is an interface to support dynamic dispatch. +type IUpdate_statementContext interface { + antlr.ParserRuleContext - return t.(IType_castContext) -} + // GetParser returns the parser. + GetParser() antlr.Parser -func (s *Array_access_sql_exprContext) COL() antlr.TerminalNode { - return s.GetToken(KuneiformParserCOL, 0) -} + // GetTable_name returns the table_name rule contexts. + GetTable_name() IIdentifierContext -func (s *Array_access_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitArray_access_sql_expr(s) + // GetAlias returns the alias rule contexts. + GetAlias() IIdentifierContext - default: - return t.VisitChildren(s) - } -} + // GetWhere returns the where rule contexts. + GetWhere() ISql_exprContext -type Comparison_sql_exprContext struct { - Sql_exprContext - left ISql_exprContext - right ISql_exprContext -} + // SetTable_name sets the table_name rule contexts. + SetTable_name(IIdentifierContext) -func NewComparison_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Comparison_sql_exprContext { - var p = new(Comparison_sql_exprContext) + // SetAlias sets the alias rule contexts. + SetAlias(IIdentifierContext) + + // SetWhere sets the where rule contexts. + SetWhere(ISql_exprContext) + + // Getter signatures + UPDATE() antlr.TerminalNode + SET() antlr.TerminalNode + AllUpdate_set_clause() []IUpdate_set_clauseContext + Update_set_clause(i int) IUpdate_set_clauseContext + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode + FROM() antlr.TerminalNode + Relation() IRelationContext + WHERE() antlr.TerminalNode + Sql_expr() ISql_exprContext + AS() antlr.TerminalNode + AllJoin() []IJoinContext + Join(i int) IJoinContext + + // IsUpdate_statementContext differentiates from other interfaces. + IsUpdate_statementContext() +} + +type Update_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + table_name IIdentifierContext + alias IIdentifierContext + where ISql_exprContext +} + +func NewEmptyUpdate_statementContext() *Update_statementContext { + var p = new(Update_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_update_statement + return p +} + +func InitEmptyUpdate_statementContext(p *Update_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_update_statement +} + +func (*Update_statementContext) IsUpdate_statementContext() {} + +func NewUpdate_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Update_statementContext { + var p = new(Update_statementContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptySql_exprContext(&p.Sql_exprContext) p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + p.RuleIndex = KuneiformParserRULE_update_statement return p } -func (s *Comparison_sql_exprContext) GetLeft() ISql_exprContext { return s.left } +func (s *Update_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Comparison_sql_exprContext) GetRight() ISql_exprContext { return s.right } +func (s *Update_statementContext) GetTable_name() IIdentifierContext { return s.table_name } -func (s *Comparison_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } +func (s *Update_statementContext) GetAlias() IIdentifierContext { return s.alias } -func (s *Comparison_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } +func (s *Update_statementContext) GetWhere() ISql_exprContext { return s.where } -func (s *Comparison_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Update_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } + +func (s *Update_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } + +func (s *Update_statementContext) SetWhere(v ISql_exprContext) { s.where = v } + +func (s *Update_statementContext) UPDATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUPDATE, 0) } -func (s *Comparison_sql_exprContext) AllSql_expr() []ISql_exprContext { +func (s *Update_statementContext) SET() antlr.TerminalNode { + return s.GetToken(KuneiformParserSET, 0) +} + +func (s *Update_statementContext) AllUpdate_set_clause() []IUpdate_set_clauseContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IUpdate_set_clauseContext); ok { len++ } } - tst := make([]ISql_exprContext, len) + tst := make([]IUpdate_set_clauseContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) + if t, ok := ctx.(IUpdate_set_clauseContext); ok { + tst[i] = t.(IUpdate_set_clauseContext) i++ } } @@ -12681,11 +13383,11 @@ func (s *Comparison_sql_exprContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Comparison_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Update_statementContext) Update_set_clause(i int) IUpdate_set_clauseContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IUpdate_set_clauseContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -12698,69 +13400,66 @@ func (s *Comparison_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) -} - -func (s *Comparison_sql_exprContext) EQUALS() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUALS, 0) -} - -func (s *Comparison_sql_exprContext) EQUATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUATE, 0) -} - -func (s *Comparison_sql_exprContext) NEQ() antlr.TerminalNode { - return s.GetToken(KuneiformParserNEQ, 0) -} - -func (s *Comparison_sql_exprContext) LT() antlr.TerminalNode { - return s.GetToken(KuneiformParserLT, 0) + return t.(IUpdate_set_clauseContext) } -func (s *Comparison_sql_exprContext) LTE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLTE, 0) -} +func (s *Update_statementContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } -func (s *Comparison_sql_exprContext) GT() antlr.TerminalNode { - return s.GetToken(KuneiformParserGT, 0) -} + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } -func (s *Comparison_sql_exprContext) GTE() antlr.TerminalNode { - return s.GetToken(KuneiformParserGTE, 0) + return tst } -func (s *Comparison_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitComparison_sql_expr(s) +func (s *Update_statementContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } - default: - return t.VisitChildren(s) + if t == nil { + return nil } -} -type Literal_sql_exprContext struct { - Sql_exprContext + return t.(IIdentifierContext) } -func NewLiteral_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Literal_sql_exprContext { - var p = new(Literal_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) +func (s *Update_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} - return p +func (s *Update_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Literal_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Update_statementContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) } -func (s *Literal_sql_exprContext) Literal() ILiteralContext { +func (s *Update_statementContext) Relation() IRelationContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ILiteralContext); ok { + if _, ok := ctx.(IRelationContext); ok { t = ctx.(antlr.RuleContext) break } @@ -12770,13 +13469,17 @@ func (s *Literal_sql_exprContext) Literal() ILiteralContext { return nil } - return t.(ILiteralContext) + return t.(IRelationContext) } -func (s *Literal_sql_exprContext) Type_cast() IType_castContext { +func (s *Update_statementContext) WHERE() antlr.TerminalNode { + return s.GetToken(KuneiformParserWHERE, 0) +} + +func (s *Update_statementContext) Sql_expr() ISql_exprContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { + if _, ok := ctx.(ISql_exprContext); ok { t = ctx.(antlr.RuleContext) break } @@ -12786,74 +13489,27 @@ func (s *Literal_sql_exprContext) Type_cast() IType_castContext { return nil } - return t.(IType_castContext) -} - -func (s *Literal_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitLiteral_sql_expr(s) - - default: - return t.VisitChildren(s) - } -} - -type Between_sql_exprContext struct { - Sql_exprContext - element ISql_exprContext - lower ISql_exprContext - upper ISql_exprContext -} - -func NewBetween_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Between_sql_exprContext { - var p = new(Between_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} - -func (s *Between_sql_exprContext) GetElement() ISql_exprContext { return s.element } - -func (s *Between_sql_exprContext) GetLower() ISql_exprContext { return s.lower } - -func (s *Between_sql_exprContext) GetUpper() ISql_exprContext { return s.upper } - -func (s *Between_sql_exprContext) SetElement(v ISql_exprContext) { s.element = v } - -func (s *Between_sql_exprContext) SetLower(v ISql_exprContext) { s.lower = v } - -func (s *Between_sql_exprContext) SetUpper(v ISql_exprContext) { s.upper = v } - -func (s *Between_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Between_sql_exprContext) BETWEEN() antlr.TerminalNode { - return s.GetToken(KuneiformParserBETWEEN, 0) + return t.(ISql_exprContext) } -func (s *Between_sql_exprContext) AND() antlr.TerminalNode { - return s.GetToken(KuneiformParserAND, 0) +func (s *Update_statementContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) } -func (s *Between_sql_exprContext) AllSql_expr() []ISql_exprContext { +func (s *Update_statementContext) AllJoin() []IJoinContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IJoinContext); ok { len++ } } - tst := make([]ISql_exprContext, len) + tst := make([]IJoinContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) + if t, ok := ctx.(IJoinContext); ok { + tst[i] = t.(IJoinContext) i++ } } @@ -12861,11 +13517,11 @@ func (s *Between_sql_exprContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Between_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Update_statementContext) Join(i int) IJoinContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IJoinContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -12878,106 +13534,268 @@ func (s *Between_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(IJoinContext) } -func (s *Between_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) +func (s *Update_statementContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Between_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Update_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Update_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitBetween_sql_expr(s) + return t.VisitUpdate_statement(s) default: return t.VisitChildren(s) } } -type Function_call_sql_exprContext struct { - Sql_exprContext -} +func (p *KuneiformParser) Update_statement() (localctx IUpdate_statementContext) { + localctx = NewUpdate_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 90, KuneiformParserRULE_update_statement) + var _la int -func NewFunction_call_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Function_call_sql_exprContext { - var p = new(Function_call_sql_exprContext) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(778) + p.Match(KuneiformParserUPDATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(779) - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + var _x = p.Identifier() - return p -} + localctx.(*Update_statementContext).table_name = _x + } + p.SetState(784) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Function_call_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(781) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Function_call_sql_exprContext) Sql_function_call() ISql_function_callContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_function_callContext); ok { - t = ctx.(antlr.RuleContext) - break + if _la == KuneiformParserAS { + { + p.SetState(780) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } + { + p.SetState(783) + + var _x = p.Identifier() + + localctx.(*Update_statementContext).alias = _x + } + + } + { + p.SetState(786) + p.Match(KuneiformParserSET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } + { + p.SetState(787) + p.Update_set_clause() + } + p.SetState(792) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - if t == nil { - return nil + for _la == KuneiformParserCOMMA { + { + p.SetState(788) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(789) + p.Update_set_clause() + } + + p.SetState(794) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + } + p.SetState(803) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) - return t.(ISql_function_callContext) -} + if _la == KuneiformParserFROM { + { + p.SetState(795) + p.Match(KuneiformParserFROM) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(796) + p.Relation() + } + p.SetState(800) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Function_call_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break + for (int64((_la-73)) & ^0x3f) == 0 && ((int64(1)<<(_la-73))&134217743) != 0 { + { + p.SetState(797) + p.Join() + } + + p.SetState(802) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) } + + } + p.SetState(807) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserWHERE { + { + p.SetState(805) + p.Match(KuneiformParserWHERE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(806) + + var _x = p.sql_expr(0) + + localctx.(*Update_statementContext).where = _x + } - if t == nil { - return nil } - return t.(IType_castContext) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -func (s *Function_call_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitFunction_call_sql_expr(s) +// IUpdate_set_clauseContext is an interface to support dynamic dispatch. +type IUpdate_set_clauseContext interface { + antlr.ParserRuleContext - default: - return t.VisitChildren(s) - } + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetColumn returns the column rule contexts. + GetColumn() IIdentifierContext + + // SetColumn sets the column rule contexts. + SetColumn(IIdentifierContext) + + // Getter signatures + EQUALS() antlr.TerminalNode + Sql_expr() ISql_exprContext + Identifier() IIdentifierContext + + // IsUpdate_set_clauseContext differentiates from other interfaces. + IsUpdate_set_clauseContext() } -type Paren_sql_exprContext struct { - Sql_exprContext +type Update_set_clauseContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + column IIdentifierContext } -func NewParen_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Paren_sql_exprContext { - var p = new(Paren_sql_exprContext) +func NewEmptyUpdate_set_clauseContext() *Update_set_clauseContext { + var p = new(Update_set_clauseContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_update_set_clause + return p +} + +func InitEmptyUpdate_set_clauseContext(p *Update_set_clauseContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_update_set_clause +} + +func (*Update_set_clauseContext) IsUpdate_set_clauseContext() {} + +func NewUpdate_set_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Update_set_clauseContext { + var p = new(Update_set_clauseContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptySql_exprContext(&p.Sql_exprContext) p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + p.RuleIndex = KuneiformParserRULE_update_set_clause return p } -func (s *Paren_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} +func (s *Update_set_clauseContext) GetParser() antlr.Parser { return s.parser } -func (s *Paren_sql_exprContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Update_set_clauseContext) GetColumn() IIdentifierContext { return s.column } + +func (s *Update_set_clauseContext) SetColumn(v IIdentifierContext) { s.column = v } + +func (s *Update_set_clauseContext) EQUALS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEQUALS, 0) } -func (s *Paren_sql_exprContext) Sql_expr() ISql_exprContext { +func (s *Update_set_clauseContext) Sql_expr() ISql_exprContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(ISql_exprContext); ok { @@ -12993,14 +13811,10 @@ func (s *Paren_sql_exprContext) Sql_expr() ISql_exprContext { return t.(ISql_exprContext) } -func (s *Paren_sql_exprContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) -} - -func (s *Paren_sql_exprContext) Type_cast() IType_castContext { +func (s *Update_set_clauseContext) Identifier() IIdentifierContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { t = ctx.(antlr.RuleContext) break } @@ -13010,189 +13824,183 @@ func (s *Paren_sql_exprContext) Type_cast() IType_castContext { return nil } - return t.(IType_castContext) + return t.(IIdentifierContext) } -func (s *Paren_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Update_set_clauseContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Update_set_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Update_set_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitParen_sql_expr(s) + return t.VisitUpdate_set_clause(s) default: return t.VisitChildren(s) } } -type Collate_sql_exprContext struct { - Sql_exprContext -} - -func NewCollate_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Collate_sql_exprContext { - var p = new(Collate_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} +func (p *KuneiformParser) Update_set_clause() (localctx IUpdate_set_clauseContext) { + localctx = NewUpdate_set_clauseContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 92, KuneiformParserRULE_update_set_clause) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(809) -func (s *Collate_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} + var _x = p.Identifier() -func (s *Collate_sql_exprContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break + localctx.(*Update_set_clauseContext).column = _x + } + { + p.SetState(810) + p.Match(KuneiformParserEQUALS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } - - if t == nil { - return nil + { + p.SetState(811) + p.sql_expr(0) } - return t.(ISql_exprContext) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -func (s *Collate_sql_exprContext) COLLATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserCOLLATE, 0) -} +// IInsert_statementContext is an interface to support dynamic dispatch. +type IInsert_statementContext interface { + antlr.ParserRuleContext -func (s *Collate_sql_exprContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + // GetParser returns the parser. + GetParser() antlr.Parser - if t == nil { - return nil - } + // GetTable_name returns the table_name rule contexts. + GetTable_name() IIdentifierContext - return t.(IIdentifierContext) -} + // GetAlias returns the alias rule contexts. + GetAlias() IIdentifierContext -func (s *Collate_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitCollate_sql_expr(s) + // GetTarget_columns returns the target_columns rule contexts. + GetTarget_columns() IIdentifier_listContext - default: - return t.VisitChildren(s) - } -} + // SetTable_name sets the table_name rule contexts. + SetTable_name(IIdentifierContext) -type Variable_sql_exprContext struct { - Sql_exprContext -} + // SetAlias sets the alias rule contexts. + SetAlias(IIdentifierContext) -func NewVariable_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Variable_sql_exprContext { - var p = new(Variable_sql_exprContext) + // SetTarget_columns sets the target_columns rule contexts. + SetTarget_columns(IIdentifier_listContext) - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + // Getter signatures + INSERT() antlr.TerminalNode + INTO() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + AllLPAREN() []antlr.TerminalNode + LPAREN(i int) antlr.TerminalNode + AllRPAREN() []antlr.TerminalNode + RPAREN(i int) antlr.TerminalNode + Upsert_clause() IUpsert_clauseContext + Identifier_list() IIdentifier_listContext + VALUES() antlr.TerminalNode + AllSql_expr_list() []ISql_expr_listContext + Sql_expr_list(i int) ISql_expr_listContext + Select_statement() ISelect_statementContext + AS() antlr.TerminalNode + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - return p + // IsInsert_statementContext differentiates from other interfaces. + IsInsert_statementContext() } -func (s *Variable_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +type Insert_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + table_name IIdentifierContext + alias IIdentifierContext + target_columns IIdentifier_listContext } -func (s *Variable_sql_exprContext) Variable() IVariableContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IVariableContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } +func NewEmptyInsert_statementContext() *Insert_statementContext { + var p = new(Insert_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_insert_statement + return p +} - return t.(IVariableContext) +func InitEmptyInsert_statementContext(p *Insert_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_insert_statement } -func (s *Variable_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (*Insert_statementContext) IsInsert_statementContext() {} - if t == nil { - return nil - } +func NewInsert_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Insert_statementContext { + var p = new(Insert_statementContext) - return t.(IType_castContext) -} + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) -func (s *Variable_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitVariable_sql_expr(s) + p.parser = parser + p.RuleIndex = KuneiformParserRULE_insert_statement - default: - return t.VisitChildren(s) - } + return p } -type Is_sql_exprContext struct { - Sql_exprContext - left ISql_exprContext - right ISql_exprContext -} +func (s *Insert_statementContext) GetParser() antlr.Parser { return s.parser } -func NewIs_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Is_sql_exprContext { - var p = new(Is_sql_exprContext) +func (s *Insert_statementContext) GetTable_name() IIdentifierContext { return s.table_name } - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) +func (s *Insert_statementContext) GetAlias() IIdentifierContext { return s.alias } - return p +func (s *Insert_statementContext) GetTarget_columns() IIdentifier_listContext { + return s.target_columns } -func (s *Is_sql_exprContext) GetLeft() ISql_exprContext { return s.left } - -func (s *Is_sql_exprContext) GetRight() ISql_exprContext { return s.right } +func (s *Insert_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } -func (s *Is_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } +func (s *Insert_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } -func (s *Is_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } +func (s *Insert_statementContext) SetTarget_columns(v IIdentifier_listContext) { s.target_columns = v } -func (s *Is_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Insert_statementContext) INSERT() antlr.TerminalNode { + return s.GetToken(KuneiformParserINSERT, 0) } -func (s *Is_sql_exprContext) IS() antlr.TerminalNode { - return s.GetToken(KuneiformParserIS, 0) +func (s *Insert_statementContext) INTO() antlr.TerminalNode { + return s.GetToken(KuneiformParserINTO, 0) } -func (s *Is_sql_exprContext) AllSql_expr() []ISql_exprContext { +func (s *Insert_statementContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]ISql_exprContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -13200,11 +14008,11 @@ func (s *Is_sql_exprContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Is_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Insert_statementContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -13217,102 +14025,47 @@ func (s *Is_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) + return t.(IIdentifierContext) } -func (s *Is_sql_exprContext) NULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserNULL, 0) +func (s *Insert_statementContext) AllLPAREN() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserLPAREN) } -func (s *Is_sql_exprContext) TRUE() antlr.TerminalNode { - return s.GetToken(KuneiformParserTRUE, 0) +func (s *Insert_statementContext) LPAREN(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, i) } -func (s *Is_sql_exprContext) FALSE() antlr.TerminalNode { - return s.GetToken(KuneiformParserFALSE, 0) +func (s *Insert_statementContext) AllRPAREN() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserRPAREN) } -func (s *Is_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) -} - -func (s *Is_sql_exprContext) DISTINCT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDISTINCT, 0) -} - -func (s *Is_sql_exprContext) FROM() antlr.TerminalNode { - return s.GetToken(KuneiformParserFROM, 0) -} - -func (s *Is_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitIs_sql_expr(s) - - default: - return t.VisitChildren(s) - } -} - -type Arithmetic_sql_exprContext struct { - Sql_exprContext - left ISql_exprContext - right ISql_exprContext -} - -func NewArithmetic_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Arithmetic_sql_exprContext { - var p = new(Arithmetic_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} - -func (s *Arithmetic_sql_exprContext) GetLeft() ISql_exprContext { return s.left } - -func (s *Arithmetic_sql_exprContext) GetRight() ISql_exprContext { return s.right } - -func (s *Arithmetic_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } - -func (s *Arithmetic_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } - -func (s *Arithmetic_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Insert_statementContext) RPAREN(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, i) } -func (s *Arithmetic_sql_exprContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ +func (s *Insert_statementContext) Upsert_clause() IUpsert_clauseContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IUpsert_clauseContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(IUpsert_clauseContext) } -func (s *Arithmetic_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Insert_statementContext) Identifier_list() IIdentifier_listContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IIdentifier_listContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -13320,85 +14073,27 @@ func (s *Arithmetic_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) -} - -func (s *Arithmetic_sql_exprContext) STAR() antlr.TerminalNode { - return s.GetToken(KuneiformParserSTAR, 0) -} - -func (s *Arithmetic_sql_exprContext) DIV() antlr.TerminalNode { - return s.GetToken(KuneiformParserDIV, 0) -} - -func (s *Arithmetic_sql_exprContext) MOD() antlr.TerminalNode { - return s.GetToken(KuneiformParserMOD, 0) -} - -func (s *Arithmetic_sql_exprContext) PLUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserPLUS, 0) -} - -func (s *Arithmetic_sql_exprContext) MINUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserMINUS, 0) -} - -func (s *Arithmetic_sql_exprContext) CONCAT() antlr.TerminalNode { - return s.GetToken(KuneiformParserCONCAT, 0) -} - -func (s *Arithmetic_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitArithmetic_sql_expr(s) - - default: - return t.VisitChildren(s) - } -} - -type Like_sql_exprContext struct { - Sql_exprContext - left ISql_exprContext - right ISql_exprContext -} - -func NewLike_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Like_sql_exprContext { - var p = new(Like_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p + return t.(IIdentifier_listContext) } -func (s *Like_sql_exprContext) GetLeft() ISql_exprContext { return s.left } - -func (s *Like_sql_exprContext) GetRight() ISql_exprContext { return s.right } - -func (s *Like_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } - -func (s *Like_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } - -func (s *Like_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Insert_statementContext) VALUES() antlr.TerminalNode { + return s.GetToken(KuneiformParserVALUES, 0) } -func (s *Like_sql_exprContext) AllSql_expr() []ISql_exprContext { +func (s *Insert_statementContext) AllSql_expr_list() []ISql_expr_listContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(ISql_expr_listContext); ok { len++ } } - tst := make([]ISql_exprContext, len) + tst := make([]ISql_expr_listContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) + if t, ok := ctx.(ISql_expr_listContext); ok { + tst[i] = t.(ISql_expr_listContext) i++ } } @@ -13406,11 +14101,11 @@ func (s *Like_sql_exprContext) AllSql_expr() []ISql_exprContext { return tst } -func (s *Like_sql_exprContext) Sql_expr(i int) ISql_exprContext { +func (s *Insert_statementContext) Sql_expr_list(i int) ISql_expr_listContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { + if _, ok := ctx.(ISql_expr_listContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -13423,54 +14118,10 @@ func (s *Like_sql_exprContext) Sql_expr(i int) ISql_exprContext { return nil } - return t.(ISql_exprContext) -} - -func (s *Like_sql_exprContext) LIKE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLIKE, 0) -} - -func (s *Like_sql_exprContext) ILIKE() antlr.TerminalNode { - return s.GetToken(KuneiformParserILIKE, 0) -} - -func (s *Like_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) -} - -func (s *Like_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitLike_sql_expr(s) - - default: - return t.VisitChildren(s) - } -} - -type Subquery_sql_exprContext struct { - Sql_exprContext -} - -func NewSubquery_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Subquery_sql_exprContext { - var p = new(Subquery_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} - -func (s *Subquery_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Subquery_sql_exprContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) + return t.(ISql_expr_listContext) } -func (s *Subquery_sql_exprContext) Select_statement() ISelect_statementContext { +func (s *Insert_statementContext) Select_statement() ISelect_statementContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(ISelect_statementContext); ok { @@ -13486,284 +14137,404 @@ func (s *Subquery_sql_exprContext) Select_statement() ISelect_statementContext { return t.(ISelect_statementContext) } -func (s *Subquery_sql_exprContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Insert_statementContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) } -func (s *Subquery_sql_exprContext) EXISTS() antlr.TerminalNode { - return s.GetToken(KuneiformParserEXISTS, 0) +func (s *Insert_statementContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func (s *Subquery_sql_exprContext) Type_cast() IType_castContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } +func (s *Insert_statementContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} - return t.(IType_castContext) +func (s *Insert_statementContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Subquery_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) +func (s *Insert_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Subquery_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Insert_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSubquery_sql_expr(s) + return t.VisitInsert_statement(s) default: return t.VisitChildren(s) } } -type Unary_sql_exprContext struct { - Sql_exprContext -} - -func NewUnary_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Unary_sql_exprContext { - var p = new(Unary_sql_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) +func (p *KuneiformParser) Insert_statement() (localctx IInsert_statementContext) { + localctx = NewInsert_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 94, KuneiformParserRULE_insert_statement) + var _la int - return p -} - -func (s *Unary_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Unary_sql_exprContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break + p.EnterOuterAlt(localctx, 1) + { + p.SetState(813) + p.Match(KuneiformParserINSERT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } - - if t == nil { - return nil + { + p.SetState(814) + p.Match(KuneiformParserINTO) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + { + p.SetState(815) - return t.(ISql_exprContext) -} - -func (s *Unary_sql_exprContext) PLUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserPLUS, 0) -} - -func (s *Unary_sql_exprContext) MINUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserMINUS, 0) -} - -func (s *Unary_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) -} - -func (s *Unary_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitUnary_sql_expr(s) + var _x = p.Identifier() - default: - return t.VisitChildren(s) + localctx.(*Insert_statementContext).table_name = _x } -} - -type Case_exprContext struct { - Sql_exprContext - case_clause ISql_exprContext - else_clause ISql_exprContext -} - -func NewCase_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Case_exprContext { - var p = new(Case_exprContext) - - InitEmptySql_exprContext(&p.Sql_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) - - return p -} - -func (s *Case_exprContext) GetCase_clause() ISql_exprContext { return s.case_clause } - -func (s *Case_exprContext) GetElse_clause() ISql_exprContext { return s.else_clause } - -func (s *Case_exprContext) SetCase_clause(v ISql_exprContext) { s.case_clause = v } + p.SetState(820) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Case_exprContext) SetElse_clause(v ISql_exprContext) { s.else_clause = v } + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(817) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Case_exprContext) GetRuleContext() antlr.RuleContext { - return s -} + if _la == KuneiformParserAS { + { + p.SetState(816) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } -func (s *Case_exprContext) CASE() antlr.TerminalNode { - return s.GetToken(KuneiformParserCASE, 0) -} + } + { + p.SetState(819) -func (s *Case_exprContext) END() antlr.TerminalNode { - return s.GetToken(KuneiformParserEND, 0) -} + var _x = p.Identifier() -func (s *Case_exprContext) AllWhen_then_clause() []IWhen_then_clauseContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IWhen_then_clauseContext); ok { - len++ + localctx.(*Insert_statementContext).alias = _x } + + } + p.SetState(826) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } + _la = p.GetTokenStream().LA(1) - tst := make([]IWhen_then_clauseContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IWhen_then_clauseContext); ok { - tst[i] = t.(IWhen_then_clauseContext) - i++ + if _la == KuneiformParserLPAREN { + { + p.SetState(822) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } + { + p.SetState(823) - return tst -} + var _x = p.Identifier_list() -func (s *Case_exprContext) When_then_clause(i int) IWhen_then_clauseContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IWhen_then_clauseContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break + localctx.(*Insert_statementContext).target_columns = _x + } + { + p.SetState(824) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - j++ } - } - if t == nil { - return nil } - - return t.(IWhen_then_clauseContext) -} - -func (s *Case_exprContext) ELSE() antlr.TerminalNode { - return s.GetToken(KuneiformParserELSE, 0) -} - -func (s *Case_exprContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ - } + p.SetState(843) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ + switch p.GetTokenStream().LA(1) { + case KuneiformParserVALUES: + { + p.SetState(828) + p.Match(KuneiformParserVALUES) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } + { + p.SetState(829) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(830) + p.Sql_expr_list() + } + { + p.SetState(831) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(839) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - return tst -} + for _la == KuneiformParserCOMMA { + { + p.SetState(832) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(833) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(834) + p.Sql_expr_list() + } + { + p.SetState(835) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } -func (s *Case_exprContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break + p.SetState(841) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - j++ + _la = p.GetTokenStream().LA(1) } - } - if t == nil { - return nil - } + case KuneiformParserSELECT: + { + p.SetState(842) + p.Select_statement() + } - return t.(ISql_exprContext) -} + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + p.SetState(846) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Case_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitCase_expr(s) + if _la == KuneiformParserON { + { + p.SetState(845) + p.Upsert_clause() + } - default: - return t.VisitChildren(s) } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -type In_sql_exprContext struct { - Sql_exprContext +// IUpsert_clauseContext is an interface to support dynamic dispatch. +type IUpsert_clauseContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetConflict_columns returns the conflict_columns rule contexts. + GetConflict_columns() IIdentifier_listContext + + // GetConflict_where returns the conflict_where rule contexts. + GetConflict_where() ISql_exprContext + + // GetUpdate_where returns the update_where rule contexts. + GetUpdate_where() ISql_exprContext + + // SetConflict_columns sets the conflict_columns rule contexts. + SetConflict_columns(IIdentifier_listContext) + + // SetConflict_where sets the conflict_where rule contexts. + SetConflict_where(ISql_exprContext) + + // SetUpdate_where sets the update_where rule contexts. + SetUpdate_where(ISql_exprContext) + + // Getter signatures + ON() antlr.TerminalNode + CONFLICT() antlr.TerminalNode + DO() antlr.TerminalNode + NOTHING() antlr.TerminalNode + UPDATE() antlr.TerminalNode + SET() antlr.TerminalNode + AllUpdate_set_clause() []IUpdate_set_clauseContext + Update_set_clause(i int) IUpdate_set_clauseContext + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + Identifier_list() IIdentifier_listContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode + AllWHERE() []antlr.TerminalNode + WHERE(i int) antlr.TerminalNode + AllSql_expr() []ISql_exprContext + Sql_expr(i int) ISql_exprContext + + // IsUpsert_clauseContext differentiates from other interfaces. + IsUpsert_clauseContext() } -func NewIn_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *In_sql_exprContext { - var p = new(In_sql_exprContext) +type Upsert_clauseContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + conflict_columns IIdentifier_listContext + conflict_where ISql_exprContext + update_where ISql_exprContext +} + +func NewEmptyUpsert_clauseContext() *Upsert_clauseContext { + var p = new(Upsert_clauseContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_upsert_clause + return p +} + +func InitEmptyUpsert_clauseContext(p *Upsert_clauseContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_upsert_clause +} + +func (*Upsert_clauseContext) IsUpsert_clauseContext() {} + +func NewUpsert_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Upsert_clauseContext { + var p = new(Upsert_clauseContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptySql_exprContext(&p.Sql_exprContext) p.parser = parser - p.CopyAll(ctx.(*Sql_exprContext)) + p.RuleIndex = KuneiformParserRULE_upsert_clause return p } -func (s *In_sql_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Upsert_clauseContext) GetParser() antlr.Parser { return s.parser } + +func (s *Upsert_clauseContext) GetConflict_columns() IIdentifier_listContext { + return s.conflict_columns } -func (s *In_sql_exprContext) Sql_expr() ISql_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } +func (s *Upsert_clauseContext) GetConflict_where() ISql_exprContext { return s.conflict_where } - if t == nil { - return nil - } +func (s *Upsert_clauseContext) GetUpdate_where() ISql_exprContext { return s.update_where } - return t.(ISql_exprContext) +func (s *Upsert_clauseContext) SetConflict_columns(v IIdentifier_listContext) { s.conflict_columns = v } + +func (s *Upsert_clauseContext) SetConflict_where(v ISql_exprContext) { s.conflict_where = v } + +func (s *Upsert_clauseContext) SetUpdate_where(v ISql_exprContext) { s.update_where = v } + +func (s *Upsert_clauseContext) ON() antlr.TerminalNode { + return s.GetToken(KuneiformParserON, 0) } -func (s *In_sql_exprContext) IN() antlr.TerminalNode { - return s.GetToken(KuneiformParserIN, 0) +func (s *Upsert_clauseContext) CONFLICT() antlr.TerminalNode { + return s.GetToken(KuneiformParserCONFLICT, 0) } -func (s *In_sql_exprContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) +func (s *Upsert_clauseContext) DO() antlr.TerminalNode { + return s.GetToken(KuneiformParserDO, 0) } -func (s *In_sql_exprContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Upsert_clauseContext) NOTHING() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOTHING, 0) } -func (s *In_sql_exprContext) Sql_expr_list() ISql_expr_listContext { +func (s *Upsert_clauseContext) UPDATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserUPDATE, 0) +} + +func (s *Upsert_clauseContext) SET() antlr.TerminalNode { + return s.GetToken(KuneiformParserSET, 0) +} + +func (s *Upsert_clauseContext) AllUpdate_set_clause() []IUpdate_set_clauseContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IUpdate_set_clauseContext); ok { + len++ + } + } + + tst := make([]IUpdate_set_clauseContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IUpdate_set_clauseContext); ok { + tst[i] = t.(IUpdate_set_clauseContext) + i++ + } + } + + return tst +} + +func (s *Upsert_clauseContext) Update_set_clause(i int) IUpdate_set_clauseContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(IUpdate_set_clauseContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -13771,13 +14542,21 @@ func (s *In_sql_exprContext) Sql_expr_list() ISql_expr_listContext { return nil } - return t.(ISql_expr_listContext) + return t.(IUpdate_set_clauseContext) } -func (s *In_sql_exprContext) Select_statement() ISelect_statementContext { +func (s *Upsert_clauseContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Upsert_clauseContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *Upsert_clauseContext) Identifier_list() IIdentifier_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISelect_statementContext); ok { + if _, ok := ctx.(IIdentifier_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -13787,55 +14566,116 @@ func (s *In_sql_exprContext) Select_statement() ISelect_statementContext { return nil } - return t.(ISelect_statementContext) + return t.(IIdentifier_listContext) } -func (s *In_sql_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) +func (s *Upsert_clauseContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func (s *In_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitIn_sql_expr(s) +func (s *Upsert_clauseContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) +} - default: - return t.VisitChildren(s) - } +func (s *Upsert_clauseContext) AllWHERE() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserWHERE) } -func (p *KuneiformParser) Sql_expr() (localctx ISql_exprContext) { - return p.sql_expr(0) +func (s *Upsert_clauseContext) WHERE(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserWHERE, i) } -func (p *KuneiformParser) sql_expr(_p int) (localctx ISql_exprContext) { - var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() +func (s *Upsert_clauseContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } - _parentState := p.GetState() - localctx = NewSql_exprContext(p, p.GetParserRuleContext(), _parentState) - var _prevctx ISql_exprContext = localctx - var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. - _startState := 88 - p.EnterRecursionRule(localctx, 88, KuneiformParserRULE_sql_expr, _p) - var _la int + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } - var _alt int + return tst +} - p.EnterOuterAlt(localctx, 1) - p.SetState(780) +func (s *Upsert_clauseContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Upsert_clauseContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Upsert_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *Upsert_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitUpsert_clause(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *KuneiformParser) Upsert_clause() (localctx IUpsert_clauseContext) { + localctx = NewUpsert_clauseContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 96, KuneiformParserRULE_upsert_clause) + var _la int + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(848) + p.Match(KuneiformParserON) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(849) + p.Match(KuneiformParserCONFLICT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(857) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 109, p.GetParserRuleContext()) { - case 1: - localctx = NewParen_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - + if _la == KuneiformParserLPAREN { { - p.SetState(722) + p.SetState(850) p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule @@ -13843,1780 +14683,3403 @@ func (p *KuneiformParser) sql_expr(_p int) (localctx ISql_exprContext) { } } { - p.SetState(723) - p.sql_expr(0) + p.SetState(851) + + var _x = p.Identifier_list() + + localctx.(*Upsert_clauseContext).conflict_columns = _x } { - p.SetState(724) + p.SetState(852) p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(726) + p.SetState(855) p.GetErrorHandler().Sync(p) - - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 97, p.GetParserRuleContext()) == 1 { - { - p.SetState(725) - p.Type_cast() - } - - } else if p.HasError() { // JIM + if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - case 2: - localctx = NewUnary_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(728) - _la = p.GetTokenStream().LA(1) - - if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() + if _la == KuneiformParserWHERE { + { + p.SetState(853) + p.Match(KuneiformParserWHERE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - } - { - p.SetState(729) - p.sql_expr(19) - } + { + p.SetState(854) - case 3: - localctx = NewLiteral_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(730) - p.Literal() - } - p.SetState(732) - p.GetErrorHandler().Sync(p) + var _x = p.sql_expr(0) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 98, p.GetParserRuleContext()) == 1 { - { - p.SetState(731) - p.Type_cast() + localctx.(*Upsert_clauseContext).conflict_where = _x } - } else if p.HasError() { // JIM - goto errorExit - } - - case 4: - localctx = NewFunction_call_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(734) - p.Sql_function_call() } - p.SetState(736) - p.GetErrorHandler().Sync(p) - - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 99, p.GetParserRuleContext()) == 1 { - { - p.SetState(735) - p.Type_cast() - } - } else if p.HasError() { // JIM + } + { + p.SetState(859) + p.Match(KuneiformParserDO) + if p.HasError() { + // Recognition error - abort rule goto errorExit } + } + p.SetState(875) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } - case 5: - localctx = NewVariable_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx + switch p.GetTokenStream().LA(1) { + case KuneiformParserNOTHING: { - p.SetState(738) - p.Variable() - } - p.SetState(740) - p.GetErrorHandler().Sync(p) - - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 100, p.GetParserRuleContext()) == 1 { - { - p.SetState(739) - p.Type_cast() + p.SetState(860) + p.Match(KuneiformParserNOTHING) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - } else if p.HasError() { // JIM - goto errorExit } - case 6: - localctx = NewColumn_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - p.SetState(745) - p.GetErrorHandler().Sync(p) - - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 101, p.GetParserRuleContext()) == 1 { - { - p.SetState(742) - - var _x = p.Identifier() - - localctx.(*Column_sql_exprContext).table = _x - } - { - p.SetState(743) - p.Match(KuneiformParserPERIOD) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - } else if p.HasError() { // JIM - goto errorExit - } + case KuneiformParserUPDATE: { - p.SetState(747) - - var _x = p.Identifier() - - localctx.(*Column_sql_exprContext).column = _x - } - p.SetState(749) - p.GetErrorHandler().Sync(p) - - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 102, p.GetParserRuleContext()) == 1 { - { - p.SetState(748) - p.Type_cast() + p.SetState(861) + p.Match(KuneiformParserUPDATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - - } else if p.HasError() { // JIM - goto errorExit } - - case 7: - localctx = NewCase_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx { - p.SetState(751) - p.Match(KuneiformParserCASE) + p.SetState(862) + p.Match(KuneiformParserSET) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(753) + { + p.SetState(863) + p.Update_set_clause() + } + p.SetState(868) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618792001339520) != 0) || ((int64((_la-66)) & ^0x3f) == 0 && ((int64(1)<<(_la-66))&4043950990402519041) != 0) { + for _la == KuneiformParserCOMMA { { - p.SetState(752) - - var _x = p.sql_expr(0) - - localctx.(*Case_exprContext).case_clause = _x + p.SetState(864) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - - } - p.SetState(756) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - for ok := true; ok; ok = _la == KuneiformParserWHEN { { - p.SetState(755) - p.When_then_clause() + p.SetState(865) + p.Update_set_clause() } - p.SetState(758) + p.SetState(870) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) } - p.SetState(762) + p.SetState(873) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserELSE { + if _la == KuneiformParserWHERE { { - p.SetState(760) - p.Match(KuneiformParserELSE) + p.SetState(871) + p.Match(KuneiformParserWHERE) if p.HasError() { // Recognition error - abort rule goto errorExit } } { - p.SetState(761) + p.SetState(872) var _x = p.sql_expr(0) - localctx.(*Case_exprContext).else_clause = _x + localctx.(*Upsert_clauseContext).update_where = _x } } - { - p.SetState(764) - p.Match(KuneiformParserEND) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - case 8: - localctx = NewSubquery_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - p.SetState(770) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } - if _la == KuneiformParserNOT || _la == KuneiformParserEXISTS { - p.SetState(767) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} - if _la == KuneiformParserNOT { - { - p.SetState(766) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } +// IDelete_statementContext is an interface to support dynamic dispatch. +type IDelete_statementContext interface { + antlr.ParserRuleContext - } - { - p.SetState(769) - p.Match(KuneiformParserEXISTS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + // GetParser returns the parser. + GetParser() antlr.Parser - } - { - p.SetState(772) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(773) - p.Select_statement() - } - { - p.SetState(774) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(776) - p.GetErrorHandler().Sync(p) + // GetTable_name returns the table_name rule contexts. + GetTable_name() IIdentifierContext - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 108, p.GetParserRuleContext()) == 1 { - { - p.SetState(775) - p.Type_cast() - } + // GetAlias returns the alias rule contexts. + GetAlias() IIdentifierContext - } else if p.HasError() { // JIM - goto errorExit - } + // GetWhere returns the where rule contexts. + GetWhere() ISql_exprContext - case 9: - localctx = NewUnary_sql_exprContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx + // SetTable_name sets the table_name rule contexts. + SetTable_name(IIdentifierContext) - { - p.SetState(778) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + // SetAlias sets the alias rule contexts. + SetAlias(IIdentifierContext) - { - p.SetState(779) - p.sql_expr(3) - } + // SetWhere sets the where rule contexts. + SetWhere(ISql_exprContext) - case antlr.ATNInvalidAltNumber: - goto errorExit - } - p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) - p.SetState(867) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 122, p.GetParserRuleContext()) - if p.HasError() { - goto errorExit - } - for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { - if _alt == 1 { - if p.GetParseListeners() != nil { - p.TriggerExitRuleEvent() - } - _prevctx = localctx - p.SetState(865) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + // Getter signatures + DELETE() antlr.TerminalNode + FROM() antlr.TerminalNode + AllIdentifier() []IIdentifierContext + Identifier(i int) IIdentifierContext + WHERE() antlr.TerminalNode + Sql_expr() ISql_exprContext + AS() antlr.TerminalNode - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 121, p.GetParserRuleContext()) { - case 1: - localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Arithmetic_sql_exprContext).left = _prevctx + // IsDelete_statementContext differentiates from other interfaces. + IsDelete_statementContext() +} - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(782) +type Delete_statementContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser + table_name IIdentifierContext + alias IIdentifierContext + where ISql_exprContext +} - if !(p.Precpred(p.GetParserRuleContext(), 17)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 17)", "")) - goto errorExit - } - { - p.SetState(783) - _la = p.GetTokenStream().LA(1) +func NewEmptyDelete_statementContext() *Delete_statementContext { + var p = new(Delete_statementContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_delete_statement + return p +} - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4734976) != 0) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } - } - { - p.SetState(784) +func InitEmptyDelete_statementContext(p *Delete_statementContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_delete_statement +} - var _x = p.sql_expr(18) +func (*Delete_statementContext) IsDelete_statementContext() {} - localctx.(*Arithmetic_sql_exprContext).right = _x - } +func NewDelete_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Delete_statementContext { + var p = new(Delete_statementContext) - case 2: - localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Arithmetic_sql_exprContext).left = _prevctx + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(785) + p.parser = parser + p.RuleIndex = KuneiformParserRULE_delete_statement - if !(p.Precpred(p.GetParserRuleContext(), 16)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 16)", "")) - goto errorExit - } - { - p.SetState(786) - _la = p.GetTokenStream().LA(1) + return p +} - if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } - } - { - p.SetState(787) +func (s *Delete_statementContext) GetParser() antlr.Parser { return s.parser } - var _x = p.sql_expr(17) +func (s *Delete_statementContext) GetTable_name() IIdentifierContext { return s.table_name } - localctx.(*Arithmetic_sql_exprContext).right = _x - } +func (s *Delete_statementContext) GetAlias() IIdentifierContext { return s.alias } - case 3: - localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Arithmetic_sql_exprContext).left = _prevctx +func (s *Delete_statementContext) GetWhere() ISql_exprContext { return s.where } - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(788) +func (s *Delete_statementContext) SetTable_name(v IIdentifierContext) { s.table_name = v } - if !(p.Precpred(p.GetParserRuleContext(), 9)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 9)", "")) - goto errorExit - } - { - p.SetState(789) - p.Match(KuneiformParserCONCAT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(790) +func (s *Delete_statementContext) SetAlias(v IIdentifierContext) { s.alias = v } - var _x = p.sql_expr(10) +func (s *Delete_statementContext) SetWhere(v ISql_exprContext) { s.where = v } - localctx.(*Arithmetic_sql_exprContext).right = _x - } +func (s *Delete_statementContext) DELETE() antlr.TerminalNode { + return s.GetToken(KuneiformParserDELETE, 0) +} - case 4: - localctx = NewLike_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Like_sql_exprContext).left = _prevctx +func (s *Delete_statementContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) +} - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(791) +func (s *Delete_statementContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } - if !(p.Precpred(p.GetParserRuleContext(), 7)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 7)", "")) - goto errorExit - } - p.SetState(793) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } - if _la == KuneiformParserNOT { - { - p.SetState(792) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + return tst +} - } - { - p.SetState(795) - _la = p.GetTokenStream().LA(1) - - if !(_la == KuneiformParserLIKE || _la == KuneiformParserILIKE) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } - } - { - p.SetState(796) +func (s *Delete_statementContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } - var _x = p.sql_expr(8) + if t == nil { + return nil + } - localctx.(*Like_sql_exprContext).right = _x - } + return t.(IIdentifierContext) +} - case 5: - localctx = NewBetween_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Between_sql_exprContext).element = _prevctx +func (s *Delete_statementContext) WHERE() antlr.TerminalNode { + return s.GetToken(KuneiformParserWHERE, 0) +} - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(797) +func (s *Delete_statementContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - if !(p.Precpred(p.GetParserRuleContext(), 6)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 6)", "")) - goto errorExit - } - p.SetState(799) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + if t == nil { + return nil + } - if _la == KuneiformParserNOT { - { - p.SetState(798) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + return t.(ISql_exprContext) +} - } - { - p.SetState(801) - p.Match(KuneiformParserBETWEEN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(802) +func (s *Delete_statementContext) AS() antlr.TerminalNode { + return s.GetToken(KuneiformParserAS, 0) +} - var _x = p.sql_expr(0) +func (s *Delete_statementContext) GetRuleContext() antlr.RuleContext { + return s +} - localctx.(*Between_sql_exprContext).lower = _x - } - { - p.SetState(803) - p.Match(KuneiformParserAND) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(804) +func (s *Delete_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} - var _x = p.sql_expr(7) +func (s *Delete_statementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitDelete_statement(s) - localctx.(*Between_sql_exprContext).upper = _x - } + default: + return t.VisitChildren(s) + } +} - case 6: - localctx = NewComparison_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Comparison_sql_exprContext).left = _prevctx +func (p *KuneiformParser) Delete_statement() (localctx IDelete_statementContext) { + localctx = NewDelete_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 98, KuneiformParserRULE_delete_statement) + var _la int - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(806) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(877) + p.Match(KuneiformParserDELETE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(878) + p.Match(KuneiformParserFROM) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(879) - if !(p.Precpred(p.GetParserRuleContext(), 5)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 5)", "")) - goto errorExit - } - { - p.SetState(807) - _la = p.GetTokenStream().LA(1) + var _x = p.Identifier() - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&260145152) != 0) { - p.GetErrorHandler().RecoverInline(p) - } else { - p.GetErrorHandler().ReportMatch(p) - p.Consume() - } - } - { - p.SetState(808) + localctx.(*Delete_statementContext).table_name = _x + } + p.SetState(884) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - var _x = p.sql_expr(6) + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserAS || _la == KuneiformParserIDENTIFIER { + p.SetState(881) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - localctx.(*Comparison_sql_exprContext).right = _x + if _la == KuneiformParserAS { + { + p.SetState(880) + p.Match(KuneiformParserAS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } + } - case 7: - localctx = NewLogical_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Logical_sql_exprContext).left = _prevctx + } + { + p.SetState(883) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(809) + var _x = p.Identifier() - if !(p.Precpred(p.GetParserRuleContext(), 2)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) - goto errorExit - } - { - p.SetState(810) - p.Match(KuneiformParserAND) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(811) + localctx.(*Delete_statementContext).alias = _x + } - var _x = p.sql_expr(3) + } + p.SetState(888) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - localctx.(*Logical_sql_exprContext).right = _x - } + if _la == KuneiformParserWHERE { + { + p.SetState(886) + p.Match(KuneiformParserWHERE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(887) - case 8: - localctx = NewLogical_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Logical_sql_exprContext).left = _prevctx + var _x = p.sql_expr(0) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(812) + localctx.(*Delete_statementContext).where = _x + } - if !(p.Precpred(p.GetParserRuleContext(), 1)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) - goto errorExit - } - { - p.SetState(813) - p.Match(KuneiformParserOR) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(814) + } - var _x = p.sql_expr(2) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used +} - localctx.(*Logical_sql_exprContext).right = _x - } +// ISql_exprContext is an interface to support dynamic dispatch. +type ISql_exprContext interface { + antlr.ParserRuleContext - case 9: - localctx = NewField_access_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(815) + // GetParser returns the parser. + GetParser() antlr.Parser + // IsSql_exprContext differentiates from other interfaces. + IsSql_exprContext() +} - if !(p.Precpred(p.GetParserRuleContext(), 21)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 21)", "")) - goto errorExit - } - { - p.SetState(816) - p.Match(KuneiformParserPERIOD) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(817) - p.Identifier() - } - p.SetState(819) - p.GetErrorHandler().Sync(p) +type Sql_exprContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 112, p.GetParserRuleContext()) == 1 { - { - p.SetState(818) - p.Type_cast() - } +func NewEmptySql_exprContext() *Sql_exprContext { + var p = new(Sql_exprContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_sql_expr + return p +} - } else if p.HasError() { // JIM - goto errorExit - } +func InitEmptySql_exprContext(p *Sql_exprContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_sql_expr +} - case 10: - localctx = NewArray_access_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Array_access_sql_exprContext).array_element = _prevctx +func (*Sql_exprContext) IsSql_exprContext() {} - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(821) +func NewSql_exprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_exprContext { + var p = new(Sql_exprContext) - if !(p.Precpred(p.GetParserRuleContext(), 20)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 20)", "")) - goto errorExit - } - { - p.SetState(822) - p.Match(KuneiformParserLBRACKET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(831) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 115, p.GetParserRuleContext()) { - case 1: - { - p.SetState(823) + p.parser = parser + p.RuleIndex = KuneiformParserRULE_sql_expr - var _x = p.sql_expr(0) + return p +} - localctx.(*Array_access_sql_exprContext).single = _x - } +func (s *Sql_exprContext) GetParser() antlr.Parser { return s.parser } - case 2: - p.SetState(825) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) +func (s *Sql_exprContext) CopyAll(ctx *Sql_exprContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) +} - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618792001339520) != 0) || ((int64((_la-66)) & ^0x3f) == 0 && ((int64(1)<<(_la-66))&4043950990402519041) != 0) { - { - p.SetState(824) +func (s *Sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} - var _x = p.sql_expr(0) +func (s *Sql_exprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} - localctx.(*Array_access_sql_exprContext).left = _x - } +type Column_sql_exprContext struct { + Sql_exprContext + table IIdentifierContext + column IIdentifierContext +} - } - { - p.SetState(827) - p.Match(KuneiformParserCOL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(829) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) +func NewColumn_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Column_sql_exprContext { + var p = new(Column_sql_exprContext) - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618792001339520) != 0) || ((int64((_la-66)) & ^0x3f) == 0 && ((int64(1)<<(_la-66))&4043950990402519041) != 0) { - { - p.SetState(828) + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) - var _x = p.sql_expr(0) + return p +} - localctx.(*Array_access_sql_exprContext).right = _x - } +func (s *Column_sql_exprContext) GetTable() IIdentifierContext { return s.table } - } +func (s *Column_sql_exprContext) GetColumn() IIdentifierContext { return s.column } - case antlr.ATNInvalidAltNumber: - goto errorExit - } - { - p.SetState(833) - p.Match(KuneiformParserRBRACKET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(835) - p.GetErrorHandler().Sync(p) +func (s *Column_sql_exprContext) SetTable(v IIdentifierContext) { s.table = v } - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 116, p.GetParserRuleContext()) == 1 { - { - p.SetState(834) - p.Type_cast() - } +func (s *Column_sql_exprContext) SetColumn(v IIdentifierContext) { s.column = v } - } else if p.HasError() { // JIM - goto errorExit - } +func (s *Column_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} - case 11: - localctx = NewCollate_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(837) +func (s *Column_sql_exprContext) AllIdentifier() []IIdentifierContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IIdentifierContext); ok { + len++ + } + } - if !(p.Precpred(p.GetParserRuleContext(), 18)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 18)", "")) - goto errorExit - } - { - p.SetState(838) - p.Match(KuneiformParserCOLLATE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(839) - p.Identifier() - } + tst := make([]IIdentifierContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) + i++ + } + } - case 12: - localctx = NewIn_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(840) + return tst +} - if !(p.Precpred(p.GetParserRuleContext(), 8)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 8)", "")) - goto errorExit - } - p.SetState(842) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) +func (s *Column_sql_exprContext) Identifier(i int) IIdentifierContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } - if _la == KuneiformParserNOT { - { - p.SetState(841) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if t == nil { + return nil + } - } - { - p.SetState(844) - p.Match(KuneiformParserIN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(845) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(848) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + return t.(IIdentifierContext) +} - switch p.GetTokenStream().LA(1) { - case KuneiformParserLPAREN, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserDOUBLE_QUOTE, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserEXISTS, KuneiformParserCASE, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: - { - p.SetState(846) - p.Sql_expr_list() - } +func (s *Column_sql_exprContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) +} - case KuneiformParserSELECT: - { - p.SetState(847) - p.Select_statement() - } +func (s *Column_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit - } - { - p.SetState(850) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Column_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitColumn_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Logical_sql_exprContext struct { + Sql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewLogical_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Logical_sql_exprContext { + var p = new(Logical_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Logical_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Logical_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Logical_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Logical_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Logical_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Logical_sql_exprContext) AND() antlr.TerminalNode { + return s.GetToken(KuneiformParserAND, 0) +} + +func (s *Logical_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Logical_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Logical_sql_exprContext) OR() antlr.TerminalNode { + return s.GetToken(KuneiformParserOR, 0) +} + +func (s *Logical_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitLogical_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Field_access_sql_exprContext struct { + Sql_exprContext +} + +func NewField_access_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Field_access_sql_exprContext { + var p = new(Field_access_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Field_access_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Field_access_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Field_access_sql_exprContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) +} + +func (s *Field_access_sql_exprContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) +} + +func (s *Field_access_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Field_access_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitField_access_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Array_access_sql_exprContext struct { + Sql_exprContext + array_element ISql_exprContext + single ISql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewArray_access_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Array_access_sql_exprContext { + var p = new(Array_access_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Array_access_sql_exprContext) GetArray_element() ISql_exprContext { return s.array_element } + +func (s *Array_access_sql_exprContext) GetSingle() ISql_exprContext { return s.single } + +func (s *Array_access_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Array_access_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Array_access_sql_exprContext) SetArray_element(v ISql_exprContext) { s.array_element = v } + +func (s *Array_access_sql_exprContext) SetSingle(v ISql_exprContext) { s.single = v } + +func (s *Array_access_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Array_access_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Array_access_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Array_access_sql_exprContext) LBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACKET, 0) +} + +func (s *Array_access_sql_exprContext) RBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACKET, 0) +} + +func (s *Array_access_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Array_access_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Array_access_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Array_access_sql_exprContext) COL() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOL, 0) +} + +func (s *Array_access_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitArray_access_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Comparison_sql_exprContext struct { + Sql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewComparison_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Comparison_sql_exprContext { + var p = new(Comparison_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Comparison_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Comparison_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Comparison_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Comparison_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Comparison_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Comparison_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Comparison_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Comparison_sql_exprContext) EQUALS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEQUALS, 0) +} + +func (s *Comparison_sql_exprContext) EQUATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserEQUATE, 0) +} + +func (s *Comparison_sql_exprContext) NEQ() antlr.TerminalNode { + return s.GetToken(KuneiformParserNEQ, 0) +} + +func (s *Comparison_sql_exprContext) LT() antlr.TerminalNode { + return s.GetToken(KuneiformParserLT, 0) +} + +func (s *Comparison_sql_exprContext) LTE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLTE, 0) +} + +func (s *Comparison_sql_exprContext) GT() antlr.TerminalNode { + return s.GetToken(KuneiformParserGT, 0) +} + +func (s *Comparison_sql_exprContext) GTE() antlr.TerminalNode { + return s.GetToken(KuneiformParserGTE, 0) +} + +func (s *Comparison_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitComparison_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Literal_sql_exprContext struct { + Sql_exprContext +} + +func NewLiteral_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Literal_sql_exprContext { + var p = new(Literal_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Literal_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Literal_sql_exprContext) Literal() ILiteralContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ILiteralContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ILiteralContext) +} + +func (s *Literal_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Literal_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitLiteral_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Between_sql_exprContext struct { + Sql_exprContext + element ISql_exprContext + lower ISql_exprContext + upper ISql_exprContext +} + +func NewBetween_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Between_sql_exprContext { + var p = new(Between_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Between_sql_exprContext) GetElement() ISql_exprContext { return s.element } + +func (s *Between_sql_exprContext) GetLower() ISql_exprContext { return s.lower } + +func (s *Between_sql_exprContext) GetUpper() ISql_exprContext { return s.upper } + +func (s *Between_sql_exprContext) SetElement(v ISql_exprContext) { s.element = v } + +func (s *Between_sql_exprContext) SetLower(v ISql_exprContext) { s.lower = v } + +func (s *Between_sql_exprContext) SetUpper(v ISql_exprContext) { s.upper = v } + +func (s *Between_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Between_sql_exprContext) BETWEEN() antlr.TerminalNode { + return s.GetToken(KuneiformParserBETWEEN, 0) +} + +func (s *Between_sql_exprContext) AND() antlr.TerminalNode { + return s.GetToken(KuneiformParserAND, 0) +} + +func (s *Between_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Between_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Between_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Between_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitBetween_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Function_call_sql_exprContext struct { + Sql_exprContext +} + +func NewFunction_call_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Function_call_sql_exprContext { + var p = new(Function_call_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Function_call_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Function_call_sql_exprContext) Sql_function_call() ISql_function_callContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_function_callContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_function_callContext) +} + +func (s *Function_call_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Function_call_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitFunction_call_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Paren_sql_exprContext struct { + Sql_exprContext +} + +func NewParen_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Paren_sql_exprContext { + var p = new(Paren_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Paren_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Paren_sql_exprContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Paren_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Paren_sql_exprContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *Paren_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Paren_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitParen_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Collate_sql_exprContext struct { + Sql_exprContext +} + +func NewCollate_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Collate_sql_exprContext { + var p = new(Collate_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Collate_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Collate_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Collate_sql_exprContext) COLLATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOLLATE, 0) +} + +func (s *Collate_sql_exprContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) +} + +func (s *Collate_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitCollate_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Make_array_sql_exprContext struct { + Sql_exprContext +} + +func NewMake_array_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Make_array_sql_exprContext { + var p = new(Make_array_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Make_array_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Make_array_sql_exprContext) ARRAY() antlr.TerminalNode { + return s.GetToken(KuneiformParserARRAY, 0) +} + +func (s *Make_array_sql_exprContext) LBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACKET, 0) +} + +func (s *Make_array_sql_exprContext) RBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACKET, 0) +} + +func (s *Make_array_sql_exprContext) Sql_expr_list() ISql_expr_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_expr_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_expr_listContext) +} + +func (s *Make_array_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Make_array_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitMake_array_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Variable_sql_exprContext struct { + Sql_exprContext +} + +func NewVariable_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Variable_sql_exprContext { + var p = new(Variable_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Variable_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Variable_sql_exprContext) Variable() IVariableContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IVariableContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IVariableContext) +} + +func (s *Variable_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Variable_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitVariable_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Window_function_call_sql_exprContext struct { + Sql_exprContext +} + +func NewWindow_function_call_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Window_function_call_sql_exprContext { + var p = new(Window_function_call_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Window_function_call_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Window_function_call_sql_exprContext) Sql_function_call() ISql_function_callContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_function_callContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_function_callContext) +} + +func (s *Window_function_call_sql_exprContext) OVER() antlr.TerminalNode { + return s.GetToken(KuneiformParserOVER, 0) +} + +func (s *Window_function_call_sql_exprContext) Window() IWindowContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IWindowContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IWindowContext) +} + +func (s *Window_function_call_sql_exprContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) +} + +func (s *Window_function_call_sql_exprContext) FILTER() antlr.TerminalNode { + return s.GetToken(KuneiformParserFILTER, 0) +} + +func (s *Window_function_call_sql_exprContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Window_function_call_sql_exprContext) WHERE() antlr.TerminalNode { + return s.GetToken(KuneiformParserWHERE, 0) +} + +func (s *Window_function_call_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Window_function_call_sql_exprContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *Window_function_call_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitWindow_function_call_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Is_sql_exprContext struct { + Sql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewIs_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Is_sql_exprContext { + var p = new(Is_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Is_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Is_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Is_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Is_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Is_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Is_sql_exprContext) IS() antlr.TerminalNode { + return s.GetToken(KuneiformParserIS, 0) +} + +func (s *Is_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Is_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Is_sql_exprContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) +} + +func (s *Is_sql_exprContext) TRUE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTRUE, 0) +} + +func (s *Is_sql_exprContext) FALSE() antlr.TerminalNode { + return s.GetToken(KuneiformParserFALSE, 0) +} + +func (s *Is_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Is_sql_exprContext) DISTINCT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDISTINCT, 0) +} + +func (s *Is_sql_exprContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) +} + +func (s *Is_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitIs_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Arithmetic_sql_exprContext struct { + Sql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewArithmetic_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Arithmetic_sql_exprContext { + var p = new(Arithmetic_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Arithmetic_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Arithmetic_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Arithmetic_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Arithmetic_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Arithmetic_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Arithmetic_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Arithmetic_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Arithmetic_sql_exprContext) STAR() antlr.TerminalNode { + return s.GetToken(KuneiformParserSTAR, 0) +} + +func (s *Arithmetic_sql_exprContext) DIV() antlr.TerminalNode { + return s.GetToken(KuneiformParserDIV, 0) +} + +func (s *Arithmetic_sql_exprContext) MOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserMOD, 0) +} + +func (s *Arithmetic_sql_exprContext) PLUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserPLUS, 0) +} + +func (s *Arithmetic_sql_exprContext) MINUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserMINUS, 0) +} + +func (s *Arithmetic_sql_exprContext) CONCAT() antlr.TerminalNode { + return s.GetToken(KuneiformParserCONCAT, 0) +} + +func (s *Arithmetic_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitArithmetic_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Like_sql_exprContext struct { + Sql_exprContext + left ISql_exprContext + right ISql_exprContext +} + +func NewLike_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Like_sql_exprContext { + var p = new(Like_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Like_sql_exprContext) GetLeft() ISql_exprContext { return s.left } + +func (s *Like_sql_exprContext) GetRight() ISql_exprContext { return s.right } + +func (s *Like_sql_exprContext) SetLeft(v ISql_exprContext) { s.left = v } + +func (s *Like_sql_exprContext) SetRight(v ISql_exprContext) { s.right = v } + +func (s *Like_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Like_sql_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Like_sql_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Like_sql_exprContext) LIKE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLIKE, 0) +} + +func (s *Like_sql_exprContext) ILIKE() antlr.TerminalNode { + return s.GetToken(KuneiformParserILIKE, 0) +} + +func (s *Like_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Like_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitLike_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Subquery_sql_exprContext struct { + Sql_exprContext +} + +func NewSubquery_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Subquery_sql_exprContext { + var p = new(Subquery_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Subquery_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Subquery_sql_exprContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Subquery_sql_exprContext) Select_statement() ISelect_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISelect_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISelect_statementContext) +} + +func (s *Subquery_sql_exprContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *Subquery_sql_exprContext) EXISTS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXISTS, 0) +} + +func (s *Subquery_sql_exprContext) Type_cast() IType_castContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IType_castContext) +} + +func (s *Subquery_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Subquery_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitSubquery_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Unary_sql_exprContext struct { + Sql_exprContext +} + +func NewUnary_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Unary_sql_exprContext { + var p = new(Unary_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Unary_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Unary_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Unary_sql_exprContext) PLUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserPLUS, 0) +} + +func (s *Unary_sql_exprContext) MINUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserMINUS, 0) +} + +func (s *Unary_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Unary_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitUnary_sql_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Case_exprContext struct { + Sql_exprContext + case_clause ISql_exprContext + else_clause ISql_exprContext +} + +func NewCase_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Case_exprContext { + var p = new(Case_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *Case_exprContext) GetCase_clause() ISql_exprContext { return s.case_clause } + +func (s *Case_exprContext) GetElse_clause() ISql_exprContext { return s.else_clause } + +func (s *Case_exprContext) SetCase_clause(v ISql_exprContext) { s.case_clause = v } + +func (s *Case_exprContext) SetElse_clause(v ISql_exprContext) { s.else_clause = v } + +func (s *Case_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Case_exprContext) CASE() antlr.TerminalNode { + return s.GetToken(KuneiformParserCASE, 0) +} + +func (s *Case_exprContext) END() antlr.TerminalNode { + return s.GetToken(KuneiformParserEND, 0) +} + +func (s *Case_exprContext) AllWhen_then_clause() []IWhen_then_clauseContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IWhen_then_clauseContext); ok { + len++ + } + } + + tst := make([]IWhen_then_clauseContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IWhen_then_clauseContext); ok { + tst[i] = t.(IWhen_then_clauseContext) + i++ + } + } + + return tst +} + +func (s *Case_exprContext) When_then_clause(i int) IWhen_then_clauseContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IWhen_then_clauseContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IWhen_then_clauseContext) +} + +func (s *Case_exprContext) ELSE() antlr.TerminalNode { + return s.GetToken(KuneiformParserELSE, 0) +} + +func (s *Case_exprContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst +} + +func (s *Case_exprContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *Case_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitCase_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type In_sql_exprContext struct { + Sql_exprContext +} + +func NewIn_sql_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *In_sql_exprContext { + var p = new(In_sql_exprContext) + + InitEmptySql_exprContext(&p.Sql_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_exprContext)) + + return p +} + +func (s *In_sql_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *In_sql_exprContext) Sql_expr() ISql_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_exprContext) +} + +func (s *In_sql_exprContext) IN() antlr.TerminalNode { + return s.GetToken(KuneiformParserIN, 0) +} + +func (s *In_sql_exprContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *In_sql_exprContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *In_sql_exprContext) Sql_expr_list() ISql_expr_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_expr_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_expr_listContext) +} + +func (s *In_sql_exprContext) Select_statement() ISelect_statementContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISelect_statementContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISelect_statementContext) +} + +func (s *In_sql_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *In_sql_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitIn_sql_expr(s) - case 13: - localctx = NewIs_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) - localctx.(*Is_sql_exprContext).left = _prevctx + default: + return t.VisitChildren(s) + } +} - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) - p.SetState(852) +func (p *KuneiformParser) Sql_expr() (localctx ISql_exprContext) { + return p.sql_expr(0) +} - if !(p.Precpred(p.GetParserRuleContext(), 4)) { - p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 4)", "")) +func (p *KuneiformParser) sql_expr(_p int) (localctx ISql_exprContext) { + var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + + _parentState := p.GetState() + localctx = NewSql_exprContext(p, p.GetParserRuleContext(), _parentState) + var _prevctx ISql_exprContext = localctx + var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. + _startState := 100 + p.EnterRecursionRule(localctx, 100, KuneiformParserRULE_sql_expr, _p) + var _la int + + var _alt int + + p.EnterOuterAlt(localctx, 1) + p.SetState(972) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 131, p.GetParserRuleContext()) { + case 1: + localctx = NewParen_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + + { + p.SetState(891) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(892) + p.sql_expr(0) + } + { + p.SetState(893) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(895) + p.GetErrorHandler().Sync(p) + + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 115, p.GetParserRuleContext()) == 1 { + { + p.SetState(894) + p.Type_cast() + } + + } else if p.HasError() { // JIM + goto errorExit + } + + case 2: + localctx = NewUnary_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(897) + _la = p.GetTokenStream().LA(1) + + if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(898) + p.sql_expr(21) + } + + case 3: + localctx = NewLiteral_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(899) + p.Literal() + } + p.SetState(901) + p.GetErrorHandler().Sync(p) + + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 116, p.GetParserRuleContext()) == 1 { + { + p.SetState(900) + p.Type_cast() + } + + } else if p.HasError() { // JIM + goto errorExit + } + + case 4: + localctx = NewWindow_function_call_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(903) + p.Sql_function_call() + } + p.SetState(910) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserFILTER { + { + p.SetState(904) + p.Match(KuneiformParserFILTER) + if p.HasError() { + // Recognition error - abort rule goto errorExit } - { - p.SetState(853) - p.Match(KuneiformParserIS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(855) - p.GetErrorHandler().Sync(p) + } + { + p.SetState(905) + p.Match(KuneiformParserLPAREN) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserNOT { - { - p.SetState(854) - p.Match(KuneiformParserNOT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - + } + { + p.SetState(906) + p.Match(KuneiformParserWHERE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } - p.SetState(863) - p.GetErrorHandler().Sync(p) + } + { + p.SetState(907) + p.sql_expr(0) + } + { + p.SetState(908) + p.Match(KuneiformParserRPAREN) if p.HasError() { + // Recognition error - abort rule goto errorExit } + } - switch p.GetTokenStream().LA(1) { - case KuneiformParserDISTINCT: - { - p.SetState(857) - p.Match(KuneiformParserDISTINCT) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(858) - p.Match(KuneiformParserFROM) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(859) + } + { + p.SetState(912) + p.Match(KuneiformParserOVER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(915) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } - var _x = p.sql_expr(0) + switch p.GetTokenStream().LA(1) { + case KuneiformParserLPAREN: + { + p.SetState(913) + p.Window() + } - localctx.(*Is_sql_exprContext).right = _x - } + case KuneiformParserDOUBLE_QUOTE, KuneiformParserIDENTIFIER: + { + p.SetState(914) + p.Identifier() + } - case KuneiformParserNULL: - { - p.SetState(860) - p.Match(KuneiformParserNULL) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } - case KuneiformParserTRUE: - { - p.SetState(861) - p.Match(KuneiformParserTRUE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + case 5: + localctx = NewFunction_call_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(917) + p.Sql_function_call() + } + p.SetState(919) + p.GetErrorHandler().Sync(p) - case KuneiformParserFALSE: - { - p.SetState(862) - p.Match(KuneiformParserFALSE) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 119, p.GetParserRuleContext()) == 1 { + { + p.SetState(918) + p.Type_cast() + } - default: - p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) - goto errorExit - } + } else if p.HasError() { // JIM + goto errorExit + } - case antlr.ATNInvalidAltNumber: + case 6: + localctx = NewVariable_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(921) + p.Variable() + } + p.SetState(923) + p.GetErrorHandler().Sync(p) + + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 120, p.GetParserRuleContext()) == 1 { + { + p.SetState(922) + p.Type_cast() + } + + } else if p.HasError() { // JIM + goto errorExit + } + + case 7: + localctx = NewMake_array_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(925) + p.Match(KuneiformParserARRAY) + if p.HasError() { + // Recognition error - abort rule goto errorExit } - } - p.SetState(869) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + { + p.SetState(926) + p.Match(KuneiformParserLBRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 122, p.GetParserRuleContext()) + p.SetState(928) + p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - } - -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.UnrollRecursionContexts(_parentctx) - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} - -// IWhen_then_clauseContext is an interface to support dynamic dispatch. -type IWhen_then_clauseContext interface { - antlr.ParserRuleContext - - // GetParser returns the parser. - GetParser() antlr.Parser - - // GetWhen_condition returns the when_condition rule contexts. - GetWhen_condition() ISql_exprContext - - // GetThen returns the then rule contexts. - GetThen() ISql_exprContext - - // SetWhen_condition sets the when_condition rule contexts. - SetWhen_condition(ISql_exprContext) - - // SetThen sets the then rule contexts. - SetThen(ISql_exprContext) + _la = p.GetTokenStream().LA(1) - // Getter signatures - WHEN() antlr.TerminalNode - THEN() antlr.TerminalNode - AllSql_expr() []ISql_exprContext - Sql_expr(i int) ISql_exprContext + if ((int64((_la-7)) & ^0x3f) == 0 && ((int64(1)<<(_la-7))&-9204794688358293503) != 0) || ((int64((_la-89)) & ^0x3f) == 0 && ((int64(1)<<(_la-89))&505496072823504897) != 0) { + { + p.SetState(927) + p.Sql_expr_list() + } - // IsWhen_then_clauseContext differentiates from other interfaces. - IsWhen_then_clauseContext() -} + } + { + p.SetState(930) + p.Match(KuneiformParserRBRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(932) + p.GetErrorHandler().Sync(p) -type When_then_clauseContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser - when_condition ISql_exprContext - then ISql_exprContext -} + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 122, p.GetParserRuleContext()) == 1 { + { + p.SetState(931) + p.Type_cast() + } -func NewEmptyWhen_then_clauseContext() *When_then_clauseContext { - var p = new(When_then_clauseContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_when_then_clause - return p -} + } else if p.HasError() { // JIM + goto errorExit + } -func InitEmptyWhen_then_clauseContext(p *When_then_clauseContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_when_then_clause -} + case 8: + localctx = NewColumn_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + p.SetState(937) + p.GetErrorHandler().Sync(p) -func (*When_then_clauseContext) IsWhen_then_clauseContext() {} + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 123, p.GetParserRuleContext()) == 1 { + { + p.SetState(934) -func NewWhen_then_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *When_then_clauseContext { - var p = new(When_then_clauseContext) + var _x = p.Identifier() - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + localctx.(*Column_sql_exprContext).table = _x + } + { + p.SetState(935) + p.Match(KuneiformParserPERIOD) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - p.parser = parser - p.RuleIndex = KuneiformParserRULE_when_then_clause + } else if p.HasError() { // JIM + goto errorExit + } + { + p.SetState(939) - return p -} + var _x = p.Identifier() -func (s *When_then_clauseContext) GetParser() antlr.Parser { return s.parser } + localctx.(*Column_sql_exprContext).column = _x + } + p.SetState(941) + p.GetErrorHandler().Sync(p) -func (s *When_then_clauseContext) GetWhen_condition() ISql_exprContext { return s.when_condition } + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 124, p.GetParserRuleContext()) == 1 { + { + p.SetState(940) + p.Type_cast() + } -func (s *When_then_clauseContext) GetThen() ISql_exprContext { return s.then } + } else if p.HasError() { // JIM + goto errorExit + } -func (s *When_then_clauseContext) SetWhen_condition(v ISql_exprContext) { s.when_condition = v } + case 9: + localctx = NewCase_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(943) + p.Match(KuneiformParserCASE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(945) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *When_then_clauseContext) SetThen(v ISql_exprContext) { s.then = v } + if ((int64((_la-7)) & ^0x3f) == 0 && ((int64(1)<<(_la-7))&-9204794688358293503) != 0) || ((int64((_la-89)) & ^0x3f) == 0 && ((int64(1)<<(_la-89))&505496072823504897) != 0) { + { + p.SetState(944) -func (s *When_then_clauseContext) WHEN() antlr.TerminalNode { - return s.GetToken(KuneiformParserWHEN, 0) -} + var _x = p.sql_expr(0) -func (s *When_then_clauseContext) THEN() antlr.TerminalNode { - return s.GetToken(KuneiformParserTHEN, 0) -} + localctx.(*Case_exprContext).case_clause = _x + } -func (s *When_then_clauseContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ } - } - - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ + p.SetState(948) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - } + _la = p.GetTokenStream().LA(1) - return tst -} + for ok := true; ok; ok = _la == KuneiformParserWHEN { + { + p.SetState(947) + p.When_then_clause() + } -func (s *When_then_clauseContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break + p.SetState(950) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit } - j++ + _la = p.GetTokenStream().LA(1) } - } - - if t == nil { - return nil - } - - return t.(ISql_exprContext) -} - -func (s *When_then_clauseContext) GetRuleContext() antlr.RuleContext { - return s -} + p.SetState(954) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *When_then_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} + if _la == KuneiformParserELSE { + { + p.SetState(952) + p.Match(KuneiformParserELSE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(953) -func (s *When_then_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitWhen_then_clause(s) + var _x = p.sql_expr(0) - default: - return t.VisitChildren(s) - } -} + localctx.(*Case_exprContext).else_clause = _x + } -func (p *KuneiformParser) When_then_clause() (localctx IWhen_then_clauseContext) { - localctx = NewWhen_then_clauseContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 90, KuneiformParserRULE_when_then_clause) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(870) - p.Match(KuneiformParserWHEN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit } - } - { - p.SetState(871) - - var _x = p.sql_expr(0) + { + p.SetState(956) + p.Match(KuneiformParserEND) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - localctx.(*When_then_clauseContext).when_condition = _x - } - { - p.SetState(872) - p.Match(KuneiformParserTHEN) + case 10: + localctx = NewSubquery_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + p.SetState(962) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - { - p.SetState(873) - - var _x = p.sql_expr(0) - - localctx.(*When_then_clauseContext).then = _x - } + _la = p.GetTokenStream().LA(1) -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} + if _la == KuneiformParserNOT || _la == KuneiformParserEXISTS { + p.SetState(959) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -// ISql_expr_listContext is an interface to support dynamic dispatch. -type ISql_expr_listContext interface { - antlr.ParserRuleContext + if _la == KuneiformParserNOT { + { + p.SetState(958) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - // GetParser returns the parser. - GetParser() antlr.Parser + } + { + p.SetState(961) + p.Match(KuneiformParserEXISTS) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - // Getter signatures - AllSql_expr() []ISql_exprContext - Sql_expr(i int) ISql_exprContext - AllCOMMA() []antlr.TerminalNode - COMMA(i int) antlr.TerminalNode + } + { + p.SetState(964) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(965) + p.Select_statement() + } + { + p.SetState(966) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(968) + p.GetErrorHandler().Sync(p) - // IsSql_expr_listContext differentiates from other interfaces. - IsSql_expr_listContext() -} + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 130, p.GetParserRuleContext()) == 1 { + { + p.SetState(967) + p.Type_cast() + } -type Sql_expr_listContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser -} + } else if p.HasError() { // JIM + goto errorExit + } -func NewEmptySql_expr_listContext() *Sql_expr_listContext { - var p = new(Sql_expr_listContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_expr_list - return p -} + case 11: + localctx = NewUnary_sql_exprContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx -func InitEmptySql_expr_listContext(p *Sql_expr_listContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_expr_list -} + { + p.SetState(970) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } -func (*Sql_expr_listContext) IsSql_expr_listContext() {} + { + p.SetState(971) + p.sql_expr(3) + } -func NewSql_expr_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_expr_listContext { - var p = new(Sql_expr_listContext) + case antlr.ATNInvalidAltNumber: + goto errorExit + } + p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) + p.SetState(1059) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 144, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { + if _alt == 1 { + if p.GetParseListeners() != nil { + p.TriggerExitRuleEvent() + } + _prevctx = localctx + p.SetState(1057) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 143, p.GetParserRuleContext()) { + case 1: + localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Arithmetic_sql_exprContext).left = _prevctx - p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql_expr_list + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(974) - return p -} + if !(p.Precpred(p.GetParserRuleContext(), 19)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 19)", "")) + goto errorExit + } + { + p.SetState(975) + _la = p.GetTokenStream().LA(1) -func (s *Sql_expr_listContext) GetParser() antlr.Parser { return s.parser } + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4734976) != 0) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(976) -func (s *Sql_expr_listContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ - } - } + var _x = p.sql_expr(20) - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ - } - } + localctx.(*Arithmetic_sql_exprContext).right = _x + } - return tst -} + case 2: + localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Arithmetic_sql_exprContext).left = _prevctx -func (s *Sql_expr_listContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(977) - if t == nil { - return nil - } + if !(p.Precpred(p.GetParserRuleContext(), 18)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 18)", "")) + goto errorExit + } + { + p.SetState(978) + _la = p.GetTokenStream().LA(1) - return t.(ISql_exprContext) -} + if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(979) -func (s *Sql_expr_listContext) AllCOMMA() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserCOMMA) -} + var _x = p.sql_expr(19) -func (s *Sql_expr_listContext) COMMA(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, i) -} + localctx.(*Arithmetic_sql_exprContext).right = _x + } -func (s *Sql_expr_listContext) GetRuleContext() antlr.RuleContext { - return s -} + case 3: + localctx = NewArithmetic_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Arithmetic_sql_exprContext).left = _prevctx -func (s *Sql_expr_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(980) -func (s *Sql_expr_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitSql_expr_list(s) + if !(p.Precpred(p.GetParserRuleContext(), 9)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 9)", "")) + goto errorExit + } + { + p.SetState(981) + p.Match(KuneiformParserCONCAT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(982) - default: - return t.VisitChildren(s) - } -} + var _x = p.sql_expr(10) -func (p *KuneiformParser) Sql_expr_list() (localctx ISql_expr_listContext) { - localctx = NewSql_expr_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 92, KuneiformParserRULE_sql_expr_list) - var _la int + localctx.(*Arithmetic_sql_exprContext).right = _x + } - p.EnterOuterAlt(localctx, 1) - { - p.SetState(875) - p.sql_expr(0) - } - p.SetState(880) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + case 4: + localctx = NewLike_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Like_sql_exprContext).left = _prevctx - for _la == KuneiformParserCOMMA { - { - p.SetState(876) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(877) - p.sql_expr(0) - } + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(983) - p.SetState(882) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - } + if !(p.Precpred(p.GetParserRuleContext(), 7)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 7)", "")) + goto errorExit + } + p.SetState(985) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -errorExit: - if p.HasError() { - v := p.GetError() - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - p.SetError(nil) - } - p.ExitRule() - return localctx - goto errorExit // Trick to prevent compiler error if the label is not used -} + if _la == KuneiformParserNOT { + { + p.SetState(984) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } -// ISql_function_callContext is an interface to support dynamic dispatch. -type ISql_function_callContext interface { - antlr.ParserRuleContext + } + { + p.SetState(987) + _la = p.GetTokenStream().LA(1) - // GetParser returns the parser. - GetParser() antlr.Parser - // IsSql_function_callContext differentiates from other interfaces. - IsSql_function_callContext() -} + if !(_la == KuneiformParserLIKE || _la == KuneiformParserILIKE) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(988) -type Sql_function_callContext struct { - antlr.BaseParserRuleContext - parser antlr.Parser -} + var _x = p.sql_expr(8) -func NewEmptySql_function_callContext() *Sql_function_callContext { - var p = new(Sql_function_callContext) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_function_call - return p -} + localctx.(*Like_sql_exprContext).right = _x + } -func InitEmptySql_function_callContext(p *Sql_function_callContext) { - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_sql_function_call -} + case 5: + localctx = NewBetween_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Between_sql_exprContext).element = _prevctx -func (*Sql_function_callContext) IsSql_function_callContext() {} + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(989) -func NewSql_function_callContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_function_callContext { - var p = new(Sql_function_callContext) + if !(p.Precpred(p.GetParserRuleContext(), 6)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 6)", "")) + goto errorExit + } + p.SetState(991) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + if _la == KuneiformParserNOT { + { + p.SetState(990) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - p.parser = parser - p.RuleIndex = KuneiformParserRULE_sql_function_call + } + { + p.SetState(993) + p.Match(KuneiformParserBETWEEN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(994) - return p -} + var _x = p.sql_expr(0) -func (s *Sql_function_callContext) GetParser() antlr.Parser { return s.parser } + localctx.(*Between_sql_exprContext).lower = _x + } + { + p.SetState(995) + p.Match(KuneiformParserAND) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(996) -func (s *Sql_function_callContext) CopyAll(ctx *Sql_function_callContext) { - s.CopyFrom(&ctx.BaseParserRuleContext) -} + var _x = p.sql_expr(7) -func (s *Sql_function_callContext) GetRuleContext() antlr.RuleContext { - return s -} + localctx.(*Between_sql_exprContext).upper = _x + } -func (s *Sql_function_callContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} + case 6: + localctx = NewComparison_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Comparison_sql_exprContext).left = _prevctx -type Normal_call_sqlContext struct { - Sql_function_callContext -} + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(998) -func NewNormal_call_sqlContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Normal_call_sqlContext { - var p = new(Normal_call_sqlContext) + if !(p.Precpred(p.GetParserRuleContext(), 5)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 5)", "")) + goto errorExit + } + { + p.SetState(999) + _la = p.GetTokenStream().LA(1) - InitEmptySql_function_callContext(&p.Sql_function_callContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_function_callContext)) + if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&260145152) != 0) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + { + p.SetState(1000) - return p -} + var _x = p.sql_expr(6) -func (s *Normal_call_sqlContext) GetRuleContext() antlr.RuleContext { - return s -} + localctx.(*Comparison_sql_exprContext).right = _x + } -func (s *Normal_call_sqlContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + case 7: + localctx = NewLogical_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Logical_sql_exprContext).left = _prevctx - if t == nil { - return nil - } + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1001) - return t.(IIdentifierContext) -} + if !(p.Precpred(p.GetParserRuleContext(), 2)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + goto errorExit + } + { + p.SetState(1002) + p.Match(KuneiformParserAND) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1003) -func (s *Normal_call_sqlContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} + var _x = p.sql_expr(3) -func (s *Normal_call_sqlContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) -} + localctx.(*Logical_sql_exprContext).right = _x + } -func (s *Normal_call_sqlContext) Sql_expr_list() ISql_expr_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + case 8: + localctx = NewLogical_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Logical_sql_exprContext).left = _prevctx - if t == nil { - return nil - } + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1004) - return t.(ISql_expr_listContext) -} + if !(p.Precpred(p.GetParserRuleContext(), 1)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + goto errorExit + } + { + p.SetState(1005) + p.Match(KuneiformParserOR) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1006) -func (s *Normal_call_sqlContext) STAR() antlr.TerminalNode { - return s.GetToken(KuneiformParserSTAR, 0) -} + var _x = p.sql_expr(2) -func (s *Normal_call_sqlContext) DISTINCT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDISTINCT, 0) -} + localctx.(*Logical_sql_exprContext).right = _x + } -func (s *Normal_call_sqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitNormal_call_sql(s) + case 9: + localctx = NewField_access_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1007) - default: - return t.VisitChildren(s) - } -} + if !(p.Precpred(p.GetParserRuleContext(), 23)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 23)", "")) + goto errorExit + } + { + p.SetState(1008) + p.Match(KuneiformParserPERIOD) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1009) + p.Identifier() + } + p.SetState(1011) + p.GetErrorHandler().Sync(p) -type Foreign_call_sqlContext struct { - Sql_function_callContext - dbid ISql_exprContext - procedure ISql_exprContext -} + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 134, p.GetParserRuleContext()) == 1 { + { + p.SetState(1010) + p.Type_cast() + } -func NewForeign_call_sqlContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Foreign_call_sqlContext { - var p = new(Foreign_call_sqlContext) + } else if p.HasError() { // JIM + goto errorExit + } - InitEmptySql_function_callContext(&p.Sql_function_callContext) - p.parser = parser - p.CopyAll(ctx.(*Sql_function_callContext)) + case 10: + localctx = NewArray_access_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Array_access_sql_exprContext).array_element = _prevctx - return p -} + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1013) -func (s *Foreign_call_sqlContext) GetDbid() ISql_exprContext { return s.dbid } + if !(p.Precpred(p.GetParserRuleContext(), 22)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 22)", "")) + goto errorExit + } + { + p.SetState(1014) + p.Match(KuneiformParserLBRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1023) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } -func (s *Foreign_call_sqlContext) GetProcedure() ISql_exprContext { return s.procedure } + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 137, p.GetParserRuleContext()) { + case 1: + { + p.SetState(1015) -func (s *Foreign_call_sqlContext) SetDbid(v ISql_exprContext) { s.dbid = v } + var _x = p.sql_expr(0) -func (s *Foreign_call_sqlContext) SetProcedure(v ISql_exprContext) { s.procedure = v } + localctx.(*Array_access_sql_exprContext).single = _x + } -func (s *Foreign_call_sqlContext) GetRuleContext() antlr.RuleContext { - return s -} + case 2: + p.SetState(1017) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Foreign_call_sqlContext) Identifier() IIdentifierContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IIdentifierContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + if ((int64((_la-7)) & ^0x3f) == 0 && ((int64(1)<<(_la-7))&-9204794688358293503) != 0) || ((int64((_la-89)) & ^0x3f) == 0 && ((int64(1)<<(_la-89))&505496072823504897) != 0) { + { + p.SetState(1016) - if t == nil { - return nil - } + var _x = p.sql_expr(0) - return t.(IIdentifierContext) -} + localctx.(*Array_access_sql_exprContext).left = _x + } -func (s *Foreign_call_sqlContext) LBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACKET, 0) -} + } + { + p.SetState(1019) + p.Match(KuneiformParserCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1021) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) -func (s *Foreign_call_sqlContext) COMMA() antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, 0) -} + if ((int64((_la-7)) & ^0x3f) == 0 && ((int64(1)<<(_la-7))&-9204794688358293503) != 0) || ((int64((_la-89)) & ^0x3f) == 0 && ((int64(1)<<(_la-89))&505496072823504897) != 0) { + { + p.SetState(1020) -func (s *Foreign_call_sqlContext) RBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACKET, 0) -} + var _x = p.sql_expr(0) -func (s *Foreign_call_sqlContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} + localctx.(*Array_access_sql_exprContext).right = _x + } -func (s *Foreign_call_sqlContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) -} + } -func (s *Foreign_call_sqlContext) AllSql_expr() []ISql_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(ISql_exprContext); ok { - len++ - } - } + case antlr.ATNInvalidAltNumber: + goto errorExit + } + { + p.SetState(1025) + p.Match(KuneiformParserRBRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1027) + p.GetErrorHandler().Sync(p) - tst := make([]ISql_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(ISql_exprContext); ok { - tst[i] = t.(ISql_exprContext) - i++ - } - } + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 138, p.GetParserRuleContext()) == 1 { + { + p.SetState(1026) + p.Type_cast() + } - return tst -} + } else if p.HasError() { // JIM + goto errorExit + } -func (s *Foreign_call_sqlContext) Sql_expr(i int) ISql_exprContext { - var t antlr.RuleContext - j := 0 - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ - } - } + case 11: + localctx = NewCollate_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1029) - if t == nil { - return nil - } + if !(p.Precpred(p.GetParserRuleContext(), 20)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 20)", "")) + goto errorExit + } + { + p.SetState(1030) + p.Match(KuneiformParserCOLLATE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1031) + p.Identifier() + } - return t.(ISql_exprContext) -} + case 12: + localctx = NewIn_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1032) -func (s *Foreign_call_sqlContext) Sql_expr_list() ISql_expr_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + if !(p.Precpred(p.GetParserRuleContext(), 8)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 8)", "")) + goto errorExit + } + p.SetState(1034) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - if t == nil { - return nil - } + if _la == KuneiformParserNOT { + { + p.SetState(1033) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - return t.(ISql_expr_listContext) -} + } + { + p.SetState(1036) + p.Match(KuneiformParserIN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1037) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1040) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } -func (s *Foreign_call_sqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitForeign_call_sql(s) + switch p.GetTokenStream().LA(1) { + case KuneiformParserLPAREN, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserDOUBLE_QUOTE, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserEXISTS, KuneiformParserCASE, KuneiformParserARRAY, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: + { + p.SetState(1038) + p.Sql_expr_list() + } - default: - return t.VisitChildren(s) - } -} + case KuneiformParserSELECT: + { + p.SetState(1039) + p.Select_statement() + } -func (p *KuneiformParser) Sql_function_call() (localctx ISql_function_callContext) { - localctx = NewSql_function_callContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 94, KuneiformParserRULE_sql_function_call) - var _la int + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + { + p.SetState(1042) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - p.SetState(906) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } + case 13: + localctx = NewIs_sql_exprContext(p, NewSql_exprContext(p, _parentctx, _parentState)) + localctx.(*Is_sql_exprContext).left = _prevctx - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 127, p.GetParserRuleContext()) { - case 1: - localctx = NewNormal_call_sqlContext(p, localctx) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(883) - p.Identifier() - } - { - p.SetState(884) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(890) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - switch p.GetTokenStream().LA(1) { - case KuneiformParserLPAREN, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserDOUBLE_QUOTE, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserEXISTS, KuneiformParserCASE, KuneiformParserDISTINCT, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: - p.SetState(886) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_sql_expr) + p.SetState(1044) - if _la == KuneiformParserDISTINCT { + if !(p.Precpred(p.GetParserRuleContext(), 4)) { + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 4)", "")) + goto errorExit + } { - p.SetState(885) - p.Match(KuneiformParserDISTINCT) + p.SetState(1045) + p.Match(KuneiformParserIS) if p.HasError() { // Recognition error - abort rule goto errorExit } } + p.SetState(1047) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - } - { - p.SetState(888) - p.Sql_expr_list() - } + if _la == KuneiformParserNOT { + { + p.SetState(1046) + p.Match(KuneiformParserNOT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - case KuneiformParserSTAR: - { - p.SetState(889) - p.Match(KuneiformParserSTAR) + } + p.SetState(1055) + p.GetErrorHandler().Sync(p) if p.HasError() { - // Recognition error - abort rule goto errorExit } - } - case KuneiformParserRPAREN: + switch p.GetTokenStream().LA(1) { + case KuneiformParserDISTINCT: + { + p.SetState(1049) + p.Match(KuneiformParserDISTINCT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1050) + p.Match(KuneiformParserFROM) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1051) - default: - } - { - p.SetState(892) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + var _x = p.sql_expr(0) - case 2: - localctx = NewForeign_call_sqlContext(p, localctx) - p.EnterOuterAlt(localctx, 2) - { - p.SetState(894) - p.Identifier() - } - { - p.SetState(895) - p.Match(KuneiformParserLBRACKET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(896) + localctx.(*Is_sql_exprContext).right = _x + } - var _x = p.sql_expr(0) + case KuneiformParserNULL: + { + p.SetState(1052) + p.Match(KuneiformParserNULL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - localctx.(*Foreign_call_sqlContext).dbid = _x - } - { - p.SetState(897) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(898) + case KuneiformParserTRUE: + { + p.SetState(1053) + p.Match(KuneiformParserTRUE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - var _x = p.sql_expr(0) + case KuneiformParserFALSE: + { + p.SetState(1054) + p.Match(KuneiformParserFALSE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - localctx.(*Foreign_call_sqlContext).procedure = _x - } - { - p.SetState(899) - p.Match(KuneiformParserRBRACKET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(900) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule + default: + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit + } + + case antlr.ATNInvalidAltNumber: goto errorExit } + } - p.SetState(902) + p.SetState(1061) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) - - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618792001339520) != 0) || ((int64((_la-66)) & ^0x3f) == 0 && ((int64(1)<<(_la-66))&4043950990402519041) != 0) { - { - p.SetState(901) - p.Sql_expr_list() - } - - } - { - p.SetState(904) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 144, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit } - - case antlr.ATNInvalidAltNumber: - goto errorExit } errorExit: @@ -15627,74 +18090,116 @@ errorExit: p.GetErrorHandler().Recover(p, v) p.SetError(nil) } - p.ExitRule() + p.UnrollRecursionContexts(_parentctx) return localctx goto errorExit // Trick to prevent compiler error if the label is not used } -// IAction_blockContext is an interface to support dynamic dispatch. -type IAction_blockContext interface { +// IWindowContext is an interface to support dynamic dispatch. +type IWindowContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser + // GetPartition returns the partition rule contexts. + GetPartition() ISql_expr_listContext + + // SetPartition sets the partition rule contexts. + SetPartition(ISql_expr_listContext) + // Getter signatures - AllAction_statement() []IAction_statementContext - Action_statement(i int) IAction_statementContext - AllSCOL() []antlr.TerminalNode - SCOL(i int) antlr.TerminalNode + LPAREN() antlr.TerminalNode + RPAREN() antlr.TerminalNode + PARTITION() antlr.TerminalNode + AllBY() []antlr.TerminalNode + BY(i int) antlr.TerminalNode + ORDER() antlr.TerminalNode + AllOrdering_term() []IOrdering_termContext + Ordering_term(i int) IOrdering_termContext + Sql_expr_list() ISql_expr_listContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - // IsAction_blockContext differentiates from other interfaces. - IsAction_blockContext() + // IsWindowContext differentiates from other interfaces. + IsWindowContext() } -type Action_blockContext struct { +type WindowContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + partition ISql_expr_listContext } -func NewEmptyAction_blockContext() *Action_blockContext { - var p = new(Action_blockContext) +func NewEmptyWindowContext() *WindowContext { + var p = new(WindowContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_block + p.RuleIndex = KuneiformParserRULE_window return p } -func InitEmptyAction_blockContext(p *Action_blockContext) { +func InitEmptyWindowContext(p *WindowContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_block + p.RuleIndex = KuneiformParserRULE_window } -func (*Action_blockContext) IsAction_blockContext() {} +func (*WindowContext) IsWindowContext() {} -func NewAction_blockContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_blockContext { - var p = new(Action_blockContext) +func NewWindowContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *WindowContext { + var p = new(WindowContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) + + p.parser = parser + p.RuleIndex = KuneiformParserRULE_window + + return p +} + +func (s *WindowContext) GetParser() antlr.Parser { return s.parser } + +func (s *WindowContext) GetPartition() ISql_expr_listContext { return s.partition } + +func (s *WindowContext) SetPartition(v ISql_expr_listContext) { s.partition = v } + +func (s *WindowContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *WindowContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} - antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) +func (s *WindowContext) PARTITION() antlr.TerminalNode { + return s.GetToken(KuneiformParserPARTITION, 0) +} - p.parser = parser - p.RuleIndex = KuneiformParserRULE_action_block +func (s *WindowContext) AllBY() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserBY) +} - return p +func (s *WindowContext) BY(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserBY, i) } -func (s *Action_blockContext) GetParser() antlr.Parser { return s.parser } +func (s *WindowContext) ORDER() antlr.TerminalNode { + return s.GetToken(KuneiformParserORDER, 0) +} -func (s *Action_blockContext) AllAction_statement() []IAction_statementContext { +func (s *WindowContext) AllOrdering_term() []IOrdering_termContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IAction_statementContext); ok { + if _, ok := ctx.(IOrdering_termContext); ok { len++ } } - tst := make([]IAction_statementContext, len) + tst := make([]IOrdering_termContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IAction_statementContext); ok { - tst[i] = t.(IAction_statementContext) + if t, ok := ctx.(IOrdering_termContext); ok { + tst[i] = t.(IOrdering_termContext) i++ } } @@ -15702,11 +18207,11 @@ func (s *Action_blockContext) AllAction_statement() []IAction_statementContext { return tst } -func (s *Action_blockContext) Action_statement(i int) IAction_statementContext { +func (s *WindowContext) Ordering_term(i int) IOrdering_termContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IAction_statementContext); ok { + if _, ok := ctx.(IOrdering_termContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -15719,68 +18224,163 @@ func (s *Action_blockContext) Action_statement(i int) IAction_statementContext { return nil } - return t.(IAction_statementContext) + return t.(IOrdering_termContext) } -func (s *Action_blockContext) AllSCOL() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserSCOL) +func (s *WindowContext) Sql_expr_list() ISql_expr_listContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_expr_listContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(ISql_expr_listContext) } -func (s *Action_blockContext) SCOL(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserSCOL, i) +func (s *WindowContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) +} + +func (s *WindowContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Action_blockContext) GetRuleContext() antlr.RuleContext { +func (s *WindowContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Action_blockContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *WindowContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Action_blockContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *WindowContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitAction_block(s) + return t.VisitWindow(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Action_block() (localctx IAction_blockContext) { - localctx = NewAction_blockContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 96, KuneiformParserRULE_action_block) +func (p *KuneiformParser) Window() (localctx IWindowContext) { + localctx = NewWindowContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 102, KuneiformParserRULE_window) var _la int p.EnterOuterAlt(localctx, 1) - p.SetState(913) + { + p.SetState(1062) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1066) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for _la == KuneiformParserDELETE || _la == KuneiformParserUPDATE || ((int64((_la-84)) & ^0x3f) == 0 && ((int64(1)<<(_la-84))&15393162790401) != 0) { + if _la == KuneiformParserPARTITION { { - p.SetState(908) - p.Action_statement() + p.SetState(1063) + p.Match(KuneiformParserPARTITION) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { - p.SetState(909) - p.Match(KuneiformParserSCOL) + p.SetState(1064) + p.Match(KuneiformParserBY) if p.HasError() { // Recognition error - abort rule goto errorExit } } + { + p.SetState(1065) - p.SetState(915) + var _x = p.Sql_expr_list() + + localctx.(*WindowContext).partition = _x + } + + } + p.SetState(1078) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserORDER { + { + p.SetState(1068) + p.Match(KuneiformParserORDER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1069) + p.Match(KuneiformParserBY) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1070) + p.Ordering_term() + } + p.SetState(1075) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) + + for _la == KuneiformParserCOMMA { + { + p.SetState(1071) + p.Match(KuneiformParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1072) + p.Ordering_term() + } + + p.SetState(1077) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + } + + } + { + p.SetState(1080) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } errorExit: @@ -15796,180 +18396,272 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IAction_statementContext is an interface to support dynamic dispatch. -type IAction_statementContext interface { +// IWhen_then_clauseContext is an interface to support dynamic dispatch. +type IWhen_then_clauseContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsAction_statementContext differentiates from other interfaces. - IsAction_statementContext() + + // GetWhen_condition returns the when_condition rule contexts. + GetWhen_condition() ISql_exprContext + + // GetThen returns the then rule contexts. + GetThen() ISql_exprContext + + // SetWhen_condition sets the when_condition rule contexts. + SetWhen_condition(ISql_exprContext) + + // SetThen sets the then rule contexts. + SetThen(ISql_exprContext) + + // Getter signatures + WHEN() antlr.TerminalNode + THEN() antlr.TerminalNode + AllSql_expr() []ISql_exprContext + Sql_expr(i int) ISql_exprContext + + // IsWhen_then_clauseContext differentiates from other interfaces. + IsWhen_then_clauseContext() } -type Action_statementContext struct { +type When_then_clauseContext struct { antlr.BaseParserRuleContext - parser antlr.Parser + parser antlr.Parser + when_condition ISql_exprContext + then ISql_exprContext } -func NewEmptyAction_statementContext() *Action_statementContext { - var p = new(Action_statementContext) +func NewEmptyWhen_then_clauseContext() *When_then_clauseContext { + var p = new(When_then_clauseContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_statement + p.RuleIndex = KuneiformParserRULE_when_then_clause return p } -func InitEmptyAction_statementContext(p *Action_statementContext) { +func InitEmptyWhen_then_clauseContext(p *When_then_clauseContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_action_statement + p.RuleIndex = KuneiformParserRULE_when_then_clause } -func (*Action_statementContext) IsAction_statementContext() {} +func (*When_then_clauseContext) IsWhen_then_clauseContext() {} -func NewAction_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_statementContext { - var p = new(Action_statementContext) +func NewWhen_then_clauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *When_then_clauseContext { + var p = new(When_then_clauseContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_action_statement + p.RuleIndex = KuneiformParserRULE_when_then_clause return p } -func (s *Action_statementContext) GetParser() antlr.Parser { return s.parser } +func (s *When_then_clauseContext) GetParser() antlr.Parser { return s.parser } -func (s *Action_statementContext) CopyAll(ctx *Action_statementContext) { - s.CopyFrom(&ctx.BaseParserRuleContext) -} +func (s *When_then_clauseContext) GetWhen_condition() ISql_exprContext { return s.when_condition } -func (s *Action_statementContext) GetRuleContext() antlr.RuleContext { - return s +func (s *When_then_clauseContext) GetThen() ISql_exprContext { return s.then } + +func (s *When_then_clauseContext) SetWhen_condition(v ISql_exprContext) { s.when_condition = v } + +func (s *When_then_clauseContext) SetThen(v ISql_exprContext) { s.then = v } + +func (s *When_then_clauseContext) WHEN() antlr.TerminalNode { + return s.GetToken(KuneiformParserWHEN, 0) } -func (s *Action_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *When_then_clauseContext) THEN() antlr.TerminalNode { + return s.GetToken(KuneiformParserTHEN, 0) } -type Extension_actionContext struct { - Action_statementContext +func (s *When_then_clauseContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } + + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } + + return tst } -func NewExtension_actionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Extension_actionContext { - var p = new(Extension_actionContext) +func (s *When_then_clauseContext) Sql_expr(i int) ISql_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } - InitEmptyAction_statementContext(&p.Action_statementContext) - p.parser = parser - p.CopyAll(ctx.(*Action_statementContext)) + if t == nil { + return nil + } - return p + return t.(ISql_exprContext) } -func (s *Extension_actionContext) GetRuleContext() antlr.RuleContext { +func (s *When_then_clauseContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Extension_actionContext) AllIDENTIFIER() []antlr.TerminalNode { - return s.GetTokens(KuneiformParserIDENTIFIER) +func (s *When_then_clauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Extension_actionContext) IDENTIFIER(i int) antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, i) -} +func (s *When_then_clauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitWhen_then_clause(s) -func (s *Extension_actionContext) PERIOD() antlr.TerminalNode { - return s.GetToken(KuneiformParserPERIOD, 0) + default: + return t.VisitChildren(s) + } } -func (s *Extension_actionContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} +func (p *KuneiformParser) When_then_clause() (localctx IWhen_then_clauseContext) { + localctx = NewWhen_then_clauseContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 104, KuneiformParserRULE_when_then_clause) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(1082) + p.Match(KuneiformParserWHEN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + { + p.SetState(1083) -func (s *Extension_actionContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) -} + var _x = p.sql_expr(0) -func (s *Extension_actionContext) Variable_list() IVariable_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IVariable_listContext); ok { - t = ctx.(antlr.RuleContext) - break + localctx.(*When_then_clauseContext).when_condition = _x + } + { + p.SetState(1084) + p.Match(KuneiformParserTHEN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit } } + { + p.SetState(1085) - if t == nil { - return nil + var _x = p.sql_expr(0) + + localctx.(*When_then_clauseContext).then = _x } - return t.(IVariable_listContext) +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() + return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } -func (s *Extension_actionContext) EQUALS() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUALS, 0) -} +// ISql_expr_listContext is an interface to support dynamic dispatch. +type ISql_expr_listContext interface { + antlr.ParserRuleContext -func (s *Extension_actionContext) Procedure_expr_list() IProcedure_expr_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } + // GetParser returns the parser. + GetParser() antlr.Parser - if t == nil { - return nil - } + // Getter signatures + AllSql_expr() []ISql_exprContext + Sql_expr(i int) ISql_exprContext + AllCOMMA() []antlr.TerminalNode + COMMA(i int) antlr.TerminalNode - return t.(IProcedure_expr_listContext) + // IsSql_expr_listContext differentiates from other interfaces. + IsSql_expr_listContext() } -func (s *Extension_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitExtension_action(s) +type Sql_expr_listContext struct { + antlr.BaseParserRuleContext + parser antlr.Parser +} - default: - return t.VisitChildren(s) - } +func NewEmptySql_expr_listContext() *Sql_expr_listContext { + var p = new(Sql_expr_listContext) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_sql_expr_list + return p } -type Local_actionContext struct { - Action_statementContext +func InitEmptySql_expr_listContext(p *Sql_expr_listContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = KuneiformParserRULE_sql_expr_list } -func NewLocal_actionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Local_actionContext { - var p = new(Local_actionContext) +func (*Sql_expr_listContext) IsSql_expr_listContext() {} + +func NewSql_expr_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_expr_listContext { + var p = new(Sql_expr_listContext) + + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) - InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Action_statementContext)) + p.RuleIndex = KuneiformParserRULE_sql_expr_list return p } -func (s *Local_actionContext) GetRuleContext() antlr.RuleContext { - return s -} +func (s *Sql_expr_listContext) GetParser() antlr.Parser { return s.parser } -func (s *Local_actionContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} +func (s *Sql_expr_listContext) AllSql_expr() []ISql_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(ISql_exprContext); ok { + len++ + } + } -func (s *Local_actionContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} + tst := make([]ISql_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(ISql_exprContext); ok { + tst[i] = t.(ISql_exprContext) + i++ + } + } -func (s *Local_actionContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) + return tst } -func (s *Local_actionContext) Procedure_expr_list() IProcedure_expr_listContext { +func (s *Sql_expr_listContext) Sql_expr(i int) ISql_exprContext { var t antlr.RuleContext + j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break + if _, ok := ctx.(ISql_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ } } @@ -15977,207 +18669,72 @@ func (s *Local_actionContext) Procedure_expr_list() IProcedure_expr_listContext return nil } - return t.(IProcedure_expr_listContext) -} - -func (s *Local_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitLocal_action(s) - - default: - return t.VisitChildren(s) - } + return t.(ISql_exprContext) } -type Sql_actionContext struct { - Action_statementContext +func (s *Sql_expr_listContext) AllCOMMA() []antlr.TerminalNode { + return s.GetTokens(KuneiformParserCOMMA) } -func NewSql_actionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Sql_actionContext { - var p = new(Sql_actionContext) - - InitEmptyAction_statementContext(&p.Action_statementContext) - p.parser = parser - p.CopyAll(ctx.(*Action_statementContext)) - - return p +func (s *Sql_expr_listContext) COMMA(i int) antlr.TerminalNode { + return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Sql_actionContext) GetRuleContext() antlr.RuleContext { +func (s *Sql_expr_listContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Sql_actionContext) Sql_statement() ISql_statementContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(ISql_statementContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(ISql_statementContext) +func (s *Sql_expr_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Sql_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Sql_expr_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitSql_action(s) + return t.VisitSql_expr_list(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Action_statement() (localctx IAction_statementContext) { - localctx = NewAction_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 98, KuneiformParserRULE_action_statement) +func (p *KuneiformParser) Sql_expr_list() (localctx ISql_expr_listContext) { + localctx = NewSql_expr_listContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 106, KuneiformParserRULE_sql_expr_list) var _la int - p.SetState(936) + p.EnterOuterAlt(localctx, 1) + { + p.SetState(1087) + p.sql_expr(0) + } + p.SetState(1092) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } + _la = p.GetTokenStream().LA(1) - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 132, p.GetParserRuleContext()) { - case 1: - localctx = NewSql_actionContext(p, localctx) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(916) - p.Sql_statement() - } - - case 2: - localctx = NewLocal_actionContext(p, localctx) - p.EnterOuterAlt(localctx, 2) - { - p.SetState(917) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + for _la == KuneiformParserCOMMA { { - p.SetState(918) - p.Match(KuneiformParserLPAREN) + p.SetState(1088) + p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(920) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { - { - p.SetState(919) - p.Procedure_expr_list() - } - - } { - p.SetState(922) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - case 3: - localctx = NewExtension_actionContext(p, localctx) - p.EnterOuterAlt(localctx, 3) - p.SetState(926) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit + p.SetState(1089) + p.sql_expr(0) } - _la = p.GetTokenStream().LA(1) - - if _la == KuneiformParserVARIABLE || _la == KuneiformParserCONTEXTUAL_VARIABLE { - { - p.SetState(923) - p.Variable_list() - } - { - p.SetState(924) - p.Match(KuneiformParserEQUALS) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - } - { - p.SetState(928) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(929) - p.Match(KuneiformParserPERIOD) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(930) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(931) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(933) + p.SetState(1094) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { - { - p.SetState(932) - p.Procedure_expr_list() - } - - } - { - p.SetState(935) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - - case antlr.ATNInvalidAltNumber: - goto errorExit } errorExit: @@ -16193,84 +18750,108 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProcedure_blockContext is an interface to support dynamic dispatch. -type IProcedure_blockContext interface { +// ISql_function_callContext is an interface to support dynamic dispatch. +type ISql_function_callContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - - // Getter signatures - AllProc_statement() []IProc_statementContext - Proc_statement(i int) IProc_statementContext - - // IsProcedure_blockContext differentiates from other interfaces. - IsProcedure_blockContext() + // IsSql_function_callContext differentiates from other interfaces. + IsSql_function_callContext() } -type Procedure_blockContext struct { +type Sql_function_callContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyProcedure_blockContext() *Procedure_blockContext { - var p = new(Procedure_blockContext) +func NewEmptySql_function_callContext() *Sql_function_callContext { + var p = new(Sql_function_callContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_block + p.RuleIndex = KuneiformParserRULE_sql_function_call return p } -func InitEmptyProcedure_blockContext(p *Procedure_blockContext) { +func InitEmptySql_function_callContext(p *Sql_function_callContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_block + p.RuleIndex = KuneiformParserRULE_sql_function_call } -func (*Procedure_blockContext) IsProcedure_blockContext() {} +func (*Sql_function_callContext) IsSql_function_callContext() {} -func NewProcedure_blockContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_blockContext { - var p = new(Procedure_blockContext) +func NewSql_function_callContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Sql_function_callContext { + var p = new(Sql_function_callContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_block + p.RuleIndex = KuneiformParserRULE_sql_function_call return p } -func (s *Procedure_blockContext) GetParser() antlr.Parser { return s.parser } +func (s *Sql_function_callContext) GetParser() antlr.Parser { return s.parser } + +func (s *Sql_function_callContext) CopyAll(ctx *Sql_function_callContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) +} + +func (s *Sql_function_callContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Sql_function_callContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} -func (s *Procedure_blockContext) AllProc_statement() []IProc_statementContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IProc_statementContext); ok { - len++ +type Normal_call_sqlContext struct { + Sql_function_callContext +} + +func NewNormal_call_sqlContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Normal_call_sqlContext { + var p = new(Normal_call_sqlContext) + + InitEmptySql_function_callContext(&p.Sql_function_callContext) + p.parser = parser + p.CopyAll(ctx.(*Sql_function_callContext)) + + return p +} + +func (s *Normal_call_sqlContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Normal_call_sqlContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]IProc_statementContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IProc_statementContext); ok { - tst[i] = t.(IProc_statementContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(IIdentifierContext) } -func (s *Procedure_blockContext) Proc_statement(i int) IProc_statementContext { +func (s *Normal_call_sqlContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) +} + +func (s *Normal_call_sqlContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) +} + +func (s *Normal_call_sqlContext) Sql_expr_list() ISql_expr_listContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProc_statementContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(ISql_expr_listContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -16278,52 +18859,97 @@ func (s *Procedure_blockContext) Proc_statement(i int) IProc_statementContext { return nil } - return t.(IProc_statementContext) + return t.(ISql_expr_listContext) } -func (s *Procedure_blockContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Normal_call_sqlContext) STAR() antlr.TerminalNode { + return s.GetToken(KuneiformParserSTAR, 0) } -func (s *Procedure_blockContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) +func (s *Normal_call_sqlContext) DISTINCT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDISTINCT, 0) } -func (s *Procedure_blockContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Normal_call_sqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_block(s) + return t.VisitNormal_call_sql(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_block() (localctx IProcedure_blockContext) { - localctx = NewProcedure_blockContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 100, KuneiformParserRULE_procedure_block) +func (p *KuneiformParser) Sql_function_call() (localctx ISql_function_callContext) { + localctx = NewSql_function_callContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 108, KuneiformParserRULE_sql_function_call) var _la int + localctx = NewNormal_call_sqlContext(p, localctx) p.EnterOuterAlt(localctx, 1) - p.SetState(941) + { + p.SetState(1095) + p.Identifier() + } + { + p.SetState(1096) + p.Match(KuneiformParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + p.SetState(1102) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _la = p.GetTokenStream().LA(1) + switch p.GetTokenStream().LA(1) { + case KuneiformParserLPAREN, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserDOUBLE_QUOTE, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserEXISTS, KuneiformParserCASE, KuneiformParserDISTINCT, KuneiformParserARRAY, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: + p.SetState(1098) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserDISTINCT { + { + p.SetState(1097) + p.Match(KuneiformParserDISTINCT) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&175640386007468168) != 0) || ((int64((_la-84)) & ^0x3f) == 0 && ((int64(1)<<(_la-84))&15426876605953) != 0) { + } { - p.SetState(938) - p.Proc_statement() + p.SetState(1100) + p.Sql_expr_list() } - p.SetState(943) - p.GetErrorHandler().Sync(p) + case KuneiformParserSTAR: + { + p.SetState(1101) + p.Match(KuneiformParserSTAR) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + case KuneiformParserRPAREN: + + default: + } + { + p.SetState(1104) + p.Match(KuneiformParserRPAREN) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) } errorExit: @@ -16339,82 +18965,82 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProcedure_exprContext is an interface to support dynamic dispatch. -type IProcedure_exprContext interface { +// IAction_exprContext is an interface to support dynamic dispatch. +type IAction_exprContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsProcedure_exprContext differentiates from other interfaces. - IsProcedure_exprContext() + // IsAction_exprContext differentiates from other interfaces. + IsAction_exprContext() } -type Procedure_exprContext struct { +type Action_exprContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyProcedure_exprContext() *Procedure_exprContext { - var p = new(Procedure_exprContext) +func NewEmptyAction_exprContext() *Action_exprContext { + var p = new(Action_exprContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_expr + p.RuleIndex = KuneiformParserRULE_action_expr return p } -func InitEmptyProcedure_exprContext(p *Procedure_exprContext) { +func InitEmptyAction_exprContext(p *Action_exprContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_expr + p.RuleIndex = KuneiformParserRULE_action_expr } -func (*Procedure_exprContext) IsProcedure_exprContext() {} +func (*Action_exprContext) IsAction_exprContext() {} -func NewProcedure_exprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_exprContext { - var p = new(Procedure_exprContext) +func NewAction_exprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_exprContext { + var p = new(Action_exprContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_expr + p.RuleIndex = KuneiformParserRULE_action_expr return p } -func (s *Procedure_exprContext) GetParser() antlr.Parser { return s.parser } +func (s *Action_exprContext) GetParser() antlr.Parser { return s.parser } -func (s *Procedure_exprContext) CopyAll(ctx *Procedure_exprContext) { +func (s *Action_exprContext) CopyAll(ctx *Action_exprContext) { s.CopyFrom(&ctx.BaseParserRuleContext) } -func (s *Procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_exprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Action_exprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -type Field_access_procedure_exprContext struct { - Procedure_exprContext +type Function_call_action_exprContext struct { + Action_exprContext } -func NewField_access_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Field_access_procedure_exprContext { - var p = new(Field_access_procedure_exprContext) +func NewFunction_call_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Function_call_action_exprContext { + var p = new(Function_call_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Field_access_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Function_call_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Field_access_procedure_exprContext) Procedure_expr() IProcedure_exprContext { +func (s *Function_call_action_exprContext) Action_function_call() IAction_function_callContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_function_callContext); ok { t = ctx.(antlr.RuleContext) break } @@ -16424,18 +19050,10 @@ func (s *Field_access_procedure_exprContext) Procedure_expr() IProcedure_exprCon return nil } - return t.(IProcedure_exprContext) -} - -func (s *Field_access_procedure_exprContext) PERIOD() antlr.TerminalNode { - return s.GetToken(KuneiformParserPERIOD, 0) -} - -func (s *Field_access_procedure_exprContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) + return t.(IAction_function_callContext) } -func (s *Field_access_procedure_exprContext) Type_cast() IType_castContext { +func (s *Function_call_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -16451,35 +19069,35 @@ func (s *Field_access_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Field_access_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Function_call_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitField_access_procedure_expr(s) + return t.VisitFunction_call_action_expr(s) default: return t.VisitChildren(s) } } -type Literal_procedure_exprContext struct { - Procedure_exprContext +type Literal_action_exprContext struct { + Action_exprContext } -func NewLiteral_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Literal_procedure_exprContext { - var p = new(Literal_procedure_exprContext) +func NewLiteral_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Literal_action_exprContext { + var p = new(Literal_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Literal_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Literal_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Literal_procedure_exprContext) Literal() ILiteralContext { +func (s *Literal_action_exprContext) Literal() ILiteralContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(ILiteralContext); ok { @@ -16495,7 +19113,7 @@ func (s *Literal_procedure_exprContext) Literal() ILiteralContext { return t.(ILiteralContext) } -func (s *Literal_procedure_exprContext) Type_cast() IType_castContext { +func (s *Literal_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -16511,42 +19129,38 @@ func (s *Literal_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Literal_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Literal_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitLiteral_procedure_expr(s) + return t.VisitLiteral_action_expr(s) default: return t.VisitChildren(s) } } -type Paren_procedure_exprContext struct { - Procedure_exprContext +type Field_access_action_exprContext struct { + Action_exprContext } -func NewParen_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Paren_procedure_exprContext { - var p = new(Paren_procedure_exprContext) +func NewField_access_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Field_access_action_exprContext { + var p = new(Field_access_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Paren_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Field_access_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Paren_procedure_exprContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) -} - -func (s *Paren_procedure_exprContext) Procedure_expr() IProcedure_exprContext { +func (s *Field_access_action_exprContext) Action_expr() IAction_exprContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { t = ctx.(antlr.RuleContext) break } @@ -16556,14 +19170,30 @@ func (s *Paren_procedure_exprContext) Procedure_expr() IProcedure_exprContext { return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } -func (s *Paren_procedure_exprContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Field_access_action_exprContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) +} + +func (s *Field_access_action_exprContext) Identifier() IIdentifierContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IIdentifierContext); ok { + t = ctx.(antlr.RuleContext) + break + } + } + + if t == nil { + return nil + } + + return t.(IIdentifierContext) } -func (s *Paren_procedure_exprContext) Type_cast() IType_castContext { +func (s *Field_access_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -16579,35 +19209,142 @@ func (s *Paren_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Paren_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Field_access_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitField_access_action_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Is_action_exprContext struct { + Action_exprContext + left IAction_exprContext + right IAction_exprContext +} + +func NewIs_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Is_action_exprContext { + var p = new(Is_action_exprContext) + + InitEmptyAction_exprContext(&p.Action_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Action_exprContext)) + + return p +} + +func (s *Is_action_exprContext) GetLeft() IAction_exprContext { return s.left } + +func (s *Is_action_exprContext) GetRight() IAction_exprContext { return s.right } + +func (s *Is_action_exprContext) SetLeft(v IAction_exprContext) { s.left = v } + +func (s *Is_action_exprContext) SetRight(v IAction_exprContext) { s.right = v } + +func (s *Is_action_exprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *Is_action_exprContext) IS() antlr.TerminalNode { + return s.GetToken(KuneiformParserIS, 0) +} + +func (s *Is_action_exprContext) AllAction_expr() []IAction_exprContext { + children := s.GetChildren() + len := 0 + for _, ctx := range children { + if _, ok := ctx.(IAction_exprContext); ok { + len++ + } + } + + tst := make([]IAction_exprContext, len) + i := 0 + for _, ctx := range children { + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) + i++ + } + } + + return tst +} + +func (s *Is_action_exprContext) Action_expr(i int) IAction_exprContext { + var t antlr.RuleContext + j := 0 + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IAction_exprContext); ok { + if j == i { + t = ctx.(antlr.RuleContext) + break + } + j++ + } + } + + if t == nil { + return nil + } + + return t.(IAction_exprContext) +} + +func (s *Is_action_exprContext) NULL() antlr.TerminalNode { + return s.GetToken(KuneiformParserNULL, 0) +} + +func (s *Is_action_exprContext) TRUE() antlr.TerminalNode { + return s.GetToken(KuneiformParserTRUE, 0) +} + +func (s *Is_action_exprContext) FALSE() antlr.TerminalNode { + return s.GetToken(KuneiformParserFALSE, 0) +} + +func (s *Is_action_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) +} + +func (s *Is_action_exprContext) DISTINCT() antlr.TerminalNode { + return s.GetToken(KuneiformParserDISTINCT, 0) +} + +func (s *Is_action_exprContext) FROM() antlr.TerminalNode { + return s.GetToken(KuneiformParserFROM, 0) +} + +func (s *Is_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitParen_procedure_expr(s) + return t.VisitIs_action_expr(s) default: return t.VisitChildren(s) } } -type Variable_procedure_exprContext struct { - Procedure_exprContext +type Variable_action_exprContext struct { + Action_exprContext } -func NewVariable_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Variable_procedure_exprContext { - var p = new(Variable_procedure_exprContext) +func NewVariable_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Variable_action_exprContext { + var p = new(Variable_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Variable_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Variable_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Variable_procedure_exprContext) Variable() IVariableContext { +func (s *Variable_action_exprContext) Variable() IVariableContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IVariableContext); ok { @@ -16623,7 +19360,7 @@ func (s *Variable_procedure_exprContext) Variable() IVariableContext { return t.(IVariableContext) } -func (s *Variable_procedure_exprContext) Type_cast() IType_castContext { +func (s *Variable_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -16639,46 +19376,50 @@ func (s *Variable_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Variable_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Variable_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitVariable_procedure_expr(s) + return t.VisitVariable_action_expr(s) default: return t.VisitChildren(s) } } -type Make_array_procedure_exprContext struct { - Procedure_exprContext +type Make_array_action_exprContext struct { + Action_exprContext } -func NewMake_array_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Make_array_procedure_exprContext { - var p = new(Make_array_procedure_exprContext) +func NewMake_array_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Make_array_action_exprContext { + var p = new(Make_array_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Make_array_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Make_array_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Make_array_procedure_exprContext) LBRACKET() antlr.TerminalNode { +func (s *Make_array_action_exprContext) LBRACKET() antlr.TerminalNode { return s.GetToken(KuneiformParserLBRACKET, 0) } -func (s *Make_array_procedure_exprContext) RBRACKET() antlr.TerminalNode { +func (s *Make_array_action_exprContext) RBRACKET() antlr.TerminalNode { return s.GetToken(KuneiformParserRBRACKET, 0) } -func (s *Make_array_procedure_exprContext) Procedure_expr_list() IProcedure_expr_listContext { +func (s *Make_array_action_exprContext) ARRAY() antlr.TerminalNode { + return s.GetToken(KuneiformParserARRAY, 0) +} + +func (s *Make_array_action_exprContext) Action_expr_list() IAction_expr_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { + if _, ok := ctx.(IAction_expr_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -16688,10 +19429,10 @@ func (s *Make_array_procedure_exprContext) Procedure_expr_list() IProcedure_expr return nil } - return t.(IProcedure_expr_listContext) + return t.(IAction_expr_listContext) } -func (s *Make_array_procedure_exprContext) Type_cast() IType_castContext { +func (s *Make_array_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -16707,62 +19448,48 @@ func (s *Make_array_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Make_array_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Make_array_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitMake_array_procedure_expr(s) + return t.VisitMake_array_action_expr(s) default: return t.VisitChildren(s) } } -type Is_procedure_exprContext struct { - Procedure_exprContext - left IProcedure_exprContext - right IProcedure_exprContext +type Comparison_action_exprContext struct { + Action_exprContext } -func NewIs_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Is_procedure_exprContext { - var p = new(Is_procedure_exprContext) +func NewComparison_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Comparison_action_exprContext { + var p = new(Comparison_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Is_procedure_exprContext) GetLeft() IProcedure_exprContext { return s.left } - -func (s *Is_procedure_exprContext) GetRight() IProcedure_exprContext { return s.right } - -func (s *Is_procedure_exprContext) SetLeft(v IProcedure_exprContext) { s.left = v } - -func (s *Is_procedure_exprContext) SetRight(v IProcedure_exprContext) { s.right = v } - -func (s *Is_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Comparison_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Is_procedure_exprContext) IS() antlr.TerminalNode { - return s.GetToken(KuneiformParserIS, 0) -} - -func (s *Is_procedure_exprContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Comparison_action_exprContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -16770,11 +19497,11 @@ func (s *Is_procedure_exprContext) AllProcedure_expr() []IProcedure_exprContext return tst } -func (s *Is_procedure_exprContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Comparison_action_exprContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -16787,75 +19514,79 @@ func (s *Is_procedure_exprContext) Procedure_expr(i int) IProcedure_exprContext return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } -func (s *Is_procedure_exprContext) NULL() antlr.TerminalNode { - return s.GetToken(KuneiformParserNULL, 0) +func (s *Comparison_action_exprContext) EQUALS() antlr.TerminalNode { + return s.GetToken(KuneiformParserEQUALS, 0) } -func (s *Is_procedure_exprContext) TRUE() antlr.TerminalNode { - return s.GetToken(KuneiformParserTRUE, 0) +func (s *Comparison_action_exprContext) EQUATE() antlr.TerminalNode { + return s.GetToken(KuneiformParserEQUATE, 0) +} + +func (s *Comparison_action_exprContext) NEQ() antlr.TerminalNode { + return s.GetToken(KuneiformParserNEQ, 0) } -func (s *Is_procedure_exprContext) FALSE() antlr.TerminalNode { - return s.GetToken(KuneiformParserFALSE, 0) +func (s *Comparison_action_exprContext) LT() antlr.TerminalNode { + return s.GetToken(KuneiformParserLT, 0) } -func (s *Is_procedure_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) +func (s *Comparison_action_exprContext) LTE() antlr.TerminalNode { + return s.GetToken(KuneiformParserLTE, 0) } -func (s *Is_procedure_exprContext) DISTINCT() antlr.TerminalNode { - return s.GetToken(KuneiformParserDISTINCT, 0) +func (s *Comparison_action_exprContext) GT() antlr.TerminalNode { + return s.GetToken(KuneiformParserGT, 0) } -func (s *Is_procedure_exprContext) FROM() antlr.TerminalNode { - return s.GetToken(KuneiformParserFROM, 0) +func (s *Comparison_action_exprContext) GTE() antlr.TerminalNode { + return s.GetToken(KuneiformParserGTE, 0) } -func (s *Is_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Comparison_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitIs_procedure_expr(s) + return t.VisitComparison_action_expr(s) default: return t.VisitChildren(s) } } -type Procedure_expr_arithmeticContext struct { - Procedure_exprContext +type Action_expr_arithmeticContext struct { + Action_exprContext } -func NewProcedure_expr_arithmeticContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Procedure_expr_arithmeticContext { - var p = new(Procedure_expr_arithmeticContext) +func NewAction_expr_arithmeticContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Action_expr_arithmeticContext { + var p = new(Action_expr_arithmeticContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Procedure_expr_arithmeticContext) GetRuleContext() antlr.RuleContext { +func (s *Action_expr_arithmeticContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_expr_arithmeticContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Action_expr_arithmeticContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -16863,11 +19594,11 @@ func (s *Procedure_expr_arithmeticContext) AllProcedure_expr() []IProcedure_expr return tst } -func (s *Procedure_expr_arithmeticContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Action_expr_arithmeticContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -16880,135 +19611,107 @@ func (s *Procedure_expr_arithmeticContext) Procedure_expr(i int) IProcedure_expr return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } -func (s *Procedure_expr_arithmeticContext) STAR() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) STAR() antlr.TerminalNode { return s.GetToken(KuneiformParserSTAR, 0) } -func (s *Procedure_expr_arithmeticContext) DIV() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) DIV() antlr.TerminalNode { return s.GetToken(KuneiformParserDIV, 0) } -func (s *Procedure_expr_arithmeticContext) MOD() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) MOD() antlr.TerminalNode { return s.GetToken(KuneiformParserMOD, 0) } -func (s *Procedure_expr_arithmeticContext) PLUS() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) PLUS() antlr.TerminalNode { return s.GetToken(KuneiformParserPLUS, 0) } -func (s *Procedure_expr_arithmeticContext) MINUS() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) MINUS() antlr.TerminalNode { return s.GetToken(KuneiformParserMINUS, 0) } -func (s *Procedure_expr_arithmeticContext) CONCAT() antlr.TerminalNode { +func (s *Action_expr_arithmeticContext) CONCAT() antlr.TerminalNode { return s.GetToken(KuneiformParserCONCAT, 0) } -func (s *Procedure_expr_arithmeticContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Action_expr_arithmeticContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_expr_arithmetic(s) + return t.VisitAction_expr_arithmetic(s) default: return t.VisitChildren(s) } } -type Unary_procedure_exprContext struct { - Procedure_exprContext +type Array_access_action_exprContext struct { + Action_exprContext + array_element IAction_exprContext + single IAction_exprContext + left IAction_exprContext + right IAction_exprContext } -func NewUnary_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Unary_procedure_exprContext { - var p = new(Unary_procedure_exprContext) +func NewArray_access_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Array_access_action_exprContext { + var p = new(Array_access_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Unary_procedure_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Array_access_action_exprContext) GetArray_element() IAction_exprContext { + return s.array_element } -func (s *Unary_procedure_exprContext) Procedure_expr() IProcedure_exprContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IProcedure_exprContext) -} +func (s *Array_access_action_exprContext) GetSingle() IAction_exprContext { return s.single } -func (s *Unary_procedure_exprContext) PLUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserPLUS, 0) -} +func (s *Array_access_action_exprContext) GetLeft() IAction_exprContext { return s.left } -func (s *Unary_procedure_exprContext) MINUS() antlr.TerminalNode { - return s.GetToken(KuneiformParserMINUS, 0) -} +func (s *Array_access_action_exprContext) GetRight() IAction_exprContext { return s.right } -func (s *Unary_procedure_exprContext) EXCL() antlr.TerminalNode { - return s.GetToken(KuneiformParserEXCL, 0) +func (s *Array_access_action_exprContext) SetArray_element(v IAction_exprContext) { + s.array_element = v } -func (s *Unary_procedure_exprContext) NOT() antlr.TerminalNode { - return s.GetToken(KuneiformParserNOT, 0) -} +func (s *Array_access_action_exprContext) SetSingle(v IAction_exprContext) { s.single = v } -func (s *Unary_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitUnary_procedure_expr(s) +func (s *Array_access_action_exprContext) SetLeft(v IAction_exprContext) { s.left = v } - default: - return t.VisitChildren(s) - } -} +func (s *Array_access_action_exprContext) SetRight(v IAction_exprContext) { s.right = v } -type Comparison_procedure_exprContext struct { - Procedure_exprContext +func (s *Array_access_action_exprContext) GetRuleContext() antlr.RuleContext { + return s } -func NewComparison_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Comparison_procedure_exprContext { - var p = new(Comparison_procedure_exprContext) - - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) - - return p +func (s *Array_access_action_exprContext) LBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserLBRACKET, 0) } -func (s *Comparison_procedure_exprContext) GetRuleContext() antlr.RuleContext { - return s +func (s *Array_access_action_exprContext) RBRACKET() antlr.TerminalNode { + return s.GetToken(KuneiformParserRBRACKET, 0) } -func (s *Comparison_procedure_exprContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Array_access_action_exprContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -17016,11 +19719,11 @@ func (s *Comparison_procedure_exprContext) AllProcedure_expr() []IProcedure_expr return tst } -func (s *Comparison_procedure_exprContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Array_access_action_exprContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -17033,82 +19736,10 @@ func (s *Comparison_procedure_exprContext) Procedure_expr(i int) IProcedure_expr return nil } - return t.(IProcedure_exprContext) -} - -func (s *Comparison_procedure_exprContext) EQUALS() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUALS, 0) -} - -func (s *Comparison_procedure_exprContext) EQUATE() antlr.TerminalNode { - return s.GetToken(KuneiformParserEQUATE, 0) -} - -func (s *Comparison_procedure_exprContext) NEQ() antlr.TerminalNode { - return s.GetToken(KuneiformParserNEQ, 0) -} - -func (s *Comparison_procedure_exprContext) LT() antlr.TerminalNode { - return s.GetToken(KuneiformParserLT, 0) -} - -func (s *Comparison_procedure_exprContext) LTE() antlr.TerminalNode { - return s.GetToken(KuneiformParserLTE, 0) -} - -func (s *Comparison_procedure_exprContext) GT() antlr.TerminalNode { - return s.GetToken(KuneiformParserGT, 0) -} - -func (s *Comparison_procedure_exprContext) GTE() antlr.TerminalNode { - return s.GetToken(KuneiformParserGTE, 0) -} - -func (s *Comparison_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitComparison_procedure_expr(s) - - default: - return t.VisitChildren(s) - } -} - -type Function_call_procedure_exprContext struct { - Procedure_exprContext -} - -func NewFunction_call_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Function_call_procedure_exprContext { - var p = new(Function_call_procedure_exprContext) - - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) - p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) - - return p -} - -func (s *Function_call_procedure_exprContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Function_call_procedure_exprContext) Procedure_function_call() IProcedure_function_callContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_function_callContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IProcedure_function_callContext) + return t.(IAction_exprContext) } -func (s *Function_call_procedure_exprContext) Type_cast() IType_castContext { +func (s *Array_access_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { if _, ok := ctx.(IType_castContext); ok { @@ -17124,48 +19755,52 @@ func (s *Function_call_procedure_exprContext) Type_cast() IType_castContext { return t.(IType_castContext) } -func (s *Function_call_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Array_access_action_exprContext) COL() antlr.TerminalNode { + return s.GetToken(KuneiformParserCOL, 0) +} + +func (s *Array_access_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitFunction_call_procedure_expr(s) + return t.VisitArray_access_action_expr(s) default: return t.VisitChildren(s) } } -type Logical_procedure_exprContext struct { - Procedure_exprContext +type Logical_action_exprContext struct { + Action_exprContext } -func NewLogical_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Logical_procedure_exprContext { - var p = new(Logical_procedure_exprContext) +func NewLogical_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Logical_action_exprContext { + var p = new(Logical_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Logical_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Logical_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Logical_procedure_exprContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Logical_action_exprContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -17173,11 +19808,11 @@ func (s *Logical_procedure_exprContext) AllProcedure_expr() []IProcedure_exprCon return tst } -func (s *Logical_procedure_exprContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Logical_action_exprContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -17190,108 +19825,75 @@ func (s *Logical_procedure_exprContext) Procedure_expr(i int) IProcedure_exprCon return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } -func (s *Logical_procedure_exprContext) AND() antlr.TerminalNode { +func (s *Logical_action_exprContext) AND() antlr.TerminalNode { return s.GetToken(KuneiformParserAND, 0) } -func (s *Logical_procedure_exprContext) OR() antlr.TerminalNode { +func (s *Logical_action_exprContext) OR() antlr.TerminalNode { return s.GetToken(KuneiformParserOR, 0) } -func (s *Logical_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Logical_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitLogical_procedure_expr(s) + return t.VisitLogical_action_expr(s) default: return t.VisitChildren(s) } } -type Array_access_procedure_exprContext struct { - Procedure_exprContext - array_element IProcedure_exprContext - single IProcedure_exprContext - left IProcedure_exprContext - right IProcedure_exprContext +type Paren_action_exprContext struct { + Action_exprContext } -func NewArray_access_procedure_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Array_access_procedure_exprContext { - var p = new(Array_access_procedure_exprContext) +func NewParen_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Paren_action_exprContext { + var p = new(Paren_action_exprContext) - InitEmptyProcedure_exprContext(&p.Procedure_exprContext) + InitEmptyAction_exprContext(&p.Action_exprContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_exprContext)) + p.CopyAll(ctx.(*Action_exprContext)) return p } -func (s *Array_access_procedure_exprContext) GetArray_element() IProcedure_exprContext { - return s.array_element -} - -func (s *Array_access_procedure_exprContext) GetSingle() IProcedure_exprContext { return s.single } - -func (s *Array_access_procedure_exprContext) GetLeft() IProcedure_exprContext { return s.left } - -func (s *Array_access_procedure_exprContext) GetRight() IProcedure_exprContext { return s.right } - -func (s *Array_access_procedure_exprContext) SetArray_element(v IProcedure_exprContext) { - s.array_element = v -} - -func (s *Array_access_procedure_exprContext) SetSingle(v IProcedure_exprContext) { s.single = v } - -func (s *Array_access_procedure_exprContext) SetLeft(v IProcedure_exprContext) { s.left = v } - -func (s *Array_access_procedure_exprContext) SetRight(v IProcedure_exprContext) { s.right = v } - -func (s *Array_access_procedure_exprContext) GetRuleContext() antlr.RuleContext { +func (s *Paren_action_exprContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Array_access_procedure_exprContext) LBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACKET, 0) -} - -func (s *Array_access_procedure_exprContext) RBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACKET, 0) +func (s *Paren_action_exprContext) LPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Array_access_procedure_exprContext) AllProcedure_expr() []IProcedure_exprContext { - children := s.GetChildren() - len := 0 - for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { - len++ +func (s *Paren_action_exprContext) Action_expr() IAction_exprContext { + var t antlr.RuleContext + for _, ctx := range s.GetChildren() { + if _, ok := ctx.(IAction_exprContext); ok { + t = ctx.(antlr.RuleContext) + break } } - tst := make([]IProcedure_exprContext, len) - i := 0 - for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) - i++ - } + if t == nil { + return nil } - return tst + return t.(IAction_exprContext) +} + +func (s *Paren_action_exprContext) RPAREN() antlr.TerminalNode { + return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Array_access_procedure_exprContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Paren_action_exprContext) Type_cast() IType_castContext { var t antlr.RuleContext - j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { - if j == i { - t = ctx.(antlr.RuleContext) - break - } - j++ + if _, ok := ctx.(IType_castContext); ok { + t = ctx.(antlr.RuleContext) + break } } @@ -17299,13 +19901,41 @@ func (s *Array_access_procedure_exprContext) Procedure_expr(i int) IProcedure_ex return nil } - return t.(IProcedure_exprContext) + return t.(IType_castContext) +} + +func (s *Paren_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case KuneiformParserVisitor: + return t.VisitParen_action_expr(s) + + default: + return t.VisitChildren(s) + } +} + +type Unary_action_exprContext struct { + Action_exprContext +} + +func NewUnary_action_exprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Unary_action_exprContext { + var p = new(Unary_action_exprContext) + + InitEmptyAction_exprContext(&p.Action_exprContext) + p.parser = parser + p.CopyAll(ctx.(*Action_exprContext)) + + return p +} + +func (s *Unary_action_exprContext) GetRuleContext() antlr.RuleContext { + return s } -func (s *Array_access_procedure_exprContext) Type_cast() IType_castContext { +func (s *Unary_action_exprContext) Action_expr() IAction_exprContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IType_castContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { t = ctx.(antlr.RuleContext) break } @@ -17315,55 +19945,67 @@ func (s *Array_access_procedure_exprContext) Type_cast() IType_castContext { return nil } - return t.(IType_castContext) + return t.(IAction_exprContext) } -func (s *Array_access_procedure_exprContext) COL() antlr.TerminalNode { - return s.GetToken(KuneiformParserCOL, 0) +func (s *Unary_action_exprContext) PLUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserPLUS, 0) +} + +func (s *Unary_action_exprContext) MINUS() antlr.TerminalNode { + return s.GetToken(KuneiformParserMINUS, 0) +} + +func (s *Unary_action_exprContext) EXCL() antlr.TerminalNode { + return s.GetToken(KuneiformParserEXCL, 0) +} + +func (s *Unary_action_exprContext) NOT() antlr.TerminalNode { + return s.GetToken(KuneiformParserNOT, 0) } -func (s *Array_access_procedure_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Unary_action_exprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitArray_access_procedure_expr(s) + return t.VisitUnary_action_expr(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_expr() (localctx IProcedure_exprContext) { - return p.procedure_expr(0) +func (p *KuneiformParser) Action_expr() (localctx IAction_exprContext) { + return p.action_expr(0) } -func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContext) { +func (p *KuneiformParser) action_expr(_p int) (localctx IAction_exprContext) { var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() _parentState := p.GetState() - localctx = NewProcedure_exprContext(p, p.GetParserRuleContext(), _parentState) - var _prevctx IProcedure_exprContext = localctx + localctx = NewAction_exprContext(p, p.GetParserRuleContext(), _parentState) + var _prevctx IAction_exprContext = localctx var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. - _startState := 102 - p.EnterRecursionRule(localctx, 102, KuneiformParserRULE_procedure_expr, _p) + _startState := 110 + p.EnterRecursionRule(localctx, 110, KuneiformParserRULE_action_expr, _p) var _la int var _alt int p.EnterOuterAlt(localctx, 1) - p.SetState(975) + p.SetState(1140) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 140, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 158, p.GetParserRuleContext()) { case 1: - localctx = NewParen_procedure_exprContext(p, localctx) + localctx = NewParen_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(945) + p.SetState(1107) p.Match(KuneiformParserLPAREN) if p.HasError() { // Recognition error - abort rule @@ -17371,23 +20013,23 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(946) - p.procedure_expr(0) + p.SetState(1108) + p.action_expr(0) } { - p.SetState(947) + p.SetState(1109) p.Match(KuneiformParserRPAREN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(949) + p.SetState(1111) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 134, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 151, p.GetParserRuleContext()) == 1 { { - p.SetState(948) + p.SetState(1110) p.Type_cast() } @@ -17396,11 +20038,11 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 2: - localctx = NewUnary_procedure_exprContext(p, localctx) + localctx = NewUnary_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(951) + p.SetState(1113) _la = p.GetTokenStream().LA(1) if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&3147776) != 0) { @@ -17411,24 +20053,24 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(952) - p.procedure_expr(13) + p.SetState(1114) + p.action_expr(13) } case 3: - localctx = NewLiteral_procedure_exprContext(p, localctx) + localctx = NewLiteral_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(953) + p.SetState(1115) p.Literal() } - p.SetState(955) + p.SetState(1117) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 135, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 152, p.GetParserRuleContext()) == 1 { { - p.SetState(954) + p.SetState(1116) p.Type_cast() } @@ -17437,19 +20079,19 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 4: - localctx = NewFunction_call_procedure_exprContext(p, localctx) + localctx = NewFunction_call_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(957) - p.Procedure_function_call() + p.SetState(1119) + p.Action_function_call() } - p.SetState(959) + p.SetState(1121) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 136, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 153, p.GetParserRuleContext()) == 1 { { - p.SetState(958) + p.SetState(1120) p.Type_cast() } @@ -17458,19 +20100,19 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 5: - localctx = NewVariable_procedure_exprContext(p, localctx) + localctx = NewVariable_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(961) + p.SetState(1123) p.Variable() } - p.SetState(963) + p.SetState(1125) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 137, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 154, p.GetParserRuleContext()) == 1 { { - p.SetState(962) + p.SetState(1124) p.Type_cast() } @@ -17479,45 +20121,63 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 6: - localctx = NewMake_array_procedure_exprContext(p, localctx) + localctx = NewMake_array_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx + p.SetState(1128) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserARRAY { + { + p.SetState(1127) + p.Match(KuneiformParserARRAY) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } { - p.SetState(965) + p.SetState(1130) p.Match(KuneiformParserLBRACKET) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(967) + p.SetState(1132) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { + if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2377900607549737096) != 0) || ((int64((_la-130)) & ^0x3f) == 0 && ((int64(1)<<(_la-130))&229873) != 0) { { - p.SetState(966) - p.Procedure_expr_list() + p.SetState(1131) + p.Action_expr_list() } } { - p.SetState(969) + p.SetState(1134) p.Match(KuneiformParserRBRACKET) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(971) + p.SetState(1136) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 139, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 157, p.GetParserRuleContext()) == 1 { { - p.SetState(970) + p.SetState(1135) p.Type_cast() } @@ -17526,12 +20186,12 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 7: - localctx = NewUnary_procedure_exprContext(p, localctx) + localctx = NewUnary_action_exprContext(p, localctx) p.SetParserRuleContext(localctx) _prevctx = localctx { - p.SetState(973) + p.SetState(1138) p.Match(KuneiformParserNOT) if p.HasError() { // Recognition error - abort rule @@ -17540,20 +20200,20 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } { - p.SetState(974) - p.procedure_expr(3) + p.SetState(1139) + p.action_expr(3) } case antlr.ATNInvalidAltNumber: goto errorExit } p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) - p.SetState(1032) + p.SetState(1197) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 149, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 167, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -17563,24 +20223,24 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex p.TriggerExitRuleEvent() } _prevctx = localctx - p.SetState(1030) + p.SetState(1195) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 148, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 166, p.GetParserRuleContext()) { case 1: - localctx = NewProcedure_expr_arithmeticContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(977) + localctx = NewAction_expr_arithmeticContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1142) if !(p.Precpred(p.GetParserRuleContext(), 12)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 12)", "")) goto errorExit } { - p.SetState(978) + p.SetState(1143) _la = p.GetTokenStream().LA(1) if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4734976) != 0) { @@ -17591,21 +20251,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(979) - p.procedure_expr(13) + p.SetState(1144) + p.action_expr(13) } case 2: - localctx = NewProcedure_expr_arithmeticContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(980) + localctx = NewAction_expr_arithmeticContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1145) if !(p.Precpred(p.GetParserRuleContext(), 11)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 11)", "")) goto errorExit } { - p.SetState(981) + p.SetState(1146) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserPLUS || _la == KuneiformParserMINUS) { @@ -17616,21 +20276,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(982) - p.procedure_expr(12) + p.SetState(1147) + p.action_expr(12) } case 3: - localctx = NewProcedure_expr_arithmeticContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(983) + localctx = NewAction_expr_arithmeticContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1148) if !(p.Precpred(p.GetParserRuleContext(), 6)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 6)", "")) goto errorExit } { - p.SetState(984) + p.SetState(1149) p.Match(KuneiformParserCONCAT) if p.HasError() { // Recognition error - abort rule @@ -17638,21 +20298,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(985) - p.procedure_expr(7) + p.SetState(1150) + p.action_expr(7) } case 4: - localctx = NewComparison_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(986) + localctx = NewComparison_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1151) if !(p.Precpred(p.GetParserRuleContext(), 5)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 5)", "")) goto errorExit } { - p.SetState(987) + p.SetState(1152) _la = p.GetTokenStream().LA(1) if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&260145152) != 0) { @@ -17663,21 +20323,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(988) - p.procedure_expr(6) + p.SetState(1153) + p.action_expr(6) } case 5: - localctx = NewLogical_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(989) + localctx = NewLogical_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1154) if !(p.Precpred(p.GetParserRuleContext(), 2)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) goto errorExit } { - p.SetState(990) + p.SetState(1155) p.Match(KuneiformParserAND) if p.HasError() { // Recognition error - abort rule @@ -17685,21 +20345,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(991) - p.procedure_expr(3) + p.SetState(1156) + p.action_expr(3) } case 6: - localctx = NewLogical_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(992) + localctx = NewLogical_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1157) if !(p.Precpred(p.GetParserRuleContext(), 1)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) goto errorExit } { - p.SetState(993) + p.SetState(1158) p.Match(KuneiformParserOR) if p.HasError() { // Recognition error - abort rule @@ -17707,21 +20367,21 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(994) - p.procedure_expr(2) + p.SetState(1159) + p.action_expr(2) } case 7: - localctx = NewField_access_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(995) + localctx = NewField_access_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1160) if !(p.Precpred(p.GetParserRuleContext(), 15)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 15)", "")) goto errorExit } { - p.SetState(996) + p.SetState(1161) p.Match(KuneiformParserPERIOD) if p.HasError() { // Recognition error - abort rule @@ -17729,19 +20389,15 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(997) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(1162) + p.Identifier() } - p.SetState(999) + p.SetState(1164) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 141, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 159, p.GetParserRuleContext()) == 1 { { - p.SetState(998) + p.SetState(1163) p.Type_cast() } @@ -17750,80 +20406,80 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 8: - localctx = NewArray_access_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - localctx.(*Array_access_procedure_exprContext).array_element = _prevctx + localctx = NewArray_access_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + localctx.(*Array_access_action_exprContext).array_element = _prevctx - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(1001) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1166) if !(p.Precpred(p.GetParserRuleContext(), 14)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 14)", "")) goto errorExit } { - p.SetState(1002) + p.SetState(1167) p.Match(KuneiformParserLBRACKET) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1011) + p.SetState(1176) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 144, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 162, p.GetParserRuleContext()) { case 1: { - p.SetState(1003) + p.SetState(1168) - var _x = p.procedure_expr(0) + var _x = p.action_expr(0) - localctx.(*Array_access_procedure_exprContext).single = _x + localctx.(*Array_access_action_exprContext).single = _x } case 2: - p.SetState(1005) + p.SetState(1170) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { + if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2377900607549737096) != 0) || ((int64((_la-130)) & ^0x3f) == 0 && ((int64(1)<<(_la-130))&229873) != 0) { { - p.SetState(1004) + p.SetState(1169) - var _x = p.procedure_expr(0) + var _x = p.action_expr(0) - localctx.(*Array_access_procedure_exprContext).left = _x + localctx.(*Array_access_action_exprContext).left = _x } } { - p.SetState(1007) + p.SetState(1172) p.Match(KuneiformParserCOL) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1009) + p.SetState(1174) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { + if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2377900607549737096) != 0) || ((int64((_la-130)) & ^0x3f) == 0 && ((int64(1)<<(_la-130))&229873) != 0) { { - p.SetState(1008) + p.SetState(1173) - var _x = p.procedure_expr(0) + var _x = p.action_expr(0) - localctx.(*Array_access_procedure_exprContext).right = _x + localctx.(*Array_access_action_exprContext).right = _x } } @@ -17832,19 +20488,19 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex goto errorExit } { - p.SetState(1013) + p.SetState(1178) p.Match(KuneiformParserRBRACKET) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1015) + p.SetState(1180) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 145, p.GetParserRuleContext()) == 1 { + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 163, p.GetParserRuleContext()) == 1 { { - p.SetState(1014) + p.SetState(1179) p.Type_cast() } @@ -17853,25 +20509,25 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } case 9: - localctx = NewIs_procedure_exprContext(p, NewProcedure_exprContext(p, _parentctx, _parentState)) - localctx.(*Is_procedure_exprContext).left = _prevctx + localctx = NewIs_action_exprContext(p, NewAction_exprContext(p, _parentctx, _parentState)) + localctx.(*Is_action_exprContext).left = _prevctx - p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_procedure_expr) - p.SetState(1017) + p.PushNewRecursionContext(localctx, _startState, KuneiformParserRULE_action_expr) + p.SetState(1182) if !(p.Precpred(p.GetParserRuleContext(), 4)) { p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 4)", "")) goto errorExit } { - p.SetState(1018) + p.SetState(1183) p.Match(KuneiformParserIS) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1020) + p.SetState(1185) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -17880,7 +20536,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex if _la == KuneiformParserNOT { { - p.SetState(1019) + p.SetState(1184) p.Match(KuneiformParserNOT) if p.HasError() { // Recognition error - abort rule @@ -17889,7 +20545,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } - p.SetState(1028) + p.SetState(1193) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -17898,7 +20554,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex switch p.GetTokenStream().LA(1) { case KuneiformParserDISTINCT: { - p.SetState(1022) + p.SetState(1187) p.Match(KuneiformParserDISTINCT) if p.HasError() { // Recognition error - abort rule @@ -17906,7 +20562,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(1023) + p.SetState(1188) p.Match(KuneiformParserFROM) if p.HasError() { // Recognition error - abort rule @@ -17914,16 +20570,16 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } { - p.SetState(1024) + p.SetState(1189) - var _x = p.procedure_expr(0) + var _x = p.action_expr(0) - localctx.(*Is_procedure_exprContext).right = _x + localctx.(*Is_action_exprContext).right = _x } case KuneiformParserNULL: { - p.SetState(1025) + p.SetState(1190) p.Match(KuneiformParserNULL) if p.HasError() { // Recognition error - abort rule @@ -17933,7 +20589,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex case KuneiformParserTRUE: { - p.SetState(1026) + p.SetState(1191) p.Match(KuneiformParserTRUE) if p.HasError() { // Recognition error - abort rule @@ -17943,7 +20599,7 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex case KuneiformParserFALSE: { - p.SetState(1027) + p.SetState(1192) p.Match(KuneiformParserFALSE) if p.HasError() { // Recognition error - abort rule @@ -17961,12 +20617,12 @@ func (p *KuneiformParser) procedure_expr(_p int) (localctx IProcedure_exprContex } } - p.SetState(1034) + p.SetState(1199) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 149, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 167, p.GetParserRuleContext()) if p.HasError() { goto errorExit } @@ -17985,69 +20641,69 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProcedure_expr_listContext is an interface to support dynamic dispatch. -type IProcedure_expr_listContext interface { +// IAction_expr_listContext is an interface to support dynamic dispatch. +type IAction_expr_listContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser // Getter signatures - AllProcedure_expr() []IProcedure_exprContext - Procedure_expr(i int) IProcedure_exprContext + AllAction_expr() []IAction_exprContext + Action_expr(i int) IAction_exprContext AllCOMMA() []antlr.TerminalNode COMMA(i int) antlr.TerminalNode - // IsProcedure_expr_listContext differentiates from other interfaces. - IsProcedure_expr_listContext() + // IsAction_expr_listContext differentiates from other interfaces. + IsAction_expr_listContext() } -type Procedure_expr_listContext struct { +type Action_expr_listContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyProcedure_expr_listContext() *Procedure_expr_listContext { - var p = new(Procedure_expr_listContext) +func NewEmptyAction_expr_listContext() *Action_expr_listContext { + var p = new(Action_expr_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_expr_list + p.RuleIndex = KuneiformParserRULE_action_expr_list return p } -func InitEmptyProcedure_expr_listContext(p *Procedure_expr_listContext) { +func InitEmptyAction_expr_listContext(p *Action_expr_listContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_expr_list + p.RuleIndex = KuneiformParserRULE_action_expr_list } -func (*Procedure_expr_listContext) IsProcedure_expr_listContext() {} +func (*Action_expr_listContext) IsAction_expr_listContext() {} -func NewProcedure_expr_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_expr_listContext { - var p = new(Procedure_expr_listContext) +func NewAction_expr_listContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_expr_listContext { + var p = new(Action_expr_listContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_expr_list + p.RuleIndex = KuneiformParserRULE_action_expr_list return p } -func (s *Procedure_expr_listContext) GetParser() antlr.Parser { return s.parser } +func (s *Action_expr_listContext) GetParser() antlr.Parser { return s.parser } -func (s *Procedure_expr_listContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Action_expr_listContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -18055,11 +20711,11 @@ func (s *Procedure_expr_listContext) AllProcedure_expr() []IProcedure_exprContex return tst } -func (s *Procedure_expr_listContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Action_expr_listContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -18072,46 +20728,46 @@ func (s *Procedure_expr_listContext) Procedure_expr(i int) IProcedure_exprContex return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } -func (s *Procedure_expr_listContext) AllCOMMA() []antlr.TerminalNode { +func (s *Action_expr_listContext) AllCOMMA() []antlr.TerminalNode { return s.GetTokens(KuneiformParserCOMMA) } -func (s *Procedure_expr_listContext) COMMA(i int) antlr.TerminalNode { +func (s *Action_expr_listContext) COMMA(i int) antlr.TerminalNode { return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Procedure_expr_listContext) GetRuleContext() antlr.RuleContext { +func (s *Action_expr_listContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_expr_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Action_expr_listContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -func (s *Procedure_expr_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Action_expr_listContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitProcedure_expr_list(s) + return t.VisitAction_expr_list(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_expr_list() (localctx IProcedure_expr_listContext) { - localctx = NewProcedure_expr_listContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 104, KuneiformParserRULE_procedure_expr_list) +func (p *KuneiformParser) Action_expr_list() (localctx IAction_expr_listContext) { + localctx = NewAction_expr_listContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 112, KuneiformParserRULE_action_expr_list) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(1035) - p.procedure_expr(0) + p.SetState(1200) + p.action_expr(0) } - p.SetState(1040) + p.SetState(1205) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -18120,7 +20776,7 @@ func (p *KuneiformParser) Procedure_expr_list() (localctx IProcedure_expr_listCo for _la == KuneiformParserCOMMA { { - p.SetState(1036) + p.SetState(1201) p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule @@ -18128,11 +20784,11 @@ func (p *KuneiformParser) Procedure_expr_list() (localctx IProcedure_expr_listCo } } { - p.SetState(1037) - p.procedure_expr(0) + p.SetState(1202) + p.action_expr(0) } - p.SetState(1042) + p.SetState(1207) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -18153,70 +20809,70 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProc_statementContext is an interface to support dynamic dispatch. -type IProc_statementContext interface { +// IAction_statementContext is an interface to support dynamic dispatch. +type IAction_statementContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsProc_statementContext differentiates from other interfaces. - IsProc_statementContext() + // IsAction_statementContext differentiates from other interfaces. + IsAction_statementContext() } -type Proc_statementContext struct { +type Action_statementContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyProc_statementContext() *Proc_statementContext { - var p = new(Proc_statementContext) +func NewEmptyAction_statementContext() *Action_statementContext { + var p = new(Action_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_proc_statement + p.RuleIndex = KuneiformParserRULE_action_statement return p } -func InitEmptyProc_statementContext(p *Proc_statementContext) { +func InitEmptyAction_statementContext(p *Action_statementContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_proc_statement + p.RuleIndex = KuneiformParserRULE_action_statement } -func (*Proc_statementContext) IsProc_statementContext() {} +func (*Action_statementContext) IsAction_statementContext() {} -func NewProc_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Proc_statementContext { - var p = new(Proc_statementContext) +func NewAction_statementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_statementContext { + var p = new(Action_statementContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_proc_statement + p.RuleIndex = KuneiformParserRULE_action_statement return p } -func (s *Proc_statementContext) GetParser() antlr.Parser { return s.parser } +func (s *Action_statementContext) GetParser() antlr.Parser { return s.parser } -func (s *Proc_statementContext) CopyAll(ctx *Proc_statementContext) { +func (s *Action_statementContext) CopyAll(ctx *Action_statementContext) { s.CopyFrom(&ctx.BaseParserRuleContext) } -func (s *Proc_statementContext) GetRuleContext() antlr.RuleContext { +func (s *Action_statementContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Proc_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Action_statementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } type Stmt_ifContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_ifContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_ifContext { var p = new(Stmt_ifContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18290,20 +20946,24 @@ func (s *Stmt_ifContext) RBRACE() antlr.TerminalNode { return s.GetToken(KuneiformParserRBRACE, 0) } -func (s *Stmt_ifContext) AllProc_statement() []IProc_statementContext { +func (s *Stmt_ifContext) SCOL() antlr.TerminalNode { + return s.GetToken(KuneiformParserSCOL, 0) +} + +func (s *Stmt_ifContext) AllAction_statement() []IAction_statementContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { len++ } } - tst := make([]IProc_statementContext, len) + tst := make([]IAction_statementContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProc_statementContext); ok { - tst[i] = t.(IProc_statementContext) + if t, ok := ctx.(IAction_statementContext); ok { + tst[i] = t.(IAction_statementContext) i++ } } @@ -18311,11 +20971,11 @@ func (s *Stmt_ifContext) AllProc_statement() []IProc_statementContext { return tst } -func (s *Stmt_ifContext) Proc_statement(i int) IProc_statementContext { +func (s *Stmt_ifContext) Action_statement(i int) IAction_statementContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -18328,7 +20988,7 @@ func (s *Stmt_ifContext) Proc_statement(i int) IProc_statementContext { return nil } - return t.(IProc_statementContext) + return t.(IAction_statementContext) } func (s *Stmt_ifContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { @@ -18342,15 +21002,15 @@ func (s *Stmt_ifContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } type Stmt_breakContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_breakContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_breakContext { var p = new(Stmt_breakContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18378,15 +21038,15 @@ func (s *Stmt_breakContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } type Stmt_variable_declarationContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_variable_declarationContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_variable_declarationContext { var p = new(Stmt_variable_declarationContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18430,15 +21090,15 @@ func (s *Stmt_variable_declarationContext) Accept(visitor antlr.ParseTreeVisitor } type Stmt_return_nextContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_return_nextContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_return_nextContext { var p = new(Stmt_return_nextContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18455,10 +21115,10 @@ func (s *Stmt_return_nextContext) NEXT() antlr.TerminalNode { return s.GetToken(KuneiformParserNEXT, 0) } -func (s *Stmt_return_nextContext) Procedure_expr_list() IProcedure_expr_listContext { +func (s *Stmt_return_nextContext) Action_expr_list() IAction_expr_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { + if _, ok := ctx.(IAction_expr_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -18468,7 +21128,7 @@ func (s *Stmt_return_nextContext) Procedure_expr_list() IProcedure_expr_listCont return nil } - return t.(IProcedure_expr_listContext) + return t.(IAction_expr_listContext) } func (s *Stmt_return_nextContext) SCOL() antlr.TerminalNode { @@ -18486,7 +21146,7 @@ func (s *Stmt_return_nextContext) Accept(visitor antlr.ParseTreeVisitor) interfa } type Stmt_for_loopContext struct { - Proc_statementContext + Action_statementContext receiver antlr.Token target_variable IVariableContext } @@ -18494,9 +21154,9 @@ type Stmt_for_loopContext struct { func NewStmt_for_loopContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_for_loopContext { var p = new(Stmt_for_loopContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18581,20 +21241,20 @@ func (s *Stmt_for_loopContext) Variable() IVariableContext { return t.(IVariableContext) } -func (s *Stmt_for_loopContext) AllProc_statement() []IProc_statementContext { +func (s *Stmt_for_loopContext) AllAction_statement() []IAction_statementContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { len++ } } - tst := make([]IProc_statementContext, len) + tst := make([]IAction_statementContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProc_statementContext); ok { - tst[i] = t.(IProc_statementContext) + if t, ok := ctx.(IAction_statementContext); ok { + tst[i] = t.(IAction_statementContext) i++ } } @@ -18602,11 +21262,11 @@ func (s *Stmt_for_loopContext) AllProc_statement() []IProc_statementContext { return tst } -func (s *Stmt_for_loopContext) Proc_statement(i int) IProc_statementContext { +func (s *Stmt_for_loopContext) Action_statement(i int) IAction_statementContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -18619,7 +21279,11 @@ func (s *Stmt_for_loopContext) Proc_statement(i int) IProc_statementContext { return nil } - return t.(IProc_statementContext) + return t.(IAction_statementContext) +} + +func (s *Stmt_for_loopContext) SCOL() antlr.TerminalNode { + return s.GetToken(KuneiformParserSCOL, 0) } func (s *Stmt_for_loopContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { @@ -18633,15 +21297,15 @@ func (s *Stmt_for_loopContext) Accept(visitor antlr.ParseTreeVisitor) interface{ } type Stmt_returnContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_returnContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_returnContext { var p = new(Stmt_returnContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18658,10 +21322,10 @@ func (s *Stmt_returnContext) SCOL() antlr.TerminalNode { return s.GetToken(KuneiformParserSCOL, 0) } -func (s *Stmt_returnContext) Procedure_expr_list() IProcedure_expr_listContext { +func (s *Stmt_returnContext) Action_expr_list() IAction_expr_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { + if _, ok := ctx.(IAction_expr_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -18671,7 +21335,7 @@ func (s *Stmt_returnContext) Procedure_expr_list() IProcedure_expr_listContext { return nil } - return t.(IProcedure_expr_listContext) + return t.(IAction_expr_listContext) } func (s *Stmt_returnContext) Sql_statement() ISql_statementContext { @@ -18700,28 +21364,28 @@ func (s *Stmt_returnContext) Accept(visitor antlr.ParseTreeVisitor) interface{} } } -type Stmt_procedure_callContext struct { - Proc_statementContext +type Stmt_action_callContext struct { + Action_statementContext } -func NewStmt_procedure_callContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_procedure_callContext { - var p = new(Stmt_procedure_callContext) +func NewStmt_action_callContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_action_callContext { + var p = new(Stmt_action_callContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } -func (s *Stmt_procedure_callContext) GetRuleContext() antlr.RuleContext { +func (s *Stmt_action_callContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Stmt_procedure_callContext) Procedure_function_call() IProcedure_function_callContext { +func (s *Stmt_action_callContext) Action_function_call() IAction_function_callContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_function_callContext); ok { + if _, ok := ctx.(IAction_function_callContext); ok { t = ctx.(antlr.RuleContext) break } @@ -18731,18 +21395,18 @@ func (s *Stmt_procedure_callContext) Procedure_function_call() IProcedure_functi return nil } - return t.(IProcedure_function_callContext) + return t.(IAction_function_callContext) } -func (s *Stmt_procedure_callContext) SCOL() antlr.TerminalNode { +func (s *Stmt_action_callContext) SCOL() antlr.TerminalNode { return s.GetToken(KuneiformParserSCOL, 0) } -func (s *Stmt_procedure_callContext) ASSIGN() antlr.TerminalNode { +func (s *Stmt_action_callContext) ASSIGN() antlr.TerminalNode { return s.GetToken(KuneiformParserASSIGN, 0) } -func (s *Stmt_procedure_callContext) AllVariable_or_underscore() []IVariable_or_underscoreContext { +func (s *Stmt_action_callContext) AllVariable_or_underscore() []IVariable_or_underscoreContext { children := s.GetChildren() len := 0 for _, ctx := range children { @@ -18763,7 +21427,7 @@ func (s *Stmt_procedure_callContext) AllVariable_or_underscore() []IVariable_or_ return tst } -func (s *Stmt_procedure_callContext) Variable_or_underscore(i int) IVariable_or_underscoreContext { +func (s *Stmt_action_callContext) Variable_or_underscore(i int) IVariable_or_underscoreContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { @@ -18783,18 +21447,18 @@ func (s *Stmt_procedure_callContext) Variable_or_underscore(i int) IVariable_or_ return t.(IVariable_or_underscoreContext) } -func (s *Stmt_procedure_callContext) AllCOMMA() []antlr.TerminalNode { +func (s *Stmt_action_callContext) AllCOMMA() []antlr.TerminalNode { return s.GetTokens(KuneiformParserCOMMA) } -func (s *Stmt_procedure_callContext) COMMA(i int) antlr.TerminalNode { +func (s *Stmt_action_callContext) COMMA(i int) antlr.TerminalNode { return s.GetToken(KuneiformParserCOMMA, i) } -func (s *Stmt_procedure_callContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Stmt_action_callContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitStmt_procedure_call(s) + return t.VisitStmt_action_call(s) default: return t.VisitChildren(s) @@ -18802,15 +21466,15 @@ func (s *Stmt_procedure_callContext) Accept(visitor antlr.ParseTreeVisitor) inte } type Stmt_variable_assignmentContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_variable_assignmentContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_variable_assignmentContext { var p = new(Stmt_variable_assignmentContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18819,20 +21483,20 @@ func (s *Stmt_variable_assignmentContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Stmt_variable_assignmentContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Stmt_variable_assignmentContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -18840,11 +21504,11 @@ func (s *Stmt_variable_assignmentContext) AllProcedure_expr() []IProcedure_exprC return tst } -func (s *Stmt_variable_assignmentContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Stmt_variable_assignmentContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -18857,7 +21521,7 @@ func (s *Stmt_variable_assignmentContext) Procedure_expr(i int) IProcedure_exprC return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } func (s *Stmt_variable_assignmentContext) ASSIGN() antlr.TerminalNode { @@ -18895,15 +21559,15 @@ func (s *Stmt_variable_assignmentContext) Accept(visitor antlr.ParseTreeVisitor) } type Stmt_sqlContext struct { - Proc_statementContext + Action_statementContext } func NewStmt_sqlContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Stmt_sqlContext { var p = new(Stmt_sqlContext) - InitEmptyProc_statementContext(&p.Proc_statementContext) + InitEmptyAction_statementContext(&p.Action_statementContext) p.parser = parser - p.CopyAll(ctx.(*Proc_statementContext)) + p.CopyAll(ctx.(*Action_statementContext)) return p } @@ -18942,23 +21606,23 @@ func (s *Stmt_sqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { - localctx = NewProc_statementContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 106, KuneiformParserRULE_proc_statement) +func (p *KuneiformParser) Action_statement() (localctx IAction_statementContext) { + localctx = NewAction_statementContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 114, KuneiformParserRULE_action_statement) var _la int - p.SetState(1123) + p.SetState(1293) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 160, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 180, p.GetParserRuleContext()) { case 1: localctx = NewStmt_variable_declarationContext(p, localctx) p.EnterOuterAlt(localctx, 1) { - p.SetState(1043) + p.SetState(1208) p.Match(KuneiformParserVARIABLE) if p.HasError() { // Recognition error - abort rule @@ -18966,11 +21630,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1044) + p.SetState(1209) p.Type_() } { - p.SetState(1045) + p.SetState(1210) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -18979,9 +21643,9 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } case 2: - localctx = NewStmt_procedure_callContext(p, localctx) + localctx = NewStmt_action_callContext(p, localctx) p.EnterOuterAlt(localctx, 2) - p.SetState(1057) + p.SetState(1222) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -18990,11 +21654,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { if _la == KuneiformParserUNDERSCORE || _la == KuneiformParserVARIABLE { { - p.SetState(1047) + p.SetState(1212) p.Variable_or_underscore() } - p.SetState(1052) + p.SetState(1217) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19003,7 +21667,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { for _la == KuneiformParserCOMMA { { - p.SetState(1048) + p.SetState(1213) p.Match(KuneiformParserCOMMA) if p.HasError() { // Recognition error - abort rule @@ -19012,11 +21676,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } { - p.SetState(1049) + p.SetState(1214) p.Variable_or_underscore() } - p.SetState(1054) + p.SetState(1219) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19024,7 +21688,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(1055) + p.SetState(1220) p.Match(KuneiformParserASSIGN) if p.HasError() { // Recognition error - abort rule @@ -19034,11 +21698,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } { - p.SetState(1059) - p.Procedure_function_call() + p.SetState(1224) + p.Action_function_call() } { - p.SetState(1060) + p.SetState(1225) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19050,25 +21714,25 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { localctx = NewStmt_variable_assignmentContext(p, localctx) p.EnterOuterAlt(localctx, 3) { - p.SetState(1062) - p.procedure_expr(0) + p.SetState(1227) + p.action_expr(0) } - p.SetState(1064) + p.SetState(1229) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - if _la == KuneiformParserIDENTIFIER { + if _la == KuneiformParserDOUBLE_QUOTE || _la == KuneiformParserIDENTIFIER { { - p.SetState(1063) + p.SetState(1228) p.Type_() } } { - p.SetState(1066) + p.SetState(1231) p.Match(KuneiformParserASSIGN) if p.HasError() { // Recognition error - abort rule @@ -19076,11 +21740,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1067) - p.procedure_expr(0) + p.SetState(1232) + p.action_expr(0) } { - p.SetState(1068) + p.SetState(1233) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19092,7 +21756,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { localctx = NewStmt_for_loopContext(p, localctx) p.EnterOuterAlt(localctx, 4) { - p.SetState(1070) + p.SetState(1235) p.Match(KuneiformParserFOR) if p.HasError() { // Recognition error - abort rule @@ -19100,7 +21764,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1071) + p.SetState(1236) var _m = p.Match(KuneiformParserVARIABLE) @@ -19111,29 +21775,29 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1072) + p.SetState(1237) p.Match(KuneiformParserIN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1076) + p.SetState(1241) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 154, p.GetParserRuleContext()) { + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 172, p.GetParserRuleContext()) { case 1: { - p.SetState(1073) + p.SetState(1238) p.Range_() } case 2: { - p.SetState(1074) + p.SetState(1239) var _x = p.Variable() @@ -19142,7 +21806,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { case 3: { - p.SetState(1075) + p.SetState(1240) p.Sql_statement() } @@ -19150,27 +21814,27 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { goto errorExit } { - p.SetState(1078) + p.SetState(1243) p.Match(KuneiformParserLBRACE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1082) + p.SetState(1247) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&175640386007468168) != 0) || ((int64((_la-84)) & ^0x3f) == 0 && ((int64(1)<<(_la-84))&15426876605953) != 0) { + for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2810246172314175624) != 0) || ((int64((_la-88)) & ^0x3f) == 0 && ((int64(1)<<(_la-88))&1010992146074830337) != 0) { { - p.SetState(1079) - p.Proc_statement() + p.SetState(1244) + p.Action_statement() } - p.SetState(1084) + p.SetState(1249) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19178,19 +21842,37 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(1085) + p.SetState(1250) p.Match(KuneiformParserRBRACE) if p.HasError() { // Recognition error - abort rule goto errorExit } } + p.SetState(1252) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserSCOL { + { + p.SetState(1251) + p.Match(KuneiformParserSCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } case 5: localctx = NewStmt_ifContext(p, localctx) p.EnterOuterAlt(localctx, 5) { - p.SetState(1087) + p.SetState(1254) p.Match(KuneiformParserIF) if p.HasError() { // Recognition error - abort rule @@ -19198,10 +21880,10 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1088) + p.SetState(1255) p.If_then_block() } - p.SetState(1093) + p.SetState(1260) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19210,7 +21892,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { for _la == KuneiformParserELSEIF { { - p.SetState(1089) + p.SetState(1256) p.Match(KuneiformParserELSEIF) if p.HasError() { // Recognition error - abort rule @@ -19218,18 +21900,18 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1090) + p.SetState(1257) p.If_then_block() } - p.SetState(1095) + p.SetState(1262) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) } - p.SetState(1105) + p.SetState(1272) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19238,7 +21920,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { if _la == KuneiformParserELSE { { - p.SetState(1096) + p.SetState(1263) p.Match(KuneiformParserELSE) if p.HasError() { // Recognition error - abort rule @@ -19246,27 +21928,27 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1097) + p.SetState(1264) p.Match(KuneiformParserLBRACE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1101) + p.SetState(1268) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&175640386007468168) != 0) || ((int64((_la-84)) & ^0x3f) == 0 && ((int64(1)<<(_la-84))&15426876605953) != 0) { + for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2810246172314175624) != 0) || ((int64((_la-88)) & ^0x3f) == 0 && ((int64(1)<<(_la-88))&1010992146074830337) != 0) { { - p.SetState(1098) - p.Proc_statement() + p.SetState(1265) + p.Action_statement() } - p.SetState(1103) + p.SetState(1270) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -19274,7 +21956,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(1104) + p.SetState(1271) p.Match(KuneiformParserRBRACE) if p.HasError() { // Recognition error - abort rule @@ -19283,16 +21965,34 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } + p.SetState(1275) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) + + if _la == KuneiformParserSCOL { + { + p.SetState(1274) + p.Match(KuneiformParserSCOL) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + + } case 6: localctx = NewStmt_sqlContext(p, localctx) p.EnterOuterAlt(localctx, 6) { - p.SetState(1107) + p.SetState(1277) p.Sql_statement() } { - p.SetState(1108) + p.SetState(1278) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19304,7 +22004,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { localctx = NewStmt_breakContext(p, localctx) p.EnterOuterAlt(localctx, 7) { - p.SetState(1110) + p.SetState(1280) p.Match(KuneiformParserBREAK) if p.HasError() { // Recognition error - abort rule @@ -19312,7 +22012,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1111) + p.SetState(1281) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19324,28 +22024,28 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { localctx = NewStmt_returnContext(p, localctx) p.EnterOuterAlt(localctx, 8) { - p.SetState(1112) + p.SetState(1282) p.Match(KuneiformParserRETURN) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1115) + p.SetState(1285) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } switch p.GetTokenStream().LA(1) { - case KuneiformParserLBRACKET, KuneiformParserLPAREN, KuneiformParserEXCL, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: + case KuneiformParserLBRACKET, KuneiformParserLPAREN, KuneiformParserEXCL, KuneiformParserPLUS, KuneiformParserMINUS, KuneiformParserDOUBLE_QUOTE, KuneiformParserNULL, KuneiformParserNOT, KuneiformParserARRAY, KuneiformParserSTRING_, KuneiformParserTRUE, KuneiformParserFALSE, KuneiformParserDIGITS_, KuneiformParserBINARY_, KuneiformParserIDENTIFIER, KuneiformParserVARIABLE, KuneiformParserCONTEXTUAL_VARIABLE: { - p.SetState(1113) - p.Procedure_expr_list() + p.SetState(1283) + p.Action_expr_list() } case KuneiformParserDELETE, KuneiformParserUPDATE, KuneiformParserWITH, KuneiformParserSELECT, KuneiformParserINSERT: { - p.SetState(1114) + p.SetState(1284) p.Sql_statement() } @@ -19354,7 +22054,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { default: } { - p.SetState(1117) + p.SetState(1287) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19366,7 +22066,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { localctx = NewStmt_return_nextContext(p, localctx) p.EnterOuterAlt(localctx, 9) { - p.SetState(1118) + p.SetState(1288) p.Match(KuneiformParserRETURN) if p.HasError() { // Recognition error - abort rule @@ -19374,7 +22074,7 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1119) + p.SetState(1289) p.Match(KuneiformParserNEXT) if p.HasError() { // Recognition error - abort rule @@ -19382,11 +22082,11 @@ func (p *KuneiformParser) Proc_statement() (localctx IProc_statementContext) { } } { - p.SetState(1120) - p.Procedure_expr_list() + p.SetState(1290) + p.Action_expr_list() } { - p.SetState(1121) + p.SetState(1291) p.Match(KuneiformParserSCOL) if p.HasError() { // Recognition error - abort rule @@ -19486,12 +22186,12 @@ func (s *Variable_or_underscoreContext) Accept(visitor antlr.ParseTreeVisitor) i func (p *KuneiformParser) Variable_or_underscore() (localctx IVariable_or_underscoreContext) { localctx = NewVariable_or_underscoreContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 108, KuneiformParserRULE_variable_or_underscore) + p.EnterRule(localctx, 116, KuneiformParserRULE_variable_or_underscore) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(1125) + p.SetState(1295) _la = p.GetTokenStream().LA(1) if !(_la == KuneiformParserUNDERSCORE || _la == KuneiformParserVARIABLE) { @@ -19515,126 +22215,110 @@ errorExit: goto errorExit // Trick to prevent compiler error if the label is not used } -// IProcedure_function_callContext is an interface to support dynamic dispatch. -type IProcedure_function_callContext interface { +// IAction_function_callContext is an interface to support dynamic dispatch. +type IAction_function_callContext interface { antlr.ParserRuleContext // GetParser returns the parser. GetParser() antlr.Parser - // IsProcedure_function_callContext differentiates from other interfaces. - IsProcedure_function_callContext() + // IsAction_function_callContext differentiates from other interfaces. + IsAction_function_callContext() } -type Procedure_function_callContext struct { +type Action_function_callContext struct { antlr.BaseParserRuleContext parser antlr.Parser } -func NewEmptyProcedure_function_callContext() *Procedure_function_callContext { - var p = new(Procedure_function_callContext) +func NewEmptyAction_function_callContext() *Action_function_callContext { + var p = new(Action_function_callContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_function_call + p.RuleIndex = KuneiformParserRULE_action_function_call return p } -func InitEmptyProcedure_function_callContext(p *Procedure_function_callContext) { +func InitEmptyAction_function_callContext(p *Action_function_callContext) { antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) - p.RuleIndex = KuneiformParserRULE_procedure_function_call + p.RuleIndex = KuneiformParserRULE_action_function_call } -func (*Procedure_function_callContext) IsProcedure_function_callContext() {} +func (*Action_function_callContext) IsAction_function_callContext() {} -func NewProcedure_function_callContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Procedure_function_callContext { - var p = new(Procedure_function_callContext) +func NewAction_function_callContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Action_function_callContext { + var p = new(Action_function_callContext) antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser - p.RuleIndex = KuneiformParserRULE_procedure_function_call + p.RuleIndex = KuneiformParserRULE_action_function_call return p } -func (s *Procedure_function_callContext) GetParser() antlr.Parser { return s.parser } +func (s *Action_function_callContext) GetParser() antlr.Parser { return s.parser } -func (s *Procedure_function_callContext) CopyAll(ctx *Procedure_function_callContext) { +func (s *Action_function_callContext) CopyAll(ctx *Action_function_callContext) { s.CopyFrom(&ctx.BaseParserRuleContext) } -func (s *Procedure_function_callContext) GetRuleContext() antlr.RuleContext { +func (s *Action_function_callContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Procedure_function_callContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { +func (s *Action_function_callContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { return antlr.TreesStringTree(s, ruleNames, recog) } -type Foreign_call_procedureContext struct { - Procedure_function_callContext - dbid IProcedure_exprContext - procedure IProcedure_exprContext +type Normal_call_actionContext struct { + Action_function_callContext + namespace IIdentifierContext + function IIdentifierContext } -func NewForeign_call_procedureContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Foreign_call_procedureContext { - var p = new(Foreign_call_procedureContext) +func NewNormal_call_actionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Normal_call_actionContext { + var p = new(Normal_call_actionContext) - InitEmptyProcedure_function_callContext(&p.Procedure_function_callContext) + InitEmptyAction_function_callContext(&p.Action_function_callContext) p.parser = parser - p.CopyAll(ctx.(*Procedure_function_callContext)) + p.CopyAll(ctx.(*Action_function_callContext)) return p } -func (s *Foreign_call_procedureContext) GetDbid() IProcedure_exprContext { return s.dbid } +func (s *Normal_call_actionContext) GetNamespace() IIdentifierContext { return s.namespace } -func (s *Foreign_call_procedureContext) GetProcedure() IProcedure_exprContext { return s.procedure } +func (s *Normal_call_actionContext) GetFunction() IIdentifierContext { return s.function } -func (s *Foreign_call_procedureContext) SetDbid(v IProcedure_exprContext) { s.dbid = v } +func (s *Normal_call_actionContext) SetNamespace(v IIdentifierContext) { s.namespace = v } -func (s *Foreign_call_procedureContext) SetProcedure(v IProcedure_exprContext) { s.procedure = v } +func (s *Normal_call_actionContext) SetFunction(v IIdentifierContext) { s.function = v } -func (s *Foreign_call_procedureContext) GetRuleContext() antlr.RuleContext { +func (s *Normal_call_actionContext) GetRuleContext() antlr.RuleContext { return s } -func (s *Foreign_call_procedureContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} - -func (s *Foreign_call_procedureContext) LBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserLBRACKET, 0) -} - -func (s *Foreign_call_procedureContext) COMMA() antlr.TerminalNode { - return s.GetToken(KuneiformParserCOMMA, 0) -} - -func (s *Foreign_call_procedureContext) RBRACKET() antlr.TerminalNode { - return s.GetToken(KuneiformParserRBRACKET, 0) -} - -func (s *Foreign_call_procedureContext) LPAREN() antlr.TerminalNode { +func (s *Normal_call_actionContext) LPAREN() antlr.TerminalNode { return s.GetToken(KuneiformParserLPAREN, 0) } -func (s *Foreign_call_procedureContext) RPAREN() antlr.TerminalNode { +func (s *Normal_call_actionContext) RPAREN() antlr.TerminalNode { return s.GetToken(KuneiformParserRPAREN, 0) } -func (s *Foreign_call_procedureContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *Normal_call_actionContext) AllIdentifier() []IIdentifierContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IIdentifierContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IIdentifierContext); ok { + tst[i] = t.(IIdentifierContext) i++ } } @@ -19642,11 +22326,11 @@ func (s *Foreign_call_procedureContext) AllProcedure_expr() []IProcedure_exprCon return tst } -func (s *Foreign_call_procedureContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *Normal_call_actionContext) Identifier(i int) IIdentifierContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IIdentifierContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -19659,69 +22343,17 @@ func (s *Foreign_call_procedureContext) Procedure_expr(i int) IProcedure_exprCon return nil } - return t.(IProcedure_exprContext) -} - -func (s *Foreign_call_procedureContext) Procedure_expr_list() IProcedure_expr_listContext { - var t antlr.RuleContext - for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { - t = ctx.(antlr.RuleContext) - break - } - } - - if t == nil { - return nil - } - - return t.(IProcedure_expr_listContext) -} - -func (s *Foreign_call_procedureContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case KuneiformParserVisitor: - return t.VisitForeign_call_procedure(s) - - default: - return t.VisitChildren(s) - } -} - -type Normal_call_procedureContext struct { - Procedure_function_callContext -} - -func NewNormal_call_procedureContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Normal_call_procedureContext { - var p = new(Normal_call_procedureContext) - - InitEmptyProcedure_function_callContext(&p.Procedure_function_callContext) - p.parser = parser - p.CopyAll(ctx.(*Procedure_function_callContext)) - - return p -} - -func (s *Normal_call_procedureContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *Normal_call_procedureContext) IDENTIFIER() antlr.TerminalNode { - return s.GetToken(KuneiformParserIDENTIFIER, 0) -} - -func (s *Normal_call_procedureContext) LPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserLPAREN, 0) + return t.(IIdentifierContext) } -func (s *Normal_call_procedureContext) RPAREN() antlr.TerminalNode { - return s.GetToken(KuneiformParserRPAREN, 0) +func (s *Normal_call_actionContext) PERIOD() antlr.TerminalNode { + return s.GetToken(KuneiformParserPERIOD, 0) } -func (s *Normal_call_procedureContext) Procedure_expr_list() IProcedure_expr_listContext { +func (s *Normal_call_actionContext) Action_expr_list() IAction_expr_listContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_expr_listContext); ok { + if _, ok := ctx.(IAction_expr_listContext); ok { t = ctx.(antlr.RuleContext) break } @@ -19731,155 +22363,85 @@ func (s *Normal_call_procedureContext) Procedure_expr_list() IProcedure_expr_lis return nil } - return t.(IProcedure_expr_listContext) + return t.(IAction_expr_listContext) } -func (s *Normal_call_procedureContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { +func (s *Normal_call_actionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { switch t := visitor.(type) { case KuneiformParserVisitor: - return t.VisitNormal_call_procedure(s) + return t.VisitNormal_call_action(s) default: return t.VisitChildren(s) } } -func (p *KuneiformParser) Procedure_function_call() (localctx IProcedure_function_callContext) { - localctx = NewProcedure_function_callContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 110, KuneiformParserRULE_procedure_function_call) +func (p *KuneiformParser) Action_function_call() (localctx IAction_function_callContext) { + localctx = NewAction_function_callContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 118, KuneiformParserRULE_action_function_call) var _la int - p.SetState(1145) + localctx = NewNormal_call_actionContext(p, localctx) + p.EnterOuterAlt(localctx, 1) + p.SetState(1300) p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 163, p.GetParserRuleContext()) { - case 1: - localctx = NewNormal_call_procedureContext(p, localctx) - p.EnterOuterAlt(localctx, 1) - { - p.SetState(1127) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 181, p.GetParserRuleContext()) == 1 { { - p.SetState(1128) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(1130) - p.GetErrorHandler().Sync(p) - if p.HasError() { - goto errorExit - } - _la = p.GetTokenStream().LA(1) - - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { - { - p.SetState(1129) - p.Procedure_expr_list() - } + p.SetState(1297) - } - { - p.SetState(1132) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } + var _x = p.Identifier() - case 2: - localctx = NewForeign_call_procedureContext(p, localctx) - p.EnterOuterAlt(localctx, 2) - { - p.SetState(1133) - p.Match(KuneiformParserIDENTIFIER) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + localctx.(*Normal_call_actionContext).namespace = _x } { - p.SetState(1134) - p.Match(KuneiformParserLBRACKET) + p.SetState(1298) + p.Match(KuneiformParserPERIOD) if p.HasError() { // Recognition error - abort rule goto errorExit } } - { - p.SetState(1135) - - var _x = p.procedure_expr(0) - localctx.(*Foreign_call_procedureContext).dbid = _x - } - { - p.SetState(1136) - p.Match(KuneiformParserCOMMA) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(1137) + } else if p.HasError() { // JIM + goto errorExit + } + { + p.SetState(1302) - var _x = p.procedure_expr(0) + var _x = p.Identifier() - localctx.(*Foreign_call_procedureContext).procedure = _x - } - { - p.SetState(1138) - p.Match(KuneiformParserRBRACKET) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - { - p.SetState(1139) - p.Match(KuneiformParserLPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } - } - p.SetState(1141) - p.GetErrorHandler().Sync(p) + localctx.(*Normal_call_actionContext).function = _x + } + { + p.SetState(1303) + p.Match(KuneiformParserLPAREN) if p.HasError() { + // Recognition error - abort rule goto errorExit } - _la = p.GetTokenStream().LA(1) - - if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&148618787706374280) != 0) || ((int64((_la-114)) & ^0x3f) == 0 && ((int64(1)<<(_la-114))&14367) != 0) { - { - p.SetState(1140) - p.Procedure_expr_list() - } + } + p.SetState(1305) + p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } + _la = p.GetTokenStream().LA(1) - } + if ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2377900607549737096) != 0) || ((int64((_la-130)) & ^0x3f) == 0 && ((int64(1)<<(_la-130))&229873) != 0) { { - p.SetState(1143) - p.Match(KuneiformParserRPAREN) - if p.HasError() { - // Recognition error - abort rule - goto errorExit - } + p.SetState(1304) + p.Action_expr_list() } - case antlr.ATNInvalidAltNumber: - goto errorExit + } + { + p.SetState(1307) + p.Match(KuneiformParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } errorExit: @@ -19903,11 +22465,11 @@ type IIf_then_blockContext interface { GetParser() antlr.Parser // Getter signatures - Procedure_expr() IProcedure_exprContext + Action_expr() IAction_exprContext LBRACE() antlr.TerminalNode RBRACE() antlr.TerminalNode - AllProc_statement() []IProc_statementContext - Proc_statement(i int) IProc_statementContext + AllAction_statement() []IAction_statementContext + Action_statement(i int) IAction_statementContext // IsIf_then_blockContext differentiates from other interfaces. IsIf_then_blockContext() @@ -19945,10 +22507,10 @@ func NewIf_then_blockContext(parser antlr.Parser, parent antlr.ParserRuleContext func (s *If_then_blockContext) GetParser() antlr.Parser { return s.parser } -func (s *If_then_blockContext) Procedure_expr() IProcedure_exprContext { +func (s *If_then_blockContext) Action_expr() IAction_exprContext { var t antlr.RuleContext for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { t = ctx.(antlr.RuleContext) break } @@ -19958,7 +22520,7 @@ func (s *If_then_blockContext) Procedure_expr() IProcedure_exprContext { return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } func (s *If_then_blockContext) LBRACE() antlr.TerminalNode { @@ -19969,20 +22531,20 @@ func (s *If_then_blockContext) RBRACE() antlr.TerminalNode { return s.GetToken(KuneiformParserRBRACE, 0) } -func (s *If_then_blockContext) AllProc_statement() []IProc_statementContext { +func (s *If_then_blockContext) AllAction_statement() []IAction_statementContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { len++ } } - tst := make([]IProc_statementContext, len) + tst := make([]IAction_statementContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProc_statementContext); ok { - tst[i] = t.(IProc_statementContext) + if t, ok := ctx.(IAction_statementContext); ok { + tst[i] = t.(IAction_statementContext) i++ } } @@ -19990,11 +22552,11 @@ func (s *If_then_blockContext) AllProc_statement() []IProc_statementContext { return tst } -func (s *If_then_blockContext) Proc_statement(i int) IProc_statementContext { +func (s *If_then_blockContext) Action_statement(i int) IAction_statementContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProc_statementContext); ok { + if _, ok := ctx.(IAction_statementContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -20007,7 +22569,7 @@ func (s *If_then_blockContext) Proc_statement(i int) IProc_statementContext { return nil } - return t.(IProc_statementContext) + return t.(IAction_statementContext) } func (s *If_then_blockContext) GetRuleContext() antlr.RuleContext { @@ -20030,36 +22592,36 @@ func (s *If_then_blockContext) Accept(visitor antlr.ParseTreeVisitor) interface{ func (p *KuneiformParser) If_then_block() (localctx IIf_then_blockContext) { localctx = NewIf_then_blockContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 112, KuneiformParserRULE_if_then_block) + p.EnterRule(localctx, 120, KuneiformParserRULE_if_then_block) var _la int p.EnterOuterAlt(localctx, 1) { - p.SetState(1147) - p.procedure_expr(0) + p.SetState(1309) + p.action_expr(0) } { - p.SetState(1148) + p.SetState(1310) p.Match(KuneiformParserLBRACE) if p.HasError() { // Recognition error - abort rule goto errorExit } } - p.SetState(1152) + p.SetState(1314) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit } _la = p.GetTokenStream().LA(1) - for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&175640386007468168) != 0) || ((int64((_la-84)) & ^0x3f) == 0 && ((int64(1)<<(_la-84))&15426876605953) != 0) { + for ((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2810246172314175624) != 0) || ((int64((_la-88)) & ^0x3f) == 0 && ((int64(1)<<(_la-88))&1010992146074830337) != 0) { { - p.SetState(1149) - p.Proc_statement() + p.SetState(1311) + p.Action_statement() } - p.SetState(1154) + p.SetState(1316) p.GetErrorHandler().Sync(p) if p.HasError() { goto errorExit @@ -20067,7 +22629,7 @@ func (p *KuneiformParser) If_then_block() (localctx IIf_then_blockContext) { _la = p.GetTokenStream().LA(1) } { - p.SetState(1155) + p.SetState(1317) p.Match(KuneiformParserRBRACE) if p.HasError() { // Recognition error - abort rule @@ -20096,8 +22658,8 @@ type IRangeContext interface { GetParser() antlr.Parser // Getter signatures - AllProcedure_expr() []IProcedure_exprContext - Procedure_expr(i int) IProcedure_exprContext + AllAction_expr() []IAction_exprContext + Action_expr(i int) IAction_exprContext RANGE() antlr.TerminalNode // IsRangeContext differentiates from other interfaces. @@ -20136,20 +22698,20 @@ func NewRangeContext(parser antlr.Parser, parent antlr.ParserRuleContext, invoki func (s *RangeContext) GetParser() antlr.Parser { return s.parser } -func (s *RangeContext) AllProcedure_expr() []IProcedure_exprContext { +func (s *RangeContext) AllAction_expr() []IAction_exprContext { children := s.GetChildren() len := 0 for _, ctx := range children { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { len++ } } - tst := make([]IProcedure_exprContext, len) + tst := make([]IAction_exprContext, len) i := 0 for _, ctx := range children { - if t, ok := ctx.(IProcedure_exprContext); ok { - tst[i] = t.(IProcedure_exprContext) + if t, ok := ctx.(IAction_exprContext); ok { + tst[i] = t.(IAction_exprContext) i++ } } @@ -20157,11 +22719,11 @@ func (s *RangeContext) AllProcedure_expr() []IProcedure_exprContext { return tst } -func (s *RangeContext) Procedure_expr(i int) IProcedure_exprContext { +func (s *RangeContext) Action_expr(i int) IAction_exprContext { var t antlr.RuleContext j := 0 for _, ctx := range s.GetChildren() { - if _, ok := ctx.(IProcedure_exprContext); ok { + if _, ok := ctx.(IAction_exprContext); ok { if j == i { t = ctx.(antlr.RuleContext) break @@ -20174,7 +22736,7 @@ func (s *RangeContext) Procedure_expr(i int) IProcedure_exprContext { return nil } - return t.(IProcedure_exprContext) + return t.(IAction_exprContext) } func (s *RangeContext) RANGE() antlr.TerminalNode { @@ -20201,14 +22763,14 @@ func (s *RangeContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { func (p *KuneiformParser) Range_() (localctx IRangeContext) { localctx = NewRangeContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 114, KuneiformParserRULE_range) + p.EnterRule(localctx, 122, KuneiformParserRULE_range) p.EnterOuterAlt(localctx, 1) { - p.SetState(1157) - p.procedure_expr(0) + p.SetState(1319) + p.action_expr(0) } { - p.SetState(1158) + p.SetState(1320) p.Match(KuneiformParserRANGE) if p.HasError() { // Recognition error - abort rule @@ -20216,8 +22778,8 @@ func (p *KuneiformParser) Range_() (localctx IRangeContext) { } } { - p.SetState(1159) - p.procedure_expr(0) + p.SetState(1321) + p.action_expr(0) } errorExit: @@ -20235,19 +22797,19 @@ errorExit: func (p *KuneiformParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int) bool { switch ruleIndex { - case 44: + case 50: var t *Sql_exprContext = nil if localctx != nil { t = localctx.(*Sql_exprContext) } return p.Sql_expr_Sempred(t, predIndex) - case 51: - var t *Procedure_exprContext = nil + case 55: + var t *Action_exprContext = nil if localctx != nil { - t = localctx.(*Procedure_exprContext) + t = localctx.(*Action_exprContext) } - return p.Procedure_expr_Sempred(t, predIndex) + return p.Action_expr_Sempred(t, predIndex) default: panic("No predicate with index: " + fmt.Sprint(ruleIndex)) @@ -20257,10 +22819,10 @@ func (p *KuneiformParser) Sempred(localctx antlr.RuleContext, ruleIndex, predInd func (p *KuneiformParser) Sql_expr_Sempred(localctx antlr.RuleContext, predIndex int) bool { switch predIndex { case 0: - return p.Precpred(p.GetParserRuleContext(), 17) + return p.Precpred(p.GetParserRuleContext(), 19) case 1: - return p.Precpred(p.GetParserRuleContext(), 16) + return p.Precpred(p.GetParserRuleContext(), 18) case 2: return p.Precpred(p.GetParserRuleContext(), 9) @@ -20281,13 +22843,13 @@ func (p *KuneiformParser) Sql_expr_Sempred(localctx antlr.RuleContext, predIndex return p.Precpred(p.GetParserRuleContext(), 1) case 8: - return p.Precpred(p.GetParserRuleContext(), 21) + return p.Precpred(p.GetParserRuleContext(), 23) case 9: - return p.Precpred(p.GetParserRuleContext(), 20) + return p.Precpred(p.GetParserRuleContext(), 22) case 10: - return p.Precpred(p.GetParserRuleContext(), 18) + return p.Precpred(p.GetParserRuleContext(), 20) case 11: return p.Precpred(p.GetParserRuleContext(), 8) @@ -20300,7 +22862,7 @@ func (p *KuneiformParser) Sql_expr_Sempred(localctx antlr.RuleContext, predIndex } } -func (p *KuneiformParser) Procedure_expr_Sempred(localctx antlr.RuleContext, predIndex int) bool { +func (p *KuneiformParser) Action_expr_Sempred(localctx antlr.RuleContext, predIndex int) bool { switch predIndex { case 13: return p.Precpred(p.GetParserRuleContext(), 12) diff --git a/parse/gen/kuneiformparser_base_visitor.go b/node/engine/parse/gen/kuneiformparser_base_visitor.go similarity index 62% rename from parse/gen/kuneiformparser_base_visitor.go rename to node/engine/parse/gen/kuneiformparser_base_visitor.go index 8df20e7ee..fdbc9b1fe 100644 --- a/parse/gen/kuneiformparser_base_visitor.go +++ b/node/engine/parse/gen/kuneiformparser_base_visitor.go @@ -7,19 +7,11 @@ type BaseKuneiformParserVisitor struct { *antlr.BaseParseTreeVisitor } -func (v *BaseKuneiformParserVisitor) VisitSchema_entry(ctx *Schema_entryContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitEntry(ctx *EntryContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitSql_entry(ctx *Sql_entryContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitAction_entry(ctx *Action_entryContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitProcedure_entry(ctx *Procedure_entryContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitStatement(ctx *StatementContext) interface{} { return v.VisitChildren(ctx) } @@ -67,91 +59,151 @@ func (v *BaseKuneiformParserVisitor) VisitVariable(ctx *VariableContext) interfa return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitVariable_list(ctx *Variable_listContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitTable_column_def(ctx *Table_column_defContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitSchema(ctx *SchemaContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitType_list(ctx *Type_listContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitAnnotation(ctx *AnnotationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitNamed_type_list(ctx *Named_type_listContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitDatabase_declaration(ctx *Database_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitInline_constraint(ctx *Inline_constraintContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitUse_declaration(ctx *Use_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitFk_action(ctx *Fk_actionContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitTable_declaration(ctx *Table_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitFk_constraint(ctx *Fk_constraintContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitColumn_def(ctx *Column_defContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAction_return(ctx *Action_returnContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitIndex_def(ctx *Index_defContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitSql_statement(ctx *Sql_statementContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitForeign_key_def(ctx *Foreign_key_defContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitCommon_table_expression(ctx *Common_table_expressionContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitForeign_key_action(ctx *Foreign_key_actionContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitCreate_table_statement(ctx *Create_table_statementContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitType_list(ctx *Type_listContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitTable_constraint_def(ctx *Table_constraint_defContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitNamed_type_list(ctx *Named_type_listContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitOpt_drop_behavior(ctx *Opt_drop_behaviorContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitTyped_variable_list(ctx *Typed_variable_listContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitDrop_table_statement(ctx *Drop_table_statementContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitConstraint(ctx *ConstraintContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAlter_table_statement(ctx *Alter_table_statementContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitAccess_modifier(ctx *Access_modifierContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAdd_column_constraint(ctx *Add_column_constraintContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitAction_declaration(ctx *Action_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitDrop_column_constraint(ctx *Drop_column_constraintContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitProcedure_declaration(ctx *Procedure_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAdd_column(ctx *Add_columnContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitForeign_procedure_declaration(ctx *Foreign_procedure_declarationContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitDrop_column(ctx *Drop_columnContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitProcedure_return(ctx *Procedure_returnContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitRename_column(ctx *Rename_columnContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitSql(ctx *SqlContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitRename_table(ctx *Rename_tableContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitSql_statement(ctx *Sql_statementContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAdd_table_constraint(ctx *Add_table_constraintContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitCommon_table_expression(ctx *Common_table_expressionContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitDrop_table_constraint(ctx *Drop_table_constraintContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitCreate_index_statement(ctx *Create_index_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitDrop_index_statement(ctx *Drop_index_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitCreate_role_statement(ctx *Create_role_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitDrop_role_statement(ctx *Drop_role_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitGrant_statement(ctx *Grant_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitRevoke_statement(ctx *Revoke_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitPrivilege_list(ctx *Privilege_listContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitPrivilege(ctx *PrivilegeContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitTransfer_ownership_statement(ctx *Transfer_ownership_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitCreate_action_statement(ctx *Create_action_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitDrop_action_statement(ctx *Drop_action_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitUse_extension_statement(ctx *Use_extension_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitUnuse_extension_statement(ctx *Unuse_extension_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitCreate_namespace_statement(ctx *Create_namespace_statementContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseKuneiformParserVisitor) VisitDrop_namespace_statement(ctx *Drop_namespace_statementContext) interface{} { return v.VisitChildren(ctx) } @@ -179,10 +231,6 @@ func (v *BaseKuneiformParserVisitor) VisitSubquery_relation(ctx *Subquery_relati return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitFunction_relation(ctx *Function_relationContext) interface{} { - return v.VisitChildren(ctx) -} - func (v *BaseKuneiformParserVisitor) VisitJoin(ctx *JoinContext) interface{} { return v.VisitChildren(ctx) } @@ -255,10 +303,18 @@ func (v *BaseKuneiformParserVisitor) VisitCollate_sql_expr(ctx *Collate_sql_expr return v.VisitChildren(ctx) } +func (v *BaseKuneiformParserVisitor) VisitMake_array_sql_expr(ctx *Make_array_sql_exprContext) interface{} { + return v.VisitChildren(ctx) +} + func (v *BaseKuneiformParserVisitor) VisitVariable_sql_expr(ctx *Variable_sql_exprContext) interface{} { return v.VisitChildren(ctx) } +func (v *BaseKuneiformParserVisitor) VisitWindow_function_call_sql_expr(ctx *Window_function_call_sql_exprContext) interface{} { + return v.VisitChildren(ctx) +} + func (v *BaseKuneiformParserVisitor) VisitIs_sql_expr(ctx *Is_sql_exprContext) interface{} { return v.VisitChildren(ctx) } @@ -287,91 +343,71 @@ func (v *BaseKuneiformParserVisitor) VisitIn_sql_expr(ctx *In_sql_exprContext) i return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitWhen_then_clause(ctx *When_then_clauseContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitSql_expr_list(ctx *Sql_expr_listContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitNormal_call_sql(ctx *Normal_call_sqlContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitForeign_call_sql(ctx *Foreign_call_sqlContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitWindow(ctx *WindowContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitAction_block(ctx *Action_blockContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitSql_action(ctx *Sql_actionContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitLocal_action(ctx *Local_actionContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitWhen_then_clause(ctx *When_then_clauseContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitExtension_action(ctx *Extension_actionContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitSql_expr_list(ctx *Sql_expr_listContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitProcedure_block(ctx *Procedure_blockContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitNormal_call_sql(ctx *Normal_call_sqlContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitField_access_procedure_expr(ctx *Field_access_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitFunction_call_action_expr(ctx *Function_call_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitLiteral_procedure_expr(ctx *Literal_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitLiteral_action_expr(ctx *Literal_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitParen_procedure_expr(ctx *Paren_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitField_access_action_expr(ctx *Field_access_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitVariable_procedure_expr(ctx *Variable_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitIs_action_expr(ctx *Is_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitMake_array_procedure_expr(ctx *Make_array_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitVariable_action_expr(ctx *Variable_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitIs_procedure_expr(ctx *Is_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitMake_array_action_expr(ctx *Make_array_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitProcedure_expr_arithmetic(ctx *Procedure_expr_arithmeticContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitComparison_action_expr(ctx *Comparison_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitUnary_procedure_expr(ctx *Unary_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAction_expr_arithmetic(ctx *Action_expr_arithmeticContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitComparison_procedure_expr(ctx *Comparison_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitArray_access_action_expr(ctx *Array_access_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitFunction_call_procedure_expr(ctx *Function_call_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitLogical_action_expr(ctx *Logical_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitLogical_procedure_expr(ctx *Logical_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitParen_action_expr(ctx *Paren_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitArray_access_procedure_expr(ctx *Array_access_procedure_exprContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitUnary_action_expr(ctx *Unary_action_exprContext) interface{} { return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitProcedure_expr_list(ctx *Procedure_expr_listContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitAction_expr_list(ctx *Action_expr_listContext) interface{} { return v.VisitChildren(ctx) } @@ -379,7 +415,7 @@ func (v *BaseKuneiformParserVisitor) VisitStmt_variable_declaration(ctx *Stmt_va return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitStmt_procedure_call(ctx *Stmt_procedure_callContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitStmt_action_call(ctx *Stmt_action_callContext) interface{} { return v.VisitChildren(ctx) } @@ -415,11 +451,7 @@ func (v *BaseKuneiformParserVisitor) VisitVariable_or_underscore(ctx *Variable_o return v.VisitChildren(ctx) } -func (v *BaseKuneiformParserVisitor) VisitNormal_call_procedure(ctx *Normal_call_procedureContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseKuneiformParserVisitor) VisitForeign_call_procedure(ctx *Foreign_call_procedureContext) interface{} { +func (v *BaseKuneiformParserVisitor) VisitNormal_call_action(ctx *Normal_call_actionContext) interface{} { return v.VisitChildren(ctx) } diff --git a/parse/gen/kuneiformparser_visitor.go b/node/engine/parse/gen/kuneiformparser_visitor.go similarity index 55% rename from parse/gen/kuneiformparser_visitor.go rename to node/engine/parse/gen/kuneiformparser_visitor.go index e820fe2c8..c22582668 100644 --- a/parse/gen/kuneiformparser_visitor.go +++ b/node/engine/parse/gen/kuneiformparser_visitor.go @@ -7,17 +7,11 @@ import "github.com/antlr4-go/antlr/v4" type KuneiformParserVisitor interface { antlr.ParseTreeVisitor - // Visit a parse tree produced by KuneiformParser#schema_entry. - VisitSchema_entry(ctx *Schema_entryContext) interface{} + // Visit a parse tree produced by KuneiformParser#entry. + VisitEntry(ctx *EntryContext) interface{} - // Visit a parse tree produced by KuneiformParser#sql_entry. - VisitSql_entry(ctx *Sql_entryContext) interface{} - - // Visit a parse tree produced by KuneiformParser#action_entry. - VisitAction_entry(ctx *Action_entryContext) interface{} - - // Visit a parse tree produced by KuneiformParser#procedure_entry. - VisitProcedure_entry(ctx *Procedure_entryContext) interface{} + // Visit a parse tree produced by KuneiformParser#statement. + VisitStatement(ctx *StatementContext) interface{} // Visit a parse tree produced by KuneiformParser#string_literal. VisitString_literal(ctx *String_literalContext) interface{} @@ -52,71 +46,116 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#variable. VisitVariable(ctx *VariableContext) interface{} - // Visit a parse tree produced by KuneiformParser#variable_list. - VisitVariable_list(ctx *Variable_listContext) interface{} + // Visit a parse tree produced by KuneiformParser#table_column_def. + VisitTable_column_def(ctx *Table_column_defContext) interface{} - // Visit a parse tree produced by KuneiformParser#schema. - VisitSchema(ctx *SchemaContext) interface{} + // Visit a parse tree produced by KuneiformParser#type_list. + VisitType_list(ctx *Type_listContext) interface{} - // Visit a parse tree produced by KuneiformParser#annotation. - VisitAnnotation(ctx *AnnotationContext) interface{} + // Visit a parse tree produced by KuneiformParser#named_type_list. + VisitNamed_type_list(ctx *Named_type_listContext) interface{} - // Visit a parse tree produced by KuneiformParser#database_declaration. - VisitDatabase_declaration(ctx *Database_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#inline_constraint. + VisitInline_constraint(ctx *Inline_constraintContext) interface{} - // Visit a parse tree produced by KuneiformParser#use_declaration. - VisitUse_declaration(ctx *Use_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#fk_action. + VisitFk_action(ctx *Fk_actionContext) interface{} - // Visit a parse tree produced by KuneiformParser#table_declaration. - VisitTable_declaration(ctx *Table_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#fk_constraint. + VisitFk_constraint(ctx *Fk_constraintContext) interface{} - // Visit a parse tree produced by KuneiformParser#column_def. - VisitColumn_def(ctx *Column_defContext) interface{} + // Visit a parse tree produced by KuneiformParser#action_return. + VisitAction_return(ctx *Action_returnContext) interface{} - // Visit a parse tree produced by KuneiformParser#index_def. - VisitIndex_def(ctx *Index_defContext) interface{} + // Visit a parse tree produced by KuneiformParser#sql_statement. + VisitSql_statement(ctx *Sql_statementContext) interface{} - // Visit a parse tree produced by KuneiformParser#foreign_key_def. - VisitForeign_key_def(ctx *Foreign_key_defContext) interface{} + // Visit a parse tree produced by KuneiformParser#common_table_expression. + VisitCommon_table_expression(ctx *Common_table_expressionContext) interface{} - // Visit a parse tree produced by KuneiformParser#foreign_key_action. - VisitForeign_key_action(ctx *Foreign_key_actionContext) interface{} + // Visit a parse tree produced by KuneiformParser#create_table_statement. + VisitCreate_table_statement(ctx *Create_table_statementContext) interface{} - // Visit a parse tree produced by KuneiformParser#type_list. - VisitType_list(ctx *Type_listContext) interface{} + // Visit a parse tree produced by KuneiformParser#table_constraint_def. + VisitTable_constraint_def(ctx *Table_constraint_defContext) interface{} - // Visit a parse tree produced by KuneiformParser#named_type_list. - VisitNamed_type_list(ctx *Named_type_listContext) interface{} + // Visit a parse tree produced by KuneiformParser#opt_drop_behavior. + VisitOpt_drop_behavior(ctx *Opt_drop_behaviorContext) interface{} - // Visit a parse tree produced by KuneiformParser#typed_variable_list. - VisitTyped_variable_list(ctx *Typed_variable_listContext) interface{} + // Visit a parse tree produced by KuneiformParser#drop_table_statement. + VisitDrop_table_statement(ctx *Drop_table_statementContext) interface{} - // Visit a parse tree produced by KuneiformParser#constraint. - VisitConstraint(ctx *ConstraintContext) interface{} + // Visit a parse tree produced by KuneiformParser#alter_table_statement. + VisitAlter_table_statement(ctx *Alter_table_statementContext) interface{} - // Visit a parse tree produced by KuneiformParser#access_modifier. - VisitAccess_modifier(ctx *Access_modifierContext) interface{} + // Visit a parse tree produced by KuneiformParser#add_column_constraint. + VisitAdd_column_constraint(ctx *Add_column_constraintContext) interface{} - // Visit a parse tree produced by KuneiformParser#action_declaration. - VisitAction_declaration(ctx *Action_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#drop_column_constraint. + VisitDrop_column_constraint(ctx *Drop_column_constraintContext) interface{} - // Visit a parse tree produced by KuneiformParser#procedure_declaration. - VisitProcedure_declaration(ctx *Procedure_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#add_column. + VisitAdd_column(ctx *Add_columnContext) interface{} - // Visit a parse tree produced by KuneiformParser#foreign_procedure_declaration. - VisitForeign_procedure_declaration(ctx *Foreign_procedure_declarationContext) interface{} + // Visit a parse tree produced by KuneiformParser#drop_column. + VisitDrop_column(ctx *Drop_columnContext) interface{} - // Visit a parse tree produced by KuneiformParser#procedure_return. - VisitProcedure_return(ctx *Procedure_returnContext) interface{} + // Visit a parse tree produced by KuneiformParser#rename_column. + VisitRename_column(ctx *Rename_columnContext) interface{} - // Visit a parse tree produced by KuneiformParser#sql. - VisitSql(ctx *SqlContext) interface{} + // Visit a parse tree produced by KuneiformParser#rename_table. + VisitRename_table(ctx *Rename_tableContext) interface{} - // Visit a parse tree produced by KuneiformParser#sql_statement. - VisitSql_statement(ctx *Sql_statementContext) interface{} + // Visit a parse tree produced by KuneiformParser#add_table_constraint. + VisitAdd_table_constraint(ctx *Add_table_constraintContext) interface{} - // Visit a parse tree produced by KuneiformParser#common_table_expression. - VisitCommon_table_expression(ctx *Common_table_expressionContext) interface{} + // Visit a parse tree produced by KuneiformParser#drop_table_constraint. + VisitDrop_table_constraint(ctx *Drop_table_constraintContext) interface{} + + // Visit a parse tree produced by KuneiformParser#create_index_statement. + VisitCreate_index_statement(ctx *Create_index_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#drop_index_statement. + VisitDrop_index_statement(ctx *Drop_index_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#create_role_statement. + VisitCreate_role_statement(ctx *Create_role_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#drop_role_statement. + VisitDrop_role_statement(ctx *Drop_role_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#grant_statement. + VisitGrant_statement(ctx *Grant_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#revoke_statement. + VisitRevoke_statement(ctx *Revoke_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#privilege_list. + VisitPrivilege_list(ctx *Privilege_listContext) interface{} + + // Visit a parse tree produced by KuneiformParser#privilege. + VisitPrivilege(ctx *PrivilegeContext) interface{} + + // Visit a parse tree produced by KuneiformParser#transfer_ownership_statement. + VisitTransfer_ownership_statement(ctx *Transfer_ownership_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#create_action_statement. + VisitCreate_action_statement(ctx *Create_action_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#drop_action_statement. + VisitDrop_action_statement(ctx *Drop_action_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#use_extension_statement. + VisitUse_extension_statement(ctx *Use_extension_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#unuse_extension_statement. + VisitUnuse_extension_statement(ctx *Unuse_extension_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#create_namespace_statement. + VisitCreate_namespace_statement(ctx *Create_namespace_statementContext) interface{} + + // Visit a parse tree produced by KuneiformParser#drop_namespace_statement. + VisitDrop_namespace_statement(ctx *Drop_namespace_statementContext) interface{} // Visit a parse tree produced by KuneiformParser#select_statement. VisitSelect_statement(ctx *Select_statementContext) interface{} @@ -136,9 +175,6 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#subquery_relation. VisitSubquery_relation(ctx *Subquery_relationContext) interface{} - // Visit a parse tree produced by KuneiformParser#function_relation. - VisitFunction_relation(ctx *Function_relationContext) interface{} - // Visit a parse tree produced by KuneiformParser#join. VisitJoin(ctx *JoinContext) interface{} @@ -193,9 +229,15 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#collate_sql_expr. VisitCollate_sql_expr(ctx *Collate_sql_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#make_array_sql_expr. + VisitMake_array_sql_expr(ctx *Make_array_sql_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#variable_sql_expr. VisitVariable_sql_expr(ctx *Variable_sql_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#window_function_call_sql_expr. + VisitWindow_function_call_sql_expr(ctx *Window_function_call_sql_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#is_sql_expr. VisitIs_sql_expr(ctx *Is_sql_exprContext) interface{} @@ -217,6 +259,9 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#in_sql_expr. VisitIn_sql_expr(ctx *In_sql_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#window. + VisitWindow(ctx *WindowContext) interface{} + // Visit a parse tree produced by KuneiformParser#when_then_clause. VisitWhen_then_clause(ctx *When_then_clauseContext) interface{} @@ -226,68 +271,50 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#normal_call_sql. VisitNormal_call_sql(ctx *Normal_call_sqlContext) interface{} - // Visit a parse tree produced by KuneiformParser#foreign_call_sql. - VisitForeign_call_sql(ctx *Foreign_call_sqlContext) interface{} - - // Visit a parse tree produced by KuneiformParser#action_block. - VisitAction_block(ctx *Action_blockContext) interface{} - - // Visit a parse tree produced by KuneiformParser#sql_action. - VisitSql_action(ctx *Sql_actionContext) interface{} + // Visit a parse tree produced by KuneiformParser#function_call_action_expr. + VisitFunction_call_action_expr(ctx *Function_call_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#local_action. - VisitLocal_action(ctx *Local_actionContext) interface{} + // Visit a parse tree produced by KuneiformParser#literal_action_expr. + VisitLiteral_action_expr(ctx *Literal_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#extension_action. - VisitExtension_action(ctx *Extension_actionContext) interface{} + // Visit a parse tree produced by KuneiformParser#field_access_action_expr. + VisitField_access_action_expr(ctx *Field_access_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#procedure_block. - VisitProcedure_block(ctx *Procedure_blockContext) interface{} + // Visit a parse tree produced by KuneiformParser#is_action_expr. + VisitIs_action_expr(ctx *Is_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#field_access_procedure_expr. - VisitField_access_procedure_expr(ctx *Field_access_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#variable_action_expr. + VisitVariable_action_expr(ctx *Variable_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#literal_procedure_expr. - VisitLiteral_procedure_expr(ctx *Literal_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#make_array_action_expr. + VisitMake_array_action_expr(ctx *Make_array_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#paren_procedure_expr. - VisitParen_procedure_expr(ctx *Paren_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#comparison_action_expr. + VisitComparison_action_expr(ctx *Comparison_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#variable_procedure_expr. - VisitVariable_procedure_expr(ctx *Variable_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#action_expr_arithmetic. + VisitAction_expr_arithmetic(ctx *Action_expr_arithmeticContext) interface{} - // Visit a parse tree produced by KuneiformParser#make_array_procedure_expr. - VisitMake_array_procedure_expr(ctx *Make_array_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#array_access_action_expr. + VisitArray_access_action_expr(ctx *Array_access_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#is_procedure_expr. - VisitIs_procedure_expr(ctx *Is_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#logical_action_expr. + VisitLogical_action_expr(ctx *Logical_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#procedure_expr_arithmetic. - VisitProcedure_expr_arithmetic(ctx *Procedure_expr_arithmeticContext) interface{} + // Visit a parse tree produced by KuneiformParser#paren_action_expr. + VisitParen_action_expr(ctx *Paren_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#unary_procedure_expr. - VisitUnary_procedure_expr(ctx *Unary_procedure_exprContext) interface{} + // Visit a parse tree produced by KuneiformParser#unary_action_expr. + VisitUnary_action_expr(ctx *Unary_action_exprContext) interface{} - // Visit a parse tree produced by KuneiformParser#comparison_procedure_expr. - VisitComparison_procedure_expr(ctx *Comparison_procedure_exprContext) interface{} - - // Visit a parse tree produced by KuneiformParser#function_call_procedure_expr. - VisitFunction_call_procedure_expr(ctx *Function_call_procedure_exprContext) interface{} - - // Visit a parse tree produced by KuneiformParser#logical_procedure_expr. - VisitLogical_procedure_expr(ctx *Logical_procedure_exprContext) interface{} - - // Visit a parse tree produced by KuneiformParser#array_access_procedure_expr. - VisitArray_access_procedure_expr(ctx *Array_access_procedure_exprContext) interface{} - - // Visit a parse tree produced by KuneiformParser#procedure_expr_list. - VisitProcedure_expr_list(ctx *Procedure_expr_listContext) interface{} + // Visit a parse tree produced by KuneiformParser#action_expr_list. + VisitAction_expr_list(ctx *Action_expr_listContext) interface{} // Visit a parse tree produced by KuneiformParser#stmt_variable_declaration. VisitStmt_variable_declaration(ctx *Stmt_variable_declarationContext) interface{} - // Visit a parse tree produced by KuneiformParser#stmt_procedure_call. - VisitStmt_procedure_call(ctx *Stmt_procedure_callContext) interface{} + // Visit a parse tree produced by KuneiformParser#stmt_action_call. + VisitStmt_action_call(ctx *Stmt_action_callContext) interface{} // Visit a parse tree produced by KuneiformParser#stmt_variable_assignment. VisitStmt_variable_assignment(ctx *Stmt_variable_assignmentContext) interface{} @@ -313,11 +340,8 @@ type KuneiformParserVisitor interface { // Visit a parse tree produced by KuneiformParser#variable_or_underscore. VisitVariable_or_underscore(ctx *Variable_or_underscoreContext) interface{} - // Visit a parse tree produced by KuneiformParser#normal_call_procedure. - VisitNormal_call_procedure(ctx *Normal_call_procedureContext) interface{} - - // Visit a parse tree produced by KuneiformParser#foreign_call_procedure. - VisitForeign_call_procedure(ctx *Foreign_call_procedureContext) interface{} + // Visit a parse tree produced by KuneiformParser#normal_call_action. + VisitNormal_call_action(ctx *Normal_call_actionContext) interface{} // Visit a parse tree produced by KuneiformParser#if_then_block. VisitIf_then_block(ctx *If_then_blockContext) interface{} diff --git a/parse/grammar/KuneiformLexer.g4 b/node/engine/parse/grammar/KuneiformLexer.g4 similarity index 83% rename from parse/grammar/KuneiformLexer.g4 rename to node/engine/parse/grammar/KuneiformLexer.g4 index 443f49ef1..0f1fdd1f0 100644 --- a/parse/grammar/KuneiformLexer.g4 +++ b/node/engine/parse/grammar/KuneiformLexer.g4 @@ -43,18 +43,21 @@ DOUBLE_QUOTE: '"'; // top-level blocks -DATABASE: 'database'; USE: 'use'; +UNUSE: 'unuse'; TABLE: 'table'; ACTION: 'action'; -PROCEDURE: 'procedure'; - -PUBLIC: 'public'; -PRIVATE: 'private'; -VIEW: 'view'; -OWNER: 'owner'; // keywords +CREATE: 'create'; +ALTER: 'alter'; +COLUMN: 'column'; +ADD: 'add'; +DROP: 'drop'; +RENAME: 'rename'; +TO: 'to'; +CONSTRAINT: 'constraint'; +CHECK: 'check'; FOREIGN: 'foreign'; PRIMARY: 'primary'; KEY: 'key'; @@ -127,6 +130,24 @@ ELSE: 'else'; BREAK: 'break'; RETURN: 'return'; NEXT: 'next'; +OVER: 'over'; +PARTITION: 'partition'; +WINDOW: 'window'; +FILTER: 'filter'; +RECURSIVE: 'recursive'; +SCHEMA: 'schema'; +GRANT: 'grant'; +REVOKE: 'revoke'; +ROLE: 'role'; +TRANSFER: 'transfer'; +OWNERSHIP: 'ownership'; +REPLACE: 'replace'; +ARRAY: 'array'; +NAMESPACE: 'namespace'; + +// role permissions +ROLES: 'roles'; +CALL: 'call'; // Literals diff --git a/node/engine/parse/grammar/KuneiformParser.g4 b/node/engine/parse/grammar/KuneiformParser.g4 new file mode 100644 index 000000000..9c8cee283 --- /dev/null +++ b/node/engine/parse/grammar/KuneiformParser.g4 @@ -0,0 +1,452 @@ +/* + * A ANTLR4 grammar for Kuneiform. + * Developed by the Kwil team. +*/ +parser grammar KuneiformParser; + +options { + tokenVocab = KuneiformLexer; +} + +// entry point for the parser +entry: + // optional semicolons, but required if there are multiple statements to delimit them + statement (SCOL statement)* SCOL? EOF +; + +statement: + (LBRACE namespace=identifier RBRACE)? + ( + sql_statement + | create_table_statement + | alter_table_statement + | drop_table_statement + | create_index_statement + | drop_index_statement + | create_role_statement + | drop_role_statement + | grant_statement + | revoke_statement + | transfer_ownership_statement + | create_action_statement + | drop_action_statement + | use_extension_statement + | unuse_extension_statement + | create_namespace_statement + | drop_namespace_statement + ) +; + +/* + The following section includes the parser rules that are commonly + used among all sections of the grammar. These include literals, +*/ + +literal: + STRING_ # string_literal + | (PLUS | MINUS)? DIGITS_ # integer_literal + | (PLUS | MINUS)? DIGITS_ PERIOD DIGITS_ # decimal_literal + | (TRUE | FALSE) # boolean_literal + | NULL # null_literal + | BINARY_ # binary_literal +; + +// identifier is used for table / column names +identifier: + (DOUBLE_QUOTE IDENTIFIER DOUBLE_QUOTE) | IDENTIFIER +; + +identifier_list: + identifier (COMMA identifier)* +; + +type: + identifier (LPAREN DIGITS_ COMMA DIGITS_ RPAREN)? (LBRACKET RBRACKET)? // Handles arrays of any type, including nested arrays +; + +type_cast: + TYPE_CAST type +; + +variable: + VARIABLE | CONTEXTUAL_VARIABLE +; + +/* + The following section includes parser rules for top-level Kuneiform. + These are the rules that parse the schema / DDL, and are used pre-consensus. +*/ + +table_column_def: + name=identifier type inline_constraint* +; + +type_list: + type (COMMA type)* +; + +named_type_list: + identifier type (COMMA identifier type)* +; + +inline_constraint: + PRIMARY KEY + | UNIQUE + | NOT NULL + | DEFAULT action_expr + | fk_constraint + | CHECK (LPAREN sql_expr RPAREN) +; + +fk_action: + ON (UPDATE|DELETE) + (SET NULL | SET DEFAULT | RESTRICT | NO ACTION | CASCADE) +; + +fk_constraint: + REFERENCES (namespace=identifier PERIOD)? table=identifier LPAREN identifier_list RPAREN (fk_action (fk_action)?)? // can be up to 0-2 actions +; + +action_return: + RETURNS (TABLE? LPAREN return_columns=named_type_list RPAREN + | LPAREN unnamed_return_types=type_list RPAREN) +; + +/* + The following section includes parser rules for SQL. +*/ + +sql_statement: // NOTE: This is only DDL. We should combine ddl and dml into sql_stmt in the future. + (WITH RECURSIVE? common_table_expression (COMMA common_table_expression)*)? + (select_statement | update_statement | insert_statement | delete_statement) +; + +common_table_expression: + identifier (LPAREN (identifier (COMMA identifier)*)? RPAREN)? AS LPAREN select_statement RPAREN +; + +create_table_statement: + CREATE TABLE (IF NOT EXISTS)? name=identifier + LPAREN + (table_column_def | table_constraint_def) + (COMMA (table_column_def | table_constraint_def))* + RPAREN +; + +table_constraint_def: + (CONSTRAINT name=identifier)? + ( + UNIQUE LPAREN identifier_list RPAREN + | CHECK LPAREN sql_expr RPAREN + | FOREIGN KEY LPAREN identifier_list RPAREN fk_constraint + | PRIMARY KEY LPAREN identifier_list RPAREN + ) +; + +opt_drop_behavior: + CASCADE + | RESTRICT +; + +drop_table_statement: + DROP TABLE (IF EXISTS)? tables=identifier_list opt_drop_behavior? +; + +alter_table_statement: + ALTER TABLE table=identifier + alter_table_action +; + +alter_table_action: + ALTER COLUMN column=identifier SET (NOT NULL | DEFAULT action_expr) # add_column_constraint + | ALTER COLUMN column=identifier DROP (NOT NULL | DEFAULT) # drop_column_constraint + | ADD COLUMN column=identifier type # add_column + | DROP COLUMN column=identifier # drop_column + | RENAME COLUMN old_column=identifier TO new_column=identifier # rename_column + | RENAME TO new_table=identifier # rename_table + | ADD table_constraint_def # add_table_constraint + | DROP CONSTRAINT identifier # drop_table_constraint +; + +create_index_statement: + CREATE UNIQUE? INDEX (IF NOT EXISTS)? name=identifier? + ON table=identifier LPAREN columns=identifier_list RPAREN +; + +drop_index_statement: + DROP INDEX (IF EXISTS)? name=identifier +; + +create_role_statement: + CREATE ROLE (IF NOT EXISTS)? identifier +; + +drop_role_statement: + DROP ROLE (IF EXISTS)? identifier +; + +grant_statement: + GRANT (privilege_list|grant_role=identifier) (ON namespace=identifier)? TO (role=identifier|user=STRING_) +; + +revoke_statement: + REVOKE (privilege_list|grant_role=identifier) (ON namespace=identifier)? FROM (role=identifier|user=STRING_) +; + +privilege_list: + privilege (COMMA privilege)* +; + +privilege: + SELECT | INSERT | UPDATE | DELETE | CREATE | DROP | ALTER | ROLES | CALL | USE +; + +transfer_ownership_statement: + TRANSFER OWNERSHIP TO identifier +; + +create_action_statement: + CREATE ACTION ((IF NOT EXISTS)|(OR REPLACE))? identifier + LPAREN (VARIABLE type (COMMA VARIABLE type)*)? RPAREN + identifier* + action_return? + LBRACE action_statement* RBRACE +; + +drop_action_statement: + DROP ACTION (IF EXISTS)? identifier +; + +use_extension_statement: + USE extension_name=identifier (IF NOT EXISTS)? + (LBRACE (identifier COL action_expr (COMMA identifier COL action_expr)*)? RBRACE)? + AS alias=identifier +; + +unuse_extension_statement: + UNUSE alias=identifier (IF EXISTS)? +; + +create_namespace_statement: + CREATE NAMESPACE (IF NOT EXISTS)? identifier +; + +drop_namespace_statement: + DROP NAMESPACE (IF EXISTS)? identifier +; + +select_statement: + select_core + (compound_operator select_core)* + (ORDER BY ordering_term (COMMA ordering_term)*)? + (LIMIT limit=sql_expr)? + (OFFSET offset=sql_expr)? +; + +compound_operator: + UNION ALL? | INTERSECT | EXCEPT +; + +ordering_term: + sql_expr (ASC | DESC)? (NULLS (FIRST | LAST))? +; + +select_core: + SELECT DISTINCT? + result_column (COMMA result_column)* + (FROM relation join*)? + (WHERE where=sql_expr)? + ( + GROUP BY group_by=sql_expr_list + (HAVING having=sql_expr)? + )? + (WINDOW identifier AS window (COMMA identifier AS window)*)? +; + +relation: + (namespace=identifier PERIOD)? table_name=identifier (AS? alias=identifier)? # table_relation + // aliases are technically required in Kuneiform for subquery and function calls, + // but we allow it to pass here since it is standard SQL to not require it, and + // we can throw a better error message after parsing. + | LPAREN select_statement RPAREN (AS? alias=identifier)? # subquery_relation +; + +join: + (INNER| LEFT | RIGHT | FULL)? JOIN + relation ON sql_expr +; + +result_column: + sql_expr (AS? identifier)? # expression_result_column + | (table_name=identifier PERIOD)? STAR # wildcard_result_column +; + +update_statement: + UPDATE table_name=identifier (AS? alias=identifier)? + SET update_set_clause (COMMA update_set_clause)* + (FROM relation join*)? + (WHERE where=sql_expr)? +; + +update_set_clause: + column=identifier EQUALS sql_expr +; + +insert_statement: + INSERT INTO table_name=identifier (AS? alias=identifier)? + (LPAREN target_columns=identifier_list RPAREN)? + ( + (VALUES LPAREN sql_expr_list RPAREN (COMMA LPAREN sql_expr_list RPAREN)*) + | (select_statement) + ) + upsert_clause? +; + +upsert_clause: + ON CONFLICT + (LPAREN conflict_columns=identifier_list RPAREN (WHERE conflict_where=sql_expr)?)? + DO ( + NOTHING + | UPDATE SET update_set_clause (COMMA update_set_clause)* + (WHERE update_where=sql_expr)? + ) +; + +delete_statement: + DELETE FROM table_name=identifier (AS? alias=identifier)? + // (USING relation join*)? + (WHERE where=sql_expr)? +; + +// https://docs.kwil.com/docs/kuneiform/operators +sql_expr: + // highest precedence: + LPAREN sql_expr RPAREN type_cast? # paren_sql_expr + | sql_expr PERIOD identifier type_cast? # field_access_sql_expr + | array_element=sql_expr LBRACKET ( + // can be arr[1], arr[1:2], arr[1:], arr[:2], arr[:] + single=sql_expr + | (left=sql_expr? COL right=sql_expr?) + ) RBRACKET type_cast? # array_access_sql_expr + | (PLUS|MINUS) sql_expr # unary_sql_expr + | sql_expr COLLATE identifier # collate_sql_expr + | left=sql_expr (STAR | DIV | MOD) right=sql_expr # arithmetic_sql_expr + | left=sql_expr (PLUS | MINUS) right=sql_expr # arithmetic_sql_expr + + // any unspecified operator: + | literal type_cast? # literal_sql_expr + // direct function calls can have a type cast, but window functions cannot + | sql_function_call (FILTER LPAREN WHERE sql_expr RPAREN)? OVER (window|identifier) # window_function_call_sql_expr + | sql_function_call type_cast? # function_call_sql_expr + | variable type_cast? # variable_sql_expr + | ARRAY LBRACKET (sql_expr_list)? RBRACKET type_cast? # make_array_sql_expr + | (table=identifier PERIOD)? column=identifier type_cast? # column_sql_expr + | CASE case_clause=sql_expr? + (when_then_clause)+ + (ELSE else_clause=sql_expr)? END # case_expr + | (NOT? EXISTS)? LPAREN select_statement RPAREN type_cast? # subquery_sql_expr + // setting precedence for arithmetic operations: + | left=sql_expr CONCAT right=sql_expr # arithmetic_sql_expr + + // the rest: + | sql_expr NOT? IN LPAREN (sql_expr_list|select_statement) RPAREN # in_sql_expr + | left=sql_expr NOT? (LIKE|ILIKE) right=sql_expr # like_sql_expr + | element=sql_expr (NOT)? BETWEEN lower=sql_expr AND upper=sql_expr # between_sql_expr + | left=sql_expr (EQUALS | EQUATE | NEQ | LT | LTE | GT | GTE) right=sql_expr # comparison_sql_expr + | left=sql_expr IS NOT? ((DISTINCT FROM right=sql_expr) | NULL | TRUE | FALSE) # is_sql_expr + | (NOT) sql_expr # unary_sql_expr + | left=sql_expr AND right=sql_expr # logical_sql_expr + | left=sql_expr OR right=sql_expr # logical_sql_expr +; + +window: + LPAREN + (PARTITION BY partition=sql_expr_list)? + (ORDER BY ordering_term (COMMA ordering_term)*)? + RPAREN +; + + +when_then_clause: + WHEN when_condition=sql_expr THEN then=sql_expr +; + +sql_expr_list: + sql_expr (COMMA sql_expr)* +; + +sql_function_call: + identifier LPAREN (DISTINCT? sql_expr_list|STAR)? RPAREN #normal_call_sql +; + +/* + The following section includes parser rules for action blocks. +*/ + +/* + This section includes parser rules for actions +*/ + +// https://docs.kwil.com/docs/kuneiform/operators +action_expr: + // highest precedence: + LPAREN action_expr RPAREN type_cast? # paren_action_expr + | action_expr PERIOD identifier type_cast? # field_access_action_expr + | array_element=action_expr LBRACKET ( + // can be arr[1], arr[1:2], arr[1:], arr[:2], arr[:] + single=action_expr + | (left=action_expr? COL right=action_expr?) + ) RBRACKET type_cast? # array_access_action_expr + | (PLUS|MINUS|EXCL) action_expr # unary_action_expr + | action_expr (STAR | DIV | MOD) action_expr # action_expr_arithmetic + | action_expr (PLUS | MINUS) action_expr # action_expr_arithmetic + + // any unspecified operator: + | literal type_cast? # literal_action_expr + | action_function_call type_cast? # function_call_action_expr + | variable type_cast? # variable_action_expr + | ARRAY? LBRACKET (action_expr_list)? RBRACKET type_cast? # make_array_action_expr // array is optional for backwards compatibility + | action_expr CONCAT action_expr # action_expr_arithmetic + + // the rest: + | action_expr (EQUALS | EQUATE | NEQ | LT | LTE | GT | GTE) action_expr # comparison_action_expr + | left=action_expr IS NOT? ((DISTINCT FROM right=action_expr) | NULL | TRUE | FALSE) # is_action_expr + | (NOT) action_expr # unary_action_expr + | action_expr AND action_expr # logical_action_expr + | action_expr OR action_expr # logical_action_expr +; + +action_expr_list: + action_expr (COMMA action_expr)* +; + +// some of the action_statements have optional semicolons. This is for backwards compatibility. +action_statement: + VARIABLE type SCOL # stmt_variable_declaration + // stmt_action_call must go above stmt_variable_assignment due to lexer ambiguity + | ((variable_or_underscore) (COMMA (variable_or_underscore))* ASSIGN)? action_function_call SCOL # stmt_action_call + | action_expr type? ASSIGN action_expr SCOL # stmt_variable_assignment + | FOR receiver=VARIABLE IN (range|target_variable=variable|sql_statement) LBRACE action_statement* RBRACE SCOL? # stmt_for_loop + | IF if_then_block (ELSEIF if_then_block)* (ELSE LBRACE action_statement* RBRACE)? SCOL? # stmt_if + | sql_statement SCOL # stmt_sql + | BREAK SCOL # stmt_break + | RETURN (action_expr_list|sql_statement)? SCOL # stmt_return + | RETURN NEXT action_expr_list SCOL # stmt_return_next +; + +variable_or_underscore: + VARIABLE | UNDERSCORE +; + +action_function_call: + (namespace=identifier PERIOD)? function=identifier LPAREN (action_expr_list)? RPAREN #normal_call_action +; + +if_then_block: + action_expr LBRACE action_statement* RBRACE +; + +// range used for for loops +range: + action_expr RANGE action_expr +; \ No newline at end of file diff --git a/parse/grammar/README.md b/node/engine/parse/grammar/README.md similarity index 100% rename from parse/grammar/README.md rename to node/engine/parse/grammar/README.md diff --git a/parse/grammar/generate.sh b/node/engine/parse/grammar/generate.sh similarity index 100% rename from parse/grammar/generate.sh rename to node/engine/parse/grammar/generate.sh diff --git a/parse/grammar/rrdiagrams.html b/node/engine/parse/grammar/rrdiagrams.html similarity index 100% rename from parse/grammar/rrdiagrams.html rename to node/engine/parse/grammar/rrdiagrams.html diff --git a/node/engine/parse/parse.go b/node/engine/parse/parse.go new file mode 100644 index 000000000..f3c0e4684 --- /dev/null +++ b/node/engine/parse/parse.go @@ -0,0 +1,196 @@ +// package parse contains logic for parsing SQL, DDL, and Actions, +// and SQL. +package parse + +import ( + "fmt" + "reflect" + "runtime" + "strings" + + "github.com/antlr4-go/antlr/v4" + "github.com/kwilteam/kwil-db/node/engine/parse/gen" +) + +// ParseResult is the result of parsing a SQL statement. +// It can be any statement, including: +// - CREATE TABLE +// - SELECT/INSERT/UPDATE/DELETE +// - CREATE ACTION +// - etc. +type ParseResult struct { + // Statements are the individual statements, in the order they were encountered. + Statements []TopLevelStatement + // ParseErrs is the error listener that contains all the errors that occurred during parsing. + ParseErrs ParseErrs `json:"parse_errs,omitempty"` +} + +func (r *ParseResult) Err() error { + return r.ParseErrs.Err() +} + +// Parse parses a statement or set of statements separated by semicolons. +func Parse(sql string) (t []TopLevelStatement, err error) { + defer func() { + if r := recover(); r != nil { + switch x := r.(type) { + case error: + err = x + default: + err = fmt.Errorf("panic: %v", r) + } + } + }() + + res, err := ParseWithErrListener(sql) + if err != nil { + return nil, err + } + + if res.Err() != nil { + return nil, res.Err() + } + + return res.Statements, nil +} + +// ParseWithErrListener parses a statement or set of statements separated by semicolons. +// It returns the parsed statements, as well as an error listener with position information. +// Public consumers should opt for Parse instead, unless there is a specific need for the error listener. +func ParseWithErrListener(sql string) (p *ParseResult, err error) { + parser, errLis, parseVisitor, deferFn, err := setupParser(sql) + if err != nil { + return nil, err + } + p = &ParseResult{ + ParseErrs: errLis, + } + + defer func() { + err2 := deferFn(recover()) + if err2 != nil { + err = err2 + } + }() + + p.Statements = parser.Entry().Accept(parseVisitor).([]TopLevelStatement) + + return p, nil +} + +func setupParser(sql string) (parser *gen.KuneiformParser, errList *errorListener, parserVisitor *schemaVisitor, deferFn func(any) error, err error) { + // trim whitespace + sql = strings.TrimSpace(sql) + + // add semicolon to the end of the statement, if it is not there + if !strings.HasSuffix(sql, ";") { + sql += ";" + } + + errList = newErrorListener("sql") + stream := antlr.NewInputStream(sql) + + lexer := gen.NewKuneiformLexer(stream) + tokens := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel) + parser = gen.NewKuneiformParser(tokens) + errList.toks = tokens + + // remove defaults + lexer.RemoveErrorListeners() + parser.RemoveErrorListeners() + lexer.AddErrorListener(errList) + parser.AddErrorListener(errList) + + parser.BuildParseTrees = true + + deferFn = func(e any) (err error) { + if e != nil { + var ok bool + err, ok = e.(error) + if !ok { + err = fmt.Errorf("panic: %v", e) + } + } + + // if there is a panic, it may be due to a syntax error. + // therefore, we should check for syntax errors first and if + // any occur, swallow the panic and return the syntax errors. + // If the issue persists past syntax errors, the else block + // will return the error. + if errList.Err() != nil { + return nil + } else if err != nil { + // stack trace since this is a core bug or unexpected error + buf := make([]byte, 1<<16) + + stackSize := runtime.Stack(buf, false) + err = fmt.Errorf("%w\n\n%s", err, buf[:stackSize]) + + return err + } + + return nil + } + + parserVisitor = newSchemaVisitor(stream, errList) + + return parser, errList, parserVisitor, deferFn, err +} + +// RecursivelyVisitPositions traverses a structure recursively, visiting all position struct types. +// It is used in both parsing tools, as well as in tests. +// WARNING: This function should NEVER be used in consensus, since it is non-deterministic. +func RecursivelyVisitPositions(v any, fn func(GetPositioner)) { + + visited := make(map[uintptr]struct{}) + visitRecursive(reflect.ValueOf(v), reflect.TypeOf((*GetPositioner)(nil)).Elem(), func(v reflect.Value) { + if v.CanInterface() { + a := v.Interface().(GetPositioner) + fn(a) + } + }, visited) +} + +// visitRecursive is a recursive function that visits all types that implement the target interface. +func visitRecursive(v reflect.Value, target reflect.Type, fn func(reflect.Value), visited map[uintptr]struct{}) { + if v.Type().Implements(target) { + // check if the value is nil + if !v.IsNil() { + fn(v) + } + } + + switch v.Kind() { + case reflect.Interface: + if v.IsNil() { + return + } + + visitRecursive(v.Elem(), target, fn, visited) + case reflect.Ptr: + if v.IsNil() { + return + } + + // check if we have visited this pointer before + ptr := v.Pointer() + if _, ok := visited[ptr]; ok { + return + } + visited[ptr] = struct{}{} + + visitRecursive(v.Elem(), target, fn, visited) + case reflect.Struct: + for i := range v.NumField() { + visitRecursive(v.Field(i), target, fn, visited) + } + case reflect.Slice, reflect.Array: + for i := range v.Len() { + visitRecursive(v.Index(i), target, fn, visited) + } + case reflect.Map: + for _, key := range v.MapKeys() { + visitRecursive(v.MapIndex(key), target, fn, visited) + } + } +} diff --git a/node/engine/parse/parse_test.go b/node/engine/parse/parse_test.go new file mode 100644 index 000000000..89d706e42 --- /dev/null +++ b/node/engine/parse/parse_test.go @@ -0,0 +1,1965 @@ +package parse + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/kwilteam/kwil-db/core/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// assertPositionsAreSet asserts that all positions in the ast are set. +func assertPositionsAreSet(t *testing.T, v any) { + RecursivelyVisitPositions(v, func(gp GetPositioner) { + pos := gp.GetPosition() + // if not set, this will tell us the struct + assert.True(t, pos.IsSet, "position is not set. struct type: %T", gp) + }) +} + +// exprVar makes an ExpressionVariable. +func exprVar(n string) *ExpressionVariable { + if n[0] != '$' && n[0] != '@' { + panic("TEST ERROR: variable name must start with $ or @") + } + + return &ExpressionVariable{ + Name: n, + Prefix: VariablePrefix(n[0]), + } +} + +// exprLitCast makes an expression based on the type of v. +// If cast is true, it will add a typecast to the expression. +// Legacy Kwil (<=v0.9) auto-cast certain values. v0.10 leaves this +// to another layer. +func exprLitCast(v any, cast bool) Expression { + switch t := v.(type) { + case int: + isNeg := t < 0 + if isNeg { + t *= -1 + } + + liter := &ExpressionLiteral{ + Type: types.IntType, + Value: int64(t), + } + if cast { + liter.Typecastable = Typecastable{ + TypeCast: types.IntType, + } + } + + if isNeg { + return &ExpressionUnary{ + Operator: UnaryOperatorNeg, + Expression: liter, + } + } + + return liter + case int64: + isNeg := t < 0 + if isNeg { + t *= -1 + } + + liter := &ExpressionLiteral{ + Type: types.IntType, + Value: t, + } + if cast { + liter.Typecastable = Typecastable{ + TypeCast: types.IntType, + } + } + + if isNeg { + return &ExpressionUnary{ + Operator: UnaryOperatorNeg, + Expression: liter, + } + } + + return liter + case string: + ee := &ExpressionLiteral{ + Type: types.TextType, + Value: t, + } + if cast { + ee.Typecastable = Typecastable{ + TypeCast: types.TextType, + } + } + return ee + case bool: + ee := &ExpressionLiteral{ + Type: types.BoolType, + Value: t, + } + if cast { + ee.Typecastable = Typecastable{ + TypeCast: types.BoolType, + } + } + return ee + default: + panic("TEST ERROR: invalid type for literal") + } +} + +// exprLit makes an ExpressionLiteral. +// it can only make strings and ints. +// It will automatically add a typecast to the expression. +// This is legacy behavior from Kwil <=v0.9 +func exprLit(v any) Expression { + return exprLitCast(v, false) +} + +func exprFunctionCall(name string, args ...Expression) *ExpressionFunctionCall { + return &ExpressionFunctionCall{ + Name: name, + Args: args, + } +} + +func Test_DDL(t *testing.T) { + type testCase struct { + name string + sql string + want TopLevelStatement + err error + } + + tests := []testCase{ + // non-sensical foreign key but its just to test + { + name: "create table", + sql: `CREATE TABLE users ( + id int PRIMARY KEY, + name text CHECK(LENGTH(name) > 10), + address text NOT NULL DEFAULT 'usa', + email text NOT NULL UNIQUE , + city_id int, + group_id int REFERENCES groups(id) ON UPDATE RESTRICT ON DELETE CASCADE, + CONSTRAINT city_fk FOREIGN KEY (city_id, address) REFERENCES cities(id, address) ON UPDATE NO ACTION ON DELETE SET NULL, + CHECK(LENGTH(email) > 1), + UNIQUE (city_id, address) + );`, + want: &CreateTableStatement{ + Name: "users", + Columns: []*Column{ + { + Name: "id", + Type: types.IntType, + Constraints: []InlineConstraint{ + &PrimaryKeyInlineConstraint{}, + }, + }, + { + Name: "name", + Type: types.TextType, + Constraints: []InlineConstraint{ + &CheckConstraint{ + Expression: &ExpressionComparison{ + Left: exprFunctionCall("length", exprColumn("", "name")), + Right: exprLitCast(10, false), + Operator: ComparisonOperatorGreaterThan, + }, + }, + }, + }, + { + Name: "address", + Type: types.TextType, + Constraints: []InlineConstraint{ + &NotNullConstraint{}, + &DefaultConstraint{ + Value: &ExpressionLiteral{ + Type: types.TextType, + Value: "usa", + }, + }, + }, + }, + { + Name: "email", + Type: types.TextType, + Constraints: []InlineConstraint{ + &NotNullConstraint{}, + &UniqueInlineConstraint{}, + }, + }, + { + Name: "city_id", + Type: types.IntType, + }, + { + Name: "group_id", + Type: types.IntType, + Constraints: []InlineConstraint{ + &ForeignKeyReferences{ + RefTable: "groups", + RefColumns: []string{"id"}, + Actions: []*ForeignKeyAction{ + { + On: ON_UPDATE, + Do: DO_RESTRICT, + }, + { + On: ON_DELETE, + Do: DO_CASCADE, + }, + }, + }, + }, + }, + }, + Constraints: []*OutOfLineConstraint{ + { + Name: "city_fk", + Constraint: &ForeignKeyOutOfLineConstraint{ + Columns: []string{"city_id", "address"}, + References: &ForeignKeyReferences{ + RefTable: "cities", + RefColumns: []string{"id", "address"}, + Actions: []*ForeignKeyAction{ + { + On: ON_UPDATE, + Do: DO_NO_ACTION, + }, + { + On: ON_DELETE, + Do: DO_SET_NULL, + }, + }, + }, + }, + }, + { + Constraint: &CheckConstraint{ + Expression: &ExpressionComparison{ + Left: exprFunctionCall("length", exprColumn("", "email")), + Right: exprLitCast(1, false), + Operator: ComparisonOperatorGreaterThan, + }, + }, + }, + { + Constraint: &UniqueOutOfLineConstraint{ + Columns: []string{ + "city_id", + "address", + }, + }, + }, + }, + }, + }, + { + name: "create table if not exists", + sql: `CREATE TABLE IF NOT EXISTS users (id int primary key)`, + want: &CreateTableStatement{ + Name: "users", + IfNotExists: true, + Columns: []*Column{ + { + Name: "id", + Type: types.IntType, + Constraints: []InlineConstraint{ + &PrimaryKeyInlineConstraint{}, + }, + }, + }, + }, + }, + { + name: "alter table add column constraint NOT NULL", + sql: `ALTER TABLE user ALTER COLUMN name SET NOT NULL;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AlterColumnSet{ + Column: "name", + Type: ConstraintTypeNotNull, + }, + }, + }, + { + name: "alter table add column constraint DEFAULT", + sql: `ALTER TABLE user ALTER COLUMN name SET DEFAULT 10;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AlterColumnSet{ + Column: "name", + Type: ConstraintTypeDefault, + Value: &ExpressionLiteral{ + Type: types.IntType, + Value: int64(10), + }, + }, + }, + }, + { + name: "alter table drop column constraint NOT NULL", + sql: `ALTER TABLE user ALTER COLUMN name DROP NOT NULL;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AlterColumnDrop{ + Column: "name", + Type: ConstraintTypeNotNull, + }, + }, + }, + { + name: "alter table drop column constraint DEFAULT", + sql: `ALTER TABLE user ALTER COLUMN name DROP DEFAULT;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AlterColumnDrop{ + Column: "name", + Type: ConstraintTypeDefault, + }, + }, + }, + { + name: "alter table add column", + sql: `ALTER TABLE user ADD COLUMN abc int;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AddColumn{ + Name: "abc", + Type: types.IntType, + }, + }, + }, + { + name: "alter table drop column", + sql: `ALTER TABLE user DROP COLUMN abc;`, + want: &AlterTableStatement{ + Table: "user", + Action: &DropColumn{ + Name: "abc", + }, + }, + }, + { + name: "alter table rename column", + sql: `ALTER TABLE user RENAME COLUMN abc TO def;`, + want: &AlterTableStatement{ + Table: "user", + Action: &RenameColumn{ + OldName: "abc", + NewName: "def", + }, + }, + }, + { + name: "alter table rename table", + sql: `ALTER TABLE user RENAME TO account;`, + want: &AlterTableStatement{ + Table: "user", + Action: &RenameTable{ + Name: "account", + }, + }, + }, + { + name: "alter table add constraint fk", + sql: `ALTER TABLE user ADD constraint new_fk FOREIGN KEY (city_id) REFERENCES cities(id) ON DELETE CASCADE;`, + want: &AlterTableStatement{ + Table: "user", + Action: &AddTableConstraint{ + Constraint: &OutOfLineConstraint{ + Name: "new_fk", + Constraint: &ForeignKeyOutOfLineConstraint{ + Columns: []string{"city_id"}, + References: &ForeignKeyReferences{ + RefTable: "cities", + RefColumns: []string{"id"}, + Actions: []*ForeignKeyAction{ + { + On: ON_DELETE, + Do: DO_CASCADE, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "alter table drop constraint", + sql: `ALTER TABLE user DROP CONSTRAINT abc;`, + want: &AlterTableStatement{ + Table: "user", + Action: &DropTableConstraint{ + Name: "abc", + }, + }, + }, + { + name: "drop table", + sql: `DROP TABLE users, posts;`, + want: &DropTableStatement{ + Tables: []string{"users", "posts"}, + Behavior: DropBehaviorDefault, + }, + }, + { + name: "drop table single table", + sql: `DROP TABLE users;`, + want: &DropTableStatement{ + Tables: []string{"users"}, + Behavior: DropBehaviorDefault, + }, + }, + { + name: "drop table if exists", + sql: `DROP TABLE IF EXISTS users, posts;`, + want: &DropTableStatement{ + Tables: []string{"users", "posts"}, + IfExists: true, + }, + }, + { + name: "drop table CASCADE", + sql: `DROP TABLE IF EXISTS users, posts CASCADE;`, + want: &DropTableStatement{ + Tables: []string{"users", "posts"}, + Behavior: DropBehaviorCascade, + IfExists: true, + }, + }, + { + name: "drop table RESTRICT ", + sql: `DROP TABLE users, posts RESTRICT;`, + want: &DropTableStatement{ + Tables: []string{"users", "posts"}, + Behavior: DropBehaviorRestrict, + }, + }, + { + name: "create index", + sql: `CREATE INDEX abc ON user(name);`, + want: &CreateIndexStatement{ + Name: "abc", + On: "user", + Columns: []string{"name"}, + Type: IndexTypeBTree, + }, + }, + { + name: "create unique index", + sql: `CREATE UNIQUE INDEX abc ON user(name);`, + want: &CreateIndexStatement{ + Name: "abc", + On: "user", + Columns: []string{"name"}, + Type: IndexTypeUnique, + }, + }, + { + name: "create index with no name", + sql: `CREATE INDEX ON user(name);`, + want: &CreateIndexStatement{ + On: "user", + Columns: []string{"name"}, + Type: IndexTypeBTree, + }, + }, + { + name: "create index if not exist", + sql: `CREATE INDEX IF NOT EXISTS abc ON user(name);`, + want: &CreateIndexStatement{ + IfNotExists: true, + Name: "abc", + On: "user", + Columns: []string{"name"}, + Type: IndexTypeBTree, + }, + }, + { + name: "drop index", + sql: `DROP INDEX abc;`, + want: &DropIndexStatement{ + Name: "abc", + }, + }, + + { + name: "drop index check exist", + sql: `DROP INDEX IF EXISTS abc;`, + want: &DropIndexStatement{ + Name: "abc", + CheckExist: true, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res, err := ParseWithErrListener(tt.sql) + require.NoError(t, err) + + if res.ParseErrs.Err() != nil { + if tt.err == nil { + t.Errorf("unexpected error: %v", res.ParseErrs.Err()) + } else { + require.ErrorIs(t, res.ParseErrs.Err(), tt.err) + } + + return + } + if tt.err != nil { + t.Errorf("expected error but got none") + return + } + + require.Len(t, res.Statements, 1) + + assertPositionsAreSet(t, res.Statements[0]) + + if !deepCompare(tt.want, res.Statements[0]) { + t.Errorf("unexpected AST:%s", diff(tt.want, res.Statements[0])) + } + }) + } +} + +func Test_SQL(t *testing.T) { + type testCase struct { + name string + sql string + want *SQLStatement + err error + } + + tests := []testCase{ + { + name: "simple select", + sql: "select *, id i, length(username) as name_len from users u where u.id = 1;", + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + &ResultColumnExpression{ + Expression: exprColumn("", "id"), + Alias: "i", + }, + &ResultColumnExpression{ + Expression: &ExpressionFunctionCall{ + Name: "length", + Args: []Expression{ + exprColumn("", "username"), + }, + }, + Alias: "name_len", + }, + }, + From: &RelationTable{ + Table: "users", + Alias: "u", + }, + Where: &ExpressionComparison{ + Left: exprColumn("u", "id"), + Operator: ComparisonOperatorEqual, + Right: exprLitCast(1, false), + }, + }, + }, + }, + }, + }, + { + name: "insert", + sql: `insert into posts (id, author_id) values (1, 1), + (2, (SELECT id from users where username = 'user2' LIMIT 1));`, + want: &SQLStatement{ + SQL: &InsertStatement{ + Table: "posts", + Columns: []string{"id", "author_id"}, + Values: [][]Expression{ + { + exprLit(1), + exprLit(1), + }, + { + exprLit(2), + &ExpressionSubquery{ + Subquery: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "id"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + Where: &ExpressionComparison{ + Left: exprColumn("", "username"), + Operator: ComparisonOperatorEqual, + Right: exprLit("user2"), + }, + }, + }, + Limit: exprLit(1), + }, + }, + }, + }, + }, + }, + }, + { + name: "select join", + sql: `SELECT p.id as id, u.username as author FROM posts AS p + INNER JOIN users AS u ON p.author_id = u.id + WHERE u.username = 'satoshi' order by u.username DESC NULLS LAST;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("p", "id"), + Alias: "id", + }, + &ResultColumnExpression{ + Expression: exprColumn("u", "username"), + Alias: "author", + }, + }, + From: &RelationTable{ + Table: "posts", + Alias: "p", + }, + Joins: []*Join{ + { + Type: JoinTypeInner, + Relation: &RelationTable{ + Table: "users", + Alias: "u", + }, + On: &ExpressionComparison{ + Left: exprColumn("p", "author_id"), + Operator: ComparisonOperatorEqual, + Right: exprColumn("u", "id"), + }, + }, + }, + Where: &ExpressionComparison{ + Left: exprColumn("u", "username"), + Operator: ComparisonOperatorEqual, + Right: exprLit("satoshi"), + }, + }, + }, + + Ordering: []*OrderingTerm{ + { + Expression: exprColumn("u", "username"), + Order: OrderTypeDesc, + Nulls: NullOrderLast, + }, + }, + }, + }, + }, + { + name: "delete", + sql: "delete from users where id = 1;", + want: &SQLStatement{ + SQL: &DeleteStatement{ + Table: "users", + Where: &ExpressionComparison{ + Left: exprColumn("", "id"), + Operator: ComparisonOperatorEqual, + Right: exprLit(1), + }, + }, + }, + }, + { + name: "upsert with conflict - success", + sql: `INSERT INTO users (id) VALUES (1) ON CONFLICT (id) DO UPDATE SET id = users.id + excluded.id;`, + want: &SQLStatement{ + SQL: &InsertStatement{ + Table: "users", + Columns: []string{"id"}, + Values: [][]Expression{ + { + exprLit(1), + }, + }, + OnConflict: &OnConflict{ + ConflictColumns: []string{"id"}, + DoUpdate: []*UpdateSetClause{ + { + Column: "id", + Value: &ExpressionArithmetic{ + Left: exprColumn("users", "id"), + Operator: ArithmeticOperatorAdd, + Right: exprColumn("excluded", "id"), + }, + }, + }, + }, + }, + }, + }, + { + name: "compound select", + sql: `SELECT * FROM users union SELECT * FROM users;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "users", + }, + }, + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + CompoundOperators: []CompoundOperator{ + CompoundOperatorUnion, + }, + }, + }, + }, + { + name: "compound selecting 1 column", + sql: `SELECT username FROM users union SELECT username FROM users;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "username"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + }, + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "username"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + CompoundOperators: []CompoundOperator{CompoundOperatorUnion}, + }, + }, + }, + { + name: "group by", + sql: `SELECT u.username, count(u.id) FROM users as u GROUP BY u.username HAVING count(u.id) > 1;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("u", "username"), + }, + &ResultColumnExpression{ + Expression: &ExpressionFunctionCall{ + Name: "count", + Args: []Expression{ + exprColumn("u", "id"), + }, + }, + }, + }, + From: &RelationTable{ + Table: "users", + Alias: "u", + }, + GroupBy: []Expression{ + exprColumn("u", "username"), + }, + Having: &ExpressionComparison{ + Left: &ExpressionFunctionCall{ + Name: "count", + Args: []Expression{ + exprColumn("u", "id"), + }, + }, + Operator: ComparisonOperatorGreaterThan, + Right: exprLit(1), + }, + }, + }, + }, + }, + }, + { + name: "group by with having, having is in group by clause", + // there's a much easier way to write this query, but this is just to test the parser + sql: `SELECT username FROM users GROUP BY username HAVING length(username) > 1;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "username"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + GroupBy: []Expression{ + exprColumn("", "username"), + }, + Having: &ExpressionComparison{ + Left: &ExpressionFunctionCall{ + Name: "length", + Args: []Expression{ + exprColumn("", "username"), + }, + }, + Operator: ComparisonOperatorGreaterThan, + Right: exprLit(1), + }, + }, + }, + }, + }, + }, + { + name: "aggregate with no group by returns one column", + sql: `SELECT count(*) FROM users;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: &ExpressionFunctionCall{ + Name: "count", + Star: true, + }, + }, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + }, + }, + }, + { + name: "ordering for subqueries", + sql: `SELECT u.username, p.id FROM (SELECT * FROM users) as u inner join (SELECT * FROM posts) as p on u.id = p.author_id;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("u", "username"), + }, + &ResultColumnExpression{ + Expression: exprColumn("p", "id"), + }, + }, + From: &RelationSubquery{ + Subquery: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + }, + Alias: "u", + }, + Joins: []*Join{ + { + Type: JoinTypeInner, + Relation: &RelationSubquery{ + Subquery: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "posts", + }, + }, + }, + }, + Alias: "p", + }, + On: &ExpressionComparison{ + Left: exprColumn("u", "id"), + Operator: ComparisonOperatorEqual, + Right: exprColumn("p", "author_id"), + }, + }, + }, + }, + }, + }, + }, + }, + {name: "non utf-8", sql: "\xbd\xb2\x3d\xbc\x20\xe2\x8c\x98;", err: ErrSyntax}, + { + // this select doesn't make much sense, however + // it is a regression test for a previously known bug + // https://github.com/kwilteam/kwil-db/pull/810 + name: "offset and limit", + sql: `SELECT * FROM users LIMIT id OFFSET id;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + Offset: exprColumn("", "id"), + Limit: exprColumn("", "id"), + }, + }, + }, + { + // this is a regression test for a previous bug. + // when parsing just SQL, we can have unknown variables + name: "unknown variable is ok", + sql: `SELECT $id;`, + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprVar("$id"), + }, + }, + }, + }, + }, + }, + }, + { + name: "select JOIN, with no specified INNER/OUTER", + sql: `SELECT u.* FROM users as u + JOIN posts as p ON u.id = p.author_id;`, // default is INNER + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{ + Table: "u", + }, + }, + From: &RelationTable{ + Table: "users", + Alias: "u", + }, + Joins: []*Join{ + { + Type: JoinTypeInner, + Relation: &RelationTable{ + Table: "posts", + Alias: "p", + }, + On: &ExpressionComparison{ + Left: exprColumn("u", "id"), + Operator: ComparisonOperatorEqual, + Right: exprColumn("p", "author_id"), + }, + }, + }, + }, + }, + }, + }, + }, + { + // regression tests for a previous bug, where whitespace after + // the semicolon would cause the parser to add an extra semicolon + name: "whitespace after semicolon", + sql: "SELECT 1; ", + want: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprLit(1), + }, + }, + }, + }, + }, + }, + }, + { + name: "cte", + sql: `WITH cte AS (SELECT id FROM users) SELECT * FROM cte;`, + want: &SQLStatement{ + CTEs: []*CommonTableExpression{ + { + Name: "cte", + Query: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "id"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + }, + }, + }, + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "cte", + }, + }, + }, + }, + }, + }, + { + name: "cte with columns", + sql: `WITH cte (id2) AS (SELECT id FROM users) SELECT * FROM cte;`, + want: &SQLStatement{ + CTEs: []*CommonTableExpression{ + { + Name: "cte", + Columns: []string{"id2"}, + Query: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: exprColumn("", "id"), + }, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + }, + }, + }, + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "cte", + }, + }, + }, + }, + }, + }, + { + name: "namespacing", + sql: `{test}SELECT * FROM users;`, + want: &SQLStatement{ + Namespacing: Namespacing{ + NamespacePrefix: "test", + }, + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{ + Table: "users", + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res, err := ParseWithErrListener(tt.sql) + require.NoError(t, err) + + if res.ParseErrs.Err() != nil { + if tt.err == nil { + t.Errorf("unexpected error: %v", res.ParseErrs.Err()) + } else { + require.ErrorIs(t, res.ParseErrs.Err(), tt.err) + } + + return + } + if tt.err != nil { + t.Errorf("expected %v but got none", tt.err) + return + } + + assertPositionsAreSet(t, res.Statements[0]) + res.Statements[0].(*SQLStatement).raw = nil + if !deepCompare(tt.want, res.Statements[0]) { + t.Errorf("unexpected AST:%s", diff(tt.want, res.Statements[0])) + } + }) + } +} + +func exprColumn(t, c string) *ExpressionColumn { + return &ExpressionColumn{ + Table: t, + Column: c, + } +} + +// deepCompare deep compares the values of two nodes. +// It ignores the parseTypes.Node field. +func deepCompare(node1, node2 any) bool { + // we return true for the parseTypes.Node field, + // we also need to ignore the unexported "schema" fields + return cmp.Equal(node1, node2, cmpOpts()...) +} + +// diff returns the diff between two nodes. +func diff(node1, node2 any) string { + return cmp.Diff(node1, node2, cmpOpts()...) +} + +func cmpOpts() []cmp.Option { + return []cmp.Option{ + cmp.AllowUnexported( + ExpressionLiteral{}, + ExpressionFunctionCall{}, + ExpressionVariable{}, + ExpressionArrayAccess{}, + ExpressionMakeArray{}, + ExpressionFieldAccess{}, + ExpressionParenthesized{}, + ExpressionColumn{}, + ExpressionSubquery{}, + ActionStmtDeclaration{}, + ActionStmtAssign{}, + ActionStmtCall{}, + ActionStmtForLoop{}, + ActionStmtIf{}, + ActionStmtSQL{}, + ActionStmtBreak{}, + ActionStmtReturn{}, + ActionStmtReturnNext{}, + LoopTermRange{}, + LoopTermSQL{}, + LoopTermVariable{}, + ActionStmtSQL{}, + SQLStatement{}, + ), + cmp.Comparer(func(x, y Position) bool { + return true + }), + cmp.Comparer(func(x, y *SQLStatement) bool { + if x == nil && y == nil { + return true + } + if x == nil || y == nil { + return false + } + + x.raw = nil + y.raw = nil + + eq := cmp.Equal(x.CTEs, y.CTEs, cmpOpts()...) + if !eq { + return false + } + + if x.Recursive != y.Recursive { + return false + } + + if x.Namespacing != y.Namespacing { + return false + } + + return cmp.Equal(x.SQL, y.SQL, cmpOpts()...) + }), + } +} + +func TestCreateActionStatements(t *testing.T) { + tests := []struct { + name string + input string + expect *CreateActionStatement + err error + }{ + { + name: "Basic create action with no params and no returns", + input: "CREATE ACTION my_action() PUBLIC {};", + expect: &CreateActionStatement{ + Name: "my_action", + Modifiers: []string{"public"}, + Parameters: nil, + Returns: nil, + IfNotExists: false, + OrReplace: false, + }, + }, + { + name: "Create action with parameters", + input: "CREATE ACTION my_action($param1 int, $param2 text) private {};", + expect: &CreateActionStatement{ + Name: "my_action", + Modifiers: []string{"private"}, + Parameters: []*NamedType{ + {Name: "$param1", Type: types.IntType}, + {Name: "$param2", Type: &types.DataType{Name: "text"}}, + }, + Returns: nil, + }, + }, + { + name: "Create action with owner and view modifiers", + input: `CREATE ACTION my_complex_action($user_id int) PUBLIC OWNER VIEW { + // body + };`, + expect: &CreateActionStatement{ + Name: "my_complex_action", + Parameters: []*NamedType{ + {Name: "$user_id", Type: types.IntType}, + }, + Modifiers: []string{"public", "owner", "view"}, + }, + }, + { + name: "Create action with IF NOT EXISTS", + input: `CREATE ACTION IF NOT EXISTS my_action() private {};`, + expect: &CreateActionStatement{ + IfNotExists: true, + Name: "my_action", + Modifiers: []string{"private"}, + }, + }, + { + name: "Create action with OR REPLACE", + input: `CREATE ACTION OR REPLACE my_action() PUBLIC {};`, + expect: &CreateActionStatement{ + OrReplace: true, + Name: "my_action", + Modifiers: []string{"public"}, + }, + }, + { + name: "Create action with return table", + input: `CREATE ACTION my_returns_action() PUBLIC RETURNS TABLE(id int, name text) {};`, + expect: &CreateActionStatement{ + Name: "my_returns_action", + Modifiers: []string{"public"}, + Returns: &ActionReturn{ + IsTable: true, + Fields: []*NamedType{ + {Name: "id", Type: types.IntType}, + {Name: "name", Type: &types.DataType{Name: "text"}}, + }, + }, + }, + }, + { + name: "Create action with unnamed return types", + input: `CREATE ACTION my_return_types() private RETURNS (int, text) {};`, + expect: &CreateActionStatement{ + Name: "my_return_types", + Modifiers: []string{"private"}, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{ + {Name: "", Type: types.IntType}, + {Name: "", Type: &types.DataType{Name: "text"}}, + }, + }, + }, + }, + { + name: "Create action with multiple parameters and complex body", + input: `CREATE ACTION do_something($a int, $b int) PUBLIC VIEW RETURNS (int) { + $c int; + $c := $a + $b; + return $c; + };`, + expect: &CreateActionStatement{ + Name: "do_something", + Parameters: []*NamedType{ + {Name: "$a", Type: types.IntType}, + {Name: "$b", Type: types.IntType}, + }, + Modifiers: []string{"public", "view"}, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{{Name: "", Type: types.IntType}}, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$c", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$c", Prefix: VariablePrefixDollar}, + Value: &ExpressionArithmetic{ + Left: &ExpressionVariable{Name: "$a", Prefix: VariablePrefixDollar}, + Operator: ArithmeticOperatorAdd, + Right: &ExpressionVariable{Name: "$b", Prefix: VariablePrefixDollar}, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$c", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with IF-ELSE and multiple statements", + input: ` + CREATE ACTION conditional_action($val int) PUBLIC { + $res int; + if $val > 10 { + $res := $val * 2; + } else { + $res := $val + 5; + } + return $res; + }; + `, + expect: &CreateActionStatement{ + Name: "conditional_action", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$val", Type: types.IntType}, + }, + Returns: nil, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$res", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtIf{ + IfThens: []*IfThen{ + { + If: &ExpressionComparison{ + Left: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Operator: ComparisonOperatorGreaterThan, + Right: exprLit(10), + }, + Then: []ActionStmt{ + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$res", Prefix: VariablePrefixDollar}, + Value: &ExpressionArithmetic{ + Left: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Operator: ArithmeticOperatorMultiply, + Right: exprLit(2), + }, + }, + }, + }, + }, + Else: []ActionStmt{ + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$res", Prefix: VariablePrefixDollar}, + Value: &ExpressionArithmetic{ + Left: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Operator: ArithmeticOperatorAdd, + Right: exprLit(5), + }, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$res", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with a FOR loop over a range", + input: ` + CREATE ACTION loop_action() private { + $i int; + $sum int; + $sum := 0; + for $i in 1..5 { + $sum := $sum + $i; + } + return $sum; + }; + `, + expect: &CreateActionStatement{ + Name: "loop_action", + Modifiers: []string{"private"}, + Returns: nil, + Parameters: nil, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$sum", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$sum", Prefix: VariablePrefixDollar}, + Value: exprLit(0), + }, + &ActionStmtForLoop{ + Receiver: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + LoopTerm: &LoopTermRange{ + Start: exprLit(1), + End: exprLit(5), + }, + Body: []ActionStmt{ + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$sum", Prefix: VariablePrefixDollar}, + Value: &ExpressionArithmetic{ + Left: &ExpressionVariable{Name: "$sum", Prefix: VariablePrefixDollar}, + Operator: ArithmeticOperatorAdd, + Right: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$sum", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with RETURN NEXT and multiple RETURNs", + input: ` + CREATE ACTION return_next_action($arr int) PUBLIC RETURNS (int) { + // Assume $arr is an array of ints. + $el int; + for $el in $arr { + return next $el; + } + // If loop finishes, return a default value + return 0; + }; + `, + expect: &CreateActionStatement{ + Name: "return_next_action", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$arr", Type: types.IntType}, + }, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{ + {Name: "", Type: types.IntType}, + }, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$el", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtForLoop{ + Receiver: &ExpressionVariable{Name: "$el", Prefix: VariablePrefixDollar}, + LoopTerm: &LoopTermVariable{ + Variable: &ExpressionVariable{Name: "$arr", Prefix: VariablePrefixDollar}, + }, + Body: []ActionStmt{ + &ActionStmtReturnNext{ + Values: []Expression{ + &ExpressionVariable{Name: "$el", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + exprLit(0), + }, + }, + }, + }, + }, + { + name: "Create action with nested action calls", + input: ` + CREATE ACTION call_other_actions($x int) private { + $y int; + $y := $x + 10; + $z := my_other_action($y); + return $z; + }; + `, + expect: &CreateActionStatement{ + Name: "call_other_actions", + Modifiers: []string{"private"}, + Parameters: []*NamedType{ + {Name: "$x", Type: types.IntType}, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$y", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$y", Prefix: VariablePrefixDollar}, + Value: &ExpressionArithmetic{ + Left: &ExpressionVariable{Name: "$x", Prefix: VariablePrefixDollar}, + Operator: ArithmeticOperatorAdd, + Right: exprLit(10), + }, + }, + &ActionStmtCall{ + Receivers: []*ExpressionVariable{ + {Name: "$z", Prefix: VariablePrefixDollar}, + }, + Call: &ExpressionFunctionCall{ + Name: "my_other_action", + Args: []Expression{ + &ExpressionVariable{Name: "$y", Prefix: VariablePrefixDollar}, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$z", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with IF, ELSEIF, ELSE conditions", + input: ` + CREATE ACTION complex_conditions($score int) PUBLIC RETURNS (text) { + // Score analysis + if $score > 90 { + return 'A'; + } elseif $score > 80 { + return 'B'; + } else { + return 'C'; + } + }; + `, + expect: &CreateActionStatement{ + Name: "complex_conditions", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$score", Type: types.IntType}, + }, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{ + {Name: "", Type: &types.DataType{Name: "text"}}, + }, + }, + Statements: []ActionStmt{ + &ActionStmtIf{ + IfThens: []*IfThen{ + { + If: &ExpressionComparison{ + Left: &ExpressionVariable{Name: "$score", Prefix: VariablePrefixDollar}, + Operator: ComparisonOperatorGreaterThan, + Right: exprLit(90), + }, + Then: []ActionStmt{ + &ActionStmtReturn{ + Values: []Expression{ + exprLit("A"), + }, + }, + }, + }, + { + If: &ExpressionComparison{ + Left: &ExpressionVariable{Name: "$score", Prefix: VariablePrefixDollar}, + Operator: ComparisonOperatorGreaterThan, + Right: exprLit(80), + }, + Then: []ActionStmt{ + &ActionStmtReturn{ + Values: []Expression{ + exprLit("B"), + }, + }, + }, + }, + }, + Else: []ActionStmt{ + &ActionStmtReturn{ + Values: []Expression{ + exprLit("C"), + }, + }, + }, + }, + }, + }, + }, + { + name: "Create action with array parameters and array return", + input: `CREATE ACTION array_manipulation($arr int[]) private RETURNS (int[]) { + // Just return the input array + return $arr; + };`, + expect: &CreateActionStatement{ + Name: "array_manipulation", + Modifiers: []string{"private"}, + Parameters: []*NamedType{{Name: "$arr", Type: &types.DataType{Name: "int8", IsArray: true}}}, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{{Name: "", Type: &types.DataType{Name: "int8", IsArray: true}}}, + }, + Statements: []ActionStmt{ + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$arr", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action calling a function with distinct arguments", + input: `CREATE ACTION calc_distinct($vals int[]) PUBLIC { + $total int; + for $row in SELECT count(distinct $vals) as total { + $total := $row.total; + }; + return $total; + };`, + expect: &CreateActionStatement{ + Name: "calc_distinct", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$vals", Type: &types.DataType{Name: "int8", IsArray: true}}, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$total", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtForLoop{ + Receiver: &ExpressionVariable{Name: "$row", Prefix: VariablePrefixDollar}, + LoopTerm: &LoopTermSQL{ + Statement: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnExpression{ + Expression: &ExpressionFunctionCall{ + Name: "count", + Distinct: true, + Args: []Expression{ + &ExpressionVariable{Name: "$vals", Prefix: VariablePrefixDollar}, + }, + }, + Alias: "total", + }, + }, + }, + }, + }, + }, + }, + Body: []ActionStmt{ + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$total", Prefix: VariablePrefixDollar}, + Value: &ExpressionFieldAccess{ + Record: &ExpressionVariable{Name: "$row", Prefix: VariablePrefixDollar}, + Field: "total", + }, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$total", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action performing an UPDATE inside action", + input: `CREATE ACTION update_something($id int, $name text) PUBLIC { + update my_table set name = $name where id = $id; + return; + };`, + expect: &CreateActionStatement{ + Name: "update_something", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$id", Type: types.IntType}, + {Name: "$name", Type: &types.DataType{Name: "text"}}, + }, + Statements: []ActionStmt{ + &ActionStmtSQL{ + SQL: &SQLStatement{ + SQL: &UpdateStatement{ + Table: "my_table", + SetClause: []*UpdateSetClause{ + { + Column: "name", + Value: &ExpressionVariable{Name: "$name", Prefix: VariablePrefixDollar}, + }, + }, + Where: &ExpressionComparison{ + Left: &ExpressionColumn{Column: "id"}, + Operator: ComparisonOperatorEqual, + Right: &ExpressionVariable{Name: "$id", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + &ActionStmtReturn{}, + }, + }, + }, + { + name: "Create action with array indexing", + input: `CREATE ACTION array_access($arr int[]) PUBLIC RETURNS (int) { + $val int; + $val := $arr[2]; + return $val; + };`, + expect: &CreateActionStatement{ + Name: "array_access", + Modifiers: []string{"public"}, + Parameters: []*NamedType{ + {Name: "$arr", Type: &types.DataType{Name: "int8", IsArray: true}}, + }, + Returns: &ActionReturn{ + IsTable: false, + Fields: []*NamedType{ + {Name: "", Type: types.IntType}, + }, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Value: &ExpressionArrayAccess{ + Array: &ExpressionVariable{Name: "$arr", Prefix: VariablePrefixDollar}, + Index: exprLit(int64(2)), + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with casting inside the body", + input: `CREATE ACTION cast_stuff($val text) private { + $int_val int; + $int_val := ($val)::int; + return $int_val; + };`, + expect: &CreateActionStatement{ + Name: "cast_stuff", + Modifiers: []string{"private"}, + Parameters: []*NamedType{ + {Name: "$val", Type: &types.DataType{Name: "text"}}, + }, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$int_val", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtAssign{ + Variable: &ExpressionVariable{Name: "$int_val", Prefix: VariablePrefixDollar}, + Value: &ExpressionParenthesized{ + Inner: &ExpressionVariable{Name: "$val", Prefix: VariablePrefixDollar}, + Typecastable: Typecastable{ + TypeCast: types.IntType, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$int_val", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with a FOR loop and BREAK", + input: `CREATE ACTION break_loop_example() PUBLIC { + $i int; + for $i in 1..10 { + if $i = 5 { + break; + } + } + return $i; + };`, + expect: &CreateActionStatement{ + Name: "break_loop_example", + Modifiers: []string{"public"}, + Statements: []ActionStmt{ + &ActionStmtDeclaration{ + Variable: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + Type: types.IntType, + }, + &ActionStmtForLoop{ + Receiver: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + LoopTerm: &LoopTermRange{ + Start: exprLit(int64(1)), + End: exprLit(int64(10)), + }, + Body: []ActionStmt{ + &ActionStmtIf{ + IfThens: []*IfThen{ + { + If: &ExpressionComparison{ + Left: &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + Operator: ComparisonOperatorEqual, + Right: exprLit(int64(5)), + }, + Then: []ActionStmt{ + &ActionStmtBreak{}, + }, + }, + }, + }, + }, + }, + &ActionStmtReturn{ + Values: []Expression{ + &ExpressionVariable{Name: "$i", Prefix: VariablePrefixDollar}, + }, + }, + }, + }, + }, + { + name: "Create action with just a SELECT statement in the body", + input: `CREATE ACTION just_select() PUBLIC { + select * from my_table; + return; + };`, + expect: &CreateActionStatement{ + Name: "just_select", + Modifiers: []string{"public"}, + Statements: []ActionStmt{ + &ActionStmtSQL{ + SQL: &SQLStatement{ + SQL: &SelectStatement{ + SelectCores: []*SelectCore{ + { + Columns: []ResultColumn{ + &ResultColumnWildcard{}, + }, + From: &RelationTable{Table: "my_table"}, + }, + }, + }, + }, + }, + &ActionStmtReturn{}, + }, + }, + }, + { + name: "create action with duplicate parameters", + input: `CREATE ACTION duplicate_params($a int, $a text) PUBLIC {};`, + err: ErrDuplicateParameterName, + }, + { + name: "create action with duplicate return names", + input: `CREATE ACTION duplicate_returns() PUBLIC RETURNS (id int, id text) {};`, + err: ErrDuplicateResultColumnName, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res, err := Parse(tt.input) + if tt.err != nil { + require.ErrorIs(t, err, tt.err) + return + } + require.NoError(t, err) + + require.Len(t, res, 1) + + tt.expect.Raw = res[0].(*CreateActionStatement).Raw + + assertPositionsAreSet(t, res[0]) + + if !deepCompare(tt.expect, res[0]) { + t.Errorf("unexpected AST:%s", diff(tt.expect, res[0])) + } + }) + } +} diff --git a/parse/postgres/doc.go b/node/engine/parse/postgres/doc.go similarity index 100% rename from parse/postgres/doc.go rename to node/engine/parse/postgres/doc.go diff --git a/parse/postgres/parse.go b/node/engine/parse/postgres/parse.go similarity index 89% rename from parse/postgres/parse.go rename to node/engine/parse/postgres/parse.go index c6016e253..29f66c206 100644 --- a/parse/postgres/parse.go +++ b/node/engine/parse/postgres/parse.go @@ -15,7 +15,7 @@ func doNothing(_ string) error { // CheckSyntaxReplaceDollar replaces all bind parameters($x) with 1 to bypass // syntax check errors. -// ToSQL() method doesn't convert bind parameters to $1, $2, etc. so we need to +// () method doesn't convert bind parameters to $1, $2, etc. so we need to // replace them manually, just so we can do the syntax check. func CheckSyntaxReplaceDollar(query string) error { // Replace all bind parameters($x) with 1 to bypass syntax check errors diff --git a/parse/postgres/_parse_cgo.go b/node/engine/parse/postgres/parse_cgo.go similarity index 100% rename from parse/postgres/_parse_cgo.go rename to node/engine/parse/postgres/parse_cgo.go diff --git a/parse/postgres/parse_test.go b/node/engine/parse/postgres/parse_test.go similarity index 82% rename from parse/postgres/parse_test.go rename to node/engine/parse/postgres/parse_test.go index 56d8c72d5..bf30c6678 100644 --- a/parse/postgres/parse_test.go +++ b/node/engine/parse/postgres/parse_test.go @@ -3,7 +3,7 @@ package postgres_test import ( "testing" - "github.com/kwilteam/kwil-db/parse/postgres" + "github.com/kwilteam/kwil-db/node/engine/parse/postgres" "github.com/stretchr/testify/assert" ) diff --git a/node/engine/parse/types.go b/node/engine/parse/types.go new file mode 100644 index 000000000..771e90b5d --- /dev/null +++ b/node/engine/parse/types.go @@ -0,0 +1,60 @@ +package parse + +import ( + antlr "github.com/antlr4-go/antlr/v4" +) + +// Position is a Position in the parse tree. It represents a range of line and column +// values in Kuneiform source code. +type Position struct { + // Set is true if the position of the Position has been set. + // This is useful for testing parsers. + IsSet bool `json:"-"` + StartLine int `json:"start_line"` + StartCol int `json:"start_col"` + EndLine int `json:"end_line"` + EndCol int `json:"end_col"` +} + +// Set sets the position of the Position based on the given parser rule context. +func (n *Position) Set(r antlr.ParserRuleContext) { + n.IsSet = true + n.StartLine = r.GetStart().GetLine() + n.StartCol = r.GetStart().GetColumn() + n.EndLine = r.GetStop().GetLine() + n.EndCol = r.GetStop().GetColumn() +} + +// SetToken sets the position of the Position based on the given token. +func (n *Position) SetToken(t antlr.Token) { + n.IsSet = true + n.StartLine = t.GetLine() + n.StartCol = t.GetColumn() + n.EndLine = t.GetLine() + n.EndCol = t.GetColumn() +} + +// GetPosition returns the Position. +// It is useful if the Position is embedded in another struct. +func (n *Position) GetPosition() *Position { + return n +} + +// Clear clears the position of the Position. +func (n *Position) Clear() { + n.IsSet = false + n.StartLine = 0 + n.StartCol = 0 + n.EndLine = 0 + n.EndCol = 0 +} + +// unaryNode creates a Position with the same start and end position. +func unaryNode(start, end int) *Position { + return &Position{ + StartLine: start, + StartCol: end, + EndLine: start, + EndCol: end, + } +} diff --git a/node/engine/generate/plpgsql.go b/node/engine/pg_generate/generate.go similarity index 53% rename from node/engine/generate/plpgsql.go rename to node/engine/pg_generate/generate.go index caa3dea0a..75bfd5708 100644 --- a/node/engine/generate/plpgsql.go +++ b/node/engine/pg_generate/generate.go @@ -1,4 +1,5 @@ -package generate +// pggenerate package is responsible for generating the Postgres-compatible SQL from the AST. +package pggenerate import ( "fmt" @@ -7,22 +8,36 @@ import ( "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/core/types/decimal" - "github.com/kwilteam/kwil-db/parse" + "github.com/kwilteam/kwil-db/node/engine/parse" ) /* This file implements a visitor to generate Postgres compatible SQL and plpgsql */ +// GenerateSQL generates Postgres compatible SQL from an AST +// If orderParams is true, it will number the parameters as $1, $2, etc. +// It will return the ordered parameters in the order they appear in the statement. +// It will also qualify the table names with the pgSchema. +func GenerateSQL(ast parse.Node, pgSchema string) (stmt string, params []string, err error) { + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf("error generating SQL: %v", r) + } + }() + + s := &sqlGenerator{ + pgSchema: pgSchema, + } + + stmt = ast.Accept(s).(string) + return stmt + ";", s.orderedParams, nil +} + // sqlVisitor creates Postgres compatible SQL from an AST type sqlGenerator struct { - parse.UnimplementedSqlVisitor // pgSchema is the schema name to prefix to the table names - pgSchema string - // numberParameters is a flag that indicates if we should number parameters as $1, $2, etc., - // instead of formatting their variable names. It should be set to true if we want to execute - // SQL directly against postgres, instead of using it in a procedure. - numberParameters bool + pgSchema string // TODO: this should be removed. Qualification needs to happen at the logical planner, to avoid qualifying CTEs // orderedParams is the order of parameters in the order they appear in the statement. // It is only set if numberParameters is true. For example, the statement SELECT $1, $2 // would have orderedParams = ["$1", "$2"] @@ -58,23 +73,19 @@ func (s *sqlGenerator) VisitExpressionFunctionCall(p0 *parse.ExpressionFunctionC // the schema name, since it is a local procedure fn, ok := parse.Functions[p0.Name] if !ok { - // if not found, it is a local procedure - str.WriteString(s.pgSchema) - str.WriteString(".") - str.WriteString(p0.Name) - str.WriteString("(") - for i, arg := range args { - if i > 0 { - str.WriteString(", ") - } - str.WriteString(arg) - } - str.WriteString(")") - typeCast(p0, &str) - return str.String() + panic("function " + p0.Name + " not found") } - pgFmt, err := fn.PGFormat(args, p0.Distinct, p0.Star) + var pgFmt string + var err error + switch fn := fn.(type) { + case *parse.ScalarFunctionDefinition: + pgFmt, err = fn.PGFormatFunc(args) + case *parse.AggregateFunctionDefinition: + pgFmt, err = fn.PGFormatFunc(args, p0.Distinct) + default: + panic("unknown function type " + fmt.Sprintf("%T", fn)) + } if err != nil { panic(err) } @@ -98,58 +109,73 @@ func typeCast(t interface{ GetTypeCast() *types.DataType }, s *strings.Builder) } } -func (s *sqlGenerator) VisitExpressionForeignCall(p0 *parse.ExpressionForeignCall) any { +func (s *sqlGenerator) VisitExpressionWindowFunctionCall(p0 *parse.ExpressionWindowFunctionCall) any { str := strings.Builder{} - str.WriteString(s.pgSchema) - str.WriteString(".") - str.WriteString(formatForeignProcedureName(p0.Name)) - str.WriteString("(") - for i, arg := range append(p0.ContextualArgs, p0.Args...) { - if i > 0 { - str.WriteString(", ") - } + str.WriteString(p0.FunctionCall.Accept(s).(string)) - str.WriteString(arg.Accept(s).(string)) + if p0.Filter != nil { + str.WriteString(" FILTER (WHERE ") + str.WriteString(p0.Filter.Accept(s).(string)) + str.WriteString(")") } - str.WriteString(")") - - typeCast(p0, &str) + str.WriteString(" OVER ") + str.WriteString(p0.Window.Accept(s).(string)) return str.String() } -func (s *sqlGenerator) VisitExpressionVariable(p0 *parse.ExpressionVariable) any { - // if a user param $, then we need to number it. - // Vars using @ get set and accessed using postgres's current_setting function - if s.numberParameters && p0.Prefix == parse.VariablePrefixDollar { - str := p0.String() - - // if it already exists, we write it as that index. - for i, v := range s.orderedParams { - if v == str { - res := strings.Builder{} - res.WriteString("$" + strconv.Itoa(i+1)) - typeCast(p0, &res) - return res.String() +func (s *sqlGenerator) VisitWindowImpl(p0 *parse.WindowImpl) any { + str := strings.Builder{} + str.WriteString("(") + + if len(p0.PartitionBy) > 0 { + str.WriteString("PARTITION BY ") + for i, arg := range p0.PartitionBy { + if i > 0 { + str.WriteString(", ") } + str.WriteString(arg.Accept(s).(string)) } + } - // otherwise, we add it to the list. - // Postgres uses $1, $2, etc. for numbered parameters. + if p0.OrderBy != nil { + str.WriteString(" ORDER BY ") + for i, arg := range p0.OrderBy { + if i > 0 { + str.WriteString(", ") + } + str.WriteString(arg.Accept(s).(string)) + } + } - s.orderedParams = append(s.orderedParams, str) + str.WriteString(")") + return str.String() +} + +func (s *sqlGenerator) VisitWindowReference(p0 *parse.WindowReference) any { + return p0.Name +} + +func (s *sqlGenerator) VisitExpressionVariable(p0 *parse.ExpressionVariable) any { + str := p0.String() - res := strings.Builder{} - res.WriteString("$") - res.WriteString(strconv.Itoa(len(s.orderedParams))) - typeCast(p0, &res) - return res.String() + // if it already exists, we write it as that index. + for i, v := range s.orderedParams { + if v == str { + return "$" + strconv.Itoa(i+1) + } } - str := strings.Builder{} - str.WriteString(formatVariable(p0)) - typeCast(p0, &str) - return str.String() + // otherwise, we add it to the list. + // Postgres uses $1, $2, etc. for numbered parameters. + + s.orderedParams = append(s.orderedParams, str) + + res := strings.Builder{} + res.WriteString("$") + res.WriteString(strconv.Itoa(len(s.orderedParams))) + typeCast(p0, &res) + return res.String() } func (s *sqlGenerator) VisitExpressionArrayAccess(p0 *parse.ExpressionArrayAccess) any { @@ -399,6 +425,9 @@ func (s *sqlGenerator) VisitSQLStatement(p0 *parse.SQLStatement) any { } if i == 0 { str.WriteString("WITH ") + if p0.Recursive { + str.WriteString("RECURSIVE ") + } } str.WriteString(cte.Accept(s).(string)) } @@ -488,6 +517,18 @@ func (s *sqlGenerator) VisitSelectCore(p0 *parse.SelectCore) any { } } + if len(p0.Windows) > 0 { + str.WriteString("\nWINDOW ") + for i, window := range p0.Windows { + if i > 0 { + str.WriteString(", ") + } + str.WriteString(window.Name) + str.WriteString(" AS ") + str.WriteString(window.Window.Accept(s).(string)) + } + } + return str.String() } @@ -513,10 +554,15 @@ func (s *sqlGenerator) VisitResultColumnWildcard(p0 *parse.ResultColumnWildcard) func (s *sqlGenerator) VisitRelationTable(p0 *parse.RelationTable) any { str := strings.Builder{} - if s.pgSchema != "" { - str.WriteString(s.pgSchema) + // we do not rely on the s.pgSchema here, since we want to this table might + // be a common table expression. The planner qualifies the table names. + // If no Namespace is set, it is likely a CTE + if p0.Namespace != "" { + str.WriteString(p0.Namespace) str.WriteString(".") } + // we do not set the pgschema here because we want to allow for CTEs + // Therefore, the pgschema must be set here using the planner str.WriteString(p0.Table) if p0.Alias != "" { str.WriteString(" AS ") @@ -537,17 +583,6 @@ func (s *sqlGenerator) VisitRelationSubquery(p0 *parse.RelationSubquery) any { return str.String() } -func (s *sqlGenerator) VisitRelationFunctionCall(p0 *parse.RelationFunctionCall) any { - str := strings.Builder{} - str.WriteString(p0.FunctionCall.Accept(s).(string)) - str.WriteString(" ") - if p0.Alias != "" { - str.WriteString("AS ") - str.WriteString(p0.Alias) - } - return str.String() -} - func (s *sqlGenerator) VisitJoin(p0 *parse.Join) any { str := strings.Builder{} str.WriteString(string(p0.Type)) @@ -665,31 +700,36 @@ func (s *sqlGenerator) VisitInsertStatement(p0 *parse.InsertStatement) any { str.WriteString(") ") } - str.WriteString("\nVALUES ") - for i, val := range p0.Values { - if i > 0 { - str.WriteString(",") - } - str.WriteString("\n(") - for j, v := range val { - if j > 0 { - str.WriteString(", ") + str.WriteString("\n") + if p0.Select != nil { + str.WriteString(p0.Select.Accept(s).(string)) + } else { + str.WriteString("VALUES ") + for i, val := range p0.Values { + if i > 0 { + str.WriteString(",") } - str.WriteString(v.Accept(s).(string)) + str.WriteString("\n(") + for j, v := range val { + if j > 0 { + str.WriteString(", ") + } + str.WriteString(v.Accept(s).(string)) + } + str.WriteString(")") } - str.WriteString(")") } - if p0.Upsert != nil { + if p0.OnConflict != nil { str.WriteString("\n") - str.WriteString(p0.Upsert.Accept(s).(string)) + str.WriteString(p0.OnConflict.Accept(s).(string)) } return str.String() } -func (s *sqlGenerator) VisitUpsertClause(p0 *parse.UpsertClause) any { +func (s *sqlGenerator) VisitUpsertClause(p0 *parse.OnConflict) any { str := strings.Builder{} str.WriteString("ON CONFLICT ") if len(p0.ConflictColumns) > 0 { @@ -748,177 +788,328 @@ func (s *sqlGenerator) VisitOrderingTerm(p0 *parse.OrderingTerm) any { return str.String() } -// procedureGenerator is a visitor that generates plpgsql code. -type procedureGenerator struct { - sqlGenerator - // anonymousReceivers counts the amount of anonymous receivers - // we should declare. This will be cross-referenced with the - // analyzer to ensure we declare the correct amount. - anonymousReceivers int - // procedure is the procedure we are generating code for - procedure *types.Procedure -} +func (s *sqlGenerator) VisitCreateTableStatement(p0 *parse.CreateTableStatement) any { + str := strings.Builder{} + str.WriteString("CREATE TABLE ") + if p0.IfNotExists { + str.WriteString("IF NOT EXISTS ") + } -var _ parse.ProcedureVisitor = &procedureGenerator{} + if s.pgSchema != "" { + str.WriteString(s.pgSchema) + str.WriteString(".") + } -func (p *procedureGenerator) VisitProcedureStmtDeclaration(p0 *parse.ProcedureStmtDeclaration) any { - // plpgsql declares variables at the top of the procedure - return "" -} + str.WriteString(p0.Name) + str.WriteString(" (\n") + for i, col := range p0.Columns { + if i > 0 { + str.WriteString(", \n") + } -func (p *procedureGenerator) VisitProcedureStmtAssignment(p0 *parse.ProcedureStmtAssign) any { - varName := p0.Variable.Accept(p).(string) - return varName + " := " + p0.Value.Accept(p).(string) + ";\n" -} + str.WriteString(col.Accept(s).(string)) + } -func (p *procedureGenerator) VisitProcedureStmtCall(p0 *parse.ProcedureStmtCall) any { - call := p0.Call.Accept(p).(string) + for _, con := range p0.Constraints { + str.WriteString(",\n") - if len(p0.Receivers) == 0 { - return "PERFORM " + call + ";\n" + if con.Name != "" { + str.WriteString("CONSTRAINT ") + str.WriteString(con.Name) + str.WriteString(" ") + } + + str.WriteString(con.Constraint.Accept(s).(string)) } - s := strings.Builder{} - s.WriteString("SELECT * INTO ") + str.WriteString("\n)") + return str.String() +} - for i, rec := range p0.Receivers { +func (s *sqlGenerator) VisitAlterTableStatement(p0 *parse.AlterTableStatement) any { + str := strings.Builder{} + str.WriteString("ALTER TABLE ") + if s.pgSchema != "" { + str.WriteString(s.pgSchema) + str.WriteString(".") + } + str.WriteString(p0.Table) + str.WriteString(" ") + + str.WriteString(p0.Action.Accept(s).(string)) + + return str.String() +} + +func (s *sqlGenerator) VisitDropTableStatement(p0 *parse.DropTableStatement) any { + str := strings.Builder{} + str.WriteString("DROP TABLE ") + if p0.IfExists { + str.WriteString("IF EXISTS ") + } + + for i, table := range p0.Tables { if i > 0 { - s.WriteString(", ") + str.WriteString(", ") } - if rec == nil { - s.WriteString(formatAnonymousReceiver(p.anonymousReceivers)) - p.anonymousReceivers++ - } else { - s.WriteString(rec.Accept(p).(string)) - } - } - - s.WriteString(" FROM ") - s.WriteString(call) - s.WriteString(";\n") - return s.String() -} - -func (p *procedureGenerator) VisitProcedureStmtForLoop(p0 *parse.ProcedureStmtForLoop) any { - s := strings.Builder{} - // if we are iterating over an array, the syntax is different - switch v := p0.LoopTerm.(type) { - case *parse.LoopTermRange, *parse.LoopTermSQL: - s.WriteString("FOR ") - s.WriteString(p0.Receiver.Accept(p).(string)) - s.WriteString(" IN ") - s.WriteString(p0.LoopTerm.Accept(p).(string)) - case *parse.LoopTermVariable: - s.WriteString("FOREACH ") - s.WriteString(p0.Receiver.Accept(p).(string)) - s.WriteString(" IN ") - s.WriteString(p0.LoopTerm.Accept(p).(string)) + if s.pgSchema != "" { + str.WriteString(s.pgSchema) + str.WriteString(".") + } + str.WriteString(table) + } + + switch p0.Behavior { + case parse.DropBehaviorCascade: + str.WriteString(" CASCADE") + case parse.DropBehaviorRestrict: + str.WriteString(" RESTRICT") + case parse.DropBehaviorDefault: + // do nothing default: - panic("unknown loop term type: " + fmt.Sprintf("%T", v)) + panic("unknown drop behavior") } + return str.String() +} - s.WriteString(" LOOP\n") +func (s *sqlGenerator) VisitCreateIndexStatement(p0 *parse.CreateIndexStatement) any { + str := strings.Builder{} + str.WriteString("CREATE ") - for _, stmt := range p0.Body { - s.WriteString(stmt.Accept(p).(string)) + switch p0.Type { + case parse.IndexTypeBTree: + str.WriteString("INDEX ") + case parse.IndexTypeUnique: + str.WriteString("UNIQUE INDEX ") + default: + // should not happen + panic("unknown index type") } - s.WriteString(" END LOOP;\n") + if p0.IfNotExists { + str.WriteString("IF NOT EXISTS ") + } + if p0.Name != "" { + str.WriteString(p0.Name) + } + str.WriteString("ON ") + str.WriteString(s.qualify(p0.On)) + str.WriteString("(" + strings.Join(p0.Columns, ", ") + ")") - return s.String() + return str.String() } -func (p *procedureGenerator) VisitLoopTermRange(p0 *parse.LoopTermRange) any { - s := strings.Builder{} - s.WriteString(p0.Start.Accept(p).(string)) - s.WriteString("..") - s.WriteString(p0.End.Accept(p).(string)) - return s.String() +// qualify prefixes the table name with the schema name, if it exists +func (s *sqlGenerator) qualify(p0 string) string { + if s.pgSchema != "" { + return s.pgSchema + "." + p0 + } + return p0 } -func (p *procedureGenerator) VisitLoopTermSQL(p0 *parse.LoopTermSQL) any { - return p0.Statement.Accept(p).(string) +func (s *sqlGenerator) VisitDropIndexStatement(p0 *parse.DropIndexStatement) any { + str := strings.Builder{} + str.WriteString("DROP INDEX ") + if p0.CheckExist { + str.WriteString("IF EXISTS ") + } + str.WriteString(s.qualify(p0.Name)) + return str.String() } -func (p *procedureGenerator) VisitLoopTermVariable(p0 *parse.LoopTermVariable) any { - // we use coalesce here so that we do not error when looping on null arrays - return fmt.Sprintf("ARRAY COALESCE(%s, '{}')", p0.Variable.Accept(p).(string)) +func (s *sqlGenerator) VisitGrantOrRevokeStatement(p0 *parse.GrantOrRevokeStatement) any { + panic("generate should never be called on a grant or revoke statement") } -func (p *procedureGenerator) VisitProcedureStmtIf(p0 *parse.ProcedureStmtIf) any { - s := strings.Builder{} - for i, clause := range p0.IfThens { - if i == 0 { - s.WriteString("IF ") - } else { - s.WriteString("ELSIF ") - } +func (s *sqlGenerator) VisitAlterColumnSet(p0 *parse.AlterColumnSet) any { + str := strings.Builder{} + str.WriteString("ALTER COLUMN ") + str.WriteString(p0.Column) + str.WriteString(" SET ") + str.WriteString(p0.Type.String()) - s.WriteString(clause.Accept(p).(string)) + if p0.Type == parse.ConstraintTypeDefault { + str.WriteString(" ") + str.WriteString(p0.Value.Accept(s).(string)) } - if p0.Else != nil { - s.WriteString("ELSE\n") - for _, stmt := range p0.Else { + return str.String() +} - s.WriteString(stmt.Accept(p).(string)) - } +func (s *sqlGenerator) VisitAlterColumnDrop(p0 *parse.AlterColumnDrop) any { + str := strings.Builder{} + str.WriteString("ALTER COLUMN ") + str.WriteString(p0.Column) + str.WriteString(" DROP ") + str.WriteString(p0.Type.String()) + return str.String() +} + +func (s *sqlGenerator) VisitAddColumn(p0 *parse.AddColumn) any { + str := strings.Builder{} + str.WriteString("ADD COLUMN ") + str.WriteString(p0.Name) + str.WriteString(" ") + + typeStr, err := p0.Type.PGString() + if err != nil { + panic(err) } - s.WriteString("END IF;\n") - return s.String() + str.WriteString(typeStr) + return str.String() } -func (p *procedureGenerator) VisitIfThen(p0 *parse.IfThen) any { - s := strings.Builder{} - s.WriteString(p0.If.Accept(p).(string)) - s.WriteString(" THEN\n") - for _, stmt := range p0.Then { - s.WriteString(stmt.Accept(p).(string)) - } - s.WriteString("\n") +func (s *sqlGenerator) VisitDropColumn(p0 *parse.DropColumn) any { + str := strings.Builder{} + str.WriteString("DROP COLUMN ") + str.WriteString(p0.Name) + return str.String() +} - return s.String() +func (s *sqlGenerator) VisitRenameColumn(p0 *parse.RenameColumn) any { + str := strings.Builder{} + str.WriteString("RENAME COLUMN ") + str.WriteString(p0.OldName) + str.WriteString(" TO ") + str.WriteString(p0.NewName) + return str.String() } -func (p *procedureGenerator) VisitProcedureStmtSQL(p0 *parse.ProcedureStmtSQL) any { - return p0.SQL.Accept(p).(string) + ";\n" +func (s *sqlGenerator) VisitRenameTable(p0 *parse.RenameTable) any { + str := strings.Builder{} + str.WriteString("RENAME TO ") + str.WriteString(p0.Name) + return str.String() } -func (p *procedureGenerator) VisitProcedureStmtBreak(p0 *parse.ProcedureStmtBreak) any { - return "EXIT;\n" +func (s *sqlGenerator) VisitAddTableConstraint(p0 *parse.AddTableConstraint) any { + str := strings.Builder{} + str.WriteString("ADD ") + if p0.Constraint.Name != "" { + str.WriteString("CONSTRAINT ") + str.WriteString(p0.Constraint.Name) + str.WriteString(" ") + } + + str.WriteString(p0.Constraint.Constraint.Accept(s).(string)) + return str.String() } -func (p *procedureGenerator) VisitProcedureStmtReturn(p0 *parse.ProcedureStmtReturn) any { - if p0.SQL != nil { - return "RETURN QUERY " + p0.SQL.Accept(p).(string) + ";\n" +func (s *sqlGenerator) VisitDropTableConstraint(p0 *parse.DropTableConstraint) any { + str := strings.Builder{} + str.WriteString("DROP CONSTRAINT ") + str.WriteString(p0.Name) + return str.String() +} + +func (s *sqlGenerator) VisitColumn(p0 *parse.Column) any { + str := strings.Builder{} + str.WriteString(p0.Name) + str.WriteString(" ") + + typeStr, err := p0.Type.PGString() + if err != nil { + panic(err) } - s := strings.Builder{} - for i, expr := range p0.Values { - s.WriteString(formatReturnVar(i)) - s.WriteString(" := ") - s.WriteString(expr.Accept(p).(string)) - s.WriteString(";\n") + str.WriteString(typeStr) + for _, con := range p0.Constraints { + str.WriteString(" ") + str.WriteString(con.Accept(s).(string)) } - s.WriteString("RETURN;") - return s.String() + return str.String() +} + +func (s *sqlGenerator) VisitCreateRoleStatement(p0 *parse.CreateRoleStatement) any { + panic("create role should never be used within a generated SQL statement") +} + +func (s *sqlGenerator) VisitDropRoleStatement(p0 *parse.DropRoleStatement) any { + panic("drop role should never be used within a generated SQL statement") +} + +func (s *sqlGenerator) VisitTransferOwnershipStatement(p0 *parse.TransferOwnershipStatement) any { + panic("transfer ownership should never be used within a generated SQL statement") } -func (p *procedureGenerator) VisitProcedureStmtReturnNext(p0 *parse.ProcedureStmtReturnNext) any { - s := strings.Builder{} - for i, expr := range p0.Values { - // we do not format the return var for return next, but instead - // assign it to the column name directly - s.WriteString(p.procedure.Returns.Fields[i].Name) - s.WriteString(" := ") - s.WriteString(expr.Accept(p).(string)) - s.WriteString(";\n") +func (s *sqlGenerator) VisitPrimaryKeyInlineConstraint(p0 *parse.PrimaryKeyInlineConstraint) any { + return "PRIMARY KEY" +} + +func (s *sqlGenerator) VisitPrimaryKeyOutOfLineConstraint(p0 *parse.PrimaryKeyOutOfLineConstraint) any { + str := strings.Builder{} + str.WriteString("PRIMARY KEY(") + str.WriteString(strings.Join(p0.Columns, ", ")) + str.WriteString(")") + return str.String() +} + +func (s *sqlGenerator) VisitUniqueInlineConstraint(p0 *parse.UniqueInlineConstraint) any { + return "UNIQUE" +} + +func (s *sqlGenerator) VisitUniqueOutOfLineConstraint(p0 *parse.UniqueOutOfLineConstraint) any { + str := strings.Builder{} + str.WriteString("UNIQUE(") + str.WriteString(strings.Join(p0.Columns, ", ")) + str.WriteString(")") + return str.String() +} + +func (s *sqlGenerator) VisitDefaultConstraint(p0 *parse.DefaultConstraint) any { + str := strings.Builder{} + str.WriteString("DEFAULT ") + str.WriteString(p0.Value.Accept(s).(string)) + return str.String() +} + +func (s *sqlGenerator) VisitNotNullConstraint(p0 *parse.NotNullConstraint) any { + return "NOT NULL" +} + +func (s *sqlGenerator) VisitCheckConstraint(p0 *parse.CheckConstraint) any { + str := strings.Builder{} + str.WriteString("CHECK(") + str.WriteString(p0.Expression.Accept(s).(string)) + str.WriteString(")") + return str.String() +} + +func (s *sqlGenerator) VisitForeignKeyReferences(fk *parse.ForeignKeyReferences) any { + str := strings.Builder{} + str.WriteString("REFERENCES ") + + if fk.RefTableNamespace != "" { + str.WriteString(fk.RefTableNamespace) + str.WriteString(".") + } else if s.pgSchema != "" { + str.WriteString(s.pgSchema) + str.WriteString(".") + } + + str.WriteString(fk.RefTable) + str.WriteString("(") + str.WriteString(strings.Join(fk.RefColumns, ", ")) + str.WriteString(")") + + for _, action := range fk.Actions { + str.WriteString(" ON ") + str.WriteString(string(action.On)) // update or delete + str.WriteString(" ") + str.WriteString(string(action.Do)) // cascade, restrict, etc. } + return str.String() +} - s.WriteString("RETURN NEXT;\n") - return s.String() +func (s *sqlGenerator) VisitForeignKeyOutOfLineConstraint(p0 *parse.ForeignKeyOutOfLineConstraint) any { + str := strings.Builder{} + str.WriteString("FOREIGN KEY(") + str.WriteString(strings.Join(p0.Columns, ", ")) + str.WriteString(") ") + str.WriteString(p0.References.Accept(s).(string)) + return str.String() } // formatPGLiteral formats a literal for user in postgres. @@ -957,66 +1148,127 @@ func formatPGLiteral(value any) (string, error) { return str.String(), nil } -// FormatProcedureName formats a procedure name for usage in postgres. This -// simply prepends the name with _fp_ -func formatForeignProcedureName(name string) string { - return "_fp_" + name +func (s *sqlGenerator) VisitLoopTermRange(p0 *parse.LoopTermRange) any { + generateErr(s) + return nil } -// formatAnonymousReceiver creates a plpgsql variable name for anonymous receivers. -func formatAnonymousReceiver(index int) string { - return fmt.Sprintf("_anon_%d", index) +func (s *sqlGenerator) VisitLoopTermSQL(p0 *parse.LoopTermSQL) any { + generateErr(s) + return nil } -// formatReturnVar formats a return variable name for usage in postgres. -func formatReturnVar(i int) string { - return fmt.Sprintf("_out_%d", i) +func (s *sqlGenerator) VisitLoopTermVariable(p0 *parse.LoopTermVariable) any { + generateErr(s) + return nil } -// formatVariable formats an expression variable for usage in postgres. -func formatVariable(e *parse.ExpressionVariable) string { - switch e.Prefix { - case parse.VariablePrefixDollar: - return formatParameterName(e.Name) - case parse.VariablePrefixAt: - return formatContextualVariableName(e.Name) - default: - // should never happen - panic("invalid variable prefix: " + string(e.Prefix)) - } +func (s *sqlGenerator) VisitProcedureStmtIf(p0 *parse.ActionStmtIf) any { + generateErr(s) + return nil } -// formatParameterName formats a parameter name for usage in postgres. This -// simply prepends the name with _param_. It expects the name does not have -// the $ prefix -func formatParameterName(name string) string { - return "_param_" + name +func (s *sqlGenerator) VisitIfThen(p0 *parse.IfThen) any { + generateErr(s) + return nil } -// formatContextualVariableName formats a contextual variable name for usage in postgres. -// This uses the current_setting function to get the value of the variable. It also -// removes the @ prefix. If the type is not a text type, it will also type cast it. -// The type casting is necessary since current_setting returns all values as text. -func formatContextualVariableName(name string) string { - str := fmt.Sprintf("current_setting('%s.%s')", PgSessionPrefix, name) +func (s *sqlGenerator) VisitProcedureStmtSQL(p0 *parse.ActionStmtSQL) any { + generateErr(s) + return nil +} - dataType, ok := parse.SessionVars[name] - if !ok { - panic("unknown contextual variable: " + name) - } - - switch dataType { - case types.BlobType: - return fmt.Sprintf("decode(%s, 'base64')", str) - case types.IntType: - return str + "::int8" - case types.BoolType: - return str + "::bool" - case types.UUIDType: - return str + "::uuid" - case types.TextType: - return str - default: - panic("disallowed contextual variable type: " + dataType.String()) - } +func (s *sqlGenerator) VisitProcedureStmtBreak(p0 *parse.ActionStmtBreak) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitProcedureStmtReturn(p0 *parse.ActionStmtReturn) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitProcedureStmtReturnNext(p0 *parse.ActionStmtReturnNext) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitUseExtensionStatement(p0 *parse.UseExtensionStatement) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitUnuseExtensionStatement(p0 *parse.UnuseExtensionStatement) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitCreateActionStatement(p0 *parse.CreateActionStatement) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitDropActionStatement(p0 *parse.DropActionStatement) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtDeclaration(p0 *parse.ActionStmtDeclaration) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtAssignment(p0 *parse.ActionStmtAssign) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtCall(p0 *parse.ActionStmtCall) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtForLoop(p0 *parse.ActionStmtForLoop) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtIf(p0 *parse.ActionStmtIf) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtSQL(p0 *parse.ActionStmtSQL) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtBreak(p0 *parse.ActionStmtBreak) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtReturn(p0 *parse.ActionStmtReturn) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitActionStmtReturnNext(p0 *parse.ActionStmtReturnNext) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitCreateNamespaceStatement(p0 *parse.CreateNamespaceStatement) any { + generateErr(s) + return nil +} + +func (s *sqlGenerator) VisitDropNamespaceStatement(p0 *parse.DropNamespaceStatement) any { + generateErr(s) + return nil +} + +// generateErr is a helper function that panics when a Visit method that is unexpected is called. +func generateErr(t any) { + panic(fmt.Sprintf("SQL generate should never be called on %T", t)) } diff --git a/node/engine/pg_generate/generate_test.go b/node/engine/pg_generate/generate_test.go new file mode 100644 index 000000000..d1b4a2bd9 --- /dev/null +++ b/node/engine/pg_generate/generate_test.go @@ -0,0 +1,327 @@ +package pggenerate_test + +import ( + "strings" + "testing" + "unicode" + + "github.com/kwilteam/kwil-db/node/engine/parse" + pggenerate "github.com/kwilteam/kwil-db/node/engine/pg_generate" + "github.com/stretchr/testify/require" +) + +func Test_PgGenerate(t *testing.T) { + type testcase struct { + name string + sql string + want string + params []string + wantErr bool + } + + tests := []testcase{ + { + name: "Simple Insert with two params", + sql: "INSERT INTO tbl VALUES ($a, $b);", + want: "INSERT INTO kwil.tbl VALUES ($1, $2);", + params: []string{"$a", "$b"}, + }, + { + name: "select with @caller", + sql: "SELECT * FROM tbl WHERE col = @caller;", + want: "SELECT * FROM tbl WHERE col = $1;", + params: []string{"@caller"}, + }, + { + name: "Insert with named columns and params", + sql: "INSERT INTO tbl (col1, col2) VALUES ($foo, $bar);", + want: "INSERT INTO kwil.tbl (col1, col2) VALUES ($1, $2);", + params: []string{"$foo", "$bar"}, + }, + { + name: "Update statement", + sql: "UPDATE tbl SET col1 = $x, col2 = $y WHERE col3 = $z;", + want: "UPDATE kwil.tbl SET col1 = $1, col2 = $2 WHERE col3 = $3;", + params: []string{"$x", "$y", "$z"}, + }, + { + name: "Select with one param", + sql: "SELECT * FROM tbl WHERE col = $param;", + want: "SELECT * FROM tbl WHERE col = $1;", + params: []string{"$param"}, + }, + { + name: "Delete with a param", + sql: "DELETE FROM tbl WHERE id = $id;", + want: "DELETE FROM kwil.tbl WHERE id = $1;", + params: []string{"$id"}, + }, + { + name: "Complex select with multiple params", + sql: "SELECT col1, col2 FROM tbl WHERE col1 = $foo AND col2 IN ($bar, $baz);", + want: "SELECT col1, col2 FROM tbl WHERE col1 = $1 AND col2 IN ($2, $3);", + params: []string{"$foo", "$bar", "$baz"}, + }, + { + name: "Repeated parameter name", + sql: "SELECT * FROM tbl WHERE col1 = $foo AND col2 = $foo;", + want: "SELECT * FROM tbl WHERE col1 = $1 AND col2 = $1;", + params: []string{"$foo"}, + }, + { + name: "Mixed case parameter name", + sql: "SELECT * FROM tbl WHERE UserId = $UserId;", + want: "SELECT * FROM tbl WHERE userid = $1;", + params: []string{"$userid"}, + }, + { + name: "Parameter name with underscore", + sql: "SELECT * FROM tbl WHERE col = $some_param_name;", + want: "SELECT * FROM tbl WHERE col = $1;", + params: []string{"$some_param_name"}, + }, + { + name: "Multiple parameters used in multiple places", + sql: "UPDATE tbl SET col1 = $a, col2 = $b WHERE col3 = $a;", + want: "UPDATE kwil.tbl SET col1 = $1, col2 = $2 WHERE col3 = $1;", + params: []string{"$a", "$b"}, + }, + { + name: "No parameters", + sql: "SELECT * FROM tbl;", + want: "SELECT * FROM tbl;", + }, + { + name: "Parameter in function call", + sql: "SELECT * FROM tbl WHERE col = abs($pwd);", + want: "SELECT * FROM tbl WHERE col = abs($1);", + params: []string{"$pwd"}, + }, + { + name: "Parameter in JOIN condition", + sql: "SELECT t1.col, t2.col FROM t1 JOIN t2 ON t1.id = t2.id AND t1.name = $name;", + want: "SELECT t1.col, t2.col FROM t1 INNER JOIN t2 ON t1.id = t2.id AND t1.name = $1;", + params: []string{"$name"}, + }, + { + name: "window function", + sql: "SELECT col1, col2, SUM(col3) OVER (PARTITION BY col1 ORDER BY col2) FROM tbl;", + want: "SELECT col1, col2, sum(col3) OVER (PARTITION BY col1 ORDER BY col2 ASC NULLS LAST) FROM tbl;", + }, + { + name: "array access", + sql: "SELECT col1[1], col2[2] FROM tbl;", + want: "SELECT col1[1], col2[2] FROM tbl;", + }, + { + name: "array slice", + sql: "SELECT col1[1:2], col2[2:], col3[:3] FROM tbl;", + want: "SELECT col1[1:2], col2[2:], col3[:3] FROM tbl;", + }, + { + name: "make array", + sql: "SELECT ARRAY[col1, col2] FROM tbl;", + want: "SELECT ARRAY[col1, col2] FROM tbl;", + }, + { + name: "type cast", + sql: "SELECT col1::INT8, (col2::TEXT)::INT8 FROM tbl;", + want: "SELECT col1::INT8, (col2::TEXT)::INT8 FROM tbl;", + }, + { + name: "arithmetics", + sql: "SELECT col1 + col2, col1 - col2, col1 * col2, col1 / col2 FROM tbl;", + want: "SELECT col1 + col2, col1 - col2, col1 * col2, col1 / col2 FROM tbl;", + }, + { + name: "comparison", + sql: "SELECT col1 = col2, col1 <> col2, col1 < col2, col1 <= col2, col1 > col2, col1 >= col2 FROM tbl;", + want: "SELECT col1 = col2, col1 <> col2, col1 < col2, col1 <= col2, col1 > col2, col1 >= col2 FROM tbl;", + }, + { + name: "unary", + sql: "SELECT +col1, -col2 FROM tbl;", + want: "SELECT +col1, -col2 FROM tbl;", + }, + { + name: "logical", + sql: "SELECT col1 AND col2, col1 OR col2, NOT col1 FROM tbl;", + want: "SELECT col1 AND col2, col1 OR col2, NOT col1 FROM tbl;", + }, + { + name: "case", + sql: "SELECT CASE WHEN col1 = 1 THEN 'one' ELSE 'other' END FROM tbl;", + want: "SELECT CASE WHEN col1 = 1 THEN 'one' ELSE 'other' END FROM tbl;", + }, + { + name: "collate", + sql: "SELECT col1 from tbl where name = 'foo' collate nocase;", + want: "SELECT col1 FROM tbl WHERE name = 'foo' COLLATE nocase;", + }, + { + name: "is null", + sql: "SELECT col1 IS NULL, col2 IS NOT NULL FROM tbl;", + want: "SELECT col1 IS NULL, col2 IS NOT NULL FROM tbl;", + }, + { + name: "between", + sql: "SELECT col1 BETWEEN 1 AND 10 FROM tbl;", + want: "SELECT col1 BETWEEN 1 AND 10 FROM tbl;", + }, + { + name: "in", + sql: "SELECT col1 IN (1, 2, 3) FROM tbl;", + want: "SELECT col1 IN (1, 2, 3) FROM tbl;", + }, + { + name: "like", + sql: "SELECT col1 LIKE 'foo%' FROM tbl WHERE col2 NOT LIKE '%bar' AND col3 ILIKE 'baz%';", + want: "SELECT col1 LIKE 'foo%' FROM tbl WHERE col2 NOT LIKE '%bar' AND col3 ILIKE 'baz%';", + }, + { + name: "exists", + sql: "SELECT EXISTS (SELECT 1 FROM tbl WHERE col1 = 1);", + want: "SELECT EXISTS (SELECT 1 FROM tbl WHERE col1 = 1);", + }, + { + name: "subquery", + sql: "SELECT (SELECT col1 FROM tbl WHERE col2 = 1) FROM tbl2;", + want: "SELECT (SELECT col1 FROM tbl WHERE col2 = 1) FROM tbl2;", + }, + { + name: "common table expression", + sql: "WITH cte AS (SELECT * FROM tbl) SELECT * FROM cte;", + want: "WITH cte AS (SELECT * FROM tbl) SELECT * FROM cte;", + }, + { + name: "recursive common table expression", + sql: "WITH RECURSIVE cte AS (SELECT * FROM tbl) SELECT * FROM cte;", + want: "WITH RECURSIVE cte AS (SELECT * FROM tbl) SELECT * FROM cte;", + }, + // ddl + { + name: "Create table", + sql: `CREATE TABLE departments ( + department_id UUID, + department_code TEXT NOT NULL, + department_name TEXT NOT NULL, + location_id INT8 DEFAULT 1, + created_at INT8, + PRIMARY KEY (department_id, department_code), + UNIQUE (department_name), + CHECK (department_name <> ''), + FOREIGN KEY (location_id) REFERENCES locations(location_id) +);`, + want: `CREATE TABLE kwil.departments ( + department_id UUID, + department_code TEXT NOT NULL, + department_name TEXT NOT NULL, + location_id INT8 DEFAULT 1, + created_at INT8, + PRIMARY KEY (department_id, department_code), + UNIQUE (department_name), + CHECK (department_name <> ''), + FOREIGN KEY (location_id) REFERENCES kwil.locations(location_id) + );`, + }, + { + name: "Create table if not exists", + sql: `CREATE TABLE IF NOT EXISTS departments ( + department_id UUID PRIMARY KEY + );`, + want: `CREATE TABLE IF NOT EXISTS kwil.departments ( + department_id UUID PRIMARY KEY + );`, + }, + { + name: "add column", + sql: `ALTER TABLE departments ADD COLUMN department_head UUID;`, + want: `ALTER TABLE kwil.departments ADD COLUMN department_head UUID;`, + }, + { + name: "drop column", + sql: `ALTER TABLE departments DROP COLUMN department_head;`, + want: `ALTER TABLE kwil.departments DROP COLUMN department_head;`, + }, + { + name: "rename column", + sql: `ALTER TABLE departments RENAME COLUMN department_head TO head_department;`, + want: `ALTER TABLE kwil.departments RENAME COLUMN department_head TO head_department;`, + }, + { + name: "rename table", + sql: `ALTER TABLE departments RENAME TO division;`, + want: `ALTER TABLE kwil.departments RENAME TO division;`, + }, + { + name: "add table constraint", + sql: `ALTER TABLE departments ADD PRIMARY KEY (department_id);`, + want: `ALTER TABLE kwil.departments ADD PRIMARY KEY (department_id);`, + }, + { + name: "drop table constraint", + sql: `ALTER TABLE departments DROP CONSTRAINT department_id;`, + want: `ALTER TABLE kwil.departments DROP CONSTRAINT department_id;`, + }, + { + name: "add column constraint", + sql: `ALTER TABLE departments ALTER COLUMN department_head SET NOT NULL;`, + want: `ALTER TABLE kwil.departments ALTER COLUMN department_head SET NOT NULL;`, + }, + { + name: "drop column constraint", + sql: `ALTER TABLE departments ALTER COLUMN department_head DROP NOT NULL;`, + want: `ALTER TABLE kwil.departments ALTER COLUMN department_head DROP NOT NULL;`, + }, + { + name: "drop table", + sql: `DROP TABLE departments;`, + want: `DROP TABLE kwil.departments;`, + }, + { + name: "drop table if exists cascade", + sql: `DROP TABLE IF EXISTS departments CASCADE;`, + want: `DROP TABLE IF EXISTS kwil.departments CASCADE;`, + }, + { + name: "create index", + sql: `CREATE INDEX IF NOT EXISTS idx_department_name_id ON departments (department_name, department_id);`, + want: `CREATE INDEX IF NOT EXISTS idx_department_name_id ON kwil.departments (department_name, department_id);`, + }, + { + name: "drop index", + sql: `DROP INDEX IF EXISTS idx_department_name_id;`, + want: `DROP INDEX IF EXISTS kwil.idx_department_name_id;`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + parsed, err := parse.Parse(tt.sql) + require.NoError(t, err) + require.Len(t, parsed, 1) + + got, ps, err := pggenerate.GenerateSQL(parsed[0], "kwil") + if err != nil { + if !tt.wantErr { + require.NoError(t, err) + } + return + } else { + require.Equal(t, tt.wantErr, false) + } + + require.Equal(t, removeWhitespace(tt.want), removeWhitespace(got)) + require.EqualValues(t, tt.params, ps) + }) + } +} + +func removeWhitespace(s string) string { + return strings.Map(func(r rune) rune { + if unicode.IsSpace(r) { + return -1 // skip this rune + } + return r + }, s) +} diff --git a/parse/planner/logical/errors.go b/node/engine/planner/logical/errors.go similarity index 66% rename from parse/planner/logical/errors.go rename to node/engine/planner/logical/errors.go index 8219febf6..d9a7ca38f 100644 --- a/parse/planner/logical/errors.go +++ b/node/engine/planner/logical/errors.go @@ -6,12 +6,16 @@ import ( var ( ErrIllegalAggregate = errors.New("illegal aggregate") + ErrIllegalWindowFunction = errors.New("illegal window function") ErrColumnNotFound = errors.New("column not found or cannot be referenced in this part of the query") ErrUpdateOrDeleteWithoutWhere = errors.New("UPDATE and DELETE statements with a FROM table require a WHERE clause") ErrUnknownTable = errors.New("unknown table") - ErrAggregateInWhere = errors.New("aggregate functions are not allowed in the WHERE clause") ErrSetIncompatibleSchemas = errors.New("incompatible schemas in COMPOUND operation") ErrNotNullableColumn = errors.New("column is not nullable") ErrIllegalConflictArbiter = errors.New("illegal conflict arbiter") ErrAmbiguousColumn = errors.New("ambiguous column") + ErrWindowAlreadyDefined = errors.New("window already defined") + ErrInvalidWindowFunction = errors.New("invalid window function") + ErrWindowNotDefined = errors.New("window not defined") + ErrFunctionDoesNotExist = errors.New("function does not exist") ) diff --git a/parse/planner/logical/mappings.go b/node/engine/planner/logical/mappings.go similarity index 97% rename from parse/planner/logical/mappings.go rename to node/engine/planner/logical/mappings.go index b55a208fb..5d2a9b00d 100644 --- a/parse/planner/logical/mappings.go +++ b/node/engine/planner/logical/mappings.go @@ -3,7 +3,7 @@ package logical import ( "fmt" - "github.com/kwilteam/kwil-db/parse" + "github.com/kwilteam/kwil-db/node/engine/parse" ) // the following maps map constants from parse to their logical diff --git a/parse/planner/logical/nodes.go b/node/engine/planner/logical/nodes.go similarity index 88% rename from parse/planner/logical/nodes.go rename to node/engine/planner/logical/nodes.go index 96bee485c..793b9f27b 100644 --- a/parse/planner/logical/nodes.go +++ b/node/engine/planner/logical/nodes.go @@ -39,7 +39,26 @@ type Traversable interface { // Accept is used to traverse the node. Accept(Visitor) any // Equal is used to compare two nodes. - Equal(other Traversable) bool + // Equal(other Traversable) bool +} + +// I am unsure if equal is needed, and so I am commenting it out for now. +// Its quite hard to implement, so I do not want to delete yet, but will leave it +// unused for now. +// This function allows us to keep the equal functionality in the nodes themselves, +// while removing it from the interface. +func eq(a, b Traversable) bool { + aEq, ok := a.(interface{ Equal(other Traversable) bool }) + if !ok { + panic("equal called, but it is not implemented") + } + + _, ok = b.(interface{ Equal(other Traversable) bool }) + if !ok { + panic("equal called, but it is not implemented") + } + + return aEq.Equal(b) } // ScanSource is a source of data that a Scan can be performed on. @@ -53,6 +72,9 @@ type ScanSource interface { // TableScanSource represents a scan of a physical table or a CTE. type TableScanSource struct { + // Namespace is the namespace of the table being scanned. + Namespace string + // TableName is the name of the table being scanned. TableName string Type TableSourceType @@ -69,7 +91,13 @@ func (t *TableScanSource) Children() []Traversable { } func (t *TableScanSource) FormatScan() string { - return t.TableName + " [" + t.Type.String() + "]" + str := strings.Builder{} + if t.Namespace != "" { + str.WriteString(t.Namespace) + str.WriteString(".") + } + str.WriteString(t.TableName + " [" + t.Type.String() + "]") + return str.String() } func (t *TableScanSource) Plans() []Plan { @@ -88,7 +116,7 @@ func (t *TableScanSource) Equal(other Traversable) bool { return false } - return t.TableName == o.TableName && t.Type == o.Type + return t.TableName == o.TableName && t.Type == o.Type && t.Namespace == o.Namespace } type TableSourceType int @@ -202,13 +230,13 @@ func (f *ProcedureScanSource) Equal(other Traversable) bool { } for i, arg := range f.Args { - if !arg.Equal(o.Args[i]) { + if !eq(arg, o.Args[i]) { return false } } for i, arg := range f.ContextualArgs { - if !arg.Equal(o.ContextualArgs[i]) { + if !eq(arg, o.ContextualArgs[i]) { return false } } @@ -357,7 +385,7 @@ func (s *Scan) String() string { str := strings.Builder{} switch t := s.Source.(type) { case *TableScanSource: - // if relation name == table name, remove the alias + // if relation name == table name, remove the alias because it is redundant if t.TableName == s.RelationName { alias = "" } @@ -408,7 +436,7 @@ func (s *Scan) Equal(other Traversable) bool { return false } - return s.Source.Equal(o.Source) + return eq(s.Source, o.Source) } type Project struct { @@ -476,12 +504,12 @@ func (p *Project) Equal(other Traversable) bool { } for i, expr := range p.Expressions { - if !expr.Equal(o.Expressions[i]) { + if !eq(expr, o.Expressions[i]) { return false } } - return p.Child.Equal(o.Child) + return eq(p.Child, o.Child) } type Filter struct { @@ -498,7 +526,7 @@ func (f *Filter) Children() []Traversable { } func (f *Filter) String() string { - return "Filter: " + f.Condition.String() + return fmt.Sprintf("Filter: %s", f.Condition.String()) } func (f *Filter) Plans() []Plan { @@ -515,7 +543,7 @@ func (f *Filter) Equal(other Traversable) bool { return false } - return f.Condition.Equal(o.Condition) && f.Child.Equal(o.Child) + return eq(f.Condition, o.Condition) && eq(f.Child, o.Child) } type Join struct { @@ -566,11 +594,11 @@ func (j *Join) Equal(other Traversable) bool { return false } - if !j.Condition.Equal(o.Condition) { + if !eq(j.Condition, o.Condition) { return false } - return j.Left.Equal(o.Left) && j.Right.Equal(o.Right) + return eq(j.Left, o.Left) && eq(j.Right, o.Right) } type Sort struct { @@ -585,27 +613,34 @@ type SortExpression struct { NullsLast bool } +func (s *SortExpression) String() string { + str := strings.Builder{} + str.WriteString("") + str.WriteString(s.Expr.String()) + str.WriteString(" ") + if s.Ascending { + str.WriteString("asc ") + } else { + str.WriteString("desc ") + } + if s.NullsLast { + str.WriteString("nulls last") + } else { + str.WriteString("nulls first") + } + + return str.String() +} + func (s *Sort) String() string { str := strings.Builder{} - str.WriteString("Sort:") + str.WriteString("Sort: ") for i, sortExpr := range s.SortExpressions { if i > 0 { - str.WriteString(";") + str.WriteString("; ") } - str.WriteString(" [") - str.WriteString(sortExpr.Expr.String()) - str.WriteString("] ") - if sortExpr.Ascending { - str.WriteString("asc ") - } else { - str.WriteString("desc ") - } - if sortExpr.NullsLast { - str.WriteString("nulls last") - } else { - str.WriteString("nulls first") - } + str.WriteString(sortExpr.String()) } return str.String() } @@ -655,12 +690,12 @@ func (s *Sort) Equal(other Traversable) bool { return false } - if !sortExpr.Expr.Equal(o.SortExpressions[i].Expr) { + if !eq(sortExpr.Expr, o.SortExpressions[i].Expr) { return false } } - return s.Child.Equal(o.Child) + return eq(s.Child, o.Child) } type Limit struct { @@ -717,15 +752,15 @@ func (l *Limit) Equal(other Traversable) bool { return false } - if l.Limit != nil && !l.Limit.Equal(o.Limit) { + if l.Limit != nil && !eq(l.Limit, o.Limit) { return false } - if l.Offset != nil && !l.Offset.Equal(o.Offset) { + if l.Offset != nil && !eq(l.Offset, o.Offset) { return false } - return l.Child.Equal(o.Child) + return eq(l.Child, o.Child) } type Distinct struct { @@ -758,7 +793,7 @@ func (d *Distinct) Equal(other Traversable) bool { return false } - return d.Child.Equal(other) + return eq(d.Child, other) } type SetOperation struct { @@ -805,7 +840,7 @@ func (s *SetOperation) Equal(other Traversable) bool { return false } - return s.OpType == o.OpType && s.Left.Equal(o.Left) && s.Right.Equal(o.Right) + return s.OpType == o.OpType && eq(s.Left, o.Left) && eq(s.Right, o.Right) } type Aggregate struct { @@ -815,7 +850,7 @@ type Aggregate struct { GroupingExpressions []Expression // AggregateExpressions are the expressions used // in the SELECT clause (e.g. SUM(x), COUNT(y)). - AggregateExpressions []Expression + AggregateExpressions []*IdentifiedExpr // Child is the input to the aggregation // (e.g. a Project node). Child Plan @@ -904,18 +939,197 @@ func (a *Aggregate) Equal(other Traversable) bool { } for i, expr := range a.GroupingExpressions { - if !expr.Equal(o.GroupingExpressions[i]) { + if !eq(expr, o.GroupingExpressions[i]) { return false } } for i, expr := range a.AggregateExpressions { - if !expr.Equal(o.AggregateExpressions[i]) { + if !eq(expr, o.AggregateExpressions[i]) { return false } } - return a.Child.Equal(o.Child) + return eq(a.Child, o.Child) +} + +// Window is a logical plan node that represents a window function. +// It applies the window function to the input relation, and returns +// both the input relation and the window function result as a single +// relation. +type Window struct { + baseLogicalPlan + // PartitionBy are the expressions used in the PARTITION BY clause. + PartitionBy []Expression + // OrderBy are the expressions used in the ORDER BY clause. + OrderBy []*SortExpression + // Functions are the window functions that are applied. + Functions []*IdentifiedExpr + // Frames are not yet supported, but when they are, they will + // be added here. + Child Plan +} + +func (s *Window) Accept(v Visitor) any { + return v.VisitWindow(s) +} + +func (w *Window) Children() []Traversable { + var c []Traversable + for _, expr := range w.PartitionBy { + c = append(c, expr) + } + for _, expr := range w.OrderBy { + c = append(c, expr.Expr.Children()...) + } + + for _, wf := range w.Functions { + c = append(c, wf.Children()...) + } + + c = append(c, w.Child) + + return c +} + +func (w *Window) Plans() []Plan { + var c []Plan + + for _, expr := range w.PartitionBy { + c = append(c, expr.Plans()...) + } + + for _, expr := range w.OrderBy { + c = append(c, expr.Expr.Plans()...) + } + + for _, wf := range w.Functions { + c = append(c, wf.Plans()...) + } + + c = append(c, w.Child) + + return c +} + +func (w *Window) String() string { + str := strings.Builder{} + str.WriteString("Window") + + if len(w.PartitionBy) > 0 { + str.WriteString(" [partition_by=") + for i, expr := range w.PartitionBy { + if i > 0 { + str.WriteString(", ") + } + str.WriteString(expr.String()) + } + str.WriteString("]") + } + + if len(w.OrderBy) > 0 { + str.WriteString(" [order_by=") + for i, expr := range w.OrderBy { + if i > 0 { + str.WriteString(", ") + } + str.WriteString(expr.String()) + } + str.WriteString("]") + } + + if len(w.Functions) > 0 { + str.WriteString(": ") + for i, wf := range w.Functions { + if i > 0 { + str.WriteString("; ") + } + str.WriteString(wf.String()) + } + } + + return str.String() +} + +func (w *Window) Relation() *Relation { + // we return the input relation and the window function results + // as a single relation + r := w.Child.Relation() + for _, wf := range w.Functions { + r.Fields = append(r.Fields, wf.Field()) + } + + return r +} + +// WindowFunction references a function being applied in a window. +type WindowFunction struct { + // Name is the function being applied. + Name string + // Args are the arguments to the function. + Args []Expression + // Filter is the optional filter condition that can be applied to the function. + Filter Expression + // returnType is the return type of the window function. + returnType *types.DataType +} + +func (w *WindowFunction) Field() *Field { + return &Field{ + Name: w.Name, + val: w.returnType.Copy(), + } +} + +func (w *WindowFunction) String() string { + str := strings.Builder{} + str.WriteString(w.Name) + str.WriteString("(") + for i, arg := range w.Args { + if i > 0 { + str.WriteString(", ") + } + str.WriteString(arg.String()) + } + str.WriteString(")") + + if w.Filter != nil { + str.WriteString(" filter=[") + str.WriteString(w.Filter.String()) + str.WriteString("]") + } + + return str.String() +} + +func (w *WindowFunction) Children() []Traversable { + var c []Traversable + for _, arg := range w.Args { + c = append(c, arg) + } + + if w.Filter != nil { + c = append(c, w.Filter) + } + + return c +} + +func (w *WindowFunction) Plans() []Plan { + var c []Plan + for _, arg := range w.Args { + c = append(c, arg.Plans()...) + } + + if w.Filter != nil { + c = append(c, w.Filter.Plans()...) + } + + return c +} + +func (w *WindowFunction) Accept(v Visitor) any { + return v.VisitWindowFunction(w) } type JoinType int @@ -1011,7 +1225,7 @@ func (s *Subplan) Equal(other Traversable) bool { return false } - return s.ID == o.ID && s.Type == o.Type && s.Plan.Equal(o.Plan) + return s.ID == o.ID && s.Type == o.Type && eq(s.Plan, o.Plan) } type SubplanType int @@ -1019,6 +1233,7 @@ type SubplanType int const ( SubplanTypeSubquery SubplanType = iota SubplanTypeCTE + SubplanTypeRecursiveCTE ) func (s SubplanType) String() string { @@ -1027,6 +1242,8 @@ func (s SubplanType) String() string { return "subquery" case SubplanTypeCTE: return "cte" + case SubplanTypeRecursiveCTE: + return "recursive cte" default: panic(fmt.Sprintf("unknown subplan type %d", s)) } @@ -1070,7 +1287,7 @@ func (c *CartesianProduct) Equal(other Traversable) bool { return false } - return c.Left.Equal(o.Left) && c.Right.Equal(o.Right) + return eq(c.Left, o.Left) && eq(c.Right, o.Right) } /* @@ -1100,6 +1317,8 @@ func (l *Literal) String() string { return "0x" + hex.EncodeToString(c) case nil: return "NULL" + case fmt.Stringer: + return c.String() default: return fmt.Sprintf("%v", l.Value) } @@ -1311,7 +1530,7 @@ func (a *AggregateFunctionCall) Equal(other Traversable) bool { } for i, arg := range a.Args { - if !arg.Equal(o.Args[i]) { + if !eq(arg, o.Args[i]) { return false } } @@ -1383,7 +1602,7 @@ func (f *ScalarFunctionCall) Equal(other Traversable) bool { } for i, arg := range f.Args { - if !arg.Equal(o.Args[i]) { + if !eq(arg, o.Args[i]) { return false } } @@ -1391,6 +1610,7 @@ func (f *ScalarFunctionCall) Equal(other Traversable) bool { return true } +// TODO: remove this since it wont be supported // ProcedureCall is a call to a procedure. // This can be a call to either a procedure in the same schema, or // to a foreign procedure. @@ -1476,7 +1696,7 @@ func (p *ProcedureCall) Equal(other Traversable) bool { } for i, arg := range p.Args { - if !arg.Equal(o.Args[i]) { + if !eq(arg, o.Args[i]) { return false } } @@ -1544,7 +1764,7 @@ func (a *ArithmeticOp) Equal(other Traversable) bool { return false } - return a.Op == o.Op && a.Left.Equal(o.Left) && a.Right.Equal(o.Right) + return a.Op == o.Op && eq(a.Left, o.Left) && eq(a.Right, o.Right) } type ComparisonOp struct { @@ -1620,7 +1840,7 @@ func (c *ComparisonOp) Equal(other Traversable) bool { return false } - return c.Op == o.Op && c.Left.Equal(o.Left) && c.Right.Equal(o.Right) + return c.Op == o.Op && eq(c.Left, o.Left) && eq(c.Right, o.Right) } type LogicalOp struct { @@ -1669,7 +1889,7 @@ func (l *LogicalOp) Equal(other Traversable) bool { return false } - return l.Op == o.Op && l.Left.Equal(o.Left) && l.Right.Equal(o.Right) + return l.Op == o.Op && eq(l.Left, o.Left) && eq(l.Right, o.Right) } type UnaryOp struct { @@ -1723,7 +1943,7 @@ func (u *UnaryOp) Equal(other Traversable) bool { return false } - return u.Op == o.Op && u.Expr.Equal(o.Expr) + return u.Op == o.Op && eq(u.Expr, o.Expr) } type TypeCast struct { @@ -1756,7 +1976,7 @@ func (t *TypeCast) Equal(other Traversable) bool { return false } - return t.Type.EqualsStrict(o.Type) && t.Expr.Equal(o.Expr) + return t.Type.EqualsStrict(o.Type) && eq(t.Expr, o.Expr) } type AliasExpr struct { @@ -1780,7 +2000,9 @@ func (a *AliasExpr) Plans() []Plan { } func (a *AliasExpr) Field() *Field { - return a.Expr.Field() + f := a.Expr.Field() + f.Name = a.Alias + return f } func (a *AliasExpr) Equal(other Traversable) bool { @@ -1789,7 +2011,7 @@ func (a *AliasExpr) Equal(other Traversable) bool { return false } - return a.Alias == o.Alias && a.Expr.Equal(o.Expr) + return a.Alias == o.Alias && eq(a.Expr, o.Expr) } type ArrayAccess struct { @@ -1828,7 +2050,7 @@ func (a *ArrayAccess) Equal(other Traversable) bool { return false } - return a.Array.Equal(o.Array) && a.Index.Equal(o.Index) + return eq(a.Array, o.Array) && eq(a.Index, o.Index) } type ArrayConstructor struct { @@ -1893,7 +2115,7 @@ func (a *ArrayConstructor) Equal(other Traversable) bool { } for i, elem := range a.Elements { - if !elem.Equal(o.Elements[i]) { + if !eq(elem, o.Elements[i]) { return false } } @@ -1941,7 +2163,7 @@ func (f *FieldAccess) Equal(other Traversable) bool { return false } - return f.Key == o.Key && f.Object.Equal(o.Object) + return f.Key == o.Key && eq(f.Object, o.Object) } type SubqueryExpr struct { @@ -2050,7 +2272,7 @@ func (c *Collate) Equal(other Traversable) bool { return false } - return c.Collation == o.Collation && c.Expr.Equal(o.Expr) + return c.Collation == o.Collation && eq(c.Expr, o.Expr) } type CollationType uint8 @@ -2139,7 +2361,7 @@ func (i *IsIn) Equal(other Traversable) bool { return false } - if !i.Left.Equal(o.Left) { + if !eq(i.Left, o.Left) { return false } @@ -2148,7 +2370,7 @@ func (i *IsIn) Equal(other Traversable) bool { } for j, expr := range i.Expressions { - if !expr.Equal(o.Expressions[j]) { + if !eq(expr, o.Expressions[j]) { return false } } @@ -2252,7 +2474,7 @@ func (c *Case) Equal(other Traversable) bool { } if c.Value != nil && o.Value != nil { - if !c.Value.Equal(o.Value) { + if !eq(c.Value, o.Value) { return false } } else if c.Value != nil || o.Value != nil { @@ -2264,16 +2486,16 @@ func (c *Case) Equal(other Traversable) bool { } for i, when := range c.WhenClauses { - if !when[0].Equal(o.WhenClauses[i][0]) { + if !eq(when[0], o.WhenClauses[i][0]) { return false } - if !when[1].Equal(o.WhenClauses[i][1]) { + if !eq(when[1], o.WhenClauses[i][1]) { return false } } if c.Else != nil && o.Else != nil { - return c.Else.Equal(o.Else) + return eq(c.Else, o.Else) } return c.Else == nil && o.Else == nil @@ -2355,7 +2577,7 @@ func (i *IdentifiedExpr) Equal(other Traversable) bool { return false } - return i.ID == o.ID && i.Expr.Equal(o.Expr) + return i.ID == o.ID && eq(i.Expr, o.Expr) } /* @@ -2436,7 +2658,7 @@ func (r *Return) Equal(t Traversable) bool { } } - return r.Child.Equal(o.Child) + return eq(r.Child, o.Child) } /* @@ -2524,12 +2746,12 @@ func (u *Update) Equal(t Traversable) bool { return false } - if !assign.Value.Equal(o.Assignments[i].Value) { + if !eq(assign.Value, o.Assignments[i].Value) { return false } } - return u.Child.Equal(o.Child) + return eq(u.Child, o.Child) } // Delete is a node that plans a delete operation. @@ -2571,7 +2793,7 @@ func (d *Delete) Equal(t Traversable) bool { return false } - return d.Child.Equal(o.Child) + return eq(d.Child, o.Child) } // TODO: I dont love this insert. Everything else feels very relational, but this @@ -2592,9 +2814,9 @@ type Insert struct { ReferencedAs string // Columns are the columns to insert into. Columns []*Field - // Values are the values to insert. - // The length of each second dimensional slice in Values must be equal to all others. - Values *Tuples + // InsertionValues are the values to insert. + // These can be either values specified using VALUES or a subquery. + InsertionValues Plan // ConflictResolution is the conflict resolution to use if there is a conflict. ConflictResolution ConflictResolution } @@ -2625,7 +2847,7 @@ func (s *Insert) Accept(v Visitor) any { return v.VisitInsert(s) } func (i *Insert) Children() []Traversable { - c := i.Values.Children() + c := i.InsertionValues.Children() if i.ConflictResolution != nil { c = append(c, i.ConflictResolution) @@ -2635,7 +2857,7 @@ func (i *Insert) Children() []Traversable { } func (i *Insert) Plans() []Plan { - c := []Plan{i.Values} + c := []Plan{i.InsertionValues} if i.ConflictResolution != nil { c = append(c, i.ConflictResolution) @@ -2658,7 +2880,7 @@ func (i *Insert) Equal(t Traversable) bool { return false } - if !i.Values.Equal(o.Values) { + if !eq(i.InsertionValues, o.InsertionValues) { return false } @@ -2670,7 +2892,7 @@ func (i *Insert) Equal(t Traversable) bool { return false } - return i.ConflictResolution.Equal(o.ConflictResolution) + return eq(i.ConflictResolution, o.ConflictResolution) } // Tuples is a list tuple being inserted into a table. @@ -2696,7 +2918,7 @@ func (t *Tuples) Equal(other Traversable) bool { } for j, v := range val { - if !v.Equal(o.Values[i][j]) { + if !eq(v, o.Values[i][j]) { return false } } @@ -2848,7 +3070,7 @@ func (c *ConflictUpdate) Equal(other Traversable) bool { } if c.ConflictFilter != nil && o.ConflictFilter != nil { - if !c.ConflictFilter.Equal(o.ConflictFilter) { + if !eq(c.ConflictFilter, o.ConflictFilter) { return false } } @@ -2858,7 +3080,7 @@ func (c *ConflictUpdate) Equal(other Traversable) bool { } for i, assign := range c.Assignments { - if !assign.Value.Equal(o.Assignments[i].Value) { + if !eq(assign.Value, o.Assignments[i].Value) { return false } } @@ -3008,6 +3230,7 @@ type Visitor interface { VisitDistinct(*Distinct) any VisitSetOperation(*SetOperation) any VisitAggregate(*Aggregate) any + VisitWindow(*Window) any VisitSubplan(*Subplan) any VisitLiteral(*Literal) any VisitVariable(*Variable) any @@ -3038,6 +3261,7 @@ type Visitor interface { VisitConflictDoNothing(*ConflictDoNothing) any VisitConflictUpdate(*ConflictUpdate) any VisitTuples(*Tuples) any + VisitWindowFunction(*WindowFunction) any } /* diff --git a/parse/planner/logical/planner.go b/node/engine/planner/logical/planner.go similarity index 52% rename from parse/planner/logical/planner.go rename to node/engine/planner/logical/planner.go index 610c34e92..292b58ab4 100644 --- a/parse/planner/logical/planner.go +++ b/node/engine/planner/logical/planner.go @@ -8,13 +8,28 @@ import ( "strings" "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/utils/order" - "github.com/kwilteam/kwil-db/parse" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" ) +// GetVarTypeFunc is a function that gets the type of a variable. +type GetVarTypeFunc = func(varName string) (dataType *types.DataType, found bool) + +// GetObjectFieldTypeFunc is a function that gets the type of a field in an object. +// TODO: we will need a way to either bind objects or pass in their fields. +type GetObjectFunc = func(objName string) (obj map[string]*types.DataType, found bool) + +// GetTableFunc is a function that gets a table by name. +// It can also be given a namespace to search in. If no namespace is given (passed as ""), it will search in the default namespace. +type GetTableFunc = func(namespace string, tableName string) (table *engine.Table, found bool) + // CreateLogicalPlan creates a logical plan from a SQL statement. -func CreateLogicalPlan(statement *parse.SQLStatement, schema *types.Schema, vars map[string]*types.DataType, - objects map[string]map[string]*types.DataType) (analyzed *AnalyzedPlan, err error) { +// If applyDefaultOrdering is true, it will rewrite the query to apply default ordering. +// Default ordering will modify the passed query. +// If defaultNamespace is not empty, it will be used as the default namespace for all tables. +func CreateLogicalPlan(statement *parse.SQLStatement, tables GetTableFunc, + vars GetVarTypeFunc, objects GetObjectFunc, applyDefaultOrdering bool, defaultNamespace string, +) (analyzed *AnalyzedPlan, err error) { defer func() { if r := recover(); r != nil { err2, ok := r.(error) @@ -25,24 +40,25 @@ func CreateLogicalPlan(statement *parse.SQLStatement, schema *types.Schema, vars } }() - if vars == nil { - vars = make(map[string]*types.DataType) - } - - if objects == nil { - objects = make(map[string]map[string]*types.DataType) - } - ctx := &planContext{ - Schema: schema, - CTEs: make(map[string]*Relation), - Variables: vars, - Objects: objects, + Tables: tables, + CTEs: make(map[string]*Relation), + Variables: vars, + Objects: objects, + applyDefaultOrdering: applyDefaultOrdering, + defaultNamespace: defaultNamespace, } scope := &scopeContext{ plan: ctx, OuterRelation: &Relation{}, + // intentionally leave preGroupRelation nil + onWindowFuncExpr: func(ewfc *parse.ExpressionWindowFunctionCall, _ *Relation, _ map[string]*IdentifiedExpr) (Expression, *Field, error) { + return nil, nil, fmt.Errorf("%w: cannot use window functions in this context", ErrIllegalWindowFunction) + }, + onAggregateFuncExpr: func(efc *parse.ExpressionFunctionCall, agg *parse.AggregateFunctionDefinition, _ map[string]*IdentifiedExpr) (Expression, *Field, error) { + return nil, nil, fmt.Errorf("%w: cannot use aggregate functions in this context", ErrIllegalAggregate) + }, } plan, err := scope.sqlStmt(statement) @@ -85,9 +101,8 @@ func (a *AnalyzedPlan) Format() string { // planContext holds information that is needed during the planning process. type planContext struct { - // Schema is the underlying database schema that the query should - // be evaluated against. - Schema *types.Schema + // Tables are the tables that the query can reference. + Tables GetTableFunc // CTEs are the common table expressions in the query. // This field should be updated as the query planner // processes the query. @@ -97,11 +112,11 @@ type planContext struct { // processes the query. CTEPlans []*Subplan // Variables are the variables in the query. - Variables map[string]*types.DataType + Variables GetVarTypeFunc // Objects are the objects in the query. // Kwil supports one-dimensional objects, so this would be // accessible via objname.fieldname. - Objects map[string]map[string]*types.DataType + Objects GetObjectFunc // SubqueryCount is the number of subqueries in the query. // This field should be updated as the query planner // processes the query. @@ -110,6 +125,11 @@ type planContext struct { // This field should be updated as the query planner // processes the query. ReferenceCount int + // applyDefaultOrdering is true if the query should be rewritten + // to apply default ordering. + applyDefaultOrdering bool + // defaultNamespace is the default namespace (schema in Postgres) for all tables. + defaultNamespace string } // scopeContext contains information about the current scope of the query. @@ -119,17 +139,65 @@ type scopeContext struct { // OuterRelation is the relation of all outer queries that can be // referenced from a subquery. OuterRelation *Relation + // preGroupRelation is the relation that is used before grouping. + // It is simply used to give more helpful error messages. + preGroupRelation *Relation // Correlations are the fields that are corellated to an outer query. Correlations []*Field + // onWindowFuncExpr is a function that is called when evaluating a window function. + onWindowFuncExpr func(*parse.ExpressionWindowFunctionCall, *Relation, map[string]*IdentifiedExpr) (Expression, *Field, error) + // onAggregateFuncExpr is a function that is called when evaluating an aggregate function. + // It is NOT called if the aggregate function is being used as a window function; in this case, + // onWindowFuncExpr is called. + onAggregateFuncExpr func(*parse.ExpressionFunctionCall, *parse.AggregateFunctionDefinition, map[string]*IdentifiedExpr) (Expression, *Field, error) + // aggViolationColumn is the column that is causing an aggregate violation. + aggViolationColumn string + cteCtx cteContext +} + +// cteContext contains information about the common table expression context. +type cteContext struct { + // statementCanBeRecursive is true if the current statement can be recursive + // and we are processing a CTE. + statementCanBeRecursive bool + // recursiveCTEShape is the shape of the current CTE. + // It is set based off of the relation of the first select core. + // If it is nil, then the recursive CTE cannot be referenced here. + recursiveCTEShape *Relation + // currentCTEName is the current CTE being processed. + currentCTEName string + // usedRecursiveCTE is true if the recursive CTE has been used. + usedRecursiveCTE bool } +type QuerySection string + +const ( + querySectionUnknown QuerySection = "UNKNOWN" + querySectionWhere QuerySection = "WHERE" + querySectionGroupBy QuerySection = "GROUP BY" + querySectionJoin QuerySection = "JOIN" + querySectionWindow QuerySection = "WINDOW" + querySectionHaving QuerySection = "HAVING" + querySectionOrderBy QuerySection = "ORDER BY" + querySectionLimit QuerySection = "LIMIT" + querySectionOffset QuerySection = "OFFSET" + querySectionResults QuerySection = "RESULTS" + querySectionCompound QuerySection = "COMPOUND" +) + // sqlStmt builds a logical plan for a top-level SQL statement. func (s *scopeContext) sqlStmt(node *parse.SQLStatement) (TopLevelPlan, error) { + if node.Recursive { + s.cteCtx.statementCanBeRecursive = true + } for _, cte := range node.CTEs { + s.cteCtx.currentCTEName = cte.Name if err := s.cte(cte); err != nil { return nil, err } } + s.cteCtx = cteContext{} // reset the CTE context switch node := node.SQL.(type) { default: @@ -161,6 +229,10 @@ func (s *scopeContext) sqlStmt(node *parse.SQLStatement) (TopLevelPlan, error) { // cte builds a common table expression. func (s *scopeContext) cte(node *parse.CommonTableExpression) error { + s.cteCtx.usedRecursiveCTE = false + defer func() { + s.cteCtx.usedRecursiveCTE = false + }() plan, rel, err := s.selectStmt(node.Query) if err != nil { return err @@ -190,11 +262,16 @@ func (s *scopeContext) cte(node *parse.CommonTableExpression) error { } } + subplanType := SubplanTypeCTE + if s.cteCtx.usedRecursiveCTE { + subplanType = SubplanTypeRecursiveCTE + } + s.plan.CTEs[node.Name] = rel s.plan.CTEPlans = append(s.plan.CTEPlans, &Subplan{ Plan: plan, ID: node.Name, - Type: SubplanTypeCTE, + Type: subplanType, extraInfo: extraInfo, }) @@ -231,18 +308,28 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * panic("no select cores") } + // isRecursive is true if the statement is recursive + isRecursive := false + var projectFunc func(Plan) Plan var preProjectRel, resultRel *Relation - plan, preProjectRel, resultRel, projectFunc, err = s.selectCore(node.SelectCores[0]) + var groupingTerms map[string]*IdentifiedExpr + plan, preProjectRel, groupingTerms, resultRel, projectFunc, err = s.selectCore(node.SelectCores[0]) if err != nil { return nil, nil, err } + logSection := false // basic flag to help us log the statement section when an error occurs + querySection := querySectionUnknown defer func() { - // the resulting relation will always be resultRel + // the resulting relation will always be resultRel, regardless of ordering and limiting rel = resultRel + if logSection { + if err != nil { + err = makeSectionErr(querySection, err) + } + } }() - // if there is one select core, thenr the sorting and limiting can refer // to both the preProjectRel and the resultRel. If it is a compound query, // then the sorting and resultRel can only refer to the resultRel. @@ -266,33 +353,31 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * field.Parent = "" } + querySection = querySectionCompound for i, core := range node.SelectCores[1:] { - rightPlan, _, rightRel, projectFunc, err := s.selectCore(core) + // if we are in the first UNION / INTERSECT / EXCEPT, we can reference a recursive CTE + if i == 0 && s.cteCtx.statementCanBeRecursive { + s.cteCtx.recursiveCTEShape = resultRel + isRecursive = true + } else { + s.cteCtx.recursiveCTEShape = nil + } + + // if a compound query, then old group by terms cannot be referenced in ORDER / LIMIT / OFFSET + groupingTerms = nil + + logSection = false // in case of error, the select core will log the section, so we don't want to log it again + rightPlan, _, _, rightRel, projectFunc, err := s.selectCore(core) if err != nil { return nil, nil, err } + logSection = true // project the result values to match the left side rightPlan = projectFunc(rightPlan) - if len(rightRel.Fields) != len(resultRel.Fields) { - return nil, nil, fmt.Errorf(`%w: the number of columns in the SELECT clauses must match`, ErrSetIncompatibleSchemas) - } - - for i, field := range rightRel.Fields { - rightScalar, err := field.Scalar() - if err != nil { - return nil, nil, err - } - - leftScalar, err := resultRel.Fields[i].Scalar() - if err != nil { - return nil, nil, err - } - - if !rightScalar.Equals(leftScalar) { - return nil, nil, fmt.Errorf(`%w: the types of columns in the SELECT clauses must match`, ErrSetIncompatibleSchemas) - } + if err := equalShape(resultRel, rightRel); err != nil { + return nil, nil, fmt.Errorf("%w: %s", ErrSetIncompatibleSchemas, err) } plan = &SetOperation{ @@ -302,32 +387,49 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * } } } + logSection = true // will be true for the rest of the function - // apply order by, limit, and offset - if len(node.Ordering) > 0 { - sort := &Sort{ - Child: plan, + // if applyDefaultOrdering is true, we need to order all results. + // In postgres, this is simply done by adding ORDER BY 1, 2, 3, ... + // We don't apply default ordering to recursive CTEs, since they are + // not allowed to have an ORDER BY clause. This is ok, because they cannot + // be limited either, and so any referencing query will select the recursive + // cte's full result set and have its own ORDER BY clause. + if s.plan.applyDefaultOrdering && !isRecursive { + for i := range resultRel.Fields { + node.Ordering = append(node.Ordering, &parse.OrderingTerm{ + Expression: &parse.ExpressionLiteral{ + Value: i + 1, // 1-indexed + Type: types.IntType.Copy(), + }, + }) } + } - for _, order := range node.Ordering { - // ordering term can be of any type - sortExpr, _, err := s.expr(order.Expression, sortAndOrderRel) - if err != nil { - return nil, nil, err - } + // if it is recursive, we need to enforce no ORDER BY, LIMIT + if isRecursive { + if len(node.Ordering) > 0 { + return nil, nil, errors.New("recursive CTEs cannot have an ORDER BY clause") + } + if node.Limit != nil { + return nil, nil, errors.New("recursive CTEs cannot have a LIMIT clause") + } + } - sort.SortExpressions = append(sort.SortExpressions, &SortExpression{ - Expr: sortExpr, - Ascending: get(orderAsc, order.Order), - NullsLast: get(orderNullsLast, order.Nulls), - }) + querySection = querySectionOrderBy + // apply order by, limit, and offset + if len(node.Ordering) > 0 { + sort, err := s.buildSort(plan, sortAndOrderRel, node.Ordering, groupingTerms) + if err != nil { + return nil, nil, err } plan = sort } + querySection = querySectionLimit if node.Limit != nil { - limitExpr, limitField, err := s.expr(node.Limit, sortAndOrderRel) + limitExpr, limitField, err := s.expr(node.Limit, sortAndOrderRel, groupingTerms) if err != nil { return nil, nil, err } @@ -347,7 +449,8 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * } if node.Offset != nil { - offsetExpr, offsetField, err := s.expr(node.Offset, sortAndOrderRel) + querySection = querySectionOffset + offsetExpr, offsetField, err := s.expr(node.Offset, sortAndOrderRel, groupingTerms) if err != nil { return nil, nil, err } @@ -370,6 +473,34 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * return plan, rel, nil } +// ordering builds a logical plan for an ordering. +func (s *scopeContext) buildSort(plan Plan, rel *Relation, ordering []*parse.OrderingTerm, groupingTerms map[string]*IdentifiedExpr) (*Sort, error) { + sort := &Sort{ + Child: plan, + } + + for _, order := range ordering { + // ordering term can be of any type + sortExpr, _, err := s.expr(order.Expression, rel, groupingTerms) + if err != nil { + return nil, err + } + + sort.SortExpressions = append(sort.SortExpressions, &SortExpression{ + Expr: sortExpr, + Ascending: get(orderAsc, order.Order), + NullsLast: get(orderNullsLast, order.Nulls), + }) + } + + return sort, nil +} + +// makeSectionErr creates an error for a section of a query. +func makeSectionErr(sec QuerySection, err error) error { + return fmt.Errorf("error in %s section of sql statement: %w", sec, err) +} + // selectCore builds a logical plan for a select core. // The order of building is: // 1. from (combining any joins into single source plan) @@ -391,173 +522,97 @@ func (s *scopeContext) selectStmt(node *parse.SelectStatement) (plan Plan, rel * // 2. // "SELECT name FROM users UNION 'hello' ORDER BY id" - this is invalid in Postgres, since "id" is not in the // result set. We need to project before the UNION. -func (s *scopeContext) selectCore(node *parse.SelectCore) (prePrjectPlan Plan, preProjectRel *Relation, resultRel *Relation, +func (s *scopeContext) selectCore(node *parse.SelectCore) (preProjectPlan Plan, preProjectRel *Relation, groupingTerms map[string]*IdentifiedExpr, resultRel *Relation, projectFunc func(Plan) Plan, err error) { - // if there is no from, we just project the columns and return - if node.From == nil { - var exprs []Expression - rel := &Relation{} - for _, resultCol := range node.Columns { - switch resultCol := resultCol.(type) { - default: - panic(fmt.Sprintf("unexpected result column type %T", resultCol)) - case *parse.ResultColumnExpression: - expr, field, err := s.expr(resultCol.Expression, rel) - if err != nil { - return nil, nil, nil, nil, err - } - - if resultCol.Alias != "" { - expr = &AliasExpr{ - Expr: expr, - Alias: resultCol.Alias, - } - field.Parent = "" - field.Name = resultCol.Alias - } - - exprs = append(exprs, expr) - rel.Fields = append(rel.Fields, field) - case *parse.ResultColumnWildcard: - // if there is no from, we cannot expand the wildcard - panic(`wildcard "*" cannot be used without a FROM clause`) - } + querySection := querySectionUnknown + defer func() { + if err != nil { + err = makeSectionErr(querySection, err) } + }() - return &EmptyScan{}, rel, rel, func(lp Plan) Plan { - var p Plan = &Project{ - Child: lp, - Expressions: exprs, - } - - if node.Distinct { - p = &Distinct{ - Child: p, - } - } - - return p - }, nil + // if there is no from, we just project the columns and return + if node.From == nil { + querySection = querySectionResults + return s.selectCoreWithoutFrom(node.Columns, node.Distinct) } + // applyPreProject is a set of functions that are run right before the projection. + var applyPreProject []func() + // otherwise, we need to build the from and join clauses scan, rel, err := s.table(node.From) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } var plan Plan = scan + querySection = querySectionJoin for _, join := range node.Joins { plan, rel, err = s.join(plan, rel, join) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } } + querySection = querySectionWhere if node.Where != nil { - whereExpr, whereType, err := s.expr(node.Where, rel) + whereExpr, whereType, err := s.expr(node.Where, rel, map[string]*IdentifiedExpr{}) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } scalar, err := whereType.Scalar() if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } if !scalar.Equals(types.BoolType) { - return nil, nil, nil, nil, errors.New("WHERE must be a boolean") + return nil, nil, nil, nil, nil, errors.New("WHERE must be a boolean") } plan = &Filter{ Child: plan, Condition: whereExpr, } - - // we need to check that the where clause does not contain any aggregate functions - contains := false - Traverse(whereExpr, func(node Traversable) bool { - if _, ok := node.(*AggregateFunctionCall); ok { - contains = true - return false - } - return true - }) - if contains { - return nil, nil, nil, nil, ErrAggregateInWhere - } } - // at this point, we have the full relation for the select core, and can expand the columns + querySection = querySectionUnknown + // wildcards expand all columns found at this point. + // For example, if we have a table "users" with columns "id" and "name", + // "SELECT * FROM USERS GROUP BY id" is selecting both "id" and "name" (which will fail, but that's not the point). results, err := s.expandResultCols(rel, node.Columns) if err != nil { - return nil, nil, nil, nil, err - } - - containsAgg := false - for _, result := range results { - containsAgg = hasAggregate(result.Expr) - } - - var resExprs []Expression - var resFields []*Field - for _, result := range results { - resExprs = append(resExprs, result.Expr) - resFields = append(resFields, result.Field) + return nil, nil, nil, nil, nil, err } - // if there is no group by or aggregate, we can apply any distinct and return - if len(node.GroupBy) == 0 && !containsAgg { - return plan, rel, &Relation{Fields: resFields}, func(lp Plan) Plan { - var p Plan = &Project{ - Child: lp, - Expressions: resExprs, - } - - if node.Distinct { - p = &Distinct{ - Child: p, - } - } - - return p - }, nil - } - - // otherwise, we need to build the group by and having clauses. + // we need to build the group by and having clauses. // This means that for all result columns, we need to rewrite any // column references or aggregate usage as columnrefs to the aggregate // functions matching term. aggTerms := make(map[string]*exprFieldPair[*IdentifiedExpr]) // any aggregate function used in the result or having - groupingTerms := make(map[string]*IdentifiedExpr) // any grouping term used in the GROUP BY + groupingTerms = make(map[string]*IdentifiedExpr) // any grouping term used in the GROUP BY aggregateRel := &Relation{} // the relation resulting from the aggregation aggPlan := &Aggregate{ // defined separately so we can reference it in the below clauses Child: plan, } - plan = aggPlan + hasGroupBy := false - for _, groupTerm := range node.GroupBy { - groupExpr, field, err := s.expr(groupTerm, rel) - if err != nil { - return nil, nil, nil, nil, err - } + oldPreGroupRel := s.preGroupRelation + s.preGroupRelation = rel + applyPreProject = append(applyPreProject, func() { s.preGroupRelation = oldPreGroupRel }) - Traverse(groupExpr, func(node Traversable) bool { - switch node.(type) { - case *AggregateFunctionCall: - err = fmt.Errorf(`%w: aggregate functions are not allowed in GROUP BY`, ErrIllegalAggregate) - return false - case *Subquery: - err = fmt.Errorf(`%w: subqueries are not allowed in GROUP BY`, ErrIllegalAggregate) - return false - } - return true - }) + querySection = querySectionGroupBy + for _, groupTerm := range node.GroupBy { + hasGroupBy = true + // we do not pass the grouping terms yet because they cannot be referenced in the group by + groupExpr, field, err := s.expr(groupTerm, rel, map[string]*IdentifiedExpr{}) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } + // if this group term already exists, we can skip it to avoid duplicate columns _, ok := groupingTerms[groupExpr.String()] if ok { continue @@ -570,35 +625,44 @@ func (s *scopeContext) selectCore(node *parse.SelectCore) (prePrjectPlan Plan, p } aggPlan.GroupingExpressions = append(aggPlan.GroupingExpressions, identified) - field.ReferenceID = identified.ID aggregateRel.Fields = append(aggregateRel.Fields, field) groupingTerms[groupExpr.String()] = identified } + if hasGroupBy { + plan = aggPlan + rel = aggregateRel + } + + // if we use an agg without group by, we will have to later alter the plan to include the aggregate node + usesAggWithoutGroupBy := false + + // on each aggregate function, we will rewrite it to be a reference, and place the actual function itself on the Aggregate node + oldOnAggregate := s.onAggregateFuncExpr + applyPreProject = append(applyPreProject, func() { s.onAggregateFuncExpr = oldOnAggregate }) + newOnAggregate := s.makeOnAggregateFunc(aggTerms, &aggPlan.AggregateExpressions) + s.onAggregateFuncExpr = func(efc *parse.ExpressionFunctionCall, afd *parse.AggregateFunctionDefinition, grouping map[string]*IdentifiedExpr) (Expression, *Field, error) { + if !hasGroupBy { + usesAggWithoutGroupBy = true + } + return newOnAggregate(efc, afd, grouping) + } + + querySection = querySectionHaving if node.Having != nil { - // hmmmmm this doesnt work because the having rel needs to be the aggregation rel, - // but we need to use this to build the aggregation rel :( - // 2: on second thought, maybe not. We will have to do some tree matching and rewriting, - // but it should be possible. - havingExpr, field, err := s.expr(node.Having, rel) + havingExpr, field, err := s.expr(node.Having, rel, groupingTerms) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } scalar, err := field.Scalar() if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } if !scalar.Equals(types.BoolType) { - return nil, nil, nil, nil, errors.New("HAVING must evaluate to a boolean") - } - - // rewrite the having expression to use the aggregate functions - havingExpr, err = s.rewriteAccordingToAggregate(havingExpr, groupingTerms, aggTerms) - if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, errors.New("HAVING must evaluate to a boolean") } plan = &Filter{ @@ -607,31 +671,76 @@ func (s *scopeContext) selectCore(node *parse.SelectCore) (prePrjectPlan Plan, p } } - // now we need to rewrite the select list to use the aggregate functions + // now we plan all window functions + windows := make(map[string]*Window) + unappliedWindows := []*Window{} // we wait to apply these to the plan until after evluating all, since subsequent windows cannot reference previous ones + querySection = querySectionWindow + for _, window := range node.Windows { + _, ok := windows[window.Name] + if ok { + return nil, nil, nil, nil, nil, fmt.Errorf(`%w: window "%s" is already defined`, ErrWindowAlreadyDefined, window.Name) + } + + win, err := s.planWindow(plan, rel, window.Window, groupingTerms) + if err != nil { + return nil, nil, nil, nil, nil, err + } + + windows[window.Name] = win + + unappliedWindows = append(unappliedWindows, win) + } + + // on each window function, we will rewrite it to be a reference, and add the window function to + // the corresponding window node. If no window node exists (if the window is defined inline with the function), + // we will create a new window node. + oldOnWindow := s.onWindowFuncExpr + applyPreProject = append(applyPreProject, func() { s.onWindowFuncExpr = oldOnWindow }) + s.onWindowFuncExpr = s.makeOnWindowFunc(&unappliedWindows, windows, plan) + + // now we can evaluate all return columns. + + querySection = querySectionResults + resultFields := make([]*Field, len(results)) + resultColExprs := make([]Expression, len(results)) for i, resultCol := range results { - results[i].Expr, err = s.rewriteAccordingToAggregate(resultCol.Expr, groupingTerms, aggTerms) + expr, field, err := s.expr(resultCol.Expression, rel, groupingTerms) if err != nil { - return nil, nil, nil, nil, err + return nil, nil, nil, nil, nil, err } + + if resultCol.Alias != "" { + expr = &AliasExpr{ + Expr: expr, + Alias: resultCol.Alias, + } + field.Name = resultCol.Alias + field.Parent = "" + } + + resultColExprs[i] = expr + resultFields[i] = field } - // finally, all of the aggregated columns need to be added to the Aggregate node - for _, agg := range order.OrderMap(aggTerms) { - aggPlan.AggregateExpressions = append(aggPlan.AggregateExpressions, agg.Value.Expr) - aggregateRel.Fields = append(aggregateRel.Fields, agg.Value.Field) + if usesAggWithoutGroupBy { + // we need to add the aggregate node to the plan + plan = aggPlan } - var resultColExprs []Expression - var resultFields []*Field - for _, resultCol := range results { - resultColExprs = append(resultColExprs, resultCol.Expr) - resultFields = append(resultFields, resultCol.Field) + // apply the unnamed window functions + for _, window := range unappliedWindows { + window.Child = plan + plan = window } - return plan, aggregateRel, &Relation{ + return plan, rel, groupingTerms, &Relation{ Fields: resultFields, }, func(lp Plan) Plan { + for _, apply := range applyPreProject { + apply() + } + var p Plan = &Project{ Child: lp, Expressions: resultColExprs, @@ -647,19 +756,290 @@ func (s *scopeContext) selectCore(node *parse.SelectCore) (prePrjectPlan Plan, p }, nil } -// hasAggregate returns true if the expression contains an aggregate function. -func hasAggregate(expr LogicalNode) bool { - var hasAggregate bool - Traverse(expr, func(node Traversable) bool { - if _, ok := node.(*AggregateFunctionCall); ok { - hasAggregate = true - return false +// makeOnWindowFunc makes a function that can be used as the callback for onWindowFuncExpr. +// The passed in unnamedWindows will be used to store any windows that are defined inline with the function. +// The namedWindows should be any windows that were defined in the SELECT statement. +// Callers of this function should pass an empty slice which can be written to. +func (s *scopeContext) makeOnWindowFunc(unnamedWindows *[]*Window, namedWindows map[string]*Window, plan Plan) func(*parse.ExpressionWindowFunctionCall, *Relation, map[string]*IdentifiedExpr) (Expression, *Field, error) { + return func(ewfc *parse.ExpressionWindowFunctionCall, rel *Relation, groupingTerms map[string]*IdentifiedExpr) (Expression, *Field, error) { + // the referenced function here must be either an aggregate + // or a window function. + // We don't simply call expr on the function because we want to ensure + // it is a window and handle it differently. + funcDef, ok := parse.Functions[ewfc.FunctionCall.Name] + if !ok { + return nil, nil, fmt.Errorf(`%w: "%s"`, ErrFunctionDoesNotExist, ewfc.FunctionCall.Name) } - return true - }) + if ewfc.FunctionCall.Star { + return nil, nil, fmt.Errorf(`%w: window functions do not support "*"`, ErrInvalidWindowFunction) + } + if ewfc.FunctionCall.Distinct { + return nil, nil, fmt.Errorf(`%w: window functions do not support DISTINCT`, ErrInvalidWindowFunction) + } - return hasAggregate + switch funcDef.(type) { + case *parse.AggregateFunctionDefinition, *parse.WindowFunctionDefinition: + // intentionally do nothing + default: + return nil, nil, fmt.Errorf(`function "%s" is not a window function`, ewfc.FunctionCall.Name) + } + + var args []Expression + var fields []*Field + for _, arg := range ewfc.FunctionCall.Args { + expr, field, err := s.expr(arg, rel, groupingTerms) + if err != nil { + return nil, nil, err + } + + args = append(args, expr) + fields = append(fields, field) + } + + dataTypes, err := dataTypes(fields) + if err != nil { + return nil, nil, err + } + + returnType, err := funcDef.ValidateArgs(dataTypes) + if err != nil { + return nil, nil, err + } + + // if a filter exists, ensure it is a boolean + var filterExpr Expression + if ewfc.Filter != nil { + var filterField *Field + filterExpr, filterField, err = s.expr(ewfc.Filter, rel, groupingTerms) + if err != nil { + return nil, nil, err + } + + scalar, err := filterField.Scalar() + if err != nil { + return nil, nil, err + } + + if !scalar.Equals(types.BoolType) { + return nil, nil, errors.New("filter expression evaluate to a boolean") + } + } + + // the window can either reference an already declared window, or it can be anonymous. + // If referencing an already declared window, we simply add the window function to that window. + // If it is anonymous, we create a new window node and add the function to that. + var identified *IdentifiedExpr + switch win := ewfc.Window.(type) { + default: + panic(fmt.Sprintf("unexpected window type %T", ewfc.Window)) + case *parse.WindowImpl: + // it is an anonymous window, so we need to create a new window node + window, err := s.planWindow(plan, rel, win, groupingTerms) + if err != nil { + return nil, nil, err + } + identified = &IdentifiedExpr{ + Expr: &WindowFunction{ + Name: ewfc.FunctionCall.Name, + Args: args, + Filter: filterExpr, + returnType: returnType, + }, + ID: s.plan.uniqueRefIdentifier(), + } + + window.Functions = append(window.Functions, identified) + *unnamedWindows = append(*unnamedWindows, window) + case *parse.WindowReference: + // it must be a reference to a window that has already been declared + window, ok := namedWindows[win.Name] + if !ok { + return nil, nil, fmt.Errorf(`%w: window "%s" is not defined`, ErrWindowNotDefined, win.Name) + } + + identified = &IdentifiedExpr{ + Expr: &WindowFunction{ + Name: ewfc.FunctionCall.Name, + Args: args, + Filter: filterExpr, + returnType: returnType, + }, + ID: s.plan.uniqueRefIdentifier(), + } + + window.Functions = append(window.Functions, identified) + } + + return &ExprRef{ + Identified: identified, + }, &Field{ + Name: ewfc.FunctionCall.Name, + val: returnType, + ReferenceID: identified.ID, + }, nil + } +} + +// makeOnAggregateFunc makes a function that can be used as the callback for onAggregateFuncExpr. +// The passed in aggTerms will be both read from and written to. +// The passed in aggregateExpressions slice will be written to with any new aggregate expressions. +func (s *scopeContext) makeOnAggregateFunc(aggTerms map[string]*exprFieldPair[*IdentifiedExpr], aggregateExpression *[]*IdentifiedExpr) func(*parse.ExpressionFunctionCall, *parse.AggregateFunctionDefinition, map[string]*IdentifiedExpr) (Expression, *Field, error) { + return func(efc *parse.ExpressionFunctionCall, afd *parse.AggregateFunctionDefinition, groupingTerms map[string]*IdentifiedExpr) (Expression, *Field, error) { + // if it matches any aggregate function, we should reference it. + // Otherwise, register it as a new aggregate + + if s.preGroupRelation == nil { + return nil, nil, errors.New("cannot use aggregate functions in this part of the query") + } + + args := make([]Expression, len(efc.Args)) + argTypes := make([]*types.DataType, len(efc.Args)) + for i, arg := range efc.Args { + expr, fields, err := s.expr(arg, s.preGroupRelation, groupingTerms) + if err != nil { + return nil, nil, err + } + + args[i] = expr + argTypes[i], err = fields.Scalar() + if err != nil { + return nil, nil, err + } + } + + returnType, err := afd.ValidateArgs(argTypes) + if err != nil { + return nil, nil, err + } + + rawExpr := &AggregateFunctionCall{ + FunctionName: efc.Name, + Args: args, + Star: efc.Star, + Distinct: efc.Distinct, + returnType: returnType, + } + + identified, ok := aggTerms[rawExpr.String()] + // if found, just use the reference + if ok { + return &ExprRef{ + Identified: identified.Expr, + }, identified.Field, nil + } + + // otherwise, register it as a new aggregate + newIdentified := &IdentifiedExpr{ + Expr: rawExpr, + ID: s.plan.uniqueRefIdentifier(), + } + + // add the expression to the aggregate node + *aggregateExpression = append(*aggregateExpression, newIdentified) + + field := newIdentified.Field() + aggTerms[rawExpr.String()] = &exprFieldPair[*IdentifiedExpr]{ + Expr: newIdentified, + Field: field, + } + + return &ExprRef{ + Identified: newIdentified, + }, field, nil + } +} + +// selectCoreWithoutFrom builds a logical plan for a select core without a FROM clause. +func (s *scopeContext) selectCoreWithoutFrom(cols []parse.ResultColumn, isDistinct bool) (preProjectPlan Plan, preProjectRel *Relation, groupingTerms map[string]*IdentifiedExpr, resultRel *Relation, + projectFunc func(Plan) Plan, err error) { + var exprs []Expression + rel := &Relation{} + + for _, resultCol := range cols { + switch resultCol := resultCol.(type) { + default: + panic(fmt.Sprintf("unexpected result column type %T", resultCol)) + case *parse.ResultColumnExpression: + expr, field, err := s.expr(resultCol.Expression, rel, nil) + if err != nil { + return nil, nil, nil, nil, nil, err + } + + if resultCol.Alias != "" { + expr = &AliasExpr{ + Expr: expr, + Alias: resultCol.Alias, + } + field.Parent = "" + field.Name = resultCol.Alias + } + + exprs = append(exprs, expr) + rel.Fields = append(rel.Fields, field) + case *parse.ResultColumnWildcard: + // if there is no from, we cannot expand the wildcard + return nil, nil, nil, nil, nil, fmt.Errorf(`wildcard "*" cannot be used without a FROM clause`) + } + } + + return &EmptyScan{}, rel, map[string]*IdentifiedExpr{}, rel, func(lp Plan) Plan { + var p Plan = &Project{ + Child: lp, + Expressions: exprs, + } + + if isDistinct { + p = &Distinct{ + Child: p, + } + } + + return p + }, nil +} + +// planWindow plans a window function. +func (s *scopeContext) planWindow(plan Plan, rel *Relation, win *parse.WindowImpl, groupingTerms map[string]*IdentifiedExpr) (*Window, error) { + var partitionBy []Expression + if len(win.PartitionBy) > 0 { + for _, partition := range win.PartitionBy { + partition, _, err := s.expr(partition, rel, groupingTerms) + if err != nil { + return nil, err + } + + partitionBy = append(partitionBy, partition) + } + } + + // to add default ordering, we will now add numbers to the window's order by + if s.plan.applyDefaultOrdering { + for i := range win.OrderBy { + win.OrderBy = append(win.OrderBy, &parse.OrderingTerm{ + Expression: &parse.ExpressionLiteral{ + Value: i + 1, + Type: types.IntType.Copy(), + }, + }) + } + } + + var orderBy []*SortExpression + if len(win.OrderBy) > 0 { + sort, err := s.buildSort(plan, rel, win.OrderBy, groupingTerms) + if err != nil { + return nil, err + } + + orderBy = sort.SortExpressions + } + + return &Window{ + PartitionBy: partitionBy, + OrderBy: orderBy, + Child: plan, + }, nil } // exprFieldPair is a helper struct that pairs an expression with a field. @@ -671,56 +1051,28 @@ type exprFieldPair[T Expression] struct { Field *Field } -// rewriteAccordingToAggregate rewrites an expression according to the rules of aggregation. -// This is used to rewrite both the select list and having clause to validate that all columns -// are either captured in aggregates or have an exactly matching expression in the group by. -func (s *scopeContext) rewriteAccordingToAggregate(expr Expression, groupingTerms map[string]*IdentifiedExpr, aggTerms map[string]*exprFieldPair[*IdentifiedExpr]) (Expression, error) { +// rewriteGroupingTerms rewrites all known grouping terms to be references. +// For example, in the query "SELECT name FROM users GROUP BY name", it rewrites the logical tree to be +// "SELECT #REF(A) FROM USERS GROUP BY name->#REF(A)". +func (s *scopeContext) rewriteGroupingTerms(expr Expression, groupingTerms map[string]*IdentifiedExpr) (Expression, error) { node, err := Rewrite(expr, &RewriteConfig{ ExprCallback: func(le Expression) (Expression, bool, error) { // if it matches any group by term, we need to rewrite it - // and stop traversing any children - identified, ok := groupingTerms[le.String()] - if ok { + if identified, ok := groupingTerms[le.String()]; ok { return &ExprRef{ Identified: identified, }, false, nil } switch le := le.(type) { + default: + return le, true, nil case *ColumnRef: - // if it is a column and in the current relation, it is an error, since - // it was not contained in an aggregate function or group by. + // if it is a column reference, then it was not found in the group by return nil, false, fmt.Errorf(`%w: column "%s" must appear in the GROUP BY clause or be used in an aggregate function`, ErrIllegalAggregate, le.String()) case *AggregateFunctionCall: - // TODO: do we need to check for the aggregate being called on a correlated column? - // if it matches any aggregate function, we need to rewrite it - // to that reference. Otherwise, register it as a new aggregate - identified, ok := aggTerms[le.String()] - if ok { - return &ExprRef{ - Identified: identified.Expr, - }, false, nil - } - - newIdentified := &IdentifiedExpr{ - Expr: le, - ID: s.plan.uniqueRefIdentifier(), - } - - aggTerms[le.String()] = &exprFieldPair[*IdentifiedExpr]{ - Expr: newIdentified, - Field: &Field{ - Name: le.FunctionName, - val: le.returnType.Copy(), - ReferenceID: newIdentified.ID, - }, - } - - return &ExprRef{ - Identified: newIdentified, - }, false, nil - default: - return le, true, nil + // if it is an aggregate, we dont need to keep searching because it does not need to be rewritten + return le, false, nil } }, }) @@ -731,33 +1083,16 @@ func (s *scopeContext) rewriteAccordingToAggregate(expr Expression, groupingTerm return node.(Expression), nil } -// expandResultCols takes a relation and result columns, and converts them to expressions -// in the order provided. This is used to expand a wildcard in a select statement. -func (s *scopeContext) expandResultCols(rel *Relation, cols []parse.ResultColumn) ([]*exprFieldPair[Expression], error) { - var resultCols []Expression - var resultFields []*Field +// expandResultCols expands all wildcards to their respective column references. +func (s *scopeContext) expandResultCols(rel *Relation, cols []parse.ResultColumn) ([]*parse.ResultColumnExpression, error) { + // TODO: we need to rewrite the statement here to explicitly reference the columns so that we can guarantee the order (maybe?) + var res []*parse.ResultColumnExpression for _, col := range cols { switch col := col.(type) { default: panic(fmt.Sprintf("unexpected result column type %T", col)) case *parse.ResultColumnExpression: - expr, field, err := s.expr(col.Expression, rel) - if err != nil { - return nil, err - } - - if col.Alias != "" { - expr = &AliasExpr{ - Expr: expr, - Alias: col.Alias, - } - // since it is aliased, we now ignore the parent - field.Parent = "" - field.Name = col.Alias - } - - resultFields = append(resultFields, field) - resultCols = append(resultCols, expr) + res = append(res, col) case *parse.ResultColumnWildcard: var newFields []*Field if col.Table != "" { @@ -767,33 +1102,51 @@ func (s *scopeContext) expandResultCols(rel *Relation, cols []parse.ResultColumn } for _, field := range newFields { - resultCols = append(resultCols, &ColumnRef{ - Parent: field.Parent, - ColumnName: field.Name, + res = append(res, &parse.ResultColumnExpression{ + Expression: &parse.ExpressionColumn{ + Table: col.Table, + Column: field.Name, + }, }) - resultFields = append(resultFields, field) } } } - var pairs []*exprFieldPair[Expression] - for i, expr := range resultCols { - pairs = append(pairs, &exprFieldPair[Expression]{ - Expr: expr, - Field: resultFields[i], - }) + return res, nil +} + +// expr visits an expression node. +// It returns the logical plan for the expression, the field that the expression represents, +// and an error if one occurred. The Expression and Field will be nil if an error occurred. +// If a group by is present, expressions will be rewritten to reference the group by terms. +// nil can be passed for the groupingTerms if there is no group by. +func (s *scopeContext) expr(node parse.Expression, currentRel *Relation, groupingTerms map[string]*IdentifiedExpr) (Expression, *Field, error) { + if groupingTerms == nil { + groupingTerms = make(map[string]*IdentifiedExpr) + } + + e, f, r, err := s.exprWithAggRewrite(node, currentRel, groupingTerms) + if err != nil { + return nil, nil, err + } + + // if we should rewrite, then we will traverse the expression and see if we can rewrite it. + // We do this to ensure that we match the longest possible rewrite tree. + if r { + e2, err := s.rewriteGroupingTerms(e, groupingTerms) + return e2, f, err } - return pairs, nil + return e, f, nil } -// expr visits an expression node. -func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expression, *Field, error) { +func (s *scopeContext) exprWithAggRewrite(node parse.Expression, currentRel *Relation, groupingTerms map[string]*IdentifiedExpr, +) (resExpr Expression, resField *Field, shouldRewrite bool, err error) { // cast is a helper function for type casting results based on the current node - cast := func(expr Expression, field *Field) (Expression, *Field, error) { + cast := func(expr Expression, field *Field) (Expression, *Field, bool, error) { castable, ok := node.(interface{ GetTypeCast() *types.DataType }) if !ok { - return expr, field, nil + return expr, field, shouldRewrite, nil } if castable.GetTypeCast() != nil { @@ -803,10 +1156,39 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres return &TypeCast{ Expr: expr, Type: castable.GetTypeCast(), - }, field2, nil + }, field2, shouldRewrite, nil + } + + return expr, field, shouldRewrite, nil + } + + // rExpr is a helper function that should be used to recursively call expr(). + // The returned boolean indicates whether the expression violates grouping rules, and should + // attempt to rewrite the expression to reference the group by terms before failing. + rExpr := func(node parse.Expression) (Expression, *Field, error) { + e, f, r, err2 := s.exprWithAggRewrite(node, currentRel, groupingTerms) + if err2 != nil { + return nil, nil, err2 } - return expr, field, nil + if r { + // // if we should rewrite, we should try to match the expression to a group by term + // // if successful, we can tell the caller to not rewrite + // // if not, we tell the caller to attempt to rewrite + // grouped, ok := groupingTerms[e.String()] + // if ok { + // s.aggViolationColumn = "" + // return &ExprRef{ + // Identified: grouped, + // }, f, nil + // } + + // if we could not find the expression in the group by terms, we should rewrite. + // This will tell the caller to rewrite the returned expression + shouldRewrite = true + } + + return e, f, nil } switch node := node.(type) { @@ -818,67 +1200,48 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres Type: node.Type, }, anonField(node.Type)) case *parse.ExpressionFunctionCall: - args, fields, err := s.manyExprs(node.Args, currentRel) - if err != nil { - return nil, nil, err - } - - // can be either a procedure call or a built-in function funcDef, ok := parse.Functions[node.Name] - if !ok { - if node.Star { - panic("star (*) not allowed in procedure calls") - } - if node.Distinct { - panic("DISTINCT not allowed in procedure calls") - } + // if it is an aggregate function, we need to handle it differently + if ok { + // now we need to apply rules depending on if it is aggregate or not + if aggFn, ok := funcDef.(*parse.AggregateFunctionDefinition); ok { + expr, field, err := s.onAggregateFuncExpr(node, aggFn, groupingTerms) + if err != nil { + return nil, nil, false, err + } - // must be a procedure call - proc, found := s.plan.Schema.FindProcedure(node.Name) - if !found { - panic(fmt.Sprintf(`no function or procedure "%s" found`, node.Name)) + return cast(expr, field) } + } - returns, err := procedureReturnExpr(proc.Returns) + var args []Expression + var fields []*Field + for _, arg := range node.Args { + expr, field, err := rExpr(arg) if err != nil { - return nil, nil, err - } - - if len(node.Args) != len(proc.Parameters) { - panic(fmt.Sprintf(`procedure "%s" expects %d arguments, but %d were provided`, node.Name, len(proc.Parameters), len(node.Args))) + return nil, nil, false, err } - for i, param := range proc.Parameters { - scalar, err := fields[i].Scalar() - if err != nil { - return nil, nil, err - } + args = append(args, expr) + fields = append(fields, field) + } - if !param.Type.Equals(scalar) { - return nil, nil, fmt.Errorf(`procedure "%s" expects argument %d to be of type %s, but %s was provided`, node.Name, i+1, param.Type, scalar) - } - } + // can be either a procedure call or a built-in function - return cast(&ProcedureCall{ - ProcedureName: node.Name, - Args: args, - returnType: returns, - }, &Field{ - Name: node.Name, - val: returns, - }) + if !ok { + return nil, nil, false, fmt.Errorf(`%w: "%s"`, ErrFunctionDoesNotExist, node.Name) } // it is a built-in function types, err := dataTypes(fields) if err != nil { - return nil, nil, err + return nil, nil, false, err } returnVal, err := funcDef.ValidateArgs(types) if err != nil { - return nil, nil, err + return nil, nil, false, err } returnField := &Field{ @@ -886,100 +1249,33 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres val: returnVal, } - // now we need to apply rules depending on if it is aggregate or not - if funcDef.IsAggregate { - // we apply cast outside the reference because we want to keep the reference - // specific to the aggregate function call. - return cast(&AggregateFunctionCall{ - FunctionName: node.Name, - Args: args, - Star: node.Star, - Distinct: node.Distinct, - returnType: returnVal, - }, returnField) - } - if node.Star { - panic("star (*) not allowed in non-aggregate function calls") - } - if node.Distinct { - panic("DISTINCT not allowed in non-aggregate function calls") - } - - return cast(&ScalarFunctionCall{ - FunctionName: node.Name, - Args: args, - returnType: returnVal, - }, returnField) - case *parse.ExpressionForeignCall: - proc, found := s.plan.Schema.FindForeignProcedure(node.Name) - if !found { - return nil, nil, fmt.Errorf(`unknown foreign procedure "%s"`, node.Name) - } - - returns, err := procedureReturnExpr(proc.Returns) - if err != nil { - return nil, nil, err - } - - args, argFields, err := s.manyExprs(node.Args, currentRel) - if err != nil { - return nil, nil, err - } - - if len(node.Args) != len(proc.Parameters) { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects %d arguments, but %d were provided`, node.Name, len(proc.Parameters), len(node.Args)) - } - - for i, param := range proc.Parameters { - scalar, err := argFields[i].Scalar() - if err != nil { - return nil, nil, err - } - - if !param.Equals(scalar) { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects argument %d to be of type %s, but %s was provided`, node.Name, i+1, param, scalar) - } - } - - contextArgs, ctxFields, err := s.manyExprs(node.ContextualArgs, currentRel) - if err != nil { - return nil, nil, err - } - - if len(ctxFields) != 2 { - return nil, nil, fmt.Errorf("foreign calls must have 2 contextual arguments") - } - - for i, field := range ctxFields { - scalar, err := field.Scalar() - if err != nil { - return nil, nil, err - } + return nil, nil, false, fmt.Errorf("star (*) not allowed in non-aggregate function calls") + } + if node.Distinct { + return nil, nil, false, fmt.Errorf("DISTINCT not allowed in non-aggregate function calls") + } - if !scalar.Equals(types.TextType) { - return nil, nil, fmt.Errorf("foreign call contextual argument %d must be a string", i+1) - } + return cast(&ScalarFunctionCall{ + FunctionName: node.Name, + Args: args, + returnType: returnVal, + }, returnField) + case *parse.ExpressionWindowFunctionCall: + wind, field, err := s.onWindowFuncExpr(node, currentRel, groupingTerms) + if err != nil { + return nil, nil, false, err } - return cast(&ProcedureCall{ - ProcedureName: node.Name, - Foreign: true, - Args: args, - ContextArgs: contextArgs, - returnType: returns, - }, &Field{ - Name: node.Name, - val: returns, - }) + return cast(wind, field) case *parse.ExpressionVariable: var val any // can be a data type or object - dt, ok := s.plan.Variables[node.String()] + dt, ok := s.plan.Variables(node.Name) if !ok { // might be an object - obj, ok := s.plan.Objects[node.String()] + obj, ok := s.plan.Objects(node.Name) if !ok { - return nil, nil, fmt.Errorf(`unknown variable "%s"`, node.String()) + return nil, nil, false, fmt.Errorf(`unknown variable "%s"`, node.Name) } val = obj @@ -988,33 +1284,33 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres } return cast(&Variable{ - VarName: node.String(), + VarName: node.Name, dataType: val, }, &Field{val: val}) case *parse.ExpressionArrayAccess: - array, field, err := s.expr(node.Array, currentRel) + array, field, err := rExpr(node.Array) if err != nil { - return nil, nil, err + return nil, nil, false, err } - index, idxField, err := s.expr(node.Index, currentRel) + index, idxField, err := rExpr(node.Index) if err != nil { - return nil, nil, err + return nil, nil, false, err } scalar, err := idxField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !scalar.Equals(types.IntType) { - return nil, nil, fmt.Errorf("array index must be an int") + return nil, nil, false, fmt.Errorf("array index must be an int") } field2 := field.Copy() scalar2, err := field2.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } scalar2.IsArray = false // since we are accessing an array, it is no longer an array @@ -1025,27 +1321,34 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres }, field2) case *parse.ExpressionMakeArray: if len(node.Values) == 0 { - return nil, nil, fmt.Errorf("array constructor must have at least one element") + return nil, nil, false, fmt.Errorf("array constructor must have at least one element") } - exprs, fields, err := s.manyExprs(node.Values, currentRel) - if err != nil { - return nil, nil, err + var exprs []Expression + var fields []*Field + for _, val := range node.Values { + expr, field, err := rExpr(val) + if err != nil { + return nil, nil, false, err + } + + exprs = append(exprs, expr) + fields = append(fields, field) } firstVal, err := fields[0].Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } for _, field := range fields[1:] { scalar, err := field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !firstVal.Equals(scalar) { - return nil, nil, fmt.Errorf("array constructor must have elements of the same type") + return nil, nil, false, fmt.Errorf("array constructor must have elements of the same type") } } @@ -1058,19 +1361,19 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres val: firstValCopy, }) case *parse.ExpressionFieldAccess: - obj, field, err := s.expr(node.Record, currentRel) + obj, field, err := rExpr(node.Record) if err != nil { - return nil, nil, err + return nil, nil, false, err } objType, err := field.Object() if err != nil { - return nil, nil, err + return nil, nil, false, err } fieldType, ok := objType[node.Field] if !ok { - return nil, nil, fmt.Errorf(`object "%s" does not have field "%s"`, field.Name, node.Field) + return nil, nil, false, fmt.Errorf(`object "%s" does not have field "%s"`, field.Name, node.Field) } return cast(&FieldAccess{ @@ -1080,35 +1383,35 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres val: fieldType, }) case *parse.ExpressionParenthesized: - expr, field, err := s.expr(node.Inner, currentRel) + expr, field, err := rExpr(node.Inner) if err != nil { - return nil, nil, err + return nil, nil, false, err } return cast(expr, field) case *parse.ExpressionComparison: - left, leftField, err := s.expr(node.Left, currentRel) + left, leftField, err := rExpr(node.Left) if err != nil { - return nil, nil, err + return nil, nil, false, err } - right, rightField, err := s.expr(node.Right, currentRel) + right, rightField, err := rExpr(node.Right) if err != nil { - return nil, nil, err + return nil, nil, false, err } leftScalar, err := leftField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } rightScalar, err := rightField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !leftScalar.Equals(rightScalar) { - return nil, nil, fmt.Errorf("comparison operands must be of the same type. %s != %s", leftScalar, rightScalar) + return nil, nil, false, fmt.Errorf("comparison operands must be of the same type. %s != %s", leftScalar, rightScalar) } var op []ComparisonOperator @@ -1131,100 +1434,100 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres expr := applyOps(left, right, op, negate) - return expr, anonField(types.BoolType.Copy()), nil + return expr, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionLogical: - left, leftField, err := s.expr(node.Left, currentRel) + left, leftField, err := rExpr(node.Left) if err != nil { - return nil, nil, err + return nil, nil, false, err } - right, rightField, err := s.expr(node.Right, currentRel) + right, rightField, err := rExpr(node.Right) if err != nil { - return nil, nil, err + return nil, nil, false, err } scalar, err := leftField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !scalar.Equals(types.BoolType) { - return nil, nil, fmt.Errorf("logical operators must be applied to boolean types") + return nil, nil, false, fmt.Errorf("logical operators must be applied to boolean types") } scalar, err = rightField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !scalar.Equals(types.BoolType) { - return nil, nil, fmt.Errorf("logical operators must be applied to boolean types") + return nil, nil, false, fmt.Errorf("logical operators must be applied to boolean types") } return &LogicalOp{ Left: left, Right: right, Op: get(logicalOps, node.Operator), - }, anonField(types.BoolType.Copy()), nil + }, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionArithmetic: - left, leftField, err := s.expr(node.Left, currentRel) + left, leftField, err := rExpr(node.Left) if err != nil { - return nil, nil, err + return nil, nil, false, err } - right, rightField, err := s.expr(node.Right, currentRel) + right, rightField, err := rExpr(node.Right) if err != nil { - return nil, nil, err + return nil, nil, false, err } leftScalar, err := leftField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } rightScalar, err := rightField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !leftScalar.Equals(rightScalar) { - return nil, nil, fmt.Errorf("arithmetic operands must be of the same type. %s != %s", leftScalar, rightScalar) + return nil, nil, false, fmt.Errorf("arithmetic operands must be of the same type. %s != %s", leftScalar, rightScalar) } return &ArithmeticOp{ Left: left, Right: right, Op: get(arithmeticOps, node.Operator), - }, &Field{val: leftField.val}, nil + }, &Field{val: leftField.val}, shouldRewrite, nil case *parse.ExpressionUnary: - expr, field, err := s.expr(node.Expression, currentRel) + expr, field, err := rExpr(node.Expression) if err != nil { - return nil, nil, err + return nil, nil, false, err } op := get(unaryOps, node.Operator) scalar, err := field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } switch op { case Negate: if !scalar.IsNumeric() { - return nil, nil, fmt.Errorf("negation can only be applied to numeric types") + return nil, nil, false, fmt.Errorf("negation can only be applied to numeric types") } if scalar.Equals(types.Uint256Type) { - return nil, nil, fmt.Errorf("negation cannot be applied to uint256") + return nil, nil, false, fmt.Errorf("negation cannot be applied to uint256") } case Not: if !scalar.Equals(types.BoolType) { - return nil, nil, fmt.Errorf("logical negation can only be applied to boolean types") + return nil, nil, false, fmt.Errorf("logical negation can only be applied to boolean types") } case Positive: if !scalar.IsNumeric() { - return nil, nil, fmt.Errorf("positive can only be applied to numeric types") + return nil, nil, false, fmt.Errorf("positive can only be applied to numeric types") } } @@ -1233,7 +1536,7 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres return &UnaryOp{ Expr: expr, Op: op, - }, &Field{val: field.val}, nil + }, &Field{val: field.val}, shouldRewrite, nil case *parse.ExpressionColumn: field, err := currentRel.Search(node.Table, node.Column) // if no error, then we found the column in the current relation @@ -1241,27 +1544,55 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres if err == nil { scalar, err := field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } - return cast(&ColumnRef{ + casted, castField, rewrite, err := cast(&ColumnRef{ Parent: field.Parent, ColumnName: field.Name, dataType: scalar, }, field) + if err != nil { + return nil, nil, false, err + } + + // if the column is in the group by, then we should rewrite it. + _, ok := groupingTerms[field.String()] + + return casted, castField, rewrite || ok, nil } // If the error is not that the column was not found, check if // the column is in the outer relation if errors.Is(err, ErrColumnNotFound) { // might be in the outer relation, correlated field, err = s.OuterRelation.Search(node.Table, node.Column) - if err != nil { - return nil, nil, err + if errors.Is(err, ErrColumnNotFound) { + // if not found, see if it is in the relation but not grouped + field, err2 := s.preGroupRelation.Search(node.Table, node.Column) + // if the column exist in the outer relation, then it might be part of an expression + // contained in the group by. We should tell the caller to attempt to rewrite the expression + if err2 == nil { + // we return the column because the caller might try to handle the error + scalar, err := field.Scalar() + if err != nil { + return nil, nil, false, err + } + + s.aggViolationColumn = node.String() + return &ColumnRef{ + Parent: field.Parent, + ColumnName: field.Name, + dataType: scalar, + }, field, true, nil + } + return nil, nil, false, err + } else if err != nil { + return nil, nil, false, err } scalar, err := field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } // mark as correlated @@ -1274,16 +1605,16 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres }, field) } // otherwise, return the error - return nil, nil, err + return nil, nil, false, err case *parse.ExpressionCollate: - expr, field, err := s.expr(node.Expression, currentRel) + expr, field, err := rExpr(node.Expression) if err != nil { - return nil, nil, err + return nil, nil, false, err } scalar, err := field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } c := &Collate{ @@ -1295,75 +1626,75 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres c.Collation = NoCaseCollation if !scalar.Equals(types.TextType) { - return nil, nil, fmt.Errorf("NOCASE collation can only be applied to text types") + return nil, nil, false, fmt.Errorf("NOCASE collation can only be applied to text types") } default: - return nil, nil, fmt.Errorf(`unknown collation "%s"`, node.Collation) + return nil, nil, false, fmt.Errorf(`unknown collation "%s"`, node.Collation) } // return the whole field since collations don't overwrite the return value's name - return c, field, nil + return c, field, shouldRewrite, nil case *parse.ExpressionStringComparison: - left, leftField, err := s.expr(node.Left, currentRel) + left, leftField, err := rExpr(node.Left) if err != nil { - return nil, nil, err + return nil, nil, false, err } - right, rightField, err := s.expr(node.Right, currentRel) + right, rightField, err := rExpr(node.Right) if err != nil { - return nil, nil, err + return nil, nil, false, err } leftScalar, err := leftField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } rightScalar, err := rightField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !leftScalar.Equals(types.TextType) || !rightScalar.Equals(types.TextType) { - return nil, nil, fmt.Errorf("string comparison operands must be of type string. %s != %s", leftScalar, rightScalar) + return nil, nil, false, fmt.Errorf("string comparison operands must be of type string. %s != %s", leftScalar, rightScalar) } expr := applyOps(left, right, []ComparisonOperator{get(stringComparisonOps, node.Operator)}, node.Not) - return expr, anonField(types.BoolType.Copy()), nil + return expr, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionIs: op := Is if node.Distinct { op = IsDistinctFrom } - left, leftField, err := s.expr(node.Left, currentRel) + left, leftField, err := rExpr(node.Left) if err != nil { - return nil, nil, err + return nil, nil, false, err } - right, rightField, err := s.expr(node.Right, currentRel) + right, rightField, err := rExpr(node.Right) if err != nil { - return nil, nil, err + return nil, nil, false, err } leftScalar, err := leftField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } rightScalar, err := rightField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if node.Distinct { if !leftScalar.Equals(rightScalar) { - return nil, nil, fmt.Errorf("IS DISTINCT FROM requires operands of the same type. %s != %s", leftScalar, rightScalar) + return nil, nil, false, fmt.Errorf("IS DISTINCT FROM requires operands of the same type. %s != %s", leftScalar, rightScalar) } } else { if !rightScalar.Equals(types.NullType) { - return nil, nil, fmt.Errorf("IS requires the right operand to be NULL") + return nil, nil, false, fmt.Errorf("IS requires the right operand to be NULL") } } @@ -1380,16 +1711,16 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres } } - return expr, anonField(types.BoolType.Copy()), nil + return expr, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionIn: - left, lField, err := s.expr(node.Expression, currentRel) + left, lField, err := rExpr(node.Expression) if err != nil { - return nil, nil, err + return nil, nil, false, err } lScalar, err := lField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } in := &IsIn{ @@ -1399,39 +1730,46 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres if node.Subquery != nil { subq, rel, err := s.planSubquery(node.Subquery, currentRel) if err != nil { - return nil, nil, err + return nil, nil, false, err } if len(rel.Fields) != 1 { - return nil, nil, fmt.Errorf("subquery must return exactly one column") + return nil, nil, false, fmt.Errorf("subquery must return exactly one column") } scalar, err := rel.Fields[0].Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !lScalar.Equals(scalar) { - return nil, nil, fmt.Errorf("IN subquery must return the same type as the left expression. %s != %s", lScalar, scalar) + return nil, nil, false, fmt.Errorf("IN subquery must return the same type as the left expression. %s != %s", lScalar, scalar) } in.Subquery = &SubqueryExpr{ Query: subq, } } else { - right, rFields, err := s.manyExprs(node.List, currentRel) - if err != nil { - return nil, nil, err + var right []Expression + var rFields []*Field + for _, expr := range node.List { + r, rField, err := rExpr(expr) + if err != nil { + return nil, nil, false, err + } + + right = append(right, r) + rFields = append(rFields, rField) } for _, r := range rFields { scalar, err := r.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !lScalar.Equals(scalar) { - return nil, nil, fmt.Errorf("IN list must contain elements of the same type as the left expression. %s != %s", lScalar, scalar) + return nil, nil, false, fmt.Errorf("IN list must contain elements of the same type as the left expression. %s != %s", lScalar, scalar) } } @@ -1447,7 +1785,7 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres } } - return expr, anonField(types.BoolType.Copy()), nil + return expr, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionBetween: leftOps, rightOps := []ComparisonOperator{GreaterThan}, []ComparisonOperator{LessThan} if !node.Not { @@ -1455,126 +1793,126 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres rightOps = append(rightOps, Equal) } - left, exprField, err := s.expr(node.Expression, currentRel) + left, exprField, err := rExpr(node.Expression) if err != nil { - return nil, nil, err + return nil, nil, false, err } exprScalar, err := exprField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } - lower, lowerField, err := s.expr(node.Lower, currentRel) + lower, lowerField, err := rExpr(node.Lower) if err != nil { - return nil, nil, err + return nil, nil, false, err } lowerScalar, err := lowerField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } - upper, upperField, err := s.expr(node.Upper, currentRel) + upper, upperField, err := rExpr(node.Upper) if err != nil { - return nil, nil, err + return nil, nil, false, err } upScalar, err := upperField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !exprScalar.Equals(lowerScalar) { - return nil, nil, fmt.Errorf("BETWEEN lower bound must be of the same type as the expression. %s != %s", exprScalar, lowerScalar) + return nil, nil, false, fmt.Errorf("BETWEEN lower bound must be of the same type as the expression. %s != %s", exprScalar, lowerScalar) } if !exprScalar.Equals(upScalar) { - return nil, nil, fmt.Errorf("BETWEEN upper bound must be of the same type as the expression. %s != %s", exprScalar, upScalar) + return nil, nil, false, fmt.Errorf("BETWEEN upper bound must be of the same type as the expression. %s != %s", exprScalar, upScalar) } return &LogicalOp{ Left: applyOps(left, lower, leftOps, false), Right: applyOps(left, upper, rightOps, false), Op: And, - }, anonField(types.BoolType.Copy()), nil + }, anonField(types.BoolType.Copy()), shouldRewrite, nil case *parse.ExpressionCase: c := &Case{} // all whens must be bool unless an expression is used before CASE expectedWhenType := types.BoolType.Copy() if node.Case != nil { - caseExpr, field, err := s.expr(node.Case, currentRel) + caseExpr, field, err := rExpr(node.Case) if err != nil { - return nil, nil, err + return nil, nil, false, err } c.Value = caseExpr expectedWhenType, err = field.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } } var returnType *types.DataType for _, whenThen := range node.WhenThen { - whenExpr, whenField, err := s.expr(whenThen[0], currentRel) + whenExpr, whenField, err := rExpr(whenThen[0]) if err != nil { - return nil, nil, err + return nil, nil, false, err } - thenExpr, thenField, err := s.expr(whenThen[1], currentRel) + thenExpr, thenField, err := rExpr(whenThen[1]) if err != nil { - return nil, nil, err + return nil, nil, false, err } thenType, err := thenField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if returnType == nil { returnType = thenType } else { if !returnType.Equals(thenType) { - return nil, nil, fmt.Errorf(`all THEN expressions must be of the same type %s, received %s`, returnType, thenType) + return nil, nil, false, fmt.Errorf(`all THEN expressions must be of the same type %s, received %s`, returnType, thenType) } } whenScalar, err := whenField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !expectedWhenType.Equals(whenScalar) { - return nil, nil, fmt.Errorf(`WHEN expression must be of type %s, received %s`, expectedWhenType, whenScalar) + return nil, nil, false, fmt.Errorf(`WHEN expression must be of type %s, received %s`, expectedWhenType, whenScalar) } c.WhenClauses = append(c.WhenClauses, [2]Expression{whenExpr, thenExpr}) } if node.Else != nil { - elseExpr, elseField, err := s.expr(node.Else, currentRel) + elseExpr, elseField, err := rExpr(node.Else) if err != nil { - return nil, nil, err + return nil, nil, false, err } elseType, err := elseField.Scalar() if err != nil { - return nil, nil, err + return nil, nil, false, err } if !returnType.Equals(elseType) { - return nil, nil, fmt.Errorf(`ELSE expression must be of the same type of THEN expressions %s, received %s`, returnType, elseExpr) + return nil, nil, false, fmt.Errorf(`ELSE expression must be of the same type of THEN expressions %s, received %s`, returnType, elseExpr) } c.Else = elseExpr } - return c, anonField(returnType), nil + return c, anonField(returnType), shouldRewrite, nil case *parse.ExpressionSubquery: subq, rel, err := s.planSubquery(node.Subquery, currentRel) if err != nil { - return nil, nil, err + return nil, nil, false, err } subqExpr := &SubqueryExpr{ @@ -1591,14 +1929,14 @@ func (s *scopeContext) expr(node parse.Expression, currentRel *Relation) (Expres } } - return plan, anonField(types.BoolType.Copy()), nil + return plan, anonField(types.BoolType.Copy()), shouldRewrite, nil } else { if len(rel.Fields) != 1 { - return nil, nil, fmt.Errorf("scalar subquery must return exactly one column") + return nil, nil, false, fmt.Errorf("scalar subquery must return exactly one column") } } - return subqExpr, rel.Fields[0], nil + return subqExpr, rel.Fields[0], shouldRewrite, nil } } @@ -1677,40 +2015,6 @@ func (s *scopeContext) planSubquery(node *parse.SelectStatement, currentRel *Rel }, rel, nil } -// manyExprs is a helper function that applies the expr function to many expressions. -func (s *scopeContext) manyExprs(nodes []parse.Expression, currentRel *Relation) ([]Expression, []*Field, error) { - var exprs []Expression - var fields []*Field - for _, node := range nodes { - expr, field, err := s.expr(node, currentRel) - if err != nil { - return nil, nil, err - } - - exprs = append(exprs, expr) - fields = append(fields, field) - } - - return exprs, fields, nil -} - -// procedureReturnExpr gets the returned data type from a procedure return. -func procedureReturnExpr(node *types.ProcedureReturn) (*types.DataType, error) { - if node == nil { - return nil, fmt.Errorf("procedure does not return a value") - } - - if node.IsTable { - return nil, fmt.Errorf("procedure returns a table, not a scalar value") - } - - if len(node.Fields) != 1 { - return nil, fmt.Errorf("procedures in expressions must return exactly one value, received %d", len(node.Fields)) - } - - return node.Fields[0].Type.Copy(), nil -} - // applyComparisonOps applies a series of comparison operators to the left and right expressions. // If negate is true, then the final expression is negated. func applyOps(left, right Expression, ops []ComparisonOperator, negate bool) Expression { @@ -1763,14 +2067,37 @@ func (s *scopeContext) table(node parse.Table) (*Scan, *Relation, error) { var scanTblType TableSourceType var rel *Relation - if physicalTbl, ok := s.plan.Schema.FindTable(node.Table); ok { - scanTblType = TableSourcePhysical - rel = relationFromTable(physicalTbl) - } else if cte, ok := s.plan.CTEs[node.Table]; ok { + + // we first check if it is a recursive CTE, then any CTE, then a physical table + if node.Table == s.cteCtx.currentCTEName && node.Namespace == "" { + // if the shape is nil, then we are in a recursive CTE but we cannot reference it here. + // For example, this might be the case if we have: + // WITH RECURSIVE cte AS (SELECT * FROM cte) ... + // In this case, the recursive CTE cannot reference itself in the subquery, because + // it needs a base case followed by a UNION ALL to reference itself. + if s.cteCtx.recursiveCTEShape == nil { + return nil, nil, fmt.Errorf("recursive CTE %s cannot reference itself in this context", node.Table) + } + + scanTblType = TableSourceCTE + rel = s.cteCtx.recursiveCTEShape + s.cteCtx.usedRecursiveCTE = true + } else if cte, ok := s.plan.CTEs[node.Table]; ok && node.Namespace == "" { // only use a cte if no namespace is specified scanTblType = TableSourceCTE rel = cte } else { - return nil, nil, fmt.Errorf(`unknown table "%s"`, node.Table) + // if it is a physical table, then we need to qualify it + if node.Namespace == "" { + node.Namespace = s.plan.defaultNamespace + } + + physicalTbl, ok := s.plan.Tables(node.Namespace, node.Table) + if !ok { + return nil, nil, fmt.Errorf(`%w: "%s"`, ErrUnknownTable, node.Table) + } + + scanTblType = TableSourcePhysical + rel = relationFromTable(physicalTbl) } for _, col := range rel.Fields { @@ -1779,6 +2106,7 @@ func (s *scopeContext) table(node parse.Table) (*Scan, *Relation, error) { return &Scan{ Source: &TableScanSource{ + Namespace: node.Namespace, TableName: node.Table, Type: scanTblType, rel: rel.Copy(), @@ -1810,142 +2138,6 @@ func (s *scopeContext) table(node parse.Table) (*Scan, *Relation, error) { Source: subq, RelationName: node.Alias, }, rel, nil - case *parse.RelationFunctionCall: - if node.Alias == "" { - return nil, nil, fmt.Errorf("join against procedure calls must have an alias") - } - - // the function call must either be a procedure or foreign procedure that returns - // a table. - - var args []Expression - var contextArgs []Expression - var procReturns *types.ProcedureReturn - var isForeign bool - if proc, ok := s.plan.Schema.FindProcedure(node.FunctionCall.FunctionName()); ok { - procReturns = proc.Returns - - procCall, ok := node.FunctionCall.(*parse.ExpressionFunctionCall) - if !ok { - // I don't think this is possible, but just in case - return nil, nil, fmt.Errorf(`unexpected procedure type "%T"`, node.FunctionCall) - } - - var fields []*Field - var err error - // we pass an empty relation because the subquery can't - // refer to the current relation, but they can be correlated against some - // outer relation. - args, fields, err = s.manyExprs(procCall.Args, &Relation{}) - if err != nil { - return nil, nil, err - } - - if len(fields) != len(proc.Parameters) { - return nil, nil, fmt.Errorf(`procedure "%s" expects %d arguments, received %d`, node.FunctionCall.FunctionName(), len(proc.Parameters), len(fields)) - } - - for i, field := range fields { - scalar, err := field.Scalar() - if err != nil { - return nil, nil, err - } - - if !scalar.Equals(proc.Parameters[i].Type) { - return nil, nil, fmt.Errorf(`procedure "%s" expects argument %d to be of type %s, received %s`, node.FunctionCall.FunctionName(), i+1, proc.Parameters[i].Type, field) - } - } - - } else if proc, ok := s.plan.Schema.FindForeignProcedure(node.FunctionCall.FunctionName()); ok { - procReturns = proc.Returns - isForeign = true - - procCall, ok := node.FunctionCall.(*parse.ExpressionForeignCall) - if !ok { - // this is possible if the user doesn't pass contextual arguments, - // (the parser will parse it as a regular function call instead of a foreign call) - return nil, nil, fmt.Errorf(`procedure "%s" is a foreign procedure and must have contextual arguments passed with []`, node.FunctionCall.FunctionName()) - } - - var fields []*Field - var err error - // we pass an empty relation because the subquery can't - // refer to the current relation, but they can be correlated against some - // outer relation. - args, fields, err = s.manyExprs(procCall.Args, &Relation{}) - if err != nil { - return nil, nil, err - } - - if len(fields) != len(proc.Parameters) { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects %d arguments, received %d`, node.FunctionCall.FunctionName(), len(proc.Parameters), len(fields)) - } - - for i, field := range fields { - scalar, err := field.Scalar() - if err != nil { - return nil, nil, err - } - - if !scalar.Equals(proc.Parameters[i]) { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects argument %d to be of type %s, received %s`, node.FunctionCall.FunctionName(), i+1, proc.Parameters[i], field) - } - } - - // must have 2 contextual arguments - if len(procCall.ContextualArgs) != 2 { - return nil, nil, fmt.Errorf(`foreign procedure "%s" must have 2 contextual arguments`, node.FunctionCall.FunctionName()) - } - - contextArgs, fields, err = s.manyExprs(procCall.ContextualArgs, &Relation{}) - if err != nil { - return nil, nil, err - } - - if len(fields) != 2 { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects 2 contextual arguments, received %d`, node.FunctionCall.FunctionName(), len(fields)) - } - - for i, field := range fields { - scalar, err := field.Scalar() - if err != nil { - return nil, nil, err - } - - if !scalar.Equals(types.TextType) { - return nil, nil, fmt.Errorf(`foreign procedure "%s" expects contextual argument %d to be of type %s, received %s`, node.FunctionCall.FunctionName(), i+1, types.TextType, field) - } - } - } else { - return nil, nil, fmt.Errorf(`unknown procedure "%s"`, node.FunctionCall.FunctionName()) - } - - if procReturns == nil { - return nil, nil, fmt.Errorf(`procedure "%s" does not return a table`, node.FunctionCall.FunctionName()) - } - if !procReturns.IsTable { - return nil, nil, fmt.Errorf(`procedure "%s" does not return a table`, node.FunctionCall.FunctionName()) - } - - rel := &Relation{} - for _, field := range procReturns.Fields { - rel.Fields = append(rel.Fields, &Field{ - Parent: node.Alias, - Name: field.Name, - val: field.Type.Copy(), - }) - } - - return &Scan{ - Source: &ProcedureScanSource{ - ProcedureName: node.FunctionCall.FunctionName(), - Args: args, - ContextualArgs: contextArgs, - IsForeign: isForeign, - rel: rel.Copy(), - }, - RelationName: node.Alias, - }, rel, nil } } @@ -1958,7 +2150,7 @@ func (s *scopeContext) join(child Plan, childRel *Relation, join *parse.Join) (P newRel := joinRels(childRel, tblRel) - onExpr, joinField, err := s.expr(join.On, newRel) + onExpr, joinField, err := s.expr(join.On, newRel, nil) if err != nil { return nil, nil, err } @@ -2021,7 +2213,7 @@ func (s *scopeContext) insert(node *parse.InsertStatement) (*Insert, error) { ReferencedAs: node.Alias, } - tbl, found := s.plan.Schema.FindTable(node.Table) + tbl, found := s.plan.Tables("", node.Table) if !found { return nil, fmt.Errorf(`%w: "%s"`, ErrUnknownTable, node.Table) } @@ -2084,7 +2276,7 @@ func (s *scopeContext) insert(node *parse.InsertStatement) (*Insert, error) { Field: &Field{ Parent: tbl.Name, Name: col.Name, - val: col.Type.Copy(), + val: col.DataType.Copy(), }, } } @@ -2094,68 +2286,81 @@ func (s *scopeContext) insert(node *parse.InsertStatement) (*Insert, error) { } else { expectedColLen = len(tbl.Columns) for _, col := range tbl.Columns { - expectedColTypes = append(expectedColTypes, col.Type.Copy()) + expectedColTypes = append(expectedColTypes, col.DataType.Copy()) } } rel := relationFromTable(tbl) ins.Columns = rel.Fields - tup := &Tuples{ - rel: &Relation{}, - } - - // check the value types and lengths - for i, vals := range node.Values { - if len(vals) != expectedColLen { - return nil, fmt.Errorf(`insert has %d columns but %d values were supplied`, expectedColLen, len(vals)) + if node.Select != nil { + // if a select statement is present, we need to plan it + plan, newRel, err := s.selectStmt(node.Select) + if err != nil { + return nil, err } - var row []*exprFieldPair[Expression] + // check that the select statement returns the correct number of columns and types + if err = equalShape(rel, newRel); err != nil { + return nil, err + } - for j, val := range vals { - expr, field, err := s.expr(val, rel) - if err != nil { - return nil, err + ins.InsertionValues = plan + } else { + tup := &Tuples{ + rel: &Relation{}, + } + // check the value types and lengths + for i, vals := range node.Values { + if len(vals) != expectedColLen { + return nil, fmt.Errorf(`insert has %d columns but %d values were supplied`, expectedColLen, len(vals)) } - scalar, err := field.Scalar() - if err != nil { - return nil, err - } + var row []*exprFieldPair[Expression] - if !scalar.Equals(expectedColTypes[j]) { - return nil, fmt.Errorf(`insert value %d must be of type %s, received %s`, j+1, expectedColTypes[j], field.val) - } + for j, val := range vals { + expr, field, err := s.expr(val, rel, nil) + if err != nil { + return nil, err + } - field.Name = tbl.Columns[j].Name - field.Parent = tbl.Name - row = append(row, &exprFieldPair[Expression]{ - Expr: expr, - Field: field, - }) - } + scalar, err := field.Scalar() + if err != nil { + return nil, err + } - pairs := orderAndFillNulls(row) - var newRow []Expression - for _, pair := range pairs { - newRow = append(newRow, pair.Expr) + if !scalar.Equals(expectedColTypes[j]) { + return nil, fmt.Errorf(`insert value %d must be of type %s, received %s`, j+1, expectedColTypes[j], field.val) + } - // if we are on the first row, we should build the tuple's relation - if i == 0 { - tup.rel.Fields = append(tup.rel.Fields, pair.Field) + field.Name = tbl.Columns[j].Name + field.Parent = tbl.Name + row = append(row, &exprFieldPair[Expression]{ + Expr: expr, + Field: field, + }) } - } - tup.Values = append(tup.Values, newRow) - } + pairs := orderAndFillNulls(row) + var newRow []Expression + for _, pair := range pairs { + newRow = append(newRow, pair.Expr) + + // if we are on the first row, we should build the tuple's relation + if i == 0 { + tup.rel.Fields = append(tup.rel.Fields, pair.Field) + } + } - ins.Values = tup + tup.Values = append(tup.Values, newRow) + } + ins.InsertionValues = tup + } // finally, we need to check if there is an ON CONFLICT clause, // and if so, we need to process it. - if node.Upsert != nil { - conflict, err := s.buildUpsert(node.Upsert, tbl, tup) + if node.OnConflict != nil { + conflict, err := s.buildUpsert(node.OnConflict, tbl, ins.InsertionValues) if err != nil { return nil, err } @@ -2167,8 +2372,8 @@ func (s *scopeContext) insert(node *parse.InsertStatement) (*Insert, error) { } // buildUpsert builds the conflict resolution for an upsert statement. -// It takes the upsert clause, the table, and the tuples that might cause a conflict. -func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, tuples *Tuples) (ConflictResolution, error) { +// It takes the upsert clause, the table, and the plan that is being inserted (either VALUES or SELECT). +func (s *scopeContext) buildUpsert(node *parse.OnConflict, table *engine.Table, insertFrom Plan) (ConflictResolution, error) { // all DO UPDATE upserts need to have an arbiter index. // DO NOTHING can optionally have one, but it is not required. var arbiterIndex Index @@ -2178,30 +2383,41 @@ func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, // do nothing case 1: // check the column for a unique or pk contraint, as well as all indexes - col, ok := table.FindColumn(node.ConflictColumns[0]) + col, ok := table.Column(node.ConflictColumns[0]) if !ok { return nil, fmt.Errorf(`conflict column "%s" not found in table`, node.ConflictColumns[0]) } - if col.HasAttribute(types.PRIMARY_KEY) { + if table.HasPrimaryKey(col.Name) { arbiterIndex = &IndexColumnConstraint{ Table: table.Name, Column: col.Name, ConstraintType: PrimaryKeyConstraintIndex, } - } else if col.HasAttribute(types.UNIQUE) { - arbiterIndex = &IndexColumnConstraint{ - Table: table.Name, - Column: col.Name, - ConstraintType: UniqueConstraintIndex, - } } else { + done := false // check all indexes for unique indexes that match the column for _, idx := range table.Indexes { - if (idx.Type == types.UNIQUE_BTREE || idx.Type == types.PRIMARY) && len(idx.Columns) == 1 && idx.Columns[0] == col.Name { + if (idx.Type == engine.UNIQUE_BTREE || idx.Type == engine.PRIMARY) && len(idx.Columns) == 1 && idx.Columns[0] == col.Name { arbiterIndex = &IndexNamed{ Name: idx.Name, } + done = true + break + } + } + + if !done { + // check all constraints for unique constraints that match the column + for _, con := range table.Constraints { + if con.Type == engine.ConstraintUnique && len(con.Columns) == 1 && con.Columns[0] == col.Name { + arbiterIndex = &IndexColumnConstraint{ + Table: table.Name, + Column: col.Name, + ConstraintType: UniqueConstraintIndex, + } + break + } } } } @@ -2210,33 +2426,59 @@ func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, return nil, fmt.Errorf(`%w: conflict column "%s" must have a unique index or be a primary key`, ErrIllegalConflictArbiter, node.ConflictColumns[0]) } default: + // colsMatch checks if two slices of strings are equal + // It is order-independent + colsMatch := func(want, have []string) bool { + if len(want) != len(have) { + return false + } + + found := make(map[string]struct{}, len(want)) + for _, col := range want { + found[col] = struct{}{} + } + + for _, col := range have { + _, ok := found[col] + if !ok { + return false + } + } + + return true + } + + found := false + // check all indexes for a unique or pk index that matches the columns for _, idx := range table.Indexes { - if idx.Type != types.UNIQUE_BTREE && idx.Type != types.PRIMARY { + if idx.Type != engine.UNIQUE_BTREE && idx.Type != engine.PRIMARY { continue } - if len(idx.Columns) != len(node.ConflictColumns) { + if !colsMatch(node.ConflictColumns, idx.Columns) { continue } - inIdxCols := make(map[string]struct{}, len(idx.Columns)) - for _, col := range idx.Columns { - inIdxCols[col] = struct{}{} + arbiterIndex = &IndexNamed{ + Name: idx.Name, } + break + } - hasAllCols := true - for _, col := range node.ConflictColumns { - _, ok := inIdxCols[col] - if !ok { - hasAllCols = false - break + if !found { + // check all constraints for a unique constraint that matches the columns + for name, con := range table.Constraints { + if con.Type != engine.ConstraintUnique { + continue + } + + if !colsMatch(node.ConflictColumns, con.Columns) { + continue } - } - if hasAllCols { arbiterIndex = &IndexNamed{ - Name: idx.Name, + Name: name, } break } @@ -2272,7 +2514,7 @@ func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, // we need to use the tuples to create a "excluded" relation // https://www.jooq.org/doc/latest/manual/sql-building/sql-statements/insert-statement/insert-on-conflict-excluded/ - excluded := tuples.Relation() + excluded := insertFrom.Relation() for _, col := range excluded.Fields { col.Parent = "excluded" } @@ -2286,7 +2528,7 @@ func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, } if node.UpdateWhere != nil { - conflictFilter, field, err := s.expr(node.UpdateWhere, referenceRel) + conflictFilter, field, err := s.expr(node.UpdateWhere, referenceRel, nil) if err != nil { return nil, err } @@ -2310,26 +2552,24 @@ func (s *scopeContext) buildUpsert(node *parse.UpsertClause, table *types.Table, // if the columns are nullable. If they are not nullable, it returns an error. // If they are nullable, it returns their data types in the order that they // were passed in. -func checkNullableColumns(tbl *types.Table, cols []string) ([]*types.DataType, error) { +func checkNullableColumns(tbl *engine.Table, cols []string) ([]*types.DataType, error) { specifiedColSet := make(map[string]struct{}, len(cols)) for _, col := range cols { specifiedColSet[col] = struct{}{} } - pks, err := tbl.GetPrimaryKey() - if err != nil { - return nil, err - } - pkSet := make(map[string]struct{}, len(pks)) - for _, pk := range pks { - pkSet[pk] = struct{}{} + pkSet := make(map[string]struct{}) + for _, col := range tbl.Columns { + if col.IsPrimaryKey { + pkSet[col.Name] = struct{}{} + } } // we will build a set of columns to decrease the time complexity // for checking if a column is in the set. tblColSet := make(map[string]*types.DataType, len(tbl.Columns)) for _, col := range tbl.Columns { - tblColSet[col.Name] = col.Type.Copy() + tblColSet[col.Name] = col.DataType.Copy() _, ok := specifiedColSet[col.Name] if ok { @@ -2337,7 +2577,10 @@ func checkNullableColumns(tbl *types.Table, cols []string) ([]*types.DataType, e } // the column is not in the set, so we need to check if it is nullable - if col.HasAttribute(types.NOT_NULL) || col.HasAttribute(types.PRIMARY_KEY) { + // All columns are nullable unless they are specified as not null + // or they are part of the primary key. + _, isPk := pkSet[col.Name] + if !col.Nullable && isPk { return nil, fmt.Errorf(`%w: column "%s" must be specified as an insert column`, ErrNotNullableColumn, col.Name) } @@ -2370,8 +2613,7 @@ func checkNullableColumns(tbl *types.Table, cols []string) ([]*types.DataType, e // between the target and the FROM + JOIN tables, and an error if one occurred. func (s *scopeContext) cartesian(targetTable, alias string, from parse.Table, joins []*parse.Join, filter parse.Expression) (plan Plan, targetRel *Relation, cartesianRel *Relation, err error) { - - tbl, ok := s.plan.Schema.FindTable(targetTable) + tbl, ok := s.plan.Tables("", targetTable) if !ok { return nil, nil, nil, fmt.Errorf(`unknown table "%s"`, targetTable) } @@ -2386,6 +2628,7 @@ func (s *scopeContext) cartesian(targetTable, alias string, from parse.Table, jo // plan the target table var targetPlan Plan = &Scan{ Source: &TableScanSource{ + Namespace: s.plan.defaultNamespace, // target table of update or delete is always in the default namespace TableName: targetTable, Type: TableSourcePhysical, rel: rel.Copy(), @@ -2396,7 +2639,7 @@ func (s *scopeContext) cartesian(targetTable, alias string, from parse.Table, jo // if there is no FROM clause, we can simply apply the filter and return if from == nil { if filter != nil { - expr, field, err := s.expr(filter, rel) + expr, field, err := s.expr(filter, rel, nil) if err != nil { return nil, nil, nil, err } @@ -2449,7 +2692,7 @@ func (s *scopeContext) cartesian(targetTable, alias string, from parse.Table, jo rel = joinRels(fromRel, rel) - expr, field, err := s.expr(filter, rel) + expr, field, err := s.expr(filter, rel, nil) if err != nil { return nil, nil, nil, err } @@ -2480,7 +2723,7 @@ func (s *scopeContext) assignments(assignments []*parse.UpdateSetClause, targetR return nil, err } - expr, assignType, err := s.expr(assign.Value, referenceRel) + expr, assignType, err := s.expr(assign.Value, referenceRel, nil) if err != nil { return nil, err } diff --git a/parse/planner/logical/planner_test.go b/node/engine/planner/logical/planner_test.go similarity index 72% rename from parse/planner/logical/planner_test.go rename to node/engine/planner/logical/planner_test.go index f2e8274bd..58ebc4e25 100644 --- a/parse/planner/logical/planner_test.go +++ b/node/engine/planner/logical/planner_test.go @@ -2,11 +2,13 @@ package logical_test import ( "errors" + "fmt" "testing" "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/parse" - "github.com/kwilteam/kwil-db/parse/planner/logical" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + "github.com/kwilteam/kwil-db/node/engine/planner/logical" "github.com/stretchr/testify/require" ) @@ -24,7 +26,7 @@ func Test_Planner(t *testing.T) { { name: "basic select", sql: "select 1", - wt: "Return: ?column? [int]\n" + + wt: "Return: ?column? [int8]\n" + "└─Project: 1\n" + " └─Empty Scan\n", }, @@ -37,10 +39,17 @@ func Test_Planner(t *testing.T) { objects: map[string]map[string]*types.DataType{ "$a": {"b": types.IntType}, }, - wt: "Return: ?column? [int], c1 [int[]]\n" + + wt: "Return: ?column? [int8], c1 [int8[]]\n" + "└─Project: $a.b; $c AS c1\n" + " └─Empty Scan\n", }, + { + name: "select array", + sql: "select ARRAY[1, 2, 3]", + wt: "Return: ?column? [int8[]]\n" + + "└─Project: [1, 2, 3]\n" + + " └─Empty Scan\n", + }, { name: "select with filter", sql: "select id, name from users where age > 18", @@ -156,23 +165,24 @@ func Test_Planner(t *testing.T) { }, { name: "aggregate with group by", - sql: "select name, sum(age) from users where name = 'a' group by name having sum(age)::int > 100", + sql: "select name, sum(age) from users where name = 'a' group by name having sum(age)::int8 > 100", wt: "Return: name [text], sum [decimal(1000,0)]\n" + "└─Project: {#ref(A)}; {#ref(B)}\n" + - " └─Filter: {#ref(B)}::int > 100\n" + + " └─Filter: {#ref(B)}::int8 > 100\n" + " └─Aggregate [{#ref(A) = users.name}]: {#ref(B) = sum(users.age)}\n" + " └─Filter: users.name = 'a'\n" + " └─Scan Table: users [physical]\n", }, { name: "complex group by and aggregate", - sql: "select sum(u.age)::int/(p.created_at/100) as res from users u inner join posts p on u.id=p.owner_id group by (p.created_at/100)", - wt: "Return: res [int]\n" + - "└─Project: {#ref(B)}::int / {#ref(A)} AS res\n" + - " └─Aggregate [{#ref(A) = p.created_at / 100}]: {#ref(B) = sum(u.age)}\n" + - " └─Join [inner]: u.id = p.owner_id\n" + - " ├─Scan Table [alias=\"u\"]: users [physical]\n" + - " └─Scan Table [alias=\"p\"]: posts [physical]\n", + sql: "select sum(u.age)::int/(p.created_at/100) as res from users u inner join posts p on u.id=p.owner_id group by (p.created_at/100) having (p.created_at/100)>10", + wt: "Return: res [int8]\n" + + "└─Project: {#ref(B)}::int8 / {#ref(A)} AS res\n" + + " └─Filter: {#ref(A)} > 10\n" + + " └─Aggregate [{#ref(A) = p.created_at / 100}]: {#ref(B) = sum(u.age)}\n" + + " └─Join [inner]: u.id = p.owner_id\n" + + " ├─Scan Table [alias=\"u\"]: users [physical]\n" + + " └─Scan Table [alias=\"p\"]: posts [physical]\n", }, { name: "invalid group by column", @@ -186,13 +196,13 @@ func Test_Planner(t *testing.T) { }, { name: "aggregate in where clause", - sql: "select sum(age) from users where sum(age)::int > 100", - err: logical.ErrAggregateInWhere, + sql: "select sum(age) from users where sum(age)::int8 > 100", + err: logical.ErrIllegalAggregate, }, { name: "complex group by", sql: "select age/2, age*3 from users group by age/2, age*3", - wt: "Return: ?column? [int], ?column? [int]\n" + + wt: "Return: ?column? [int8], ?column? [int8]\n" + "└─Project: {#ref(A)}; {#ref(B)}\n" + " └─Aggregate [{#ref(A) = users.age / 2}] [{#ref(B) = users.age * 3}]\n" + " └─Scan Table: users [physical]\n", @@ -200,7 +210,7 @@ func Test_Planner(t *testing.T) { { name: "select * with group by", sql: "select * from users group by name, age, id", - wt: "Return: id [uuid], name [text], age [int]\n" + + wt: "Return: id [uuid], name [text], age [int8]\n" + "└─Project: {#ref(C)}; {#ref(A)}; {#ref(B)}\n" + " └─Aggregate [{#ref(A) = users.name}] [{#ref(B) = users.age}] [{#ref(C) = users.id}]\n" + " └─Scan Table: users [physical]\n", @@ -212,17 +222,17 @@ func Test_Planner(t *testing.T) { }, { name: "complex having", - sql: "select name, sum(age/2)+sum(age*10) from users group by name having sum(age)::int > 100 or sum(age/2)::int > 10", + sql: "select name, sum(age/2)+sum(age*10) from users group by name having sum(age)::int8 > 100 or sum(age/2)::int8 > 10", wt: "Return: name [text], ?column? [decimal(1000,0)]\n" + "└─Project: {#ref(A)}; {#ref(C)} + {#ref(D)}\n" + - " └─Filter: {#ref(B)}::int > 100 OR {#ref(C)}::int > 10\n" + - " └─Aggregate [{#ref(A) = users.name}]: {#ref(D) = sum(users.age * 10)}; {#ref(C) = sum(users.age / 2)}; {#ref(B) = sum(users.age)}\n" + + " └─Filter: {#ref(B)}::int8 > 100 OR {#ref(C)}::int8 > 10\n" + + " └─Aggregate [{#ref(A) = users.name}]: {#ref(B) = sum(users.age)}; {#ref(C) = sum(users.age / 2)}; {#ref(D) = sum(users.age * 10)}\n" + " └─Scan Table: users [physical]\n", }, { name: "duplicate group by columns", sql: "select name, age from users group by name, name, age", - wt: "Return: name [text], age [int]\n" + + wt: "Return: name [text], age [int8]\n" + "└─Project: {#ref(A)}; {#ref(B)}\n" + " └─Aggregate [{#ref(A) = users.name}] [{#ref(B) = users.age}]\n" + " └─Scan Table: users [physical]\n", @@ -241,29 +251,39 @@ func Test_Planner(t *testing.T) { "$id": types.IntType, "$name": types.TextType, }, - sql: `select c.brand, pu.content, u.name, u2.id, count(p.id) from users u + sql: `select u.name, u2.id, count(p.id) from users u inner join posts p on u.id = p.owner_id - left join owned_cars['dbid', 'proc']($id) c on c.owner_name = u.name - right join posts_by_user($name) pu on pu.content = p.content full join (select id from users where age > 18) u2 on u2.id = u.id - group by c.brand, pu.content, u.name, u2.id;`, - wt: "Return: brand [text], content [text], name [text], id [uuid], count [int]\n" + - "└─Project: {#ref(A)}; {#ref(B)}; {#ref(C)}; {#ref(D)}; {#ref(E)}\n" + - " └─Aggregate [{#ref(A) = c.brand}] [{#ref(B) = pu.content}] [{#ref(C) = u.name}] [{#ref(D) = u2.id}]: {#ref(E) = count(p.id)}\n" + + group by u.name, u2.id;`, + wt: "Return: name [text], id [uuid], count [int8]\n" + + "└─Project: {#ref(A)}; {#ref(B)}; {#ref(C)}\n" + + " └─Aggregate [{#ref(A) = u.name}] [{#ref(B) = u2.id}]: {#ref(C) = count(p.id)}\n" + " └─Join [outer]: u2.id = u.id\n" + - " ├─Join [right]: pu.content = p.content\n" + - " │ ├─Join [left]: c.owner_name = u.name\n" + - " │ │ ├─Join [inner]: u.id = p.owner_id\n" + - " │ │ │ ├─Scan Table [alias=\"u\"]: users [physical]\n" + - " │ │ │ └─Scan Table [alias=\"p\"]: posts [physical]\n" + - " │ │ └─Scan Procedure [alias=\"c\"]: [foreign=true] [dbid='dbid'] [proc='proc'] owned_cars($id)\n" + - " │ └─Scan Procedure [alias=\"pu\"]: [foreign=false] posts_by_user($name)\n" + + " ├─Join [inner]: u.id = p.owner_id\n" + + " │ ├─Scan Table [alias=\"u\"]: users [physical]\n" + + " │ └─Scan Table [alias=\"p\"]: posts [physical]\n" + " └─Scan Subquery [alias=\"u2\"]: [subplan_id=0] (uncorrelated)\n" + "Subplan [subquery] [id=0]\n" + "└─Project: users.id\n" + " └─Filter: users.age > 18\n" + " └─Scan Table: users [physical]\n", }, + { + name: "basic inline window", + sql: "select name, sum(age) over (partition by name) from users", + wt: "Return: name [text], sum [decimal(1000,0)]\n" + + "└─Project: users.name; {#ref(A)}\n" + + " └─Window [partition_by=users.name]: {#ref(A) = sum(users.age)}\n" + + " └─Scan Table: users [physical]\n", + }, + { + name: "named window used several times", + sql: "select name, sum(age) over w1, array_agg(name) over w1 from users window w1 as (partition by name order by age)", + wt: "Return: name [text], sum [decimal(1000,0)], array_agg [text[]]\n" + + "└─Project: users.name; {#ref(A)}; {#ref(B)}\n" + + " └─Window [partition_by=users.name] [order_by=users.age asc nulls last]: {#ref(A) = sum(users.age)}; {#ref(B) = array_agg(users.name)}\n" + + " └─Scan Table: users [physical]\n", + }, { name: "common table expressions", sql: `with a (id2, name2) as (select id, name from users), @@ -327,7 +347,7 @@ func Test_Planner(t *testing.T) { select id, content from posts order by name desc;`, wt: "Return: id [uuid], name [text]\n" + - "└─Sort: [name] desc nulls last\n" + + "└─Sort: name desc nulls last\n" + " └─Set: union\n" + " ├─Project: users.id; users.name\n" + " │ └─Scan Table: users [physical]\n" + @@ -337,11 +357,30 @@ func Test_Planner(t *testing.T) { { name: "sort", sql: "select name, age from users order by name desc nulls last, id asc", - wt: "Return: name [text], age [int]\n" + + wt: "Return: name [text], age [int8]\n" + "└─Project: users.name; users.age\n" + - " └─Sort: [users.name] desc nulls last; [users.id] asc nulls last\n" + + " └─Sort: users.name desc nulls last; users.id asc nulls last\n" + " └─Scan Table: users [physical]\n", }, + { + name: "sort with group by", + sql: "select name, sum(age) from users group by name order by name, sum(age)", + wt: "Return: name [text], sum [decimal(1000,0)]\n" + + "└─Project: {#ref(A)}; {#ref(B)}\n" + + " └─Sort: {#ref(A)} asc nulls last; {#ref(B)} asc nulls last\n" + + " └─Aggregate [{#ref(A) = users.name}]: {#ref(B) = sum(users.age)}\n" + + " └─Scan Table: users [physical]\n", + }, + { + // unlike the above, this tests that we can make new aggregate references from the ORDER BY clause + name: "sort with aggregate", + sql: "select name from users group by name order by sum(age)", + wt: "Return: name [text]\n" + + "└─Project: {#ref(A)}\n" + + " └─Sort: {#ref(B)} asc nulls last\n" + + " └─Aggregate [{#ref(A) = users.name}]: {#ref(B) = sum(users.age)}\n" + + " └─Scan Table: users [physical]\n", + }, { name: "sort invalid column", sql: "select name, age from users order by wallet", @@ -350,7 +389,7 @@ func Test_Planner(t *testing.T) { { name: "limit and offset", sql: "select name, age from users limit 10 offset 5", - wt: "Return: name [text], age [int]\n" + + wt: "Return: name [text], age [int8]\n" + "└─Project: users.name; users.age\n" + " └─Limit [offset=5]: 10\n" + " └─Scan Table: users [physical]\n", @@ -358,22 +397,15 @@ func Test_Planner(t *testing.T) { { name: "distinct", sql: "select distinct name, age from users", - wt: "Return: name [text], age [int]\n" + + wt: "Return: name [text], age [int8]\n" + "└─Distinct\n" + " └─Project: users.name; users.age\n" + " └─Scan Table: users [physical]\n", }, - { - name: "scalar function, procedure, foreign procedure", - sql: "select car_count['dbid', 'proc'](id), post_count(id), abs(age) from users", - wt: "Return: car_count [int], post_count [int], abs [int]\n" + - "└─Project: car_count['dbid', 'proc'](users.id); post_count(users.id); abs(users.age)\n" + - " └─Scan Table: users [physical]\n", - }, { name: "distinct aggregate", sql: "select count(distinct name), sum(age) from users", - wt: "Return: count [int], sum [decimal(1000,0)]\n" + + wt: "Return: count [int8], sum [decimal(1000,0)]\n" + "└─Project: {#ref(A)}; {#ref(B)}\n" + " └─Aggregate: {#ref(A) = count(distinct users.name)}; {#ref(B) = sum(users.age)}\n" + " └─Scan Table: users [physical]\n", @@ -381,16 +413,16 @@ func Test_Planner(t *testing.T) { { name: "unary and alias", sql: "select age as pos_age, -age from users", - wt: "Return: pos_age [int], ?column? [int]\n" + + wt: "Return: pos_age [int8], ?column? [int8]\n" + "└─Project: users.age AS pos_age; -users.age\n" + " └─Scan Table: users [physical]\n", }, { name: "order by alias", sql: "select age as pos_age from users order by pos_age", - wt: "Return: pos_age [int]\n" + + wt: "Return: pos_age [int8]\n" + "└─Project: users.age AS pos_age\n" + - " └─Sort: [pos_age] asc nulls last\n" + + " └─Sort: pos_age asc nulls last\n" + " └─Scan Table: users [physical]\n", }, { @@ -444,7 +476,11 @@ func Test_Planner(t *testing.T) { }, { name: "update from with join", - sql: "update users set name = pu.content from posts p inner join posts_by_user('satoshi') pu on p.content = pu.content where p.owner_id = users.id", + sql: `update users set name = pu.content from posts p inner join ( + select p.content from posts p + inner join users u on p.owner_id = u.id + where u.name = 'satoshi' + ) pu on p.content = pu.content where p.owner_id = users.id`, // will be unoptimized, so it will use a cartesian product // optimization could re-write the filter to a join, as well as // add projections. @@ -454,11 +490,17 @@ func Test_Planner(t *testing.T) { " ├─Scan Table: users [physical]\n" + " └─Join [inner]: p.content = pu.content\n" + " ├─Scan Table [alias=\"p\"]: posts [physical]\n" + - " └─Scan Procedure [alias=\"pu\"]: [foreign=false] posts_by_user('satoshi')\n", + " └─Scan Subquery [alias=\"pu\"]: [subplan_id=0] (uncorrelated)\n" + + "Subplan [subquery] [id=0]\n" + + "└─Project: p.content\n" + + " └─Filter: u.name = 'satoshi'\n" + + " └─Join [inner]: p.owner_id = u.id\n" + + " ├─Scan Table [alias=\"p\"]: posts [physical]\n" + + " └─Scan Table [alias=\"u\"]: users [physical]\n", }, { name: "update with from without where", - sql: "update users set name = pu.content from posts_by_user('satoshi') pu", + sql: "update users set name = pu.content from posts pu", err: logical.ErrUpdateOrDeleteWithoutWhere, }, { @@ -471,13 +513,13 @@ func Test_Planner(t *testing.T) { { name: "insert", sql: "insert into users values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1), ('123e4567-e89b-12d3-a456-426614174001'::uuid, 'satoshi2', 2)", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "└─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1); ('123e4567-e89b-12d3-a456-426614174001'::uuid, 'satoshi2', 2)\n", }, { name: "insert with null", sql: "insert into users (id, name) values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi')", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "└─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', NULL)\n", }, { @@ -488,28 +530,28 @@ func Test_Planner(t *testing.T) { { name: "on conflict do nothing", sql: "insert into users values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1) on conflict do nothing", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1)\n" + "└─Conflict [nothing]\n", }, { name: "on conflict do update (arbiter index primary key)", sql: "insert into users values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1) on conflict (id) do update set name = 'satoshi'", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1)\n" + "└─Conflict [update] [arbiter=users.id (primary key)]: [name = 'satoshi']\n", }, { name: "on conflict do update (arbiter unique constraint)", sql: "insert into posts values ('123e4567-e89b-12d3-a456-426614174000'::uuid, '123e4567-e89b-12d3-a456-426614174001'::uuid, 'hello', 1) on conflict (content) do update set owner_id = '123e4567-e89b-12d3-a456-426614174001'::uuid", - wt: "Insert [posts]: id [uuid], owner_id [uuid], content [text], created_at [int]\n" + + wt: "Insert [posts]: id [uuid], owner_id [uuid], content [text], created_at [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, '123e4567-e89b-12d3-a456-426614174001'::uuid, 'hello', 1)\n" + "└─Conflict [update] [arbiter=posts.content (unique)]: [owner_id = '123e4567-e89b-12d3-a456-426614174001'::uuid]\n", }, { name: "on conflict do update (arbiter index non-primary key)", sql: "insert into users values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1) on conflict (name) do update set name = 'satoshi' WHERE users.age = 1", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1)\n" + "└─Conflict [update] [arbiter=name_idx (index)]: [name = 'satoshi'] where [users.age = 1]\n", }, @@ -529,16 +571,16 @@ func Test_Planner(t *testing.T) { err: logical.ErrIllegalConflictArbiter, }, { - name: "conflict on compound unique index", + name: "conflict on composite unique index", sql: "insert into posts values ('123e4567-e89b-12d3-a456-426614174000'::uuid, '123e4567-e89b-12d3-a456-426614174001'::uuid, 'hello', 1) on conflict (owner_id, created_at) do update set content = 'hello'", - wt: "Insert [posts]: id [uuid], owner_id [uuid], content [text], created_at [int]\n" + + wt: "Insert [posts]: id [uuid], owner_id [uuid], content [text], created_at [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, '123e4567-e89b-12d3-a456-426614174001'::uuid, 'hello', 1)\n" + "└─Conflict [update] [arbiter=owner_created_idx (index)]: [content = 'hello']\n", }, { name: "excluded clause", sql: "insert into users (id, name) values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi') on conflict (id) do update set name = excluded.name where (excluded.age/2) = 0", - wt: "Insert [users]: id [uuid], name [text], age [int]\n" + + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + "├─Values: ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', NULL)\n" + "└─Conflict [update] [arbiter=users.id (primary key)]: [name = excluded.name] where [excluded.age / 2 = 0]\n", }, @@ -548,18 +590,56 @@ func Test_Planner(t *testing.T) { sql: "insert into users values ('123e4567-e89b-12d3-a456-426614174000'::uuid, 'satoshi', 1) on conflict (name) do update set name = 'satoshi' WHERE age = 1", err: logical.ErrAmbiguousColumn, }, + { + name: "insert with select", + sql: "insert into users select * from users", + wt: "Insert [users]: id [uuid], name [text], age [int8]\n" + + "└─Project: users.id; users.name; users.age\n" + + " └─Scan Table: users [physical]\n", + }, + { + name: "recursive CTE", + sql: `with recursive r as ( + select 1 as n + union all + select n+1 from r where n < 10 + ) + select * from r;`, + wt: "Return: n [int8]\n" + + "└─Project: r.n\n" + + " └─Scan Table: r [cte]\n" + + "Subplan [recursive cte] [id=r] [r.n -> n]\n" + + "└─Set: union all\n" + + " ├─Project: 1 AS n\n" + + " │ └─Empty Scan\n" + + " └─Project: r.n + 1\n" + + " └─Filter: r.n < 10\n" + + " └─Scan Table: r [cte]\n", + }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - schema, err := parse.Parse([]byte(testSchema)) + parsedSql, err := parse.Parse(test.sql) require.NoError(t, err) + require.Len(t, parsedSql, 1) - parsedSql, err := parse.ParseSQL(test.sql, schema, true) - require.NoError(t, err) - require.NoError(t, parsedSql.ParseErrs.Err()) + sqlPlan, ok := parsedSql[0].(*parse.SQLStatement) + require.True(t, ok) - plan, err := logical.CreateLogicalPlan(parsedSql.AST, schema, test.vars, test.objects) + plan, err := logical.CreateLogicalPlan(sqlPlan, + func(namespace, tableName string) (table *engine.Table, found bool) { + table, found = testTables[tableName] + return table, found + }, func(varName string) (dataType *types.DataType, found bool) { + dataType, found = test.vars[varName] + return dataType, found + }, + func(objName string) (obj map[string]*types.DataType, found bool) { + obj, ok := test.objects[objName] + return obj, ok + }, + false, "") if test.err != nil { require.Error(t, err) @@ -571,6 +651,16 @@ func Test_Planner(t *testing.T) { require.ErrorIs(t, err, test.err) } else { require.NoError(t, err) + + if plan.Format() != test.wt { + fmt.Println(test.name) + fmt.Println("Expected:") + fmt.Println(test.wt) + fmt.Println("----") + fmt.Println("Received:") + fmt.Println(plan.Format()) + } + require.Equal(t, test.wt, plan.Format()) // check that Relation() works @@ -587,40 +677,72 @@ func Test_Planner(t *testing.T) { } } -// special error for testing that will match any error -var errAny = errors.New("any error") - -var testSchema = `database planner; - -table users { - id uuid primary key, - name text, - age int max(150), - #name_idx unique(name), - #age_idx index(age) -} - -table posts { - id uuid primary key, - owner_id uuid not null, - content text maxlen(300) unique, - created_at int not null, - foreign key (owner_id) references users(id) on delete cascade on update cascade, - #owner_created_idx unique(owner_id, created_at) -} - -procedure posts_by_user($name text) public view returns table(content text) { - return select p.content from posts p - inner join users u on p.owner_id = u.id - where u.name = $name; -} - -procedure post_count($id uuid) public view returns (int) { - for $row in select count(*) as count from posts where owner_id = $id { - return $row.count; - } +var testTables = map[string]*engine.Table{ + "users": { + Name: "users", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "name", + DataType: types.TextType, + }, + { + Name: "age", + DataType: types.IntType, + }, + }, + Indexes: []*engine.Index{ + { + Name: "name_idx", + Type: engine.UNIQUE_BTREE, + Columns: []string{ + "name", + }, + }, + }, + }, + "posts": { + Name: "posts", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "owner_id", + DataType: types.UUIDType, + }, + { + Name: "content", + DataType: types.TextType, + }, + { + Name: "created_at", + DataType: types.IntType, + }, + }, + Constraints: map[string]*engine.Constraint{ + "content_unique": { + Type: engine.ConstraintUnique, + Columns: []string{ + "content", + }, + }, + "owner_created_idx": { + Type: engine.ConstraintUnique, + Columns: []string{ + "owner_id", + "created_at", + }, + }, + }, + }, } -foreign procedure owned_cars($id int) returns table(owner_name text, brand text, model text) -foreign procedure car_count($id uuid) returns (int) -` +// special error for testing that will match any error +var errAny = errors.New("any error") diff --git a/parse/planner/logical/relation.go b/node/engine/planner/logical/relation.go similarity index 86% rename from parse/planner/logical/relation.go rename to node/engine/planner/logical/relation.go index df6e2322f..9828a767c 100644 --- a/parse/planner/logical/relation.go +++ b/node/engine/planner/logical/relation.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/kwilteam/kwil-db/core/types" + "github.com/kwilteam/kwil-db/node/engine" ) // Relation is the current relation in the query plan. @@ -92,18 +93,47 @@ func (r *Relation) FindReference(id string) (*Field, error) { return found[0], nil } -func relationFromTable(tbl *types.Table) *Relation { +func relationFromTable(tbl *engine.Table) *Relation { s := &Relation{} for _, col := range tbl.Columns { s.Fields = append(s.Fields, &Field{ Parent: tbl.Name, Name: col.Name, - val: col.Type.Copy(), + val: col.DataType.Copy(), }) } return s } +// equalShape checks that two relations have the same shape. +// This means that they have the same number of fields and that +// the fields have the same data types. The fields do NOT need +// to have the same names. If the relations do not have the same +// shape, an error is returned. +func equalShape(r1, r2 *Relation) error { + if len(r1.Fields) != len(r2.Fields) { + return fmt.Errorf("relations do not have the same number of fields") + } + + for i, f := range r1.Fields { + scalarR, err := f.Scalar() + if err != nil { + return err + } + + scalarL, err := r2.Fields[i].Scalar() + if err != nil { + return err + } + + if !scalarR.Equals(scalarL) { + return fmt.Errorf("field %d (named %s and %s) does not have the same data type", i, f.Name, r2.Fields[i].Name) + } + } + + return nil +} + // Field is a field in a relation. // Parent and Name can be empty, if the expression // is a constant. If this is the last expression in a relation, @@ -268,13 +298,13 @@ func joinRels(rels ...*Relation) *Relation { // dataTypes returns a slice of data types from a slice of fields. func dataTypes(fields []*Field) ([]*types.DataType, error) { - var types []*types.DataType + var types1 []*types.DataType for _, f := range fields { dt, err := f.Scalar() if err != nil { return nil, err } - types = append(types, dt) + types1 = append(types1, dt) } - return types, nil + return types1, nil } diff --git a/parse/planner/logical/rewriter.go b/node/engine/planner/logical/rewriter.go similarity index 96% rename from parse/planner/logical/rewriter.go rename to node/engine/planner/logical/rewriter.go index 6e1be1dcc..732fc835f 100644 --- a/parse/planner/logical/rewriter.go +++ b/node/engine/planner/logical/rewriter.go @@ -376,7 +376,7 @@ func (r *rewriteVisitor) VisitDelete(p0 *Delete) any { func (r *rewriteVisitor) VisitInsert(p0 *Insert) any { return r.plan(p0, func() { - p0.Values = p0.Values.Accept(r).(*Tuples) + p0.InsertionValues = p0.InsertionValues.Accept(r).(*Tuples) }, func() { if p0.ConflictResolution != nil { @@ -414,6 +414,25 @@ func (r *rewriteVisitor) VisitTuples(p0 *Tuples) any { return p0 } +func (r *rewriteVisitor) VisitWindow(p0 *Window) any { + return r.plan(p0, + func() { p0.Child = p0.Child.Accept(r).(Plan) }, + func() { r.slice(p0.PartitionBy) }, + func() { r.slice(p0.OrderBy) }, + ) +} + +func (r *rewriteVisitor) VisitWindowFunction(p0 *WindowFunction) any { + return r.expr(p0, + func() { r.slice(p0.Args) }, + func() { + if p0.Filter != nil { + p0.Filter = p0.Filter.Accept(r).(Expression) + } + }, + ) +} + // execFields executes the given fields in the correct order. func (r *rewriteVisitor) execFields(fields []func()) { if r.reverseFieldOrder { diff --git a/parse/planner/optimizer/pushdown.go b/node/engine/planner/optimizer/pushdown.go similarity index 96% rename from parse/planner/optimizer/pushdown.go rename to node/engine/planner/optimizer/pushdown.go index bc52910f0..657bc8c69 100644 --- a/parse/planner/optimizer/pushdown.go +++ b/node/engine/planner/optimizer/pushdown.go @@ -4,7 +4,7 @@ import ( "errors" "fmt" - "github.com/kwilteam/kwil-db/parse/planner/logical" + "github.com/kwilteam/kwil-db/node/engine/planner/logical" ) // PushdownPredicates pushes down filters to the lowest possible level. @@ -223,6 +223,18 @@ func push(n logical.Plan, expr logical.Expression) (logical.Plan, error) { return nil, err } + n.Child = res + return n, nil + case *logical.Window: + if expr != nil { + return nil, errCannotPush + } + + res, err := push(n.Child, nil) + if err != nil { + return nil, err + } + n.Child = res return n, nil case *logical.SetOperation: @@ -273,12 +285,12 @@ func push(n logical.Plan, expr logical.Expression) (logical.Plan, error) { return nil, errCannotPush } - res, err := push(n.Values, nil) + res, err := push(n.InsertionValues, nil) if err != nil { return nil, err } - n.Values = res.(*logical.Tuples) + n.InsertionValues = res.(*logical.Tuples) if n.ConflictResolution != nil { res, err = push(n.ConflictResolution, nil) diff --git a/parse/planner/optimizer/pushdown_test.go b/node/engine/planner/optimizer/pushdown_test.go similarity index 65% rename from parse/planner/optimizer/pushdown_test.go rename to node/engine/planner/optimizer/pushdown_test.go index 278d94244..805233cbb 100644 --- a/parse/planner/optimizer/pushdown_test.go +++ b/node/engine/planner/optimizer/pushdown_test.go @@ -5,8 +5,9 @@ import ( "testing" "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/parse" - "github.com/kwilteam/kwil-db/parse/planner/logical" + "github.com/kwilteam/kwil-db/node/engine" + "github.com/kwilteam/kwil-db/node/engine/parse" + "github.com/kwilteam/kwil-db/node/engine/planner/logical" "github.com/stretchr/testify/require" ) @@ -22,24 +23,24 @@ func Test_Pushdown(t *testing.T) { { name: "simple pushdown", sql: "select * from users where name = 'foo'", - wt: "Return: id [uuid], name [text], age [int]\n" + + wt: "Return: id [uuid], name [text], age [int8]\n" + "└─Project: users.id; users.name; users.age\n" + " └─Scan Table: users [physical] filter=[users.name = 'foo']\n", }, { name: "push down join", - sql: "select * from users u inner join posts p on u.id = p.owner_id where u.name = p.content", - wt: "Return: id [uuid], name [text], age [int], id [uuid], owner_id [uuid], content [text], created_at [int]\n" + - "└─Project: u.id; u.name; u.age; p.id; p.owner_id; p.content; p.created_at\n" + + sql: "select u.* from users u inner join posts p on u.id = p.owner_id where u.name = p.content", + wt: "Return: id [uuid], name [text], age [int8]\n" + + "└─Project: u.id; u.name; u.age\n" + " └─Join [inner]: u.id = p.owner_id AND u.name = p.content\n" + " ├─Scan Table [alias=\"u\"]: users [physical]\n" + " └─Scan Table [alias=\"p\"]: posts [physical]\n", }, { name: "pushdown through join", - sql: "select * from users u inner join posts p on u.id = p.owner_id where u.name = 'foo'", - wt: "Return: id [uuid], name [text], age [int], id [uuid], owner_id [uuid], content [text], created_at [int]\n" + - "└─Project: u.id; u.name; u.age; p.id; p.owner_id; p.content; p.created_at\n" + + sql: "select u.* from users u inner join posts p on u.id = p.owner_id where u.name = 'foo'", + wt: "Return: id [uuid], name [text], age [int8]\n" + + "└─Project: u.id; u.name; u.age\n" + " └─Join [inner]: u.id = p.owner_id\n" + " ├─Scan Table [alias=\"u\"]: users [physical] filter=[u.name = 'foo']\n" + " └─Scan Table [alias=\"p\"]: posts [physical]\n", @@ -70,7 +71,7 @@ func Test_Pushdown(t *testing.T) { // there is no need to push down any aggregate function. name: "aggregate having", sql: "select u.id, count(p.id) from users u inner join posts p on u.id = p.owner_id group by u.id having count(p.id) > 10", - wt: "Return: id [uuid], count [int]\n" + + wt: "Return: id [uuid], count [int8]\n" + "└─Project: {#ref(A)}; {#ref(B)}\n" + " └─Filter: {#ref(B)} > 10\n" + " └─Aggregate [{#ref(A) = u.id}]: {#ref(B) = count(p.id)}\n" + @@ -100,14 +101,18 @@ func Test_Pushdown(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - schema, err := parse.Parse([]byte(testSchema)) - require.NoError(t, err) - parsedSql, err := parse.ParseSQL(test.sql, schema, true) + parsedSql, err := parse.Parse(test.sql) require.NoError(t, err) - require.NoError(t, parsedSql.ParseErrs.Err()) - plan, err := logical.CreateLogicalPlan(parsedSql.AST, schema, map[string]*types.DataType{}, map[string]map[string]*types.DataType{}) + plan, err := logical.CreateLogicalPlan(parsedSql[0].(*parse.SQLStatement), + func(namespace, tableName string) (table *engine.Table, found bool) { + t, found := testTables[tableName] + return t, found + }, + func(varName string) (dataType *types.DataType, found bool) { return nil, false }, + func(objName string) (obj map[string]*types.DataType, found bool) { return nil, false }, + false, "") require.NoError(t, err) newPlan, err := PushdownPredicates(plan.Plan) @@ -132,37 +137,69 @@ func Test_Pushdown(t *testing.T) { // special error for testing that will match any error var errAny = errors.New("any error") -var testSchema = `database planner; - -table users { - id uuid primary key, - name text, - age int max(150), - #name_idx unique(name), - #age_idx index(age) -} - -table posts { - id uuid primary key, - owner_id uuid not null, - content text maxlen(300) unique, - created_at int not null, - foreign key (owner_id) references users(id) on delete cascade on update cascade, - #owner_created_idx unique(owner_id, created_at) -} - -procedure posts_by_user($name text) public view returns table(content text) { - return select p.content from posts p - inner join users u on p.owner_id = u.id - where u.name = $name; -} - -procedure post_count($id uuid) public view returns (int) { - for $row in select count(*) as count from posts where owner_id = $id { - return $row.count; - } +var testTables = map[string]*engine.Table{ + "users": { + Name: "users", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "name", + DataType: types.TextType, + }, + { + Name: "age", + DataType: types.IntType, + }, + }, + Indexes: []*engine.Index{ + { + Name: "name_idx", + Type: engine.UNIQUE_BTREE, + Columns: []string{ + "name", + }, + }, + }, + }, + "posts": { + Name: "posts", + Columns: []*engine.Column{ + { + Name: "id", + DataType: types.UUIDType, + IsPrimaryKey: true, + }, + { + Name: "owner_id", + DataType: types.UUIDType, + }, + { + Name: "content", + DataType: types.TextType, + }, + { + Name: "created_at", + DataType: types.IntType, + }, + }, + Constraints: map[string]*engine.Constraint{ + "content_unique": { + Type: engine.ConstraintUnique, + Columns: []string{ + "content", + }, + }, + "owner_created_idx": { + Type: engine.ConstraintUnique, + Columns: []string{ + "owner_id", + "created_at", + }, + }, + }, + }, } - -foreign procedure owned_cars($id int) returns table(owner_name text, brand text, model text) -foreign procedure car_count($id uuid) returns (int) -` diff --git a/node/engine/testdata/actions.go b/node/engine/testdata/actions.go deleted file mode 100644 index 4cfb676de..000000000 --- a/node/engine/testdata/actions.go +++ /dev/null @@ -1,98 +0,0 @@ -package testdata - -import "github.com/kwilteam/kwil-db/core/types" - -var ( - ActionCreateUser = &types.Action{ - Name: "create_user", - Parameters: []string{"$id", "$username", "$age"}, - Public: true, - Body: "INSERT INTO users (id, username, age, address) VALUES ($id, $username, $age, @caller);", - } - - ActionGetUserByAddress = &types.Action{ - Name: "get_user_by_address", - Parameters: []string{"$address"}, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: "SELECT id, username, age FROM users WHERE address = $address;", - } - - ActionCreatePost = &types.Action{ - Name: "create_post", - Parameters: []string{"$id", "$title", "$content", "$post_date"}, - Public: true, - Body: `INSERT INTO posts (id, title, content, author_id, post_date) VALUES ( - $id, $title, $content, - (SELECT id FROM users WHERE address = @caller LIMIT 1), - $post_date);`, - } - - ActionGetPosts = &types.Action{ - Name: "get_posts", - Parameters: []string{"$username"}, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: `SELECT p.id as id, p.title as title, p.content as content, p.post_date as post_date, u.username as author FROM posts AS p - INNER JOIN users AS u ON p.author_id = u.id - WHERE u.username = $username;`, - } - - // ActionAdminDeleteUser is a procedure that can only be called by the owner of the schema - ActionAdminDeleteUser = &types.Action{ - Name: "admin_delete_user", - Parameters: []string{"$id"}, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierOwner, - }, - Body: "DELETE FROM users WHERE id = $id;", - } - - // ActionCallsPrivate is a procedure that calls a private procedure - ActionCallsPrivate = &types.Action{ - Name: "calls_private", - Parameters: []string{}, - Public: true, - Body: "private_procedure();", - } - - // ActionPrivate is a private procedure - ActionPrivate = &types.Action{ - Name: "private_procedure", - Parameters: []string{}, - Public: false, - Body: "SELECT * FROM users;", - } - - // ActionRecursive is a recursive procedure that should hit a max stack - // depth error before using the system's max stack memory, which is fatal. - ActionRecursive = &types.Action{ - Name: "recursive_procedure", - Parameters: []string{"$id", "$a", "$b"}, - Public: true, - Body: "recursive_procedure($id, $a, $b);", - } - - // ActionRecursiveSneakyA is procedure that calls - // ProcedureRecursiveSneakyB, which calls ActionRecursiveSneakyA, which - // calls ProcedureRecursiveSneakyB, which calls... - ActionRecursiveSneakyA = &types.Action{ - Name: "recursive_procedure_a", - Parameters: []string{}, - Public: true, - Body: "recursive_procedure_b();", - } - - // ActionRecursiveSneakyB is procedure that calls ProcedureRecursiveSneakyA. - ActionRecursiveSneakyB = &types.Action{ - Name: "recursive_procedure_b", - Parameters: []string{}, - Public: true, - Body: "recursive_procedure_a();", - } -) diff --git a/node/engine/testdata/extension.go b/node/engine/testdata/extension.go deleted file mode 100644 index 8e8ab775f..000000000 --- a/node/engine/testdata/extension.go +++ /dev/null @@ -1,16 +0,0 @@ -package testdata - -import "github.com/kwilteam/kwil-db/core/types" - -var ( - ExtensionErc20 = &types.Extension{ - Name: "erc20", - Initialization: []*types.ExtensionConfig{ - { - Key: "address", - Value: "0x1234567890", - }, - }, - Alias: "token", - } -) diff --git a/node/engine/testdata/procedures.go b/node/engine/testdata/procedures.go deleted file mode 100644 index 46af74e3b..000000000 --- a/node/engine/testdata/procedures.go +++ /dev/null @@ -1,71 +0,0 @@ -package testdata - -import "github.com/kwilteam/kwil-db/core/types" - -var ( - ProcCreateUser = &types.Procedure{ - Name: "proc_create_user", - Parameters: []*types.ProcedureParameter{ - {Name: "$id", Type: types.IntType}, - {Name: "$username", Type: types.TextType}, - {Name: "$age", Type: types.IntType}, - }, - Public: true, - Body: "INSERT INTO users (id, username, age, address) VALUES ($id, $username, $age, @caller);", - } - - ProcGetUserByAddress = &types.Procedure{ - Name: "proc_get_user_by_address", - Parameters: []*types.ProcedureParameter{ - {Name: "$address", Type: types.TextType}, - }, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: ` - for $row in SELECT id, username, age FROM users WHERE address = $address { - return $row.id, $row.username, $row.age; - } - error('User not found'); - `, - Returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - { - Name: "username", - Type: types.TextType, - }, - { - Name: "age", - Type: types.IntType, - }, - }, - }, - } - - ProcGetUsersByAge = &types.Procedure{ - Name: "proc_get_users_by_age", - Parameters: []*types.ProcedureParameter{ - {Name: "$age", Type: types.IntType}, - }, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: ` - return SELECT id, username, age FROM users WHERE age = $age; - `, - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - {Name: "id", Type: types.IntType}, - {Name: "username", Type: types.TextType}, - {Name: "age", Type: types.IntType}, - }, - }, - } -) diff --git a/node/engine/testdata/schema.go b/node/engine/testdata/schema.go deleted file mode 100644 index 05c965fe8..000000000 --- a/node/engine/testdata/schema.go +++ /dev/null @@ -1,33 +0,0 @@ -package testdata - -import ( - "github.com/kwilteam/kwil-db/core/types" -) - -// TestSchema is a test schema that mocks a social media application -var TestSchema = &types.Schema{ - Name: "test_schema", - Owner: []byte("test_owner"), - Tables: []*types.Table{ - TableUsers, - TablePosts, - }, - Actions: []*types.Action{ - ActionCreateUser, - ActionCreatePost, - ActionGetUserByAddress, - ActionGetPosts, - ActionAdminDeleteUser, - ActionCallsPrivate, - ActionPrivate, - ActionRecursive, - ActionRecursiveSneakyA, - ActionRecursiveSneakyB, - }, - Procedures: []*types.Procedure{ - ProcCreateUser, - ProcGetUserByAddress, - ProcGetUsersByAge, - }, - Extensions: []*types.Extension{}, -} diff --git a/node/engine/testdata/tables.go b/node/engine/testdata/tables.go deleted file mode 100644 index 7fcf523e9..000000000 --- a/node/engine/testdata/tables.go +++ /dev/null @@ -1,183 +0,0 @@ -package testdata - -import ( - "github.com/kwilteam/kwil-db/core/types" -) - -var ( - TableUsers = &types.Table{ - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "username", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.UNIQUE, - }, - { - Type: types.MIN_LENGTH, - Value: "5", - }, - { - Type: types.MAX_LENGTH, - Value: "32", - }, - }, - }, - { - Name: "age", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.MIN, - Value: "13", - }, - { - Type: types.MAX, - Value: "420", - }, - }, - }, - { - Name: "address", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.UNIQUE, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "age_idx", - Columns: []string{ - "age", - }, - Type: types.BTREE, - }, - }, - } - - TablePosts = &types.Table{ - Name: "posts", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "title", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.MAX_LENGTH, - Value: "300", - }, - }, - }, - { - Name: "content", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.MAX_LENGTH, - Value: "10000", - }, - }, - }, - { - Name: "author_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "post_date", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "author_idx", - Columns: []string{ - "author_id", - }, - Type: types.BTREE, - }, - { // author post names must be unique - Name: "author_title_idx", - Columns: []string{ - "author_id", - "title", - }, - Type: types.UNIQUE_BTREE, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{ - "author_id", - }, - ParentKeys: []string{ - "id", - }, - ParentTable: "users", - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_DELETE, - Do: types.DO_CASCADE, - }, - }, - }, - }, - } -) diff --git a/node/engine/types.go b/node/engine/types.go new file mode 100644 index 000000000..4be185068 --- /dev/null +++ b/node/engine/types.go @@ -0,0 +1,183 @@ +package engine + +import ( + "strings" + + "github.com/kwilteam/kwil-db/core/types" +) + +// Table is a table in the schema. +type Table struct { + // Name is the name of the table. + Name string + // Columns is a list of columns in the table. + Columns []*Column + // Indexes is a list of indexes on the table. + Indexes []*Index + // Constraints are constraints on the table. + Constraints map[string]*Constraint +} + +// Copy deep copies the table. +func (t *Table) Copy() *Table { + table := &Table{ + Name: t.Name, + Columns: make([]*Column, len(t.Columns)), + Indexes: make([]*Index, len(t.Indexes)), + Constraints: make(map[string]*Constraint), + } + + for i, col := range t.Columns { + table.Columns[i] = col.Copy() + } + + for i, idx := range t.Indexes { + table.Indexes[i] = idx.Copy() + } + + for name, constraint := range t.Constraints { + table.Constraints[name] = constraint.Copy() + } + + return table +} + +func (t *Table) PrimaryKeyCols() []*Column { + var pkCols []*Column + for _, col := range t.Columns { + if col.IsPrimaryKey { + pkCols = append(pkCols, col) + } + } + + return pkCols +} + +// HasPrimaryKey returns true if the column is part of the primary key. +func (t *Table) HasPrimaryKey(col string) bool { + col = strings.ToLower(col) + for _, c := range t.Columns { + if c.Name == col && c.IsPrimaryKey { + return true + } + } + return false +} + +// Column returns a column by name. +// If the column is not found, the second return value is false. +func (t *Table) Column(name string) (*Column, bool) { + for _, col := range t.Columns { + if col.Name == name { + return col, true + } + } + return nil, false +} + +// SearchConstraint returns a list of constraints that match the given column and type. +func (t *Table) SearchConstraint(column string, constraint ConstraintType) []*Constraint { + var constraints []*Constraint + for _, c := range t.Constraints { + if c.Type == constraint { + for _, col := range c.Columns { + if col == column { + constraints = append(constraints, c) + } + } + } + } + return constraints +} + +// Column is a column in a table. +type Column struct { + // Name is the name of the column. + Name string + // DataType is the data type of the column. + DataType *types.DataType + // Nullable is true if the column can be null. + Nullable bool + // IsPrimaryKey is true if the column is part of the primary key. + IsPrimaryKey bool +} + +func (c *Column) Copy() *Column { + return &Column{ + Name: c.Name, + DataType: c.DataType.Copy(), + Nullable: c.Nullable, + IsPrimaryKey: c.IsPrimaryKey, + } +} + +// Constraint is a constraint in the schema. +type Constraint struct { + // Type is the type of the constraint. + Type ConstraintType + // Columns is a list of column names that the constraint is on. + Columns []string +} + +func (c *Constraint) Copy() *Constraint { + return &Constraint{ + Type: c.Type, + Columns: append([]string{}, c.Columns...), + } +} + +func (c *Constraint) ContainsColumn(col string) bool { + for _, column := range c.Columns { + if column == col { + return true + } + } + return false +} + +type ConstraintType string + +const ( + ConstraintUnique ConstraintType = "unique" + ConstraintCheck ConstraintType = "check" + ConstraintFK ConstraintType = "foreign_key" +) + +// IndexType is a type of index (e.g. BTREE, UNIQUE_BTREE, PRIMARY) +type IndexType string + +// Index is an index on a table. +type Index struct { + Name string `json:"name"` + Columns []string `json:"columns"` + Type IndexType `json:"type"` +} + +func (i *Index) Copy() *Index { + return &Index{ + Name: i.Name, + Columns: append([]string{}, i.Columns...), + Type: i.Type, + } +} + +func (i *Index) ContainsColumn(col string) bool { + for _, column := range i.Columns { + if column == col { + return true + } + } + return false +} + +// index types +const ( + // BTREE is the default index type. + BTREE IndexType = "BTREE" + // UNIQUE_BTREE is a unique BTREE index. + UNIQUE_BTREE IndexType = "UNIQUE_BTREE" + // PRIMARY is a primary index. + // Only one primary index is allowed per table. + // A primary index cannot exist on a table that also has a primary key. + PRIMARY IndexType = "PRIMARY" +) diff --git a/node/pg/db.go b/node/pg/db.go index abebf6fad..97093ccb4 100644 --- a/node/pg/db.go +++ b/node/pg/db.go @@ -736,3 +736,27 @@ func (db *DB) Execute(ctx context.Context, stmt string, args ...any) (*sql.Resul } // TODO: require rw with target_session_attrs=read-write ? + +// Exec executes a Postgres SQL statement against the database. Unlike the Execute method, +// this function does not use `query` internally, which allows for many statements delimited by +// semicolons to be executed in one call. +func Exec(ctx context.Context, tx sql.Executor, stmt string) error { + var conn *pgx.Conn + switch tx := tx.(type) { + case *DB: + tx.mtx.Lock() + defer tx.mtx.Unlock() + if tx.tx == nil { + return sql.ErrNoTransaction + } + + conn = tx.tx.(conner).Conn() + case conner: + conn = tx.Conn() + default: + return fmt.Errorf("unsupported type %T", tx) + } + + _, err := conn.Exec(ctx, stmt) + return err +} diff --git a/node/pg/repl_changeset.go b/node/pg/repl_changeset.go index b3d59d841..da144421d 100644 --- a/node/pg/repl_changeset.go +++ b/node/pg/repl_changeset.go @@ -14,7 +14,6 @@ import ( "strings" "github.com/jackc/pglogrepl" - "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/node/types/sql" ) diff --git a/node/pg/repl_changeset_test.go b/node/pg/repl_changeset_test.go index fc1d7ce40..ca879b611 100644 --- a/node/pg/repl_changeset_test.go +++ b/node/pg/repl_changeset_test.go @@ -6,10 +6,9 @@ import ( "strings" "testing" + "github.com/kwilteam/kwil-db/core/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/kwilteam/kwil-db/core/types" ) func TestChangesetEntry_Serialize(t *testing.T) { @@ -514,7 +513,7 @@ func TestRelation_SerializeSize(t *testing.T) { {Name: "col3", Type: types.BoolType}, }, }, - want: 2 + 4 + 4 + 4 + 5 + 4 + (2 + 4 + 4 + (2 + 4 + 3 + 1 + 4)) + 2*(2+4+4+(2+4+4+1+4)), + want: 2 + 4 + 4 + 4 + 5 + 4 + (2 + 4 + 5 + (2 + 4 + 3 + 1 + 4)) + 2*(2+4+4+(2+4+4+1+4)), }, } diff --git a/node/pg/types.go b/node/pg/types.go index 42f61b4ff..55de6e0d4 100644 --- a/node/pg/types.go +++ b/node/pg/types.go @@ -11,7 +11,6 @@ import ( "strings" "github.com/jackc/pgx/v5/pgtype" - "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/core/types/decimal" "github.com/kwilteam/kwil-db/node/types/sql" diff --git a/node/services/jsonrpc/usersvc/service.go b/node/services/jsonrpc/usersvc/service.go index b75d584b1..7f25e5f73 100644 --- a/node/services/jsonrpc/usersvc/service.go +++ b/node/services/jsonrpc/usersvc/service.go @@ -21,18 +21,16 @@ import ( "github.com/kwilteam/kwil-db/node/types/sql" "github.com/kwilteam/kwil-db/node/voting" - "github.com/kwilteam/kwil-db/node/engine/execution" // errors from engine + // errors from engine + "github.com/kwilteam/kwil-db/node/engine/interpreter" rpcserver "github.com/kwilteam/kwil-db/node/services/jsonrpc" "github.com/kwilteam/kwil-db/node/services/jsonrpc/ratelimit" - "github.com/kwilteam/kwil-db/parse" "github.com/kwilteam/kwil-db/version" ) type EngineReader interface { - Procedure(ctx *common.TxContext, tx sql.DB, options *common.ExecutionData) (*sql.ResultSet, error) - GetSchema(dbid string) (*types.Schema, error) - ListDatasets(owner []byte) ([]*types.DatasetIdentifier, error) - Execute(ctx *common.TxContext, tx sql.DB, dbid string, query string, values map[string]any) (*sql.ResultSet, error) + Call(ctx *common.TxContext, tx sql.DB, namespace, action string, args []any, resultFn func(*common.Row) error) (*common.CallResult, error) + Execute(ctx *common.TxContext, tx sql.DB, query string, params map[string]any, resultFn func(*common.Row) error) error } type BlockchainTransactor interface { @@ -307,11 +305,6 @@ func (svc *Service) Methods() map[jsonrpc.Method]rpcserver.MethodDef { "get current blockchain info", "chain info including chain ID and best block", ), - userjson.MethodDatabases: rpcserver.MakeMethodDef( - svc.ListDatabases, - "list databases", - "an array of matching databases", - ), userjson.MethodPing: rpcserver.MakeMethodDef( svc.Ping, "ping the server", @@ -327,11 +320,6 @@ func (svc *Service) Methods() map[jsonrpc.Method]rpcserver.MethodDef { "perform an ad-hoc SQL query", "the result of the query as a encoded records", ), - userjson.MethodSchema: rpcserver.MakeMethodDef( - svc.Schema, - "get a deployed database's kuneiform schema definition", - "the kuneiform schema", - ), userjson.MethodTxQuery: rpcserver.MakeMethodDef( svc.TxQuery, "query for the status of a transaction", @@ -522,25 +510,22 @@ func (svc *Service) Query(ctx context.Context, req *userjson.QueryRequest) (*use readTx := svc.db.BeginDelayedReadTx() defer readTx.Rollback(ctx) - result, err := svc.engine.Execute(&common.TxContext{ + r := &rowReader{} + err := svc.engine.Execute(&common.TxContext{ Ctx: ctxExec, BlockContext: &common.BlockContext{ Height: -1, // cannot know the height here. }, - }, readTx, req.DBID, req.Query, nil) + }, readTx, req.Query, req.Params, r.read) if err != nil { // We don't know for sure that it's an invalid argument, but an invalid // user-provided query isn't an internal server error. return nil, engineError(err) } - - bts, err := json.Marshal(resultMap(result)) // marshalling the map is less efficient, but necessary for backwards compatibility - if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorResultEncoding, "failed to marshal call result", nil) - } - return &userjson.QueryResponse{ - Result: bts, + ColumnNames: r.qr.ColumnNames, + ColumnTypes: r.qr.ColumnTypes, + Values: r.qr.Values, }, nil } @@ -579,27 +564,6 @@ func (svc *Service) Ping(ctx context.Context, req *userjson.PingRequest) (*userj }, nil } -func (svc *Service) ListDatabases(ctx context.Context, req *userjson.ListDatabasesRequest) (*userjson.ListDatabasesResponse, *jsonrpc.Error) { - dbs, err := svc.engine.ListDatasets(req.Owner) - if err != nil { - svc.log.Error("ListDatasets failed", "error", err) - return nil, engineError(err) - } - - pbDatasets := make([]*userjson.DatasetInfo, len(dbs)) - for i, db := range dbs { - pbDatasets[i] = &userjson.DatasetInfo{ - DBID: db.DBID, - Name: db.Name, - Owner: db.Owner, - } - } - - return &userjson.ListDatabasesResponse{ - Databases: pbDatasets, - }, nil -} - func checkEngineError(err error) (jsonrpc.ErrorCode, string) { if err == nil { return 0, "" // would not be constructing a jsonrpc.Error @@ -607,14 +571,11 @@ func checkEngineError(err error) (jsonrpc.ErrorCode, string) { if errors.Is(err, context.DeadlineExceeded) { return jsonrpc.ErrorTimeout, "db timeout" } - if errors.Is(err, execution.ErrDatasetExists) { - return jsonrpc.ErrorEngineDatasetExists, execution.ErrDatasetExists.Error() - } - if errors.Is(err, execution.ErrDatasetNotFound) { - return jsonrpc.ErrorEngineDatasetNotFound, execution.ErrDatasetNotFound.Error() + if errors.Is(err, interpreter.ErrNamespaceExists) { + return jsonrpc.ErrorEngineDatasetExists, err.Error() } - if errors.Is(err, execution.ErrInvalidSchema) { - return jsonrpc.ErrorEngineInvalidSchema, execution.ErrInvalidSchema.Error() + if errors.Is(err, interpreter.ErrNamespaceNotFound) { + return jsonrpc.ErrorEngineDatasetNotFound, err.Error() } return jsonrpc.ErrorEngineInternal, err.Error() @@ -631,20 +592,6 @@ func engineError(err error) *jsonrpc.Error { } } -func (svc *Service) Schema(ctx context.Context, req *userjson.SchemaRequest) (*userjson.SchemaResponse, *jsonrpc.Error) { - // logger := svc.log.With(log.String("rpc", "GetSchema"), log.String("dbid", req.DBID)) - logger := svc.log - schema, err := svc.engine.GetSchema(req.DBID) - if err != nil { - logger.Debug("failed to get schema", "error", err) - return nil, engineError(err) - } - - return &userjson.SchemaResponse{ - Schema: schema, - }, nil -} - func unmarshalActionCall(req *userjson.CallRequest) (*types.ActionCall, *types.CallMessage, error) { var actionPayload types.ActionCall err := actionPayload.UnmarshalBinary(req.Body.Payload) @@ -660,20 +607,6 @@ func unmarshalActionCall(req *userjson.CallRequest) (*types.ActionCall, *types.C return &actionPayload, &cm, nil } -func resultMap(r *sql.ResultSet) []map[string]any { - m := make([]map[string]any, len(r.Rows)) - for i, row := range r.Rows { - m2 := make(map[string]any) - for j, col := range row { - m2[r.Columns[j]] = col - } - - m[i] = m2 - } - - return m -} - func (svc *Service) verifyCallChallenge(challenge [32]byte) *jsonrpc.Error { svc.challengeMtx.Lock() challengeTime, ok := svc.challenges[challenge] @@ -701,31 +634,9 @@ func (svc *Service) Call(ctx context.Context, req *userjson.CallRequest) (*userj return nil, jsonrpc.NewError(jsonrpc.ErrorInvalidParams, "failed to convert action call: "+err.Error(), nil) } - // Authenticate by validating the challenge was server-issued, and verify - // the signature on the serialized call message that include the challenge. - if svc.privateMode { - // The message must have a sig, sender, and challenge. - if msg.Signature == nil || len(msg.Sender) == 0 { - return nil, jsonrpc.NewError(jsonrpc.ErrorCallChallengeNotFound, "signed call message with challenge required", nil) - } - if len(msg.Body.Challenge) != 32 { - return nil, jsonrpc.NewError(jsonrpc.ErrorInvalidCallChallenge, "incorrect challenge data length", nil) - } - // The call message sender must be interpreted consistently with - // signature verification, so ensure the auth types match. - if msg.AuthType != msg.Signature.Type { - return nil, jsonrpc.NewError(jsonrpc.ErrorMismatchCallAuthType, "different authentication schemes in signature and caller", nil) - } - // Ensure we issued the message's challenge. - if err := svc.verifyCallChallenge([32]byte(msg.Body.Challenge)); err != nil { - return nil, err - } - sigtxt := types.CallSigText(body.DBID, body.Action, - msg.Body.Payload, msg.Body.Challenge) - err = ident.VerifySignature(msg.Sender, []byte(sigtxt), msg.Signature) - if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorInvalidCallSignature, "invalid signature on call message", nil) - } + if jsonRPCErr := svc.authenticate(msg, types.CallSigText(body.DBID, body.Action, + msg.Body.Payload, msg.Body.Challenge)); jsonRPCErr != nil { + return nil, jsonRPCErr } args := make([]any, len(body.Arguments)) @@ -736,9 +647,55 @@ func (svc *Service) Call(ctx context.Context, req *userjson.CallRequest) (*userj } } + ctxExec, cancel := context.WithTimeout(ctx, svc.readTxTimeout) + defer cancel() + + txContext, jsonRPCErr := svc.txCtx(ctxExec, msg) + if err != nil { + return nil, jsonRPCErr + } + + // we use a basic read tx since we are subscribing to notices, + // and it is therefore pointless to use a delayed tx + readTx, err := svc.db.BeginReadTx(ctx) + if err != nil { + return nil, jsonrpc.NewError(jsonrpc.ErrorNodeInternal, "failed to start read tx", nil) + } + defer readTx.Rollback(ctx) + + r := &rowReader{} + callRes, err := svc.engine.Call(txContext, readTx, body.DBID, body.Action, args, r.read) + if err != nil { + return nil, engineError(err) + } + + return &userjson.CallResponse{ + QueryResult: &r.qr, + Logs: callRes.Logs, + }, nil +} + +// rowReader is a helper struct that writes data for a query response +type rowReader struct { + qr types.QueryResult +} + +func (r *rowReader) read(row *common.Row) error { + if r.qr.ColumnNames == nil { + r.qr.ColumnNames = row.ColumnNames + r.qr.ColumnTypes = row.ColumnTypes + } + r.qr.Values = append(r.qr.Values, row.Values) + return nil +} + +// txCtx creates a transaction context from the given context and call message. +// It will do its best to determine the caller and signer, and the block context. +func (svc *Service) txCtx(ctx context.Context, msg *types.CallMessage) (*common.TxContext, *jsonrpc.Error) { signer := msg.Sender caller := "" // string representation of sender, if signed. Otherwise, empty string if signer != nil && msg.AuthType != "" { + var err error caller, err = ident.Identifier(msg.AuthType, signer) if err != nil { return nil, jsonrpc.NewError(jsonrpc.ErrorIdentInvalid, "failed to get caller: "+err.Error(), nil) @@ -755,84 +712,50 @@ func (svc *Service) Call(ctx context.Context, req *userjson.CallRequest) (*userj stamp = -1 } - ctxExec, cancel := context.WithTimeout(ctx, svc.readTxTimeout) - defer cancel() - - // we use a basic read tx since we are subscribing to notices, - // and it is therefore pointless to use a delayed tx - readTx, err := svc.db.BeginReadTx(ctx) - if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorNodeInternal, "failed to start read tx", nil) - } - defer readTx.Rollback(ctx) - - logCh, done, err := readTx.Subscribe(ctx) - if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorNodeInternal, "failed to subscribe to notices", nil) - } - defer done(ctx) - - var logs []string - wg := sync.WaitGroup{} - wg.Add(1) - go func() { - for { - select { - case <-ctxExec.Done(): - wg.Done() - return - case logMsg, ok := <-logCh: - if !ok { - wg.Done() - return - } - - _, notc, err := parse.ParseNotice(logMsg) - if err != nil { - svc.log.Error("failed to parse notice", "error", err) - continue - } - - logs = append(logs, notc) - } - } - }() - - executeResult, err := svc.engine.Procedure(&common.TxContext{ - Ctx: ctxExec, - Signer: signer, - Caller: caller, + return &common.TxContext{ + Ctx: ctx, + Signer: signer, + Caller: caller, + Authenticator: msg.AuthType, BlockContext: &common.BlockContext{ Height: height, Timestamp: stamp, }, - Authenticator: msg.AuthType, - }, readTx, &common.ExecutionData{ - Dataset: body.DBID, - Procedure: body.Action, - Args: args, - }) - if err != nil { - return nil, engineError(err) - } + }, nil +} - err = done(ctx) - if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorNodeInternal, "failed to unsubscribe from notices", nil) +// authenticate enforces authentication for the given context and message +// if private mode is enabled. It returns an error if authentication fails. +func (svc *Service) authenticate(msg *types.CallMessage, sigTxt string) *jsonrpc.Error { + if !svc.privateMode { + return nil } - // marshalling the map is less efficient, but necessary for backwards compatibility - btsResult, err := json.Marshal(resultMap(executeResult)) + // Authenticate by validating the challenge was server-issued, and verify + // the signature on the serialized call message that include the challenge. + + // The message must have a sig, sender, and challenge. + if msg.Signature == nil || len(msg.Sender) == 0 { + return jsonrpc.NewError(jsonrpc.ErrorCallChallengeNotFound, "signed call message with challenge required", nil) + } + if len(msg.Body.Challenge) != 32 { + return jsonrpc.NewError(jsonrpc.ErrorInvalidCallChallenge, "incorrect challenge data length", nil) + } + // The call message sender must be interpreted consistently with + // signature verification, so ensure the auth types match. + if msg.AuthType != msg.Signature.Type { + return jsonrpc.NewError(jsonrpc.ErrorMismatchCallAuthType, "different authentication schemes in signature and caller", nil) + } + // Ensure we issued the message's challenge. + if err := svc.verifyCallChallenge([32]byte(msg.Body.Challenge)); err != nil { + return err + } + err := ident.VerifySignature(msg.Sender, []byte(sigTxt), msg.Signature) if err != nil { - return nil, jsonrpc.NewError(jsonrpc.ErrorResultEncoding, "failed to marshal call result", nil) + return jsonrpc.NewError(jsonrpc.ErrorInvalidCallSignature, "invalid signature on call message", nil) } - wg.Wait() - - return &userjson.CallResponse{ - Result: btsResult, - Logs: logs, - }, nil + return nil } func (svc *Service) TxQuery(ctx context.Context, req *userjson.TxQueryRequest) (*userjson.TxQueryResponse, *jsonrpc.Error) { diff --git a/node/txapp/interfaces.go b/node/txapp/interfaces.go index b167eb8cf..86abb1d87 100644 --- a/node/txapp/interfaces.go +++ b/node/txapp/interfaces.go @@ -4,7 +4,6 @@ import ( "context" "math/big" - "github.com/kwilteam/kwil-db/common" "github.com/kwilteam/kwil-db/core/types" "github.com/kwilteam/kwil-db/node/types/sql" "github.com/kwilteam/kwil-db/node/voting" @@ -43,8 +42,6 @@ type DB interface { sql.SnapshotTxMaker } -type Engine = common.Engine // ok? or can reduce this? - var ( // getEvents gets all events, even if they have been // marked received diff --git a/node/txapp/mempool.go b/node/txapp/mempool.go index 3cd23e38a..0b9983034 100644 --- a/node/txapp/mempool.go +++ b/node/txapp/mempool.go @@ -80,10 +80,8 @@ func (m *mempool) applyTransaction(ctx *common.TxContext, tx *types.Transaction, return errors.New("validator vote ids are not allowed during migration") case types.PayloadTypeValidatorVoteBodies: return errors.New("validator vote bodies are not allowed during migration") - case types.PayloadTypeDeploySchema: - return errors.New("deploy schema transactions are not allowed during migration") - case types.PayloadTypeDropSchema: - return errors.New("drop schema transactions are not allowed during migration") + case types.PayloadTypeRawStatement: + return errors.New("raw statement transactions are not allowed during migration") case types.PayloadTypeTransfer: return errors.New("transfer transactions are not allowed during migration") } diff --git a/node/txapp/routes.go b/node/txapp/routes.go index 13be8743d..f75be347f 100644 --- a/node/txapp/routes.go +++ b/node/txapp/routes.go @@ -13,16 +13,14 @@ import ( "github.com/kwilteam/kwil-db/extensions/consensus" "github.com/kwilteam/kwil-db/extensions/resolutions" "github.com/kwilteam/kwil-db/node/accounts" - "github.com/kwilteam/kwil-db/node/engine/execution" - "github.com/kwilteam/kwil-db/node/ident" + "github.com/kwilteam/kwil-db/node/engine/interpreter" "github.com/kwilteam/kwil-db/node/types/sql" "github.com/kwilteam/kwil-db/node/voting" ) func init() { err := errors.Join( - RegisterRoute(types.PayloadTypeDeploySchema, NewRoute(&deployDatasetRoute{})), - RegisterRoute(types.PayloadTypeDropSchema, NewRoute(&dropDatasetRoute{})), + RegisterRoute(types.PayloadTypeRawStatement, NewRoute(&rawStatementRoute{})), RegisterRoute(types.PayloadTypeExecute, NewRoute(&executeActionRoute{})), RegisterRoute(types.PayloadTypeTransfer, NewRoute(&transferRoute{})), RegisterRoute(types.PayloadTypeValidatorJoin, NewRoute(&validatorJoinRoute{})), @@ -204,99 +202,46 @@ func codeForEngineError(err error) types.TxCode { if err == nil { return types.CodeOk } - if errors.Is(err, execution.ErrDatasetExists) { + if errors.Is(err, interpreter.ErrNamespaceExists) { return types.CodeDatasetExists } - if errors.Is(err, execution.ErrDatasetNotFound) { + if errors.Is(err, interpreter.ErrNamespaceNotFound) { return types.CodeDatasetMissing } - if errors.Is(err, execution.ErrInvalidSchema) { - return types.CodeInvalidSchema - } return types.CodeUnknownError } -type deployDatasetRoute struct { - schema *types.Schema // set by PreTx - identifier string - authType string +type rawStatementRoute struct { + statement string } -var _ consensus.Route = (*deployDatasetRoute)(nil) - -func (d *deployDatasetRoute) Name() string { - return types.PayloadTypeDeploySchema.String() -} +var _ consensus.Route = (*rawStatementRoute)(nil) -func (d *deployDatasetRoute) Price(ctx context.Context, app *common.App, tx *types.Transaction) (*big.Int, error) { - return big.NewInt(1000000000000000000), nil +func (d *rawStatementRoute) Name() string { + return types.PayloadTypeRawStatement.String() } -func (d *deployDatasetRoute) PreTx(ctx *common.TxContext, svc *common.Service, tx *types.Transaction) (types.TxCode, error) { - if ctx.BlockContext.ChainContext.NetworkParameters.MigrationStatus == types.MigrationInProgress || - ctx.BlockContext.ChainContext.NetworkParameters.MigrationStatus == types.MigrationCompleted { - return types.CodeNetworkInMigration, errors.New("cannot deploy dataset during migration") - } - - schemaPayload := &types.Schema{} - err := schemaPayload.UnmarshalBinary(tx.Body.Payload) - if err != nil { - return types.CodeEncodingError, err - } - - d.schema = schemaPayload - - d.identifier, err = ident.Identifier(tx.Signature.Type, tx.Sender) - if err != nil { - return types.CodeUnknownError, err - } - - d.authType = tx.Signature.Type - - return 0, nil -} - -func (d *deployDatasetRoute) InTx(ctx *common.TxContext, app *common.App, tx *types.Transaction) (types.TxCode, error) { - err := app.Engine.CreateDataset(ctx, app.DB, d.schema) - if err != nil { - return codeForEngineError(err), err - } - return 0, nil -} - -type dropDatasetRoute struct { - dbid string -} - -var _ consensus.Route = (*dropDatasetRoute)(nil) - -func (d *dropDatasetRoute) Name() string { - return types.PayloadTypeDropSchema.String() -} - -func (d *dropDatasetRoute) Price(ctx context.Context, app *common.App, tx *types.Transaction) (*big.Int, error) { +func (d *rawStatementRoute) Price(ctx context.Context, app *common.App, tx *types.Transaction) (*big.Int, error) { return big.NewInt(10000000000000), nil } -func (d *dropDatasetRoute) PreTx(ctx *common.TxContext, svc *common.Service, tx *types.Transaction) (types.TxCode, error) { - if ctx.BlockContext.ChainContext.NetworkParameters.MigrationStatus == types.MigrationInProgress || - ctx.BlockContext.ChainContext.NetworkParameters.MigrationStatus == types.MigrationCompleted { - return types.CodeNetworkInMigration, errors.New("cannot drop dataset during migration") - } - - drop := &types.DropSchema{} - err := drop.UnmarshalBinary(tx.Body.Payload) +func (d *rawStatementRoute) PreTx(ctx *common.TxContext, svc *common.Service, tx *types.Transaction) (types.TxCode, error) { + raw := &types.RawStatement{} + err := raw.UnmarshalBinary(tx.Body.Payload) if err != nil { return types.CodeEncodingError, err } - d.dbid = drop.DBID + d.statement = raw.Statement return 0, nil } -func (d *dropDatasetRoute) InTx(ctx *common.TxContext, app *common.App, tx *types.Transaction) (types.TxCode, error) { - err := app.Engine.DeleteDataset(ctx, app.DB, d.dbid) +func (d *rawStatementRoute) InTx(ctx *common.TxContext, app *common.App, tx *types.Transaction) (types.TxCode, error) { + err := app.Engine.Execute(ctx, app.DB, d.statement, nil, func(r *common.Row) error { + // we throw away all results for raw statements in a block + return nil + }) if err != nil { return codeForEngineError(err), err } @@ -304,9 +249,9 @@ func (d *dropDatasetRoute) InTx(ctx *common.TxContext, app *common.App, tx *type } type executeActionRoute struct { - dbid string - action string - args [][]any + namespace string + action string + args [][]any } var _ consensus.Route = (*executeActionRoute)(nil) @@ -327,7 +272,7 @@ func (d *executeActionRoute) PreTx(ctx *common.TxContext, svc *common.Service, t } d.action = action.Action - d.dbid = action.DBID + d.namespace = action.DBID // here, we decode the [][]types.EncodedTypes into [][]any args := make([][]any, len(action.Arguments)) @@ -355,10 +300,11 @@ func (d *executeActionRoute) PreTx(ctx *common.TxContext, svc *common.Service, t func (d *executeActionRoute) InTx(ctx *common.TxContext, app *common.App, tx *types.Transaction) (types.TxCode, error) { for i := range d.args { - _, err := app.Engine.Procedure(ctx, app.DB, &common.ExecutionData{ - Dataset: d.dbid, - Procedure: d.action, - Args: d.args[i], + // TODO: once we are able to store execution logs in the block store, we should propagate the discarded + // return value here. + _, err := app.Engine.Call(ctx, app.DB, d.namespace, d.action, d.args[i], func(r *common.Row) error { + // we throw away all results for execute actions + return nil }) if err != nil { return codeForEngineError(err), err diff --git a/node/txapp/txapp.go b/node/txapp/txapp.go index 16a32e2ae..a978ef3d2 100644 --- a/node/txapp/txapp.go +++ b/node/txapp/txapp.go @@ -26,9 +26,9 @@ import ( // maintaining a mempool for uncommitted accounts, pricing transactions, // managing atomicity of the database, and managing the validator set. type TxApp struct { - Engine Engine // tracks deployed schemas - Accounts Accounts // tracks account balances and nonces - Validators Validators // tracks validator power + Engine common.Engine // tracks deployed schemas + Accounts Accounts // tracks account balances and nonces + Validators Validators // tracks validator power service *common.Service // forks forks.Forks diff --git a/parse/actions.go b/parse/actions.go deleted file mode 100644 index 07ef96b50..000000000 --- a/parse/actions.go +++ /dev/null @@ -1,69 +0,0 @@ -package parse - -import "github.com/kwilteam/kwil-db/core/types" - -type actionAnalyzer struct { - sqlAnalyzer - // Mutative is true if the action mutates state. - Mutative bool - // schema is the database schema that contains the action. - schema *types.Schema - // inSQL is true if the visitor is in an SQL statement. - inSQL bool -} - -var _ Visitor = (*actionAnalyzer)(nil) - -func (a *actionAnalyzer) VisitActionStmtSQL(p0 *ActionStmtSQL) any { - // we simply need to call the sql analyzer to make it check the statement - // and rewrite it to be deterministic. We can ignore the result. - a.inSQL = true - p0.SQL.Accept(&a.sqlAnalyzer) - a.inSQL = false - - if a.sqlAnalyzer.sqlResult.Mutative { - a.Mutative = true - } - - return nil -} - -func (a *actionAnalyzer) VisitActionStmtExtensionCall(p0 *ActionStmtExtensionCall) any { - a.sqlCtx.isInlineAction = true - for _, arg := range p0.Args { - arg.Accept(&a.sqlAnalyzer) - } - a.sqlCtx.isInlineAction = false - - _, ok := a.schema.FindExtensionImport(p0.Extension) - if !ok { - a.errs.AddErr(p0, ErrActionNotFound, p0.Extension) - } - - // we need to add all receivers to the known variables - for _, rec := range p0.Receivers { - a.blockContext.variables[rec] = types.UnknownType - } - - return nil -} - -func (a *actionAnalyzer) VisitActionStmtActionCall(p0 *ActionStmtActionCall) any { - a.sqlCtx.isInlineAction = true - for _, arg := range p0.Args { - arg.Accept(&a.sqlAnalyzer) - } - a.sqlCtx.isInlineAction = false - - act, ok := a.schema.FindAction(p0.Action) - if !ok { - a.errs.AddErr(p0, ErrActionNotFound, p0.Action) - return nil - } - - if !act.IsView() { - a.Mutative = true - } - - return nil -} diff --git a/parse/analyze.go b/parse/analyze.go deleted file mode 100644 index 8c78da12a..000000000 --- a/parse/analyze.go +++ /dev/null @@ -1,2783 +0,0 @@ -package parse - -import ( - "fmt" - "maps" - - "github.com/kwilteam/kwil-db/core/types" -) - -/* - this file performs analysis of SQL and procedures. It performs several main types of validation: - 1. Type checking: it ensures that all statements and expressions return correct types. - This is critical because plpgsql only throws type errors at runtime, which is really bad - for a smart contract language. - 2. Deterministic ordering: it ensures that all queries have deterministic ordering, even if - not specified by the query writer. It adds necessary ordering clauses to achieve this. - 3. Aggregate checks: it ensures that aggregate functions are used correctly, and that they - can not be used to create non-determinism, and also that they return errors that would otherwise - be thrown by Postgres at runtime. - 4. Mutative checks: it analyzes whether or not a procedure / sql statement is attempting to - modify state. It does not return an error if it does, but will return a boolean indicating - whether or not it is mutative. This can be used by callers to ensure that VIEW procedures - are not mutative, which would otherwise only be checked at runtime when executing them with - a read-only transaction. - 5. Contextual statement checks: Procedure statements that can only be used in certain contexts - (e.g. loop breaks and RETURN NEXT) are checked to ensure that they are only used in loops. - 6. PLPGSQL Variable Declarations: It analyzes what variables should be declared at the top - of a PLPGSQL statement, and what types they should be. - 7. Cartesian Join Checks: All joins must be joined using =, and one side of the join condition - must be a unique column with no other math applied to it. Primary keys are also counted as unique, - unless it is a compound primary key. - - DETERMINISTIC ORDERING RULES: - If a SELECT statement is a simple select (e.g. does not use compound operators): - - 1. All joined tables that are physical (and not subqueries or procedure calls) are ordered by their primary keys, - in the order they are joined. - - 2. If a SELECT has a DISTINCT clause, it will order by all columns being returned. The reason - for this can be seen here: https://stackoverflow.com/questions/3492093/does-postgresql-support-distinct-on-multiple-columns. - All previous rules do not apply. - - 3. If a SELECT has a GROUP BY clause, all columns specified in the GROUP BY clause will be ordered. - All previous rules do not apply. - - If a SELECT statement is a compound select (e.g. uses UNION, UNION ALL INTERSECT, EXCEPT): - - 1. All returned columns are ordered by their position in the SELECT statement. - - 2. If any compound SELECT statement has a GROUP BY, then it will return an error. - This is a remnant of SQLite's rudimentary indexing, but these queries are fairly uncommon, - and therefore not allowed for the time being - - - AGGREGATE FUNCTION RULES: - - 1. Aggregate functions can only be used in the SELECT clause, and not in the WHERE clause. - - 2. All columns referenced in HAVING or return columns must be in the GROUP BY clause, unless they are - in an aggregate function. - - 3. All columns used within aggregate functions cannot be specified in the GROUP BY clause. - - 4. If there is an aggregate in the return columns and no GROUP BY clause, then there can only - be one column in the return columns (the column with the aggregate function). -*/ - -// unknownPosition is used to signal that a position is unknown in the AST because -// it was not present in the parse statement. This is used for when we make modifications -// to the ast, e.g. for enforcing ordering. -func unknownPosition() Position { - return Position{ - IsSet: true, - StartLine: -1, - StartCol: -1, - EndLine: -1, - EndCol: -1, - } -} - -// blockContext is the context for the current block. This is can be an action, procedure, -// or sql block. -type blockContext struct { - // schema is the current schema - schema *types.Schema - // variables holds information about all variable declarations in the block - // It holds both user variables like $arg1, $arg2, and contextual variables, - // like @caller and @txid. It will be populated while the analyzer is running, - // but is prepopulated with the procedure's arguments and contextual variables. - variables map[string]*types.DataType - // anonymousVariables holds information about all anonymous variable declarations in the block. - // Anonymous variables are objects with fields, such as the receiver of loops. - // The map maps the name to the fields to their data types. - // The map will be populated while the analyzer is running. - anonymousVariables map[string]map[string]*types.DataType - // errs is used for passing errors back to the caller. - errs *errorListener -} - -// variableExists checks if a variable exists in the current block. -// It will check both user variables and anonymous variables. -func (b *blockContext) variableExists(name string) bool { - _, ok := b.variables[name] - if ok { - return true - } - - _, ok = b.anonymousVariables[name] - return ok -} - -// copyVariables copies both the user variables and anonymous variables. -func (b *blockContext) copyVariables() (map[string]*types.DataType, map[string]map[string]*types.DataType) { - // we do not need to deep copy anonymousVariables because anonymousVariables maps an object name - // to an objects fields and their data types. The only way to declare an object in Kuneiform - // is for $row in SELECT ..., the $row will have fields. Since these variables can only be declared once - // per procedure, we do not need to worry about the object having different fields throughout the - // procedure. - return maps.Clone(b.variables), maps.Clone(b.anonymousVariables) -} - -// sqlContext is the context of the current SQL statement -type sqlContext struct { - // joinedRelations tracks all relations joined on the current SQL statement. - joinedRelations []*Relation - // outerRelations are relations that are not joined on the scope, but are available. - // These are typically outer queries in a subquery. Calling these will be a correlated subquery. - outerRelations []*Relation - // joinedTables maps all used table names/aliases to their table definitions. - // The tables named here are also included in joinedRelations, but not - // all joinedRelations are in this map. This map ONLY includes actual SQL - // tables joined in this context, not joined subqueries or procedure calls. - // These are used for default ordering. - joinedTables map[string]*types.Table - // ctes are the common table expressions in the current scope. - ctes []*Relation - // outerScope is the scope of the outer query. - outerScope *sqlContext - // isInlineAction is true if the visitor is analyzing a SQL expression within an in-line - // statement in an action - isInlineAction bool - // inConflict is true if we are in an ON CONFLICT clause - inConflict bool - // targetTable is the name (or alias) of the table being inserted, updated, or deleted to/from. - // It is not set if we are not in an insert, update, or delete statement. - targetTable string - // hasAnonymousTable is true if an unnamed table has been joined. If this is true, - // it can be the only table joined in a select statement. - hasAnonymousTable bool - // inSelect is true if we are in a select statement. - inSelect bool - // inLoneSQL is true if this is being parsed as a lone SQL query, and - // not as part of an action or procedure. This allows us to bypass certain - // checks, such as that variables are declared as part of the procedure. - inLoneSQL bool - - // temp are values that are temporary and not even saved within the same scope. - // they are used in highly specific contexts, and shouldn't be relied on unless - // specifically documented. All temp values are denoted with a _. - - // inAggregate is true if we are within an aggregate functions parameters. - // it should only be used in ExpressionColumn, and set in ExpressionFunctionCall. - _inAggregate bool - // containsAggregate is true if the current expression contains an aggregate function. - // it is set in ExpressionFunctionCall, and accessed/reset in SelectCore. - _containsAggregate bool - // containsAggregateWithoutGroupBy is true if the current expression contains an aggregate function, - // but there is no GROUP BY clause. This is set in SelectCore, and accessed in SelectStatement. - _containsAggregateWithoutGroupBy bool - // columnInAggregate is the column found within an aggregate function, - // comprised of the relation and attribute. - // It is set in ExpressionColumn, and accessed/reset in - // SelectCore. It is nil if none are found. - _columnInAggregate *[2]string - // columnsOutsideAggregate are columns found outside of an aggregate function. - // It is set in ExpressionColumn, and accessed/reset in - // SelectCore - _columnsOutsideAggregate [][2]string - // inOrdering is true if we are in an ordering clause - _inOrdering bool - // result is the result of a query. It is only set when analyzing the ordering clause - _result []*Attribute -} - -func newSQLContext() sqlContext { - return sqlContext{ - joinedTables: make(map[string]*types.Table), - } -} - -// setTempValuesToZero resets all temp values to their zero values. -func (s *sqlContext) setTempValuesToZero() { - s._inAggregate = false - s._containsAggregate = false - s._columnInAggregate = nil - s._columnsOutsideAggregate = nil - s._inOrdering = false - s._result = nil -} - -// copy copies the sqlContext. -// it does not copy the outer scope. -func (c *sqlContext) copy() sqlContext { - joinedRelations := make([]*Relation, len(c.joinedRelations)) - for i, r := range c.joinedRelations { - joinedRelations[i] = r.Copy() - } - - outerRelations := make([]*Relation, len(c.outerRelations)) - for i, r := range c.outerRelations { - outerRelations[i] = r.Copy() - } - - // ctes don't need to be copied right now since they are not modified. - colsOutsideAgg := make([][2]string, len(c._columnsOutsideAggregate)) - copy(colsOutsideAgg, c._columnsOutsideAggregate) - - return sqlContext{ - joinedRelations: joinedRelations, - outerRelations: outerRelations, - ctes: c.ctes, - joinedTables: c.joinedTables, - _containsAggregateWithoutGroupBy: c._containsAggregateWithoutGroupBy, // we want to carry this over - } -} - -// joinRelation adds a relation to the context. -func (c *sqlContext) joinRelation(r *Relation) error { - // check if the relation is already joined - _, ok := c.getJoinedRelation(r.Name) - if ok { - return ErrTableAlreadyJoined - } - - c.joinedRelations = append(c.joinedRelations, r) - return nil -} - -// join joins a table. It will return an error if the table is already joined. -func (c *sqlContext) join(name string, t *types.Table) error { - _, ok := c.joinedTables[name] - if ok { - return ErrTableAlreadyJoined - } - - c.joinedTables[name] = t - return nil -} - -// getJoinedRelation returns the relation with the given name. -func (c *sqlContext) getJoinedRelation(name string) (*Relation, bool) { - for _, r := range c.joinedRelations { - if r.Name == name { - return r, true - } - } - - return nil, false -} - -// getOuterRelation returns the relation with the given name. -func (c *sqlContext) getOuterRelation(name string) (*Relation, bool) { - for _, r := range c.outerRelations { - if r.Name == name { - return r, true - } - } - - return nil, false -} - -// the following special table names track table names that mean something in the context of the SQL statement. -const ( - tableExcluded = "excluded" -) - -// findAttribute searches for a attribute in the specified relation. -// if the relation is empty, it will search all joined relations. -// It does NOT search the outer relations unless specifically specified; -// this matches Postgres' behavior. -// If the relation is empty and many columns are found, it will return an error. -// It returns both an error and an error message in case of an error. -// This is because it is meant to pass errors back to the error listener. -func (c *sqlContext) findAttribute(relation string, column string) (relName string, attr *Attribute, msg string, err error) { - if relation == "" { - foundAttrs := make([]*Attribute, 0) - - for _, r := range c.joinedRelations { - for _, a := range r.Attributes { - if a.Name == column { - relName = r.Name - foundAttrs = append(foundAttrs, a) - } - } - } - - switch len(foundAttrs) { - case 0: - return "", nil, column, ErrUnknownColumn - case 1: - return relName, foundAttrs[0], "", nil - default: - return "", nil, column, ErrAmbiguousColumn - } - } - - // if referencing excluded, we should instead look at the target table, - // since the excluded data will always match the failed insert. - if relation == tableExcluded { - // excluded can only be used in an ON CONFLICT clause - if !c.inConflict { - return "", nil, relation, fmt.Errorf("%w: excluded table can only be used in an ON CONFLICT clause", ErrInvalidExcludedTable) - } - relation = c.targetTable - } - - r, ok := c.getJoinedRelation(relation) - if !ok { - r, ok = c.getOuterRelation(relation) - if !ok { - return "", nil, relation, ErrUnknownTable - } - } - - for _, a := range r.Attributes { - if a.Name == column { - return r.Name, a, "", nil - } - } - - return "", nil, relation + "." + column, ErrUnknownColumn -} - -// scope moves the current scope to outer scope, -// and sets the current scope to a new scope. -func (c *sqlContext) scope() { - c2 := &sqlContext{ - joinedRelations: make([]*Relation, len(c.joinedRelations)), - outerRelations: make([]*Relation, len(c.outerRelations)), - joinedTables: make(map[string]*types.Table), - // we do not need to copy ctes since they are not ever modified. - targetTable: c.targetTable, - isInlineAction: c.isInlineAction, - inConflict: c.inConflict, - inSelect: c.inSelect, - hasAnonymousTable: c.hasAnonymousTable, - } - // copy all non-temp values - for i, r := range c.outerRelations { - c2.outerRelations[i] = r.Copy() - } - - for i, r := range c.joinedRelations { - c2.joinedRelations[i] = r.Copy() - } - - for k, t := range c.joinedTables { - c2.joinedTables[k] = t.Copy() - } - - // move joined relations to the outside - c.outerRelations = append(c.outerRelations, c.joinedRelations...) - - // zero everything else - c.joinedRelations = nil - c.joinedTables = make(map[string]*types.Table) - c.setTempValuesToZero() - - // we do NOT change the inAction, inConflict, or targetTable values, - // since these apply in all nested scopes. - - // we do not alter inSelect, but we do alter hasAnonymousTable. - c2.hasAnonymousTable = false - - c2.outerScope = c.outerScope - c.outerScope = c2 -} - -// popScope moves the current scope to the outer scope. -func (c *sqlContext) popScope() { - *c = *c.outerScope -} - -/* - this visitor breaks down nodes into 4 different types: - - Expressions: expressions simply return *Attribute. The name on all of these will be empty UNLESS it is a column reference. - - CommonTableExpressions: the only node that can directly add tables to outerRelations slice. - -*/ - -// sqlAnalyzer visits SQL nodes and analyzes them. -type sqlAnalyzer struct { - UnimplementedSqlVisitor - blockContext - sqlCtx sqlContext - sqlResult sqlAnalyzeResult -} - -// reset resets the sqlAnalyzer. -func (s *sqlAnalyzer) reset() { - // we don't need to touch the block context, since it does not change here. - s.sqlCtx = newSQLContext() - s.sqlResult = sqlAnalyzeResult{} -} - -type sqlAnalyzeResult struct { - Mutative bool -} - -// startSQLAnalyze initializes all fields of the sqlAnalyzer. -func (s *sqlAnalyzer) startSQLAnalyze() { - s.sqlCtx = sqlContext{ - joinedTables: make(map[string]*types.Table), - } -} - -// endSQLAnalyze is called at the end of the analysis. -func (s *sqlAnalyzer) endSQLAnalyze() *sqlAnalyzeResult { - res := s.sqlResult - s.sqlCtx = sqlContext{} - return &res -} - -var _ Visitor = (*sqlAnalyzer)(nil) - -// typeErr should be used when a type error is encountered. -// It returns an unknown attribute and adds an error to the error listener. -func (s *sqlAnalyzer) typeErr(node Node, t1, t2 *types.DataType) *types.DataType { - s.errs.AddErr(node, ErrType, "%s != %s", t1, t2) - return cast(node, types.UnknownType) -} - -// expect is a helper function that expects a certain type, and adds an error if it is not found. -func (s *sqlAnalyzer) expect(node Node, t *types.DataType, expected *types.DataType) { - if !t.Equals(expected) { - s.errs.AddErr(node, ErrType, "expected %s, received %s", expected, t) - } -} - -// expectedNumeric is a helper function that expects a numeric type, and adds an error if it is not found. -func (s *sqlAnalyzer) expectedNumeric(node Node, t *types.DataType) { - if !t.IsNumeric() { - s.errs.AddErr(node, ErrType, "expected numeric type, received %s", t) - } -} - -// expressionTypeErr should be used if we expect an expression to return a *types.DataType, -// but it returns something else. It will attempt to read the actual type and create an error -// message that is helpful for the end user. -func (s *sqlAnalyzer) expressionTypeErr(e Expression) *types.DataType { - - // prefixMsg is a function used to attempt to infer more information about - // the error. expressionTypeErr is typically triggered when someone uses a function/procedure - // with an incompatible return type. prefixMsg will attempt to get the name of the function/procedure - prefixMsg := func() string { - msg := "expression" - if call, ok := e.(ExpressionCall); ok { - msg = fmt.Sprintf(`function/procedure "%s"`, call.FunctionName()) - } - return msg - } - - switch v := e.Accept(s).(type) { - case *types.DataType: - // if it is a basic expression returning a scalar (e.g. "'hello'" or "abs(-1)"), - // or a procedure that returns exactly one scalar value. - // This should never happen, since expressionTypeErr is called when the expression - // does not return a *types.DataType. - panic("api misuse: expressionTypeErr should only be called when the expression does not return a *types.DataType") - case map[string]*types.DataType: - // if it is a loop receiver on a select statement (e.g. "for $row in select * from table") - s.errs.AddErr(e, ErrType, "invalid usage of compound type. you must reference a field using $compound.field notation") - case []*types.DataType: - // if it is a procedure than returns several scalar values - s.errs.AddErr(e, ErrType, "expected %s to return a single value, returns %d values", prefixMsg(), len(v)) - case *returnsTable: - // if it is a procedure that returns a table - s.errs.AddErr(e, ErrType, "%s returns table, not scalar values", prefixMsg()) - case nil: - // if it is a procedure that returns nothing - s.errs.AddErr(e, ErrType, "%s does not return any value", prefixMsg()) - default: - // unknown - s.errs.AddErr(e, ErrType, "internal bug: could not infer expected type") - } - - return cast(e, types.UnknownType) -} - -// cast will return the type case if one exists. If not, it will simply -// return the passed type. -func cast(castable any, fallback *types.DataType) *types.DataType { - if castable == nil { - return fallback - } - - c, ok := castable.(interface{ GetTypeCast() *types.DataType }) - if !ok { - return fallback - } - - if c.GetTypeCast() == nil { - return fallback - } - - return c.GetTypeCast() -} - -func (s *sqlAnalyzer) VisitExpressionLiteral(p0 *ExpressionLiteral) any { - // if type casted by the user, we should just use their value. If not, - // we should assert the type since Postgres might detect it incorrectly. - if p0.TypeCast == nil && !p0.Type.EqualsStrict(types.NullType) { - // cannot cast to null - p0.TypeCast = p0.Type - } else { - return cast(p0, p0.Type) - } - return p0.TypeCast -} - -func (s *sqlAnalyzer) VisitExpressionFunctionCall(p0 *ExpressionFunctionCall) any { - // function call should either be against a known function, or a procedure. - fn, ok := Functions[p0.Name] - if !ok { - // if not found, it might be a schema procedure. - proc, found := s.schema.FindProcedure(p0.Name) - if !found { - s.errs.AddErr(p0, ErrUnknownFunctionOrProcedure, p0.Name) - return cast(p0, types.UnknownType) - } - - if !proc.IsView() { - s.sqlResult.Mutative = true - } - - // if it is a procedure, it cannot use distinct or * - if p0.Distinct { - s.errs.AddErr(p0, ErrFunctionSignature, "cannot use DISTINCT when calling procedure %s", p0.Name) - return cast(p0, types.UnknownType) - } - if p0.Star { - s.errs.AddErr(p0, ErrFunctionSignature, "cannot use * when calling procedure %s", p0.Name) - return cast(p0, types.UnknownType) - } - - // verify the inputs - if len(p0.Args) != len(proc.Parameters) { - s.errs.AddErr(p0, ErrFunctionSignature, "expected %d arguments, received %d", len(proc.Parameters), len(p0.Args)) - return cast(p0, types.UnknownType) - } - - for i, arg := range p0.Args { - dt, ok := arg.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(arg) - } - - if !dt.Equals(proc.Parameters[i].Type) { - return s.typeErr(arg, dt, proc.Parameters[i].Type) - } - } - - return s.returnProcedureReturnExpr(p0, p0.Name, proc.Returns) - } - - if s.sqlCtx._inOrdering && s.sqlCtx._inAggregate { - s.errs.AddErr(p0, ErrOrdering, "cannot use aggregate functions in ORDER BY clause") - return cast(p0, types.UnknownType) - } - - // the function is a built in function. If using DISTINCT, it needs to be an aggregate - // if using *, it needs to support it. - if p0.Distinct && !fn.IsAggregate { - s.errs.AddErr(p0, ErrFunctionSignature, "DISTINCT can only be used with aggregate functions") - return cast(p0, types.UnknownType) - } - - if fn.IsAggregate { - s.sqlCtx._inAggregate = true - s.sqlCtx._containsAggregate = true - defer func() { s.sqlCtx._inAggregate = false }() - } - - // if the function is called with *, we need to ensure it supports it. - // If not, then we validate all args and return the type. - var returnType *types.DataType - if p0.Star { - if fn.StarArgReturn == nil { - s.errs.AddErr(p0, ErrFunctionSignature, "function does not support *") - return cast(p0, types.UnknownType) - } - - // if calling with *, it must have no args - if len(p0.Args) != 0 { - s.errs.AddErr(p0, ErrFunctionSignature, "function does not accept arguments when using *") - return cast(p0, types.UnknownType) - } - - returnType = fn.StarArgReturn - } else { - argTyps := make([]*types.DataType, len(p0.Args)) - for i, arg := range p0.Args { - dt, ok := arg.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(arg) - } - - argTyps[i] = dt - } - - var err error - returnType, err = fn.ValidateArgs(argTyps) - if err != nil { - s.errs.AddErr(p0, ErrFunctionSignature, err.Error()) - return cast(p0, types.UnknownType) - } - } - - // callers of this visitor know that a nil return means a function does not - // return anything. We explicitly return nil instead of a nil *types.DataType - if returnType == nil { - return nil - } - - return cast(p0, returnType) -} - -func (s *sqlAnalyzer) VisitExpressionForeignCall(p0 *ExpressionForeignCall) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrFunctionSignature, "foreign calls are not supported in in-line action statements") - } - - // foreign call must be defined as a foreign procedure - proc, found := s.schema.FindForeignProcedure(p0.Name) - if !found { - s.errs.AddErr(p0, ErrUnknownFunctionOrProcedure, p0.Name) - return cast(p0, types.UnknownType) - } - - if len(p0.ContextualArgs) != 2 { - s.errs.AddErr(p0, ErrFunctionSignature, "expected 2 contextual arguments, received %d", len(p0.ContextualArgs)) - return cast(p0, types.UnknownType) - } - - // contextual args have to be strings - for _, ctxArgs := range p0.ContextualArgs { - dt, ok := ctxArgs.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(ctxArgs) - } - - s.expect(ctxArgs, dt, types.TextType) - } - - // verify the inputs - if len(p0.Args) != len(proc.Parameters) { - s.errs.AddErr(p0, ErrFunctionSignature, "expected %d arguments, received %d", len(proc.Parameters), len(p0.Args)) - return cast(p0, types.UnknownType) - } - - for i, arg := range p0.Args { - dt, ok := arg.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(arg) - } - - if !dt.Equals(proc.Parameters[i]) { - return s.typeErr(arg, dt, proc.Parameters[i]) - } - } - - return s.returnProcedureReturnExpr(p0, p0.Name, proc.Returns) -} - -// returnProcedureReturnExpr handles a procedure return used as an expression return. It mandates -// that the procedure returns a single value, or a table. -func (s *sqlAnalyzer) returnProcedureReturnExpr(p0 ExpressionCall, procedureName string, ret *types.ProcedureReturn) any { - // if an expression calls a function, it should return exactly one value or a table. - if ret == nil { - if p0.GetTypeCast() != nil { - s.errs.AddErr(p0, ErrType, "cannot typecast procedure %s because does not return a value", procedureName) - } - return nil - } - - // if it returns a table, we need to return it as a set of attributes. - if ret.IsTable { - attrs := make([]*Attribute, len(ret.Fields)) - for i, f := range ret.Fields { - attrs[i] = &Attribute{ - Name: f.Name, - Type: f.Type, - } - } - - return &returnsTable{ - attrs: attrs, - } - } - - switch len(ret.Fields) { - case 0: - s.errs.AddErr(p0, ErrFunctionSignature, "procedure %s does not return a value", procedureName) - return cast(p0, types.UnknownType) - case 1: - return cast(p0, ret.Fields[0].Type) - default: - if p0.GetTypeCast() != nil { - s.errs.AddErr(p0, ErrType, "cannot type cast multiple return values") - } - - retVals := make([]*types.DataType, len(ret.Fields)) - for i, f := range ret.Fields { - retVals[i] = f.Type.Copy() - } - - return retVals - } -} - -// returnsTable is a special struct returned by returnProcedureReturnExpr when a procedure returns a table. -// It is used internally to detect when a procedure returns a table, so that we can properly throw type errors -// with helpful messages when a procedure returning a table is used in a position where a scalar value is expected. -type returnsTable struct { - attrs []*Attribute -} - -func (s *sqlAnalyzer) VisitExpressionVariable(p0 *ExpressionVariable) any { - dt, ok := s.blockContext.variables[p0.String()] - if !ok { - // if not found, it could be an anonymous variable. - anonVar, ok := s.blockContext.anonymousVariables[p0.String()] - if ok { - // if it is anonymous, we cannot type cast, since it is a compound type. - if p0.GetTypeCast() != nil { - s.errs.AddErr(p0, ErrType, "cannot type cast compound variable") - } - - return anonVar - } - - // if not found, then var does not exist. - // for raw SQL queries, this is ok. For procedures and actions, this is an error. - if !s.sqlCtx.inLoneSQL { - s.errs.AddErr(p0, ErrUndeclaredVariable, p0.String()) - } - return cast(p0, types.UnknownType) - } - - return cast(p0, dt) -} - -func (s *sqlAnalyzer) VisitExpressionArrayAccess(p0 *ExpressionArrayAccess) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "array access is not supported in in-line action statements") - } - - var isArray bool - if p0.Index != nil { - // if single index, result is not an array - idxAttr, ok := p0.Index.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Index) - } - if !idxAttr.Equals(types.IntType) { - return s.typeErr(p0.Index, idxAttr, types.IntType) - } - } else { - // if multiple indexes, result is an array - isArray = true - for _, idx := range p0.FromTo { - if idx == nil { - continue - } - idxAttr, ok := idx.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(idx) - } - if !idxAttr.Equals(types.IntType) { - return s.typeErr(idx, idxAttr, types.IntType) - } - } - } - - arrAttr, ok := p0.Array.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Array) - } - - if !arrAttr.IsArray { - s.errs.AddErr(p0.Array, ErrType, "expected array") - return cast(p0, types.UnknownType) - } - - return cast(p0, &types.DataType{ - Name: arrAttr.Name, - Metadata: arrAttr.Metadata, - IsArray: isArray, - }) -} - -func (s *sqlAnalyzer) VisitExpressionMakeArray(p0 *ExpressionMakeArray) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "array instantiation is not supported in in-line action statements") - } - - if len(p0.Values) == 0 { - s.errs.AddErr(p0, ErrAssignment, "array instantiation must have at least one element") - return cast(p0, types.UnknownType) - } - - first, ok := p0.Values[0].Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Values[0]) - } - - for _, v := range p0.Values { - typ, ok := v.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(v) - } - - if !typ.Equals(first) { - return s.typeErr(v, typ, first) - } - } - - return cast(p0, &types.DataType{ - Name: first.Name, - Metadata: first.Metadata, - IsArray: true, - }) -} - -func (s *sqlAnalyzer) VisitExpressionFieldAccess(p0 *ExpressionFieldAccess) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "field access is not supported in in-line action statements") - } - - // field access needs to be accessing a compound type. - // currently, compound types can only be anonymous variables declared - // as loop receivers. - anonType, ok := p0.Record.Accept(s).(map[string]*types.DataType) - if !ok { - s.errs.AddErr(p0.Record, ErrType, "cannot access field on non-compound type") - return cast(p0, types.UnknownType) - } - - dt, ok := anonType[p0.Field] - if !ok { - s.errs.AddErr(p0, ErrType, "unknown field %s", p0.Field) - return cast(p0, types.UnknownType) - } - - return cast(p0, dt) -} - -func (s *sqlAnalyzer) VisitExpressionParenthesized(p0 *ExpressionParenthesized) any { - dt, ok := p0.Inner.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Inner) - } - - return cast(p0, dt) -} - -func (s *sqlAnalyzer) VisitExpressionComparison(p0 *ExpressionComparison) any { - left, ok := p0.Left.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Left) - } - - right, ok := p0.Right.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Right) - } - - if !left.Equals(right) { - return s.typeErr(p0.Right, right, left) - } - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionLogical(p0 *ExpressionLogical) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "logical expressions are not supported in in-line action statements") - } - - left, ok := p0.Left.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Left) - } - - right, ok := p0.Right.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Right) - } - - if !left.Equals(types.BoolType) { - return s.typeErr(p0.Left, left, types.BoolType) - } - - if !right.Equals(types.BoolType) { - return s.typeErr(p0.Right, right, types.BoolType) - } - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionArithmetic(p0 *ExpressionArithmetic) any { - left, ok := p0.Left.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Left) - } - - right, ok := p0.Right.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Right) - } - - // both must be numeric UNLESS it is a concat - if p0.Operator == ArithmeticOperatorConcat { - if !left.Equals(types.TextType) || !right.Equals(types.TextType) { - // Postgres supports concatenation on non-text types, but we do not, - // so we give a more descriptive error here. - // see the note at the top of: https://www.postgresql.org/docs/16.1/functions-string.html - s.errs.AddErr(p0.Left, ErrType, "concatenation only allowed on text types. received %s and %s", left.String(), right.String()) - return cast(p0, types.UnknownType) - } - } else { - s.expectedNumeric(p0.Left, left) - } - - // we check this after to return a more helpful error message if - // the user is not concatenating strings. - if !left.Equals(right) { - return s.typeErr(p0.Right, right, left) - } - - return cast(p0, left) -} - -func (s *sqlAnalyzer) VisitExpressionUnary(p0 *ExpressionUnary) any { - e, ok := p0.Expression.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Expression) - } - - switch p0.Operator { - default: - panic("unknown unary operator") - case UnaryOperatorPos: - s.expectedNumeric(p0.Expression, e) - case UnaryOperatorNeg: - s.expectedNumeric(p0.Expression, e) - - if e.Equals(types.Uint256Type) { - s.errs.AddErr(p0.Expression, ErrType, "cannot negate uint256") - return cast(p0, types.UnknownType) - } - case UnaryOperatorNot: - s.expect(p0.Expression, e, types.BoolType) - } - - return cast(p0, e) -} - -func (s *sqlAnalyzer) VisitExpressionColumn(p0 *ExpressionColumn) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "column references are not supported in in-line action statements") - } - - // there is a special case, where if we are within an ORDER BY clause, - // we can access columns in the result set. We should search that first - // before searching all joined tables, as result set columns with conflicting - // names are given precedence over joined tables. - if s.sqlCtx._inOrdering && p0.Table == "" { - attr := findAttribute(s.sqlCtx._result, p0.Column) - // short-circuit if we find the column, otherwise proceed to normal search - if attr != nil { - return cast(p0, attr.Type) - } - } - - // if we are in an upsert and the column references a column name in the target table - // AND the table is not specified, we need to throw an ambiguity error. For conflict tables, - // the user HAS to specify whether the upsert value is from the existing table or excluded table. - if s.sqlCtx.inConflict && p0.Table == "" { - mainTbl, ok := s.sqlCtx.joinedTables[s.sqlCtx.targetTable] - // if not ok, then we are in a subquery or something else, and we can ignore this check. - if ok { - if _, ok = mainTbl.FindColumn(p0.Column); ok { - s.errs.AddErr(p0, ErrAmbiguousConflictTable, `upsert value is ambigous. specify whether the column is from "%s" or "%s"`, s.sqlCtx.targetTable, tableExcluded) - return cast(p0, types.UnknownType) - - } - } - } - - // findColumn accounts for empty tables in search, so we do not have to - // worry about it being qualified or not. - relName, col, msg, err := s.sqlCtx.findAttribute(p0.Table, p0.Column) - if err != nil { - s.errs.AddErr(p0, err, msg) - return cast(p0, types.UnknownType) - } - - if s.sqlCtx._inAggregate { - if s.sqlCtx._columnInAggregate != nil { - s.errs.AddErr(p0, ErrAggregate, "cannot use multiple columns in aggregate function args") - return cast(p0, types.UnknownType) - } - - s.sqlCtx._columnInAggregate = &[2]string{relName, col.Name} - } else { - s.sqlCtx._columnsOutsideAggregate = append(s.sqlCtx._columnsOutsideAggregate, [2]string{relName, col.Name}) - } - - return cast(p0, col.Type) -} - -var supportedCollations = map[string]struct{}{ - "nocase": {}, -} - -func (s *sqlAnalyzer) VisitExpressionCollate(p0 *ExpressionCollate) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "collate is not supported in in-line action statements") - } - - e, ok := p0.Expression.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Expression) - } - - if !e.Equals(types.TextType) { - return s.typeErr(p0.Expression, e, types.TextType) - } - - _, ok = supportedCollations[p0.Collation] - if !ok { - s.errs.AddErr(p0, ErrCollation, `unsupported collation "%s"`, p0.Collation) - } - - return cast(p0, e) -} - -func (s *sqlAnalyzer) VisitExpressionStringComparison(p0 *ExpressionStringComparison) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "string comparison is not supported in in-line action statements") - } - - left, ok := p0.Left.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Left) - } - - right, ok := p0.Right.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Right) - } - - if !left.Equals(types.TextType) { - return s.typeErr(p0.Left, left, types.TextType) - } - - if !right.Equals(types.TextType) { - return s.typeErr(p0.Right, right, types.TextType) - } - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionIs(p0 *ExpressionIs) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "IS expression is not supported in in-line action statements") - } - - left, ok := p0.Left.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Left) - } - - right, ok := p0.Right.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Right) - } - - // right has to be null, unless distinct is true. If distinct is true, - // then left and right must be the same type - if p0.Distinct { - if !left.Equals(right) { - return s.typeErr(p0.Right, right, left) - } - } else { - if !right.Equals(types.NullType) { - return s.typeErr(p0.Right, right, types.NullType) - } - } - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionIn(p0 *ExpressionIn) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "IN expression is not supported in in-line action statements") - } - - exprType, ok := p0.Expression.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Expression) - } - - switch { - case len(p0.List) > 0: - for _, e := range p0.List { - dt, ok := e.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(e) - } - - if !dt.Equals(exprType) { - return s.typeErr(e, dt, exprType) - } - } - case p0.Subquery != nil: - rel, ok := p0.Subquery.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - if len(rel) != 1 { - s.errs.AddErr(p0.Subquery, ErrResultShape, "subquery expressions must return exactly 1 column, received %d", len(rel)) - return cast(p0, types.UnknownType) - } - - if !rel[0].Type.Equals(exprType) { - return s.typeErr(p0.Subquery, rel[0].Type, exprType) - } - default: - panic("list or subquery must be set for in expression") - } - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionBetween(p0 *ExpressionBetween) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "BETWEEN expression is not supported in in-line action statements") - } - - between, ok := p0.Expression.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Expression) - } - - lower, ok := p0.Lower.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Lower) - } - - upper, ok := p0.Upper.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Upper) - } - - if !between.Equals(lower) { - return s.typeErr(p0.Lower, lower, between) - } - - if !between.Equals(upper) { - return s.typeErr(p0.Upper, upper, between) - } - - s.expectedNumeric(p0.Expression, between) - - return cast(p0, types.BoolType) -} - -func (s *sqlAnalyzer) VisitExpressionSubquery(p0 *ExpressionSubquery) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "subquery is not supported in in-line action statements") - } - - // subquery should return a table - rel, ok := p0.Subquery.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - if len(rel) != 1 { - s.errs.AddErr(p0, ErrResultShape, "subquery expressions must return exactly 1 column, received %d", len(rel)) - return cast(p0, types.UnknownType) - } - - if p0.Exists { - if p0.GetTypeCast() != nil { - s.errs.AddErr(p0, ErrType, "cannot type cast subquery with EXISTS") - } - return types.BoolType - } - - return cast(p0, rel[0].Type) -} - -func (s *sqlAnalyzer) VisitExpressionCase(p0 *ExpressionCase) any { - if s.sqlCtx.isInlineAction { - s.errs.AddErr(p0, ErrAssignment, "CASE expression is not supported in in-line action statements") - } - - // all whens in a case statement must be bool, unless there is an expression - // that occurs after CASE. In that case, whens all must match the case expression type. - expectedWhenType := types.BoolType - if p0.Case != nil { - caseType, ok := p0.Case.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Case) - } - - expectedWhenType = caseType - } - - // all thens and else must return the same type. - var returnType *types.DataType - for _, w := range p0.WhenThen { - when, ok := w[0].Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(w[0]) - } - - if !when.Equals(expectedWhenType) { - return s.typeErr(w[0], when, expectedWhenType) - } - - then, ok := w[1].Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(w[1]) - } - - // if return type is not set, set it to the first then - if returnType == nil { - returnType = then - } - // if the return type is of type null, we should keep trying - // to reset until we get a non-null type - if returnType.EqualsStrict(types.NullType) { - returnType = then - } - - if !then.Equals(returnType) { - return s.typeErr(w[1], then, returnType) - } - } - - if p0.Else != nil { - elseType, ok := p0.Else.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Else) - } - - if returnType != nil && !elseType.Equals(returnType) { - return s.typeErr(p0.Else, elseType, returnType) - } - } - - return cast(p0, returnType) -} - -// The below methods are responsible for manipulating the sql context and identifying -// the resulting relations. - -func (s *sqlAnalyzer) VisitCommonTableExpression(p0 *CommonTableExpression) any { - // check that the table does not already exist - _, ok := s.sqlCtx.getOuterRelation(p0.Name) - if ok { - s.errs.AddErr(p0, ErrTableAlreadyExists, p0.Name) - return nil - } - - _, ok = s.schema.FindTable(p0.Name) - if ok { - s.errs.AddErr(p0, ErrTableAlreadyExists, p0.Name) - return nil - } - - rel, ok := p0.Query.Accept(s).([]*Attribute) - if !ok { - // panic because it is an internal error. - // I guess we could just let it panic without ok, - // but this is more descriptive. - panic("expected query to return attributes") - } - - // cte columns are optional - if len(p0.Columns) > 0 { - if len(p0.Columns) != len(rel) { - s.errs.AddErr(p0, ErrResultShape, "expected %d columns, received %d", len(p0.Columns), len(rel)) - return nil - } - - // rename the columns and add the relation to the outer scope - for i, col := range p0.Columns { - rel[i].Name = col - } - } - - s.sqlCtx.outerRelations = append(s.sqlCtx.outerRelations, &Relation{ - Name: p0.Name, - Attributes: rel, - }) - - return nil -} - -func (s *sqlAnalyzer) VisitSQLStatement(p0 *SQLStatement) any { - for _, cte := range p0.CTEs { - cte.Accept(s) - } - - rel, ok := p0.SQL.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - return rel -} - -func (s *sqlAnalyzer) VisitSelectStatement(p0 *SelectStatement) any { - // for each subquery, we need to create a new scope. - s.sqlCtx.inSelect = true - - // all select cores will need their own scope. They all also need to have the - // same shape as each other - s.sqlCtx.scope() - rel1, ok := p0.SelectCores[0].Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - // keep the rel1 scope as we may need to reference the joined - // tables later. - rel1Scope := s.sqlCtx.copy() - s.sqlCtx.popScope() - - isCompound := false - compoundHasGroupBy := false - // we visit the rest of the select cores to check the shape - for _, core := range p0.SelectCores[1:] { - isCompound = true - if core.GroupBy != nil { - compoundHasGroupBy = true - } - - s.sqlCtx.scope() - rel2, ok := core.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - s.sqlCtx.popScope() - - if !ShapesMatch(rel1, rel2) { - s.errs.AddErr(core, ErrResultShape, "expected shape to match previous select core") - return rel1 - } - } - - // we want to re-set the rel1 scope, since it is used in ordering, - // as well as grouping re-checks if the statement is not a compound select. - // e.g. "select a, b from t1 union select c, d from t2 order by a" - oldScope := s.sqlCtx - s.sqlCtx = rel1Scope - defer func() { s.sqlCtx = oldScope }() - - // If it is not a compound select, we should use the scope from the first select core, - // so that we can analyze joined tables in the order and limit clauses. It if is a compound - // select, then we should flatten all joined tables into a single anonymous table. This can - // then be referenced in order bys and limits. If there are column conflicts in the flattened column, - // we should return an error, since there will be no way for us to inform postgres of our default ordering. - if isCompound { - // we can simply assign this to the rel1Scope, since we will not - // need it past this point. We can add it as an unnamed relation. - rel1Scope.joinedRelations = []*Relation{{Attributes: rel1}} - - // if a compound select, then we have the following default ordering rules: - // 1. All columns returned will be ordered in the order they are returned. - // 2. If the statement includes a group by in one of the select cores, then - // we throw an error. This is a relic of SQLite's rudimentary referencing, however - // since it is such an uncommon query anyways, we have decided to not support it - // until we have time for more testing. - if compoundHasGroupBy || p0.SelectCores[0].GroupBy != nil { - s.errs.AddErr(p0, ErrAggregate, "cannot use group by in compound select") - return rel1 - } - - // order all flattened returns - for _, attr := range rel1 { - p0.Ordering = append(p0.Ordering, &OrderingTerm{ - Position: unknownPosition(), - Expression: &ExpressionColumn{ - Position: unknownPosition(), - // leave column blank, since we are referencing a column that no - // longer knows what table it is from due to the compound. - Column: attr.Name, - }, - }) - } - } else { - // if it is not a compound, then we apply the following default ordering rules (after the user defined): - // 1. Each primary key for each schema table joined is ordered in ascending order. - // The tables and columns for all joined tables will be sorted alphabetically. - // If table aliases are used, they will be used instead of the name. This must include - // subqueries and function joins; even though those are ordered, they still need to - // be ordered in the outermost select. - // see: https://www.reddit.com/r/PostgreSQL/comments/u6icv9/is_original_sort_order_preserve_after_joining/ - // TODO: we can likely make some significant optimizations here by only applying ordering - // on the outermost query UNLESS aggregates are used in the subquery, but that is a future - // optimization. - // 2. If the select core contains DISTINCT, then the above does not apply, and - // we order by all columns returned, in the order they are returned. - // 3. If there is a group by clause, none of the above apply, and instead we order by - // all columns specified in the group by. - // 4. If there is an aggregate clause with no group by, then no ordering is applied. - - // addressing point 4: if there is an aggregate clause with no group by, then no ordering is applied. - if s.sqlCtx._containsAggregateWithoutGroupBy { - // do nothing. - } else if p0.SelectCores[0].GroupBy != nil { - // reset and visit the group by to get the columns - var colsToOrder [][2]string - for _, g := range p0.SelectCores[0].GroupBy { - s.sqlCtx.setTempValuesToZero() - g.Accept(s) - - if len(s.sqlCtx._columnsOutsideAggregate) > 1 { - s.errs.AddErr(g, ErrAggregate, "cannot use multiple columns in group by") - return rel1 - } - - colsToOrder = append(colsToOrder, s.sqlCtx._columnsOutsideAggregate...) - } - - // order the columns - for _, col := range colsToOrder { - p0.Ordering = append(p0.Ordering, &OrderingTerm{ - Position: unknownPosition(), - Expression: &ExpressionColumn{ - Position: unknownPosition(), - Table: col[0], - Column: col[1], - }, - }) - } - } else if p0.SelectCores[0].Distinct { - // if distinct, order by all columns returned - for _, attr := range rel1 { - p0.Ordering = append(p0.Ordering, &OrderingTerm{ - Position: unknownPosition(), - Expression: &ExpressionColumn{ - Position: unknownPosition(), - Table: "", - Column: attr.Name, - }, - }) - } - } else { - // if not distinct, order by primary keys in all joined tables - for _, rel := range rel1Scope.joinedRelations { - // if it is a table, we only order by primary key. - // otherwise, order by all columns. - tbl, ok := rel1Scope.joinedTables[rel.Name] - if ok { - pks, err := tbl.GetPrimaryKey() - if err != nil { - s.errs.AddErr(p0, err, "could not get primary key for table %s", rel.Name) - } - - for _, pk := range pks { - p0.Ordering = append(p0.Ordering, &OrderingTerm{ - Position: unknownPosition(), - Expression: &ExpressionColumn{ - Position: unknownPosition(), - Table: rel.Name, - Column: pk, - }, - }) - } - - continue - } - - // if not a table, order by all columns - for _, attr := range rel.Attributes { - p0.Ordering = append(p0.Ordering, &OrderingTerm{ - Position: unknownPosition(), - Expression: &ExpressionColumn{ - Position: unknownPosition(), - Table: rel.Name, - Column: attr.Name, - }, - }) - } - } - } - } - - // we need to inform the analyzer that we are in ordering - s.sqlCtx._inOrdering = true - s.sqlCtx._result = rel1 - - // if the user is trying to order and there is an aggregate without group by, we should throw an error. - if s.sqlCtx._containsAggregateWithoutGroupBy && len(p0.Ordering) > 0 { - s.errs.AddErr(p0, ErrAggregate, "cannot use order by with aggregate function without group by") - return rel1 - } - // analyze the ordering, limit, and offset - for _, o := range p0.Ordering { - o.Accept(s) - } - - // unset the ordering context - s.sqlCtx._inOrdering = false - s.sqlCtx._result = nil - - if p0.Limit != nil { - dt, ok := p0.Limit.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.Limit) - return rel1 - } - - s.expectedNumeric(p0.Limit, dt) - } - - if p0.Offset != nil { - dt, ok := p0.Offset.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.Offset) - return rel1 - } - - s.expectedNumeric(p0.Offset, dt) - - } - - return rel1 -} - -// There are some rules for select cores that are necessary for non-determinism: -// 1. If a SELECT is DISTINCT and contains a GROUP BY, we return an error since we cannot -// order it. -// 2. If a result column uses an aggregate function AND there is no GROUP BY, then all -// result columns must be aggregate functions if they reference a column in a table. -// 3. If there is a GROUP BY, then all result columns must be aggregate functions UNLESS -// the column is specified in the GROUP BY -func (s *sqlAnalyzer) VisitSelectCore(p0 *SelectCore) any { - // we first need to visit the from and join in order to join - // all tables to the context. - // we will visit columns last since it will determine our return type. - if p0.From != nil { - p0.From.Accept(s) - for _, j := range p0.Joins { - j.Accept(s) - } - } - - if p0.Where != nil { - s.sqlCtx.setTempValuesToZero() - whereType, ok := p0.Where.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Where) - } - - // if it contains an aggregate, throw an error - if s.sqlCtx._containsAggregate { - s.errs.AddErr(p0.Where, ErrAggregate, "cannot use aggregate function in WHERE") - return []*Attribute{} - } - - s.expect(p0.Where, whereType, types.BoolType) - } - - hasGroupBy := false - // colsInGroupBy tracks the table and column names that are in the group by. - colsInGroupBy := make(map[[2]string]struct{}) - for _, g := range p0.GroupBy { - hasGroupBy = true - - // we need to get all columns used in the group by. - // If more than one column is used per group by, or if an aggregate is - // used, we return an error. - s.sqlCtx.setTempValuesToZero() - - // group by return type is not important - g.Accept(s) - - if s.sqlCtx._containsAggregate { - s.errs.AddErr(g, ErrAggregate, "cannot use aggregate function in group by") - return []*Attribute{} - } - if len(s.sqlCtx._columnsOutsideAggregate) != 1 { - s.errs.AddErr(g, ErrAggregate, "group by must reference exactly one column") - return []*Attribute{} - } - - _, ok := colsInGroupBy[s.sqlCtx._columnsOutsideAggregate[0]] - if ok { - s.errs.AddErr(g, ErrAggregate, "cannot use column in group by more than once") - return []*Attribute{} - } - colsInGroupBy[s.sqlCtx._columnsOutsideAggregate[0]] = struct{}{} - - if p0.Having != nil { - s.sqlCtx.setTempValuesToZero() - havingType, ok := p0.Having.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Having) - } - - // columns in having must be in the group by if not in aggregate - for _, col := range s.sqlCtx._columnsOutsideAggregate { - if _, ok := colsInGroupBy[col]; !ok { - s.errs.AddErr(p0.Having, ErrAggregate, "column used in having must be in group by, or must be in aggregate function") - } - } - - // COMMENTING THIS OUT: if a column is in an aggregate in the having, then it is ok if it is not in the group by - // if s.sqlCtx._columnInAggregate != nil { - // if _, ok := colsInGroupBy[*s.sqlCtx._columnInAggregate]; !ok { - // s.errs.AddErr(p0.Having, ErrAggregate, "cannot use column in having if not in group by or in aggregate function") - // } - // } - - s.expect(p0.Having, havingType, types.BoolType) - } - } - - if hasGroupBy && p0.Distinct { - s.errs.AddErr(p0, ErrAggregate, "cannot use DISTINCT with GROUP BY") - return []*Attribute{} - } - - var res []*Attribute - for _, c := range p0.Columns { - // for each result column, we need to check that: - // IF THERE IS A GROUP BY: - // 1. if it is an aggregate, then its column is not in the group by - // 2. for any column that occurs outside of an aggregate, it is also in the group by - // IF THERE IS NOT A GROUP BY: - // 3. if there is an aggregate, then it can be the only return column - - // reset to be sure - s.sqlCtx.setTempValuesToZero() - - attrs, ok := c.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - if !hasGroupBy && s.sqlCtx._containsAggregate { - if len(p0.Columns) != 1 { - s.errs.AddErr(c, ErrAggregate, "cannot return multiple values in SELECT that uses aggregate function and no group by") - } - s.sqlCtx._containsAggregateWithoutGroupBy = true - } else if hasGroupBy { - // if column used in aggregate, ensure it is not in group by - if s.sqlCtx._columnInAggregate != nil { - if _, ok := colsInGroupBy[*s.sqlCtx._columnInAggregate]; ok { - s.errs.AddErr(c, ErrAggregate, "cannot use column in aggregate function and in group by") - } - } - - // ensure all columns used outside aggregate are in group by - for _, col := range s.sqlCtx._columnsOutsideAggregate { - if _, ok := colsInGroupBy[col]; !ok { - s.errs.AddErr(c, ErrAggregate, "column used outside aggregate must be included in group by") - } - } - } - - var amiguousCol string - var err error - res, amiguousCol, err = Coalesce(append(res, attrs...)...) - if err != nil { - s.errs.AddErr(c, err, amiguousCol) - return res - } - } - - return res -} - -func (s *sqlAnalyzer) VisitRelationTable(p0 *RelationTable) any { - if s.sqlCtx.hasAnonymousTable { - s.errs.AddErr(p0, ErrUnnamedJoin, "statement uses an unnamed subquery or procedure join. to join another table, alias the subquery or procedure") - return []*Attribute{} - } - - // table must either be a common table expression, or a table in the schema. - var rel *Relation - tbl, ok := s.schema.FindTable(p0.Table) - if !ok { - cte, ok := s.sqlCtx.getOuterRelation(p0.Table) - if !ok { - s.errs.AddErr(p0, ErrUnknownTable, p0.Table) - return []*Attribute{} - } - - rel = cte.Copy() - } else { - rel = tableToRelation(tbl) - - // since we have joined a new table, we need to add it to the joined tables. - name := p0.Table - if p0.Alias != "" { - name = p0.Alias - } - - err := s.sqlCtx.join(name, tbl) - if err != nil { - s.errs.AddErr(p0, err, name) - return []*Attribute{} - } - } - - // if there is an alias, we rename the relation - if p0.Alias != "" { - rel.Name = p0.Alias - } - - err := s.sqlCtx.joinRelation(rel) - if err != nil { - s.errs.AddErr(p0, err, p0.Table) - return []*Attribute{} - } - - return nil -} - -func (s *sqlAnalyzer) VisitRelationSubquery(p0 *RelationSubquery) any { - if s.sqlCtx.hasAnonymousTable { - s.errs.AddErr(p0, ErrUnnamedJoin, "statement uses an unnamed subquery or procedure join. to join another table, alias the subquery or procedure") - return []*Attribute{} - } - - relation, ok := p0.Subquery.Accept(s).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - // alias is usually required for subquery joins - if p0.Alias == "" { - // if alias is not given, then this must be a select and there must be exactly one table joined - if !s.sqlCtx.inSelect { - s.errs.AddErr(p0, ErrUnnamedJoin, "joins against subqueries must be aliased") - return []*Attribute{} - } - - // must be no relations, since this needs to be the first and only relation - if len(s.sqlCtx.joinedRelations) != 0 { - s.errs.AddErr(p0, ErrUnnamedJoin, "joins against subqueries must be aliased") - return []*Attribute{} - } - - s.sqlCtx.hasAnonymousTable = true - } - - err := s.sqlCtx.joinRelation(&Relation{ - Name: p0.Alias, - Attributes: relation, - }) - if err != nil { - s.errs.AddErr(p0, err, p0.Alias) - return []*Attribute{} - } - - return nil -} - -func (s *sqlAnalyzer) VisitRelationFunctionCall(p0 *RelationFunctionCall) any { - if s.sqlCtx.hasAnonymousTable { - s.errs.AddErr(p0, ErrUnnamedJoin, "statement uses an unnamed subquery or procedure join. to join another table, alias the subquery or procedure") - return []*Attribute{} - } - - // the function call here must return []*Attribute - // this logic is handled in returnProcedureReturnExpr. - ret, ok := p0.FunctionCall.Accept(s).(*returnsTable) - if !ok { - s.errs.AddErr(p0, ErrType, "cannot join procedure that does not return type table") - } - - // alias is usually required for subquery joins - if p0.Alias == "" { - // if alias is not given, then this must be a select and there must be exactly one table joined - if !s.sqlCtx.inSelect { - s.errs.AddErr(p0, ErrUnnamedJoin, "joins against procedures must be aliased") - return []*Attribute{} - } - - // must be no relations, since this needs to be the first and only relation - if len(s.sqlCtx.joinedRelations) != 0 { - s.errs.AddErr(p0, ErrUnnamedJoin, "joins against procedures must be aliased") - return []*Attribute{} - } - - s.sqlCtx.hasAnonymousTable = true - } - - err := s.sqlCtx.joinRelation(&Relation{ - Name: p0.Alias, - Attributes: ret.attrs, - }) - if err != nil { - s.errs.AddErr(p0, err, p0.Alias) - return []*Attribute{} - } - - return nil -} - -func (s *sqlAnalyzer) VisitJoin(p0 *Join) any { - // call visit on the comparison to perform regular type checking - p0.Relation.Accept(s) - dt, ok := p0.On.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.On) - return nil - } - - s.expect(p0.On, dt, types.BoolType) - - return nil -} - -func (s *sqlAnalyzer) VisitUpdateStatement(p0 *UpdateStatement) any { - s.sqlResult.Mutative = true - - tbl, msg, err := s.setTargetTable(p0.Table, p0.Alias) - if err != nil { - s.errs.AddErr(p0, err, msg) - return []*Attribute{} - } - - if p0.From != nil { - // we visit from and joins first to fill out the context, since those tables can be - // referenced in the set expression. - p0.From.Accept(s) - for _, j := range p0.Joins { - j.Accept(s) - } - } - - for _, set := range p0.SetClause { - // this calls VisitUpdateSetClause, defined directly below. - attr := set.Accept(s).(*Attribute) - - // we will see if the table being updated has this column, and if it - // is of the correct type. - col, ok := tbl.FindColumn(attr.Name) - if !ok { - s.errs.AddErr(set, ErrUnknownColumn, attr.Name) - continue - } - - if !col.Type.Equals(attr.Type) { - s.typeErr(set, attr.Type, col.Type) - } - } - - if p0.Where != nil { - whereType, ok := p0.Where.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.Where) - return []*Attribute{} - } - - s.expect(p0.Where, whereType, types.BoolType) - } - - return []*Attribute{} -} - -// UpdateSetClause will map the updated column to the type it is being -// set to. Since it does not have context as to the table being acted on, -// it is the responsibility of the caller to validate the types. It will simply -// return the column and the type it is being set to, as an attribute. -func (s *sqlAnalyzer) VisitUpdateSetClause(p0 *UpdateSetClause) any { - dt, ok := p0.Value.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Value) - } - - return &Attribute{ - Name: p0.Column, - Type: dt, - } -} - -// result columns return []*Attribute -func (s *sqlAnalyzer) VisitResultColumnExpression(p0 *ResultColumnExpression) any { - e, ok := p0.Expression.Accept(s).(*types.DataType) - if !ok { - return s.expressionTypeErr(p0.Expression) - } - - attr := &Attribute{ - Name: p0.Alias, - Type: e, - } - - // ResultColumnExpressions always need to have aliases, unless the expression - // is a column. - if attr.Name == "" { - col, ok := p0.Expression.(*ExpressionColumn) - // if returning a column and not aliased, we give it the column name. - // otherwise, we simply leave the name blank. It will not be referenceable - if ok { - attr.Name = col.Column - } - } - - return []*Attribute{attr} -} - -func (s *sqlAnalyzer) VisitResultColumnWildcard(p0 *ResultColumnWildcard) any { - // if the table is specified, we need to return all columns from that table. - if p0.Table != "" { - tbl, ok := s.sqlCtx.getJoinedRelation(p0.Table) - if !ok { - s.errs.AddErr(p0, ErrUnknownTable, p0.Table) - return []*Attribute{} - } - - return tbl.Attributes - } - - // if table is empty, we flatten all joined relations. - flattened, conflictCol, err := Flatten(s.sqlCtx.joinedRelations...) - if err != nil { - s.errs.AddErr(p0, err, conflictCol) - return []*Attribute{} - } - - return flattened -} - -func (s *sqlAnalyzer) VisitDeleteStatement(p0 *DeleteStatement) any { - s.sqlResult.Mutative = true - - _, msg, err := s.setTargetTable(p0.Table, p0.Alias) - if err != nil { - s.errs.AddErr(p0, err, msg) - return []*Attribute{} - - } - - if p0.From != nil { - p0.From.Accept(s) - for _, j := range p0.Joins { - j.Accept(s) - } - } - - if p0.Where != nil { - whereType, ok := p0.Where.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.Where) - return []*Attribute{} - } - - s.expect(p0.Where, whereType, types.BoolType) - } - - return []*Attribute{} - -} - -func (s *sqlAnalyzer) VisitInsertStatement(p0 *InsertStatement) any { - s.sqlResult.Mutative = true - - tbl, msg, err := s.setTargetTable(p0.Table, p0.Alias) - if err != nil { - s.errs.AddErr(p0, err, msg) - return []*Attribute{} - } - - // all columns specified need to exist within the table - // we will keep track of the types of columns in the order - // they are specified, to match against the values. If columns - // are not specified, we simply get call the table's columns. - var colTypes []*types.DataType - if len(p0.Columns) == 0 { - for _, col := range tbl.Columns { - colTypes = append(colTypes, col.Type) - } - } else { - for _, col := range p0.Columns { - c, ok := tbl.FindColumn(col) - if !ok { - s.errs.AddErr(p0, ErrUnknownColumn, col) - return []*Attribute{} - } - colTypes = append(colTypes, c.Type) - } - } - - for _, valList := range p0.Values { - if len(valList) != len(colTypes) { - s.errs.AddErr(p0, ErrResultShape, "expected %d values, received %d", len(colTypes), len(valList)) - return []*Attribute{} - } - - for i, val := range valList { - dt, ok := val.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(val) - return []*Attribute{} - } - - if !dt.Equals(colTypes[i]) { - s.typeErr(val, dt, colTypes[i]) - } - } - } - - if p0.Upsert != nil { - s.sqlCtx.inConflict = true - p0.Upsert.Accept(s) - s.sqlCtx.inConflict = false - } - - return []*Attribute{} - -} - -// setTargetTable joins a table from the schema to the sql context, for -// usage in an insert, delete, or update statement. -// It will return an error if the table is already joined, or if the table -// is not in the schema. Optionally, an alias can be passed, which will join -// the table with the alias name. If there is an error, it returns the error -// and a message. It should only be used in INSERT, DELETE, and UPDATE statements. -func (s *sqlAnalyzer) setTargetTable(table string, alias string) (*types.Table, string, error) { - tbl, ok := s.schema.FindTable(table) - if !ok { - return nil, table, ErrUnknownTable - } - - name := tbl.Name - if alias != "" { - name = alias - } - - err := s.sqlCtx.join(name, tbl) - if err != nil { - return nil, name, err - } - - rel := tableToRelation(tbl) - rel.Name = name - - err = s.sqlCtx.joinRelation(rel) - if err != nil { - return nil, name, err - } - - s.sqlCtx.targetTable = name - - return tbl, "", nil -} - -func (s *sqlAnalyzer) VisitUpsertClause(p0 *UpsertClause) any { - // upsert clause can only be called in an insert. Inserts will - // always have exactly 1 table joined to the context. We will - // need to retrieve the one table, verify all conflict columns - // are valid columns, and validate that any DoUpdate clause - // references a real column and is assigning it to the correct type. - if len(s.sqlCtx.joinedRelations) != 1 { - // panicking because this is an internal bug in context scoping - panic("expected exactly 1 table to be joined in upsert clause") - } - - rel := s.sqlCtx.joinedRelations[0] - for _, col := range p0.ConflictColumns { - _, ok := rel.FindAttribute(col) - if !ok { - s.errs.AddErr(p0, ErrUnknownColumn, "conflict column %s", col) - return nil - } - } - - for _, set := range p0.DoUpdate { - attr := set.Accept(s).(*Attribute) - - foundAttr, ok := rel.FindAttribute(attr.Name) - if !ok { - s.errs.AddErr(p0, ErrUnknownColumn, "update column %s", attr.Name) - continue - } - - if !foundAttr.Type.Equals(attr.Type) { - s.typeErr(set, attr.Type, foundAttr.Type) - return nil - } - } - - if p0.ConflictWhere != nil { - dt, ok := p0.ConflictWhere.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.ConflictWhere) - return nil - } - - s.expect(p0.ConflictWhere, dt, types.BoolType) - } - - if p0.UpdateWhere != nil { - dt, ok := p0.UpdateWhere.Accept(s).(*types.DataType) - if !ok { - s.expressionTypeErr(p0.UpdateWhere) - return nil - } - - s.expect(p0.UpdateWhere, dt, types.BoolType) - } - - return nil -} - -func (s *sqlAnalyzer) VisitOrderingTerm(p0 *OrderingTerm) any { - // visit the expression. We do not have to worry about what - // it returns though - p0.Expression.Accept(s) - return nil -} - -// tableToRelation converts a table to a relation. -func tableToRelation(t *types.Table) *Relation { - attrs := make([]*Attribute, len(t.Columns)) - for i, col := range t.Columns { - attrs[i] = &Attribute{ - Name: col.Name, - Type: col.Type.Copy(), - } - } - - return &Relation{ - Name: t.Name, - Attributes: attrs, - } -} - -// procedureContext holds context for the procedure analyzer. -type procedureContext struct { - // procedureDefinition is the definition for the procedure that we are - // currently analyzing. - procedureDefinition *types.Procedure - // activeLoopReceivers track the variable name for the current loop. - // The innermost nested loop will be at the 0-index. If we are - // not in a loop, the slice will be empty. - activeLoopReceivers []string -} - -func newProcedureContext(proc *types.Procedure) *procedureContext { - return &procedureContext{ - procedureDefinition: proc, - } -} - -// loopTargetTracker is used to track the target of a loop. -type loopTargetTracker struct { - // name is the variable name of the loop target. - name *ExpressionVariable - // dataType is the data type of the loop target. - // If the loop target is an anonymous variable, then it will be nil. - dataType *types.DataType -} - -// procedureAnalyzer analyzes the procedural language. Since the procedural -// language can execute sql statements, it uses the sqlAnalyzer. -type procedureAnalyzer struct { - sqlAnalyzer - procCtx *procedureContext - // procResult stores data that the analyzer will return with the parsed procedure. - // The information is used by the code generator to generate the plpgsql code. - procResult struct { - // allLoopReceivers tracks all loop receivers that have occurred over the lifetime - // of the procedure. This is used to generate variables to hold the loop target - // in plpgsql. - allLoopReceivers []*loopTargetTracker - // anonymousReceivers track the data types of procedure return values - // that the user throws away. In the procedure call - // `$var1, _, $var2 := proc_that_returns_3_values()`, the underscore is - // the anonymous receiver. This slice tracks the types for each of the - // receivers as it encounters them, so that it can generate a throw-away - // variable in plpgsql - anonymousReceivers []*types.DataType - // allVariables is a map of all variables declared in the procedure. - // The key is the variable name, and the value is the data type. - // This does not include any variable declared by a FOR LOOP. - allVariables map[string]*types.DataType - } -} - -// markDeclared checks if the variable has been declared in the same procedure body, -// but in a different scope. PLPGSQL cannot handle redeclaration, so we need to check for this. -// It will throw the error in the method, and let the caller continue since this is not a critical -// parsing bug. It will mark the variable as declared if it has not been declared yet. -func (p *procedureAnalyzer) markDeclared(p0 Node, name string, typ *types.DataType) { - dt, ok := p.procResult.allVariables[name] - if !ok { - p.procResult.allVariables[name] = typ - return - } - - if !dt.Equals(typ) { - p.errs.AddErr(p0, ErrCrossScopeDeclaration, `variable %s is declared in a different scope in this procedure as a different type. - This is not supported.`, name) - } -} - -// startProcedureAnalyze starts the analysis of a procedure. -func (p *procedureAnalyzer) startSQLAnalyze() { - p.sqlAnalyzer.startSQLAnalyze() -} - -// endProcedureAnalyze ends the analysis of a procedure. -func (p *procedureAnalyzer) endSQLAnalyze(node Node) { - sqlRes := p.sqlAnalyzer.endSQLAnalyze() - if sqlRes.Mutative { - if p.procCtx.procedureDefinition.IsView() { - p.errs.AddErr(node, ErrViewMutatesState, "SQL statement mutates state in view procedure") - } - } -} - -var _ Visitor = (*procedureAnalyzer)(nil) - -func (p *procedureAnalyzer) VisitProcedureStmtDeclaration(p0 *ProcedureStmtDeclaration) any { - // we will check if the variable has already been declared, and if so, error. - - if p.variableExists(p0.Variable.String()) { - p.errs.AddErr(p0, ErrVariableAlreadyDeclared, p0.Variable.String()) - return zeroProcedureReturn() - } - - // TODO: we need to figure out how to undeclare a variable if it is declared in a loop/if block - - p.variables[p0.Variable.String()] = p0.Type - p.markDeclared(p0.Variable, p0.Variable.String(), p0.Type) - - // now that it is declared, we can visit it - p0.Variable.Accept(p) - - return zeroProcedureReturn() -} - -func (p *procedureAnalyzer) VisitProcedureStmtAssignment(p0 *ProcedureStmtAssign) any { - // visit the value first to get the data type - dt, ok := p0.Value.Accept(p).(*types.DataType) - if !ok { - p.expressionTypeErr(p0.Value) - return zeroProcedureReturn() - } - - // the variable can be either an ExpressionVariable or an ExpressionArrayAccess - // If it is an ExpressionVariable, we need to declare it - - exprVar, ok := p0.Variable.(*ExpressionVariable) - if ok { - _, ok = p.variables[exprVar.String()] - if !ok { - // if it does not exist, we can declare it here. - p.variables[exprVar.String()] = dt - p.markDeclared(p0.Variable, exprVar.String(), dt) - return zeroProcedureReturn() - } - } - - // the type can be inferred from the value. - // If the user explicitly declared a type, the inferred - // type should match - if p0.Type != nil { - if !p0.Type.Equals(dt) { - p.errs.AddErr(p0, ErrType, "declared type: %s, inferred type: %s", p0.Type.String(), dt.String()) - return zeroProcedureReturn() - } - } - - // ensure the variable already exists, and we are assigning the correct type. - dt2, ok := p0.Variable.Accept(p).(*types.DataType) - if !ok { - p.expressionTypeErr(p0.Variable) - return zeroProcedureReturn() - } - - if !dt2.Equals(dt) { - p.typeErr(p0, dt2, dt) - } - - return zeroProcedureReturn() -} - -func (p *procedureAnalyzer) VisitProcedureStmtCall(p0 *ProcedureStmtCall) any { - // we track if sqlResult has already been set to alreadyMutative to avoid throwing - // an incorrect error below. - alreadyMutative := p.sqlResult.Mutative - - var callReturns []*types.DataType - - // procedure calls can return many different types of values. - switch v := p0.Call.Accept(p).(type) { - case *types.DataType: - callReturns = []*types.DataType{v} - case []*types.DataType: - callReturns = v - case *returnsTable: - // if a procedure that returns a table is being called in a - // procedure, we need to ensure there are no receivers, since - // it is impossible to assign a table to a variable. - // we will also not add these to the callReturns, since they are - // table columns, and not assignable variables - if len(p0.Receivers) != 0 { - p.errs.AddErr(p0, ErrResultShape, "procedure returns table, cannot assign to variable(s)") - return zeroProcedureReturn() - } - case nil: - // do nothing - default: - p.expressionTypeErr(p0.Call) - return zeroProcedureReturn() - } - - // if calling the `error` function, then this branch will return - exits := false - if p0.Call.FunctionName() == "error" { - exits = true - } - - // if calling a non-view procedure, the above will set the sqlResult to be mutative - // if this procedure is a view, we should throw an error. - if !alreadyMutative && p.sqlResult.Mutative && p.procCtx.procedureDefinition.IsView() { - p.errs.AddErr(p0, ErrViewMutatesState, `view procedure calls non-view procedure "%s"`, p0.Call.FunctionName()) - } - - // users can discard returns by simply not having receivers. - // if there are no receivers, we can return early. - if len(p0.Receivers) == 0 { - return &procedureStmtResult{ - willReturn: exits, - } - } - - // we do not have to capture all return values, but we need to ensure - // we do not have more receivers than return values. - if len(p0.Receivers) != len(callReturns) { - p.errs.AddErr(p0, ErrResultShape, `function/procedure "%s" returns %d value(s), statement expects %d value(s)`, p0.Call.FunctionName(), len(callReturns), len(p0.Receivers)) - return zeroProcedureReturn() - } - - for i, r := range p0.Receivers { - // if the receiver is nil, we will not assign it to a variable, as it is an - // anonymous receiver. - if r == nil { - p.procResult.anonymousReceivers = append(p.procResult.anonymousReceivers, callReturns[i]) - continue - } - - // ensure the receiver is not already an anonymous variable - if _, ok := p.anonymousVariables[r.String()]; ok { - p.errs.AddErr(r, ErrVariableAlreadyDeclared, r.String()) - continue - } - - // if the variable has been declared, the type must match. otherwise, declare it and infer the type. - declaredType, ok := p.variables[r.String()] - if ok { - if !declaredType.Equals(callReturns[i]) { - p.typeErr(r, callReturns[i], declaredType) - continue - } - } else { - p.variables[r.String()] = callReturns[i] - p.markDeclared(r, r.String(), callReturns[i]) - } - } - - return &procedureStmtResult{ - willReturn: exits, - } -} - -// VisitProcedureStmtForLoop visits a for loop statement. -// This function is a bit convoluted, but it handles a lot of logic. It checks that the loop -// target variable can actually be declared by plpgsql, and then has to allow it to be accessed -// in the current block context. Once we exit the for loop, it has to make it no longer accessible -// in the context, BUT needs to still keep track of it. It needs to keep track of its data type, -// and whether it is a compound type, so that plpgsql knows whether to declare it as a RECORD -// or as a scalar type. -func (p *procedureAnalyzer) VisitProcedureStmtForLoop(p0 *ProcedureStmtForLoop) any { - // check to make sure the receiver has not already been declared - if p.variableExists(p0.Receiver.String()) { - p.errs.AddErr(p0.Receiver, ErrVariableAlreadyDeclared, p0.Receiver.String()) - return zeroProcedureReturn() - } - - tracker := &loopTargetTracker{ - name: p0.Receiver, - } - - // get the type from the loop term. - // can be a scalar if the term is a range or array, - // and an object if it is a sql statement. - res := p0.LoopTerm.Accept(p) - scalarVal, ok := res.(*types.DataType) - - // we copy the variables to ensure that the loop target is only accessible in the loop. - vars, anonVars := p.copyVariables() - defer func() { - p.variables = vars - p.anonymousVariables = anonVars - }() - - // we do not mark declared here since these are loop receivers, - // and they get tracked in a separate slice than other variables. - if ok { - // if here, we are looping over an array or range. - // we need to use the returned type, but remove the IsArray - rec := scalarVal.Copy() - rec.IsArray = false - p.variables[p0.Receiver.String()] = rec - tracker.dataType = rec - } else { - // if we are here, we are looping over a select. - compound, ok := res.(map[string]*types.DataType) - if !ok { - p.expressionTypeErr(p0.LoopTerm) - return zeroProcedureReturn() - } - p.anonymousVariables[p0.Receiver.String()] = compound - // we do not set the tracker type here, since it is an anonymous variable. - } - - // we now need to add the loop target. - // if it already has been used, we will error. - for _, t := range p.procResult.allLoopReceivers { - if t.name.String() == p0.Receiver.String() { - p.errs.AddErr(p0.Receiver, ErrVariableAlreadyDeclared, p0.Receiver.String()) - return zeroProcedureReturn() - } - } - - p.procCtx.activeLoopReceivers = append([]string{tracker.name.String()}, p.procCtx.activeLoopReceivers...) - p.procResult.allLoopReceivers = append(p.procResult.allLoopReceivers, tracker) - - // returns tracks whether this loop is guaranteed to exit. - returns := false - canBreakPrematurely := false - // we will now visit the statements in the loop. - for _, stmt := range p0.Body { - res := stmt.Accept(p).(*procedureStmtResult) - if res.canBreak { - canBreakPrematurely = true - } - if res.willReturn { - returns = true - } - } - // if it is possible for a for loop to break prematurely, then it is possible - // that it does not include a return, and so we need to inform the caller - // that it does not guarantee a return. - if canBreakPrematurely { - returns = false - } - - // pop the loop target - if len(p.procCtx.activeLoopReceivers) == 1 { - p.procCtx.activeLoopReceivers = nil - } else { - p.procCtx.activeLoopReceivers = p.procCtx.activeLoopReceivers[1:] - } - - return &procedureStmtResult{ - willReturn: returns, - } -} - -func (p *procedureAnalyzer) VisitLoopTermRange(p0 *LoopTermRange) any { - // range loops are always integers - start, ok := p0.Start.Accept(p).(*types.DataType) - if !ok { - return p.expressionTypeErr(p0.Start) - } - - end, ok := p0.End.Accept(p).(*types.DataType) - if !ok { - return p.expressionTypeErr(p0.End) - } - - // the types have to be ints - - p.expect(p0.Start, start, types.IntType) - p.expect(p0.End, end, types.IntType) - - return types.IntType -} - -func (p *procedureAnalyzer) VisitLoopTermSQL(p0 *LoopTermSQL) any { - p.startSQLAnalyze() - rels, ok := p0.Statement.Accept(p).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - p.endSQLAnalyze(p0.Statement) - - // we need to convert the attributes into an object - obj := make(map[string]*types.DataType) - for _, rel := range rels { - obj[rel.Name] = rel.Type - } - - return obj -} - -func (p *procedureAnalyzer) VisitLoopTermVariable(p0 *LoopTermVariable) any { - // we need to ensure the variable exists - dt, ok := p0.Variable.Accept(p).(*types.DataType) - if !ok { - return p.expressionTypeErr(p0.Variable) - } - - return dt -} - -func (p *procedureAnalyzer) VisitProcedureStmtIf(p0 *ProcedureStmtIf) any { - canBreak := false - - allThensReturn := true - for _, c := range p0.IfThens { - res := c.Accept(p).(*procedureStmtResult) - if !res.willReturn { - allThensReturn = false - } - if res.canBreak { - canBreak = true - } - } - - // initialize to true, so that if else does not exist, we know we still exit. - // It gets set to false if we encounter an else block. - elseReturns := true - if p0.Else != nil { - vars, anonVars := p.copyVariables() - defer func() { - p.variables = vars - p.anonymousVariables = anonVars - }() - - elseReturns = false - for _, stmt := range p0.Else { - res := stmt.Accept(p).(*procedureStmtResult) - if res.willReturn { - elseReturns = true - } - if res.canBreak { - canBreak = true - } - } - } - - return &procedureStmtResult{ - willReturn: allThensReturn && elseReturns, - canBreak: canBreak, - } -} - -func (p *procedureAnalyzer) VisitIfThen(p0 *IfThen) any { - dt, ok := p0.If.Accept(p).(*types.DataType) - if !ok { - p.expressionTypeErr(p0.If) - return zeroProcedureReturn() - } - - p.expect(p0.If, dt, types.BoolType) - - canBreak := false - returns := false - - vars, anonVars := p.copyVariables() - defer func() { - p.variables = vars - p.anonymousVariables = anonVars - }() - - for _, stmt := range p0.Then { - res := stmt.Accept(p).(*procedureStmtResult) - if res.willReturn { - returns = true - } - if res.canBreak { - canBreak = true - } - } - - return &procedureStmtResult{ - willReturn: returns, - canBreak: canBreak, - } -} - -func (p *procedureAnalyzer) VisitProcedureStmtSQL(p0 *ProcedureStmtSQL) any { - p.startSQLAnalyze() - defer p.endSQLAnalyze(p0.SQL) - - _, ok := p0.SQL.Accept(p).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - return zeroProcedureReturn() -} - -func (p *procedureAnalyzer) VisitProcedureStmtBreak(p0 *ProcedureStmtBreak) any { - if len(p.procCtx.activeLoopReceivers) == 0 { - p.errs.AddErr(p0, ErrBreak, "break statement outside of loop") - } - - return &procedureStmtResult{ - canBreak: true, - } -} - -func (p *procedureAnalyzer) VisitProcedureStmtReturn(p0 *ProcedureStmtReturn) any { - if p.procCtx.procedureDefinition.Returns == nil { - if len(p0.Values) != 0 { - p.errs.AddErr(p0, ErrFunctionSignature, "procedure does not return any values") - } - if p0.SQL != nil { - p.errs.AddErr(p0, ErrFunctionSignature, "cannot return SQL statement from procedure that does not return any values") - } - return &procedureStmtResult{ - willReturn: true, - } - } - returns := p.procCtx.procedureDefinition.Returns - - if p0.SQL != nil { - if !returns.IsTable { - p.errs.AddErr(p0, ErrReturn, "procedure expects scalar returns, cannot return SQL statement") - return &procedureStmtResult{ - willReturn: true, - } - } - - p.startSQLAnalyze() - defer p.endSQLAnalyze(p0.SQL) - - res, ok := p0.SQL.Accept(p).([]*Attribute) - if !ok { - panic("expected query to return attributes") - } - - if len(res) != len(returns.Fields) { - p.errs.AddErr(p0, ErrReturn, "expected %d return table columns, received %d", len(returns.Fields), len(res)) - return &procedureStmtResult{ - willReturn: true, - } - } - - // we will compare the return types to the procedure definition - for i, r := range res { - retField := returns.Fields[i] - if !r.Type.Equals(retField.Type) { - p.errs.AddErr(p0, ErrReturn, "expected column type %s, received column type %s", retField.Type.String(), r.Type.String()) - } - - if r.Name != retField.Name { - p.errs.AddErr(p0, ErrReturn, "expected column name %s, received column name %s", retField.Name, r.Name) - } - } - - return &procedureStmtResult{ - willReturn: true, - } - } - if returns.IsTable { - // if the procedure is expecting a table return, it can return nothing. - if len(p0.Values) == 0 { - return &procedureStmtResult{ - willReturn: true, - } - } - - p.errs.AddErr(p0, ErrReturn, "procedure expects table returns, cannot return scalar values") - return &procedureStmtResult{ - willReturn: true, - } - } - - if len(p0.Values) != len(returns.Fields) { - p.errs.AddErr(p0, ErrReturn, "expected %d return values, received %d", len(returns.Fields), len(p0.Values)) - return &procedureStmtResult{ - willReturn: true, - } - } - - for i, v := range p0.Values { - dt, ok := v.Accept(p).(*types.DataType) - if !ok { - p.expressionTypeErr(v) - return &procedureStmtResult{ - willReturn: true, - } - } - - if !dt.Equals(returns.Fields[i].Type) { - p.typeErr(v, dt, returns.Fields[i].Type) - } - } - - return &procedureStmtResult{ - willReturn: true, - } -} - -func (p *procedureAnalyzer) VisitProcedureStmtReturnNext(p0 *ProcedureStmtReturnNext) any { - if p.procCtx.procedureDefinition.Returns == nil { - p.errs.AddErr(p0, ErrFunctionSignature, "procedure does not return any values") - return &procedureStmtResult{ - willReturn: true, - } - } - - if !p.procCtx.procedureDefinition.Returns.IsTable { - p.errs.AddErr(p0, ErrReturn, "procedure expects scalar returns, cannot return next") - return &procedureStmtResult{ - willReturn: true, - } - } - - if len(p0.Values) != len(p.procCtx.procedureDefinition.Returns.Fields) { - p.errs.AddErr(p0, ErrReturn, "expected %d return values, received %d", len(p.procCtx.procedureDefinition.Returns.Fields), len(p0.Values)) - return &procedureStmtResult{ - willReturn: true, - } - } - - for i, v := range p0.Values { - dt, ok := v.Accept(p).(*types.DataType) - if !ok { - p.expressionTypeErr(v) - return &procedureStmtResult{ - willReturn: true, - } - } - - if !dt.Equals(p.procCtx.procedureDefinition.Returns.Fields[i].Type) { - p.typeErr(v, dt, p.procCtx.procedureDefinition.Returns.Fields[i].Type) - } - } - - return &procedureStmtResult{ - willReturn: true, - } -} - -// zeroProcedureReturn creates a new procedure return with all 0 values. -func zeroProcedureReturn() *procedureStmtResult { - return &procedureStmtResult{} -} - -// procedureStmtResult is returned from each procedure statement visit. -type procedureStmtResult struct { - // willReturn is true if the statement contains a return statement that it will - // always hit. This is used to determine if a path will exit a procedure. - // it is used to tell whether or not a statement can potentially exit a procedure, - // since all procedures that have an expected return must always return that value. - // It only tells us whether or not a return is guaranteed to be hit from a statement. - // The return types are checked at the point of the return statement. - willReturn bool - // canBreak is true if the statement that can break a for loop it is in. - // For example, an IF statement that breaks a for loop will set canBreak to true. - canBreak bool -} diff --git a/parse/analyze_test.go b/parse/analyze_test.go deleted file mode 100644 index acdf14906..000000000 --- a/parse/analyze_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package parse - -import ( - "testing" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/stretchr/testify/require" -) - -func Test_Scope(t *testing.T) { - tbl := &types.Table{} - s := sqlContext{ - joinedTables: map[string]*types.Table{ - "table1": tbl, - }, - } - - s.scope() - s.scope() - - require.EqualValues(t, s.joinedTables, map[string]*types.Table{}) - - s.popScope() - s.popScope() - - require.EqualValues(t, s.joinedTables, map[string]*types.Table{ - "table1": tbl, - }) -} diff --git a/parse/ast.go b/parse/ast.go deleted file mode 100644 index 3e937ccbe..000000000 --- a/parse/ast.go +++ /dev/null @@ -1,1151 +0,0 @@ -package parse - -import ( - "encoding/hex" - "fmt" - "strings" - - "github.com/antlr4-go/antlr/v4" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/types/decimal" -) - -// this file contains the ASTs for SQL, procedures, and actions. - -// Node is a node in the AST. -type Node interface { - GetPositioner - Accept(Visitor) any - Set(r antlr.ParserRuleContext) - SetToken(t antlr.Token) -} - -type GetPositioner interface { - GetPosition() *Position - Clear() -} - -type Typecastable struct { - TypeCast *types.DataType -} - -func (t *Typecastable) Cast(t2 *types.DataType) { - t.TypeCast = t2 -} - -func (t *Typecastable) GetTypeCast() *types.DataType { - return t.TypeCast -} - -// Expression is an interface for all expressions. -type Expression interface { - Node -} - -// ExpressionLiteral is a literal expression. -type ExpressionLiteral struct { - Position - Typecastable - Type *types.DataType - // Value is the value of the literal. - // It must be of type string, int64, bool, *uint256.Int, *decimal.Decimal, - // or nil - Value any -} - -func (e *ExpressionLiteral) Accept(v Visitor) any { - return v.VisitExpressionLiteral(e) -} - -// String returns the string representation of the literal. -func (e *ExpressionLiteral) String() string { - s, err := literalToString(e.Value) - if err != nil { - panic(err.Error() + ": " + fmt.Sprintf("%T", e.Value)) - } - return s -} - -// literalToString formats a literal value to be used in a SQL / DDL statement. -func literalToString(value any) (string, error) { - str := strings.Builder{} - switch v := value.(type) { - case string: // for text type - str.WriteString("'" + v + "'") - case int64, int, int32: // for int type - str.WriteString(fmt.Sprint(v)) - case *types.Uint256: - str.WriteString(v.String()) - case *decimal.Decimal: - str.WriteString(v.String()) - case bool: // for bool type - if v { - str.WriteString("true") - } - str.WriteString("false") - case []byte: - str.WriteString("0x" + hex.EncodeToString(v)) - case nil: - // do nothing - default: - return "", fmt.Errorf("unsupported literal type: %T", v) - } - - return str.String(), nil -} - -type ExpressionCall interface { - Expression - Cast(*types.DataType) - GetTypeCast() *types.DataType - FunctionName() string -} - -// ExpressionFunctionCall is a function call expression. -type ExpressionFunctionCall struct { - Position - Typecastable - // Name is the name of the function. - Name string - // Args are the arguments to the function call. - // They are passed using () - Args []Expression - // Distinct is true if the function call is a DISTINCT function call. - Distinct bool - // Star is true if the function call is a * function call. - // If it is set, then Args must be empty. - Star bool -} - -var _ ExpressionCall = (*ExpressionFunctionCall)(nil) - -func (e *ExpressionFunctionCall) Accept(v Visitor) any { - return v.VisitExpressionFunctionCall(e) -} - -func (e *ExpressionFunctionCall) FunctionName() string { - return e.Name -} - -// ExpressionForeignCall is a call to an external procedure. -type ExpressionForeignCall struct { - Position - Typecastable - // Name is the name of the function. - Name string - // ContextualArgs are arguments that are contextual to the function call. - // They are passed using [] - ContextualArgs []Expression - // Args are the arguments to the function call. - // They are passed using () - Args []Expression -} - -func (e *ExpressionForeignCall) Accept(v Visitor) any { - return v.VisitExpressionForeignCall(e) -} - -func (e *ExpressionForeignCall) FunctionName() string { - return e.Name -} - -var _ ExpressionCall = (*ExpressionForeignCall)(nil) - -// ExpressionVariable is a variable. -// This can either be $ or @ variables. -type ExpressionVariable struct { - Position - Typecastable - // Name is the naem of the variable, - // without the $ or @. - Name string - // Prefix is the $ or @ prefix. - Prefix VariablePrefix -} - -func (e *ExpressionVariable) Accept(v Visitor) any { - return v.VisitExpressionVariable(e) -} - -// String returns the string representation, as it was passed -// in Kuneiform. -func (e *ExpressionVariable) String() string { - return string(e.Prefix) + e.Name -} - -type VariablePrefix string - -const ( - VariablePrefixDollar VariablePrefix = "$" - VariablePrefixAt VariablePrefix = "@" -) - -// ExpressionArrayAccess accesses an array value. -type ExpressionArrayAccess struct { - Position - Typecastable - // Array is the array that is being accessed. - Array Expression - // Index is the index that is being accessed. - // Either Index or FromTo is set, but not both. - Index Expression - // FromTo is the range that is being accessed. - // Either Index or FromTo is set, but not both. - // If FromTo is set, then it is a range access. - // If both values are set, then it is arr[FROM:TO]. - // If only From is set, then it is arr[FROM:]. - // If only To is set, then it is arr[:TO]. - // If neither are set and index is not set, then it is arr[:]. - FromTo [2]Expression -} - -func (e *ExpressionArrayAccess) Accept(v Visitor) any { - return v.VisitExpressionArrayAccess(e) -} - -// ExpressionMakeArray makes a new array. -type ExpressionMakeArray struct { - Position - Typecastable - Values []Expression -} - -func (e *ExpressionMakeArray) Accept(v Visitor) any { - return v.VisitExpressionMakeArray(e) -} - -// ExpressionFieldAccess accesses a field in a record. -type ExpressionFieldAccess struct { - Position - Typecastable - // Record is the record that is being accessed. - Record Expression - // Field is the field that is being accessed. - Field string -} - -func (e *ExpressionFieldAccess) Accept(v Visitor) any { - return v.VisitExpressionFieldAccess(e) -} - -// ExpressionParenthesized is a parenthesized expression. -type ExpressionParenthesized struct { - Position - Typecastable - // Inner is the inner expression. - Inner Expression -} - -func (e *ExpressionParenthesized) Accept(v Visitor) any { - return v.VisitExpressionParenthesized(e) -} - -// ExpressionComparison is a comparison expression. -type ExpressionComparison struct { - Position - // Left is the left side of the comparison. - Left Expression - // Right is the right side of the comparison. - Right Expression - // Operator is the operator of the comparison. - Operator ComparisonOperator -} - -func (e *ExpressionComparison) Accept(v Visitor) any { - return v.VisitExpressionComparison(e) -} - -type ComparisonOperator string - -const ( - ComparisonOperatorEqual ComparisonOperator = "=" - ComparisonOperatorNotEqual ComparisonOperator = "!=" - ComparisonOperatorGreaterThan ComparisonOperator = ">" - ComparisonOperatorLessThan ComparisonOperator = "<" - ComparisonOperatorGreaterThanOrEqual ComparisonOperator = ">=" - ComparisonOperatorLessThanOrEqual ComparisonOperator = "<=" -) - -// ExpressionLogical is a logical expression. -type ExpressionLogical struct { - Position - // Left is the left side of the logical expression. - Left Expression - // Right is the right side of the logical expression. - Right Expression - // Operator is the operator of the logical expression. - Operator LogicalOperator -} - -func (e *ExpressionLogical) Accept(v Visitor) any { - return v.VisitExpressionLogical(e) -} - -type LogicalOperator string - -const ( - LogicalOperatorAnd LogicalOperator = "and" - LogicalOperatorOr LogicalOperator = "or" -) - -// ExpressionArithmetic is an arithmetic expression. -type ExpressionArithmetic struct { - Position - // Left is the left side of the arithmetic expression. - Left Expression - // Right is the right side of the arithmetic expression. - Right Expression - // Operator is the operator of the arithmetic expression. - Operator ArithmeticOperator -} - -func (e *ExpressionArithmetic) Accept(v Visitor) any { - return v.VisitExpressionArithmetic(e) -} - -type ArithmeticOperator string - -const ( - ArithmeticOperatorAdd ArithmeticOperator = "+" - ArithmeticOperatorSubtract ArithmeticOperator = "-" - ArithmeticOperatorMultiply ArithmeticOperator = "*" - ArithmeticOperatorDivide ArithmeticOperator = "/" - ArithmeticOperatorModulo ArithmeticOperator = "%" - ArithmeticOperatorConcat ArithmeticOperator = "||" -) - -type ExpressionUnary struct { - Position - // Expression is the expression that is being operated on. - Expression Expression - // Operator is the operator of the unary expression. - Operator UnaryOperator -} - -func (e *ExpressionUnary) Accept(v Visitor) any { - return v.VisitExpressionUnary(e) -} - -type UnaryOperator string - -const ( - // Not can be either NOT or ! - UnaryOperatorNot UnaryOperator = "not" - UnaryOperatorNeg UnaryOperator = "-" - UnaryOperatorPos UnaryOperator = "+" -) - -// ExpressionColumn is a column in a table. -type ExpressionColumn struct { - Position - Typecastable - // Table is the table that the column is in. - Table string // can be empty - // Column is the name of the column. - Column string -} - -func (e *ExpressionColumn) Accept(v Visitor) any { - return v.VisitExpressionColumn(e) -} - -// ExpressionCollate is an expression with a collation. -type ExpressionCollate struct { - Position - // Expression is the expression that is being collated. - Expression Expression - // Collation is the collation that is being used. - Collation string -} - -func (e *ExpressionCollate) Accept(v Visitor) any { - return v.VisitExpressionCollate(e) -} - -// ExpressionStringComparison is a string comparison expression. -type ExpressionStringComparison struct { - Position - // Left is the left side of the comparison. - Left Expression - // Right is the right side of the comparison. - Right Expression - Not bool - // Operator is the operator of the comparison. - Operator StringComparisonOperator -} - -func (e *ExpressionStringComparison) Accept(v Visitor) any { - return v.VisitExpressionStringComparison(e) -} - -type StringComparisonOperator string - -const ( - StringComparisonOperatorLike StringComparisonOperator = "LIKE" - StringComparisonOperatorILike StringComparisonOperator = "ILIKE" -) - -// ExpressionIs is an IS expression. -type ExpressionIs struct { - Position - // Left is the left side of the IS expression. - Left Expression - // Right is the right side of the IS expression. - Right Expression - // Not is true if the IS expression is a NOT IS expression. - Not bool - // Distinct is true if the IS expression is a DISTINCT IS expression. - Distinct bool -} - -func (e *ExpressionIs) Accept(v Visitor) any { - return v.VisitExpressionIs(e) -} - -// ExpressionBetween is a BETWEEN expression. -type ExpressionBetween struct { - Position - // Expression is the expression that is being compared. - Expression Expression - // Lower is the left side of the BETWEEN expression. - Lower Expression - // Upper is the right side of the BETWEEN expression. - Upper Expression - // Not is true if the BETWEEN expression is a NOT BETWEEN expression. - Not bool -} - -func (e *ExpressionBetween) Accept(v Visitor) any { - return v.VisitExpressionBetween(e) -} - -type ExpressionIn struct { - Position - // Expression is the expression that is being compared. - Expression Expression - // List is the list of expressions that the expression is being compared to. - // Either List or Subquery is set, but not both. - List []Expression - // Subquery is the subquery that the expression is being compared to. - // Either List or Subquery is set, but not both. - Subquery *SelectStatement - // Not is true if the IN expression is a NOT IN expression. - Not bool -} - -func (e *ExpressionIn) Accept(v Visitor) any { - return v.VisitExpressionIn(e) -} - -// ExpressionSubquery is a subquery expression. -type ExpressionSubquery struct { - Position - Typecastable - Not bool - Exists bool - Subquery *SelectStatement -} - -func (e *ExpressionSubquery) Accept(v Visitor) any { - return v.VisitExpressionSubquery(e) -} - -// ExpressionCase is a CASE expression. -type ExpressionCase struct { - Position - Case Expression - WhenThen [][2]Expression - Else Expression -} - -func (e *ExpressionCase) Accept(v Visitor) any { - return v.VisitExpressionCase(e) -} - -// CommonTableExpression is a common table expression. -type CommonTableExpression struct { - Position - // Name is the name of the CTE. - Name string - // Columns are the columns of the CTE. - Columns []string - // Query is the query of the CTE. - Query *SelectStatement -} - -func (c *CommonTableExpression) Accept(v Visitor) any { - return v.VisitCommonTableExpression(c) -} - -// SQLStatement is a SQL statement. -type SQLStatement struct { - Position - CTEs []*CommonTableExpression - // SQL can be an insert, update, delete, or select statement. - SQL SQLCore -} - -func (s *SQLStatement) Accept(v Visitor) any { - return v.VisitSQLStatement(s) -} - -// SQLCore is a top-level statement. -// It can be INSERT, UPDATE, DELETE, SELECT. -type SQLCore interface { - Node - StmtType() SQLStatementType -} - -type SQLStatementType string - -const ( - SQLStatementTypeInsert SQLStatementType = "insert" - SQLStatementTypeUpdate SQLStatementType = "update" - SQLStatementTypeDelete SQLStatementType = "delete" - SQLStatementTypeSelect SQLStatementType = "select" -) - -// SelectStatement is a SELECT statement. -type SelectStatement struct { - Position - SelectCores []*SelectCore - CompoundOperators []CompoundOperator - Ordering []*OrderingTerm - Limit Expression - Offset Expression -} - -func (s *SelectStatement) Accept(v Visitor) any { - return v.VisitSelectStatement(s) -} - -func (SelectStatement) StmtType() SQLStatementType { - return SQLStatementTypeSelect -} - -type CompoundOperator string - -const ( - CompoundOperatorUnion CompoundOperator = "UNION" - CompoundOperatorUnionAll CompoundOperator = "UNION ALL" - CompoundOperatorIntersect CompoundOperator = "INTERSECT" - CompoundOperatorExcept CompoundOperator = "EXCEPT" -) - -// OrderingTerm is a term in an order by clause -type OrderingTerm struct { - Position - Expression Expression - Order OrderType - Nulls NullOrder -} - -func (o *OrderingTerm) Accept(v Visitor) any { - return v.VisitOrderingTerm(o) -} - -type OrderType string - -const ( - OrderTypeAsc OrderType = "ASC" - OrderTypeDesc OrderType = "DESC" -) - -type NullOrder string - -const ( - NullOrderFirst NullOrder = "FIRST" - NullOrderLast NullOrder = "LAST" -) - -type SelectCore struct { - Position - // Distinct is true if the SELECT statement is a DISTINCT SELECT statement. - Distinct bool - Columns []ResultColumn - From Table // can be nil - Joins []*Join // can be nil - Where Expression // can be nil - GroupBy []Expression // can be nil - Having Expression // can be nil -} - -func (s *SelectCore) Accept(v Visitor) any { - return v.VisitSelectCore(s) -} - -type ResultColumn interface { - Node - ResultColumnType() ResultColumnType -} - -type ResultColumnType string - -const ( - ResultColumnTypeExpression ResultColumnType = "expression" - ResultColumnTypeWildcard ResultColumnType = "wildcare" -) - -type ResultColumnExpression struct { - Position - - Expression Expression - Alias string // can be empty -} - -func (r *ResultColumnExpression) Accept(v Visitor) any { - return v.VisitResultColumnExpression(r) -} - -func (r *ResultColumnExpression) ResultColumnType() ResultColumnType { - return ResultColumnTypeExpression -} - -type ResultColumnWildcard struct { - Position - Table string // can be empty -} - -func (r *ResultColumnWildcard) Accept(v Visitor) any { - return v.VisitResultColumnWildcard(r) -} - -func (r *ResultColumnWildcard) ResultColumnType() ResultColumnType { - return ResultColumnTypeWildcard -} - -type Table interface { - Node - table() -} - -type RelationTable struct { - Position - Table string - Alias string // can be empty -} - -func (r *RelationTable) Accept(v Visitor) any { - return v.VisitRelationTable(r) -} - -func (RelationTable) table() {} - -type RelationSubquery struct { - Position - Subquery *SelectStatement - // Alias cannot be empty, as our syntax - // forces it for subqueries. - Alias string -} - -func (r *RelationSubquery) Accept(v Visitor) any { - return v.VisitRelationSubquery(r) -} - -func (RelationSubquery) table() {} - -type RelationFunctionCall struct { - Position - FunctionCall ExpressionCall - // The alias cannot be empty, as our syntax forces - // it for function calls - Alias string -} - -func (r *RelationFunctionCall) Accept(v Visitor) any { - return v.VisitRelationFunctionCall(r) -} - -func (RelationFunctionCall) table() {} - -// Join is a join in a SELECT statement. -type Join struct { - Position - Type JoinType - Relation Table - On Expression -} - -func (j *Join) Accept(v Visitor) any { - return v.VisitJoin(j) -} - -type JoinType string - -const ( - JoinTypeInner JoinType = "INNER" - JoinTypeLeft JoinType = "LEFT" - JoinTypeRight JoinType = "RIGHT" - JoinTypeFull JoinType = "FULL" -) - -type UpdateStatement struct { - Position - Table string - Alias string // can be empty - SetClause []*UpdateSetClause - From Table // can be nil - Joins []*Join // can be nil - Where Expression // can be nil -} - -func (u *UpdateStatement) Accept(v Visitor) any { - return v.VisitUpdateStatement(u) -} - -func (u *UpdateStatement) StmtType() SQLStatementType { - return SQLStatementTypeUpdate -} - -type UpdateSetClause struct { - Position - Column string - Value Expression -} - -func (u *UpdateSetClause) Accept(v Visitor) any { - return v.VisitUpdateSetClause(u) -} - -type DeleteStatement struct { - Position - - Table string - Alias string // can be empty - From Table // can be nil - Joins []*Join // can be nil - Where Expression // can be nil -} - -func (d *DeleteStatement) StmtType() SQLStatementType { - return SQLStatementTypeDelete -} - -func (d *DeleteStatement) Accept(v Visitor) any { - return v.VisitDeleteStatement(d) -} - -type InsertStatement struct { - Position - Table string - Alias string // can be empty - Columns []string // can be empty - Values [][]Expression - Upsert *UpsertClause // can be nil -} - -func (i *InsertStatement) Accept(v Visitor) any { - return v.VisitInsertStatement(i) -} - -func (i *InsertStatement) StmtType() SQLStatementType { - return SQLStatementTypeInsert -} - -type UpsertClause struct { - Position - ConflictColumns []string // can be empty - ConflictWhere Expression // can be nil - DoUpdate []*UpdateSetClause // if nil, then do nothing - UpdateWhere Expression // can be nil -} - -func (u *UpsertClause) Accept(v Visitor) any { - return v.VisitUpsertClause(u) -} - -// action ast: - -type ActionStmt interface { - Node - ActionStmt() ActionStatementTypes -} - -type ActionStatementTypes string - -const ( - ActionStatementTypeExtensionCall ActionStatementTypes = "extension_call" - ActionStatementTypeActionCall ActionStatementTypes = "action_call" - ActionStatementTypeSQL ActionStatementTypes = "sql" -) - -type ActionStmtSQL struct { - Position - SQL *SQLStatement -} - -func (a *ActionStmtSQL) Accept(v Visitor) any { - return v.VisitActionStmtSQL(a) -} - -func (a *ActionStmtSQL) ActionStmt() ActionStatementTypes { - return ActionStatementTypeSQL -} - -type ActionStmtExtensionCall struct { - Position - Receivers []string - Extension string - Method string - Args []Expression -} - -func (a *ActionStmtExtensionCall) Accept(v Visitor) any { - return v.VisitActionStmtExtensionCall(a) -} - -func (a *ActionStmtExtensionCall) ActionStmt() ActionStatementTypes { - return ActionStatementTypeExtensionCall -} - -type ActionStmtActionCall struct { - Position - Action string - Args []Expression -} - -func (a *ActionStmtActionCall) Accept(v Visitor) any { - return v.VisitActionStmtActionCall(a) -} - -func (a *ActionStmtActionCall) ActionStmt() ActionStatementTypes { - return ActionStatementTypeActionCall -} - -// procedure ast: - -// ProcedureStmt is a statement in a procedure. -// it is the top-level interface for all procedure statements. -type ProcedureStmt interface { - Node - procedureStmt() -} - -type baseProcedureStmt struct { - Position -} - -func (baseProcedureStmt) procedureStmt() {} - -// ProcedureStmtDeclaration is a variable declaration in a procedure. -type ProcedureStmtDeclaration struct { - baseProcedureStmt - // Variable is the variable that is being declared. - Variable *ExpressionVariable - Type *types.DataType -} - -func (p *ProcedureStmtDeclaration) Accept(v Visitor) any { - return v.VisitProcedureStmtDeclaration(p) -} - -// ProcedureStmtAssign is a variable assignment in a procedure. -// It should only be called on variables that have already been declared. -type ProcedureStmtAssign struct { - baseProcedureStmt - // Variable is the variable that is being assigned. - Variable Expression - // Type is the type of the variable. - // It can be nil if the variable is not being assigned, - // or if the type should be inferred. - Type *types.DataType - // Value is the value that is being assigned. - Value Expression -} - -func (p *ProcedureStmtAssign) Accept(v Visitor) any { - return v.VisitProcedureStmtAssignment(p) -} - -// ProcedureStmtCall is a call to another procedure or built-in function. -type ProcedureStmtCall struct { - baseProcedureStmt - // Receivers are the variables being assigned. If nil, then the - // receiver can be ignored. - Receivers []*ExpressionVariable - Call ExpressionCall -} - -func (p *ProcedureStmtCall) Accept(v Visitor) any { - return v.VisitProcedureStmtCall(p) -} - -type ProcedureStmtForLoop struct { - baseProcedureStmt - // Receiver is the variable that is assigned on each iteration. - Receiver *ExpressionVariable - // LoopTerm is what the loop is looping through. - LoopTerm LoopTerm - // Body is the body of the loop. - Body []ProcedureStmt -} - -func (p *ProcedureStmtForLoop) Accept(v Visitor) any { - return v.VisitProcedureStmtForLoop(p) -} - -// LoopTerm what the loop is looping through. -type LoopTerm interface { - Node - loopTerm() -} - -type baseLoopTerm struct { - Position -} - -func (baseLoopTerm) loopTerm() {} - -type LoopTermRange struct { - baseLoopTerm - // Start is the start of the range. - Start Expression - // End is the end of the range. - End Expression -} - -func (e *LoopTermRange) Accept(v Visitor) interface{} { - return v.VisitLoopTermRange(e) -} - -type LoopTermSQL struct { - baseLoopTerm - // Statement is the Statement statement to execute. - Statement *SQLStatement -} - -func (e *LoopTermSQL) Accept(v Visitor) interface{} { - return v.VisitLoopTermSQL(e) -} - -type LoopTermVariable struct { - baseLoopTerm - // Variable is the variable to loop through. - // It must be an array. - Variable *ExpressionVariable -} - -func (e *LoopTermVariable) Accept(v Visitor) interface{} { - return v.VisitLoopTermVariable(e) -} - -type ProcedureStmtIf struct { - baseProcedureStmt - // IfThens are the if statements. - // They are evaluated in order, as - // IF ... THEN ... ELSEIF ... THEN ... - IfThens []*IfThen - // Else is the else statement. - // It is evaluated if no other if statement - // is true. - Else []ProcedureStmt -} - -func (p *ProcedureStmtIf) Accept(v Visitor) any { - return v.VisitProcedureStmtIf(p) -} - -type IfThen struct { - Position - If Expression - Then []ProcedureStmt -} - -func (i *IfThen) Accept(v Visitor) any { - return v.VisitIfThen(i) -} - -type ProcedureStmtSQL struct { - baseProcedureStmt - SQL *SQLStatement -} - -func (p *ProcedureStmtSQL) Accept(v Visitor) any { - return v.VisitProcedureStmtSQL(p) -} - -type ProcedureStmtBreak struct { - baseProcedureStmt -} - -func (p *ProcedureStmtBreak) Accept(v Visitor) any { - return v.VisitProcedureStmtBreak(p) -} - -type ProcedureStmtReturn struct { - baseProcedureStmt - // Values are the values to return. - // Either values is set or SQL is set, but not both. - Values []Expression - // SQL is the SQL statement to return. - // Either values is set or SQL is set, but not both. - SQL *SQLStatement -} - -func (p *ProcedureStmtReturn) Accept(v Visitor) any { - return v.VisitProcedureStmtReturn(p) -} - -type ProcedureStmtReturnNext struct { - baseProcedureStmt - // Values are the values to return. - Values []Expression -} - -func (p *ProcedureStmtReturnNext) Accept(v Visitor) any { - return v.VisitProcedureStmtReturnNext(p) -} - -/* - There are three types of visitors, all which compose on each other: - - Visitor: top-level visitor capable of visiting actions, procedures, and SQL. - - ProcedureVisitor: a visitor capable of only visiting procedures and SQL. It must include - SQL because procedures themselves rely on SQL/ - - SQLVisitor: a visitor capable of only visiting SQL. -*/ - -// Visitor is an interface for visiting nodes in the parse tree. -type Visitor interface { - ProcedureVisitor - VisitActionStmtSQL(*ActionStmtSQL) any - VisitActionStmtExtensionCall(*ActionStmtExtensionCall) any - VisitActionStmtActionCall(*ActionStmtActionCall) any -} - -// ProcedureVisitor includes visit methods only needed to analyze procedures. -// It does not need visit methods for structs that are for the schema or actions -type ProcedureVisitor interface { - SQLVisitor - VisitProcedureStmtDeclaration(*ProcedureStmtDeclaration) any - VisitProcedureStmtAssignment(*ProcedureStmtAssign) any - VisitProcedureStmtCall(*ProcedureStmtCall) any - VisitProcedureStmtForLoop(*ProcedureStmtForLoop) any - VisitLoopTermRange(*LoopTermRange) any - VisitLoopTermSQL(*LoopTermSQL) any - VisitLoopTermVariable(*LoopTermVariable) any - VisitProcedureStmtIf(*ProcedureStmtIf) any - VisitIfThen(*IfThen) any - VisitProcedureStmtSQL(*ProcedureStmtSQL) any - VisitProcedureStmtBreak(*ProcedureStmtBreak) any - VisitProcedureStmtReturn(*ProcedureStmtReturn) any - VisitProcedureStmtReturnNext(*ProcedureStmtReturnNext) any -} - -// SQLVisitor is a visitor that only has methods for SQL nodes. -type SQLVisitor interface { - VisitExpressionLiteral(*ExpressionLiteral) any - VisitExpressionFunctionCall(*ExpressionFunctionCall) any - VisitExpressionForeignCall(*ExpressionForeignCall) any - VisitExpressionVariable(*ExpressionVariable) any - VisitExpressionArrayAccess(*ExpressionArrayAccess) any - VisitExpressionMakeArray(*ExpressionMakeArray) any - VisitExpressionFieldAccess(*ExpressionFieldAccess) any - VisitExpressionParenthesized(*ExpressionParenthesized) any - VisitExpressionComparison(*ExpressionComparison) any - VisitExpressionLogical(*ExpressionLogical) any - VisitExpressionArithmetic(*ExpressionArithmetic) any - VisitExpressionUnary(*ExpressionUnary) any - VisitExpressionColumn(*ExpressionColumn) any - VisitExpressionCollate(*ExpressionCollate) any - VisitExpressionStringComparison(*ExpressionStringComparison) any - VisitExpressionIs(*ExpressionIs) any - VisitExpressionIn(*ExpressionIn) any - VisitExpressionBetween(*ExpressionBetween) any - VisitExpressionSubquery(*ExpressionSubquery) any - VisitExpressionCase(*ExpressionCase) any - VisitCommonTableExpression(*CommonTableExpression) any - VisitSQLStatement(*SQLStatement) any - VisitSelectStatement(*SelectStatement) any - VisitSelectCore(*SelectCore) any - VisitResultColumnExpression(*ResultColumnExpression) any - VisitResultColumnWildcard(*ResultColumnWildcard) any - VisitRelationTable(*RelationTable) any - VisitRelationSubquery(*RelationSubquery) any - VisitRelationFunctionCall(*RelationFunctionCall) any - VisitJoin(*Join) any - VisitUpdateStatement(*UpdateStatement) any - VisitUpdateSetClause(*UpdateSetClause) any - VisitDeleteStatement(*DeleteStatement) any - VisitInsertStatement(*InsertStatement) any - VisitUpsertClause(*UpsertClause) any - VisitOrderingTerm(*OrderingTerm) any -} - -// UnimplementedSqlVisitor is meant to be used when an implementing visitor only intends -// to implement the SQLVisitor interface. It will implement the full visitor interface, -// but will panic if any of the methods are called. It does not implement the SQLVisitor -// interface, so it alone cannot be used as a visitor. -type UnimplementedSqlVisitor struct { - UnimplementedProcedureVisitor -} - -func (s *UnimplementedSqlVisitor) VisitActionStmtSQL(p0 *ActionStmtSQL) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedSqlVisitor) VisitActionStmtExtensionCall(p0 *ActionStmtExtensionCall) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedSqlVisitor) VisitActionStmtActionCall(p0 *ActionStmtActionCall) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -// UnimplementedProcedureVisitor is meant to be used when an implementing visitor only intends -// to implement the ProcedureVisitor interface. It will implement the full visitor interface, -// but will panic if any of the methods are called. It does not implement the SQLVisitor or -// ProcedureVisitor interfaces, so it alone cannot be used as a visitor. -type UnimplementedProcedureVisitor struct{} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtDeclaration(p0 *ProcedureStmtDeclaration) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtAssignment(p0 *ProcedureStmtAssign) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtCall(p0 *ProcedureStmtCall) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtForLoop(p0 *ProcedureStmtForLoop) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitLoopTermRange(p0 *LoopTermRange) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitLoopTermSQL(p0 *LoopTermSQL) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitLoopTermVariable(p0 *LoopTermVariable) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtIf(p0 *ProcedureStmtIf) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitIfThen(p0 *IfThen) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtSQL(p0 *ProcedureStmtSQL) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtBreak(p0 *ProcedureStmtBreak) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtReturn(p0 *ProcedureStmtReturn) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} - -func (s *UnimplementedProcedureVisitor) VisitProcedureStmtReturnNext(p0 *ProcedureStmtReturnNext) any { - panic(fmt.Sprintf("api misuse: cannot visit %T in constrained visitor", s)) -} diff --git a/parse/gen/kuneiform_lexer.go b/parse/gen/kuneiform_lexer.go deleted file mode 100644 index 8c6fb80e0..000000000 --- a/parse/gen/kuneiform_lexer.go +++ /dev/null @@ -1,698 +0,0 @@ -// Code generated from KuneiformLexer.g4 by ANTLR 4.13.1. DO NOT EDIT. - -package gen - -import ( - "fmt" - "sync" - "unicode" - - "github.com/antlr4-go/antlr/v4" -) - -// Suppress unused import error -var _ = fmt.Printf -var _ = sync.Once{} -var _ = unicode.IsLetter - -type KuneiformLexer struct { - *antlr.BaseLexer - channelNames []string - modeNames []string - // TODO: EOF string -} - -var KuneiformLexerLexerStaticData struct { - once sync.Once - serializedATN []int32 - ChannelNames []string - ModeNames []string - LiteralNames []string - SymbolicNames []string - RuleNames []string - PredictionContextCache *antlr.PredictionContextCache - atn *antlr.ATN - decisionToDFA []*antlr.DFA -} - -func kuneiformlexerLexerInit() { - staticData := &KuneiformLexerLexerStaticData - staticData.ChannelNames = []string{ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN", - } - staticData.ModeNames = []string{ - "DEFAULT_MODE", - } - staticData.LiteralNames = []string{ - "", "'{'", "'}'", "'['", "']'", "':'", "';'", "'('", "')'", "','", "'@'", - "'!'", "'.'", "'||'", "'*'", "'='", "'=='", "'#'", "'$'", "'%'", "'+'", - "'-'", "'/'", "", "'<'", "'<='", "'>'", "'>='", "'::'", "'_'", "':='", - "'..'", "'\"'", "'database'", "'use'", "'table'", "'action'", "'procedure'", - "'public'", "'private'", "'view'", "'owner'", "'foreign'", "'primary'", - "'key'", "'on'", "'do'", "'unique'", "'cascade'", "'restrict'", "'set'", - "'default'", "'null'", "'delete'", "'update'", "'references'", "'ref'", - "'not'", "'index'", "'and'", "'or'", "'like'", "'ilike'", "'in'", "'between'", - "'is'", "'exists'", "'all'", "'any'", "'join'", "'left'", "'right'", - "'inner'", "'as'", "'asc'", "'desc'", "'limit'", "'offset'", "'order'", - "'by'", "'group'", "'having'", "'returns'", "'no'", "'with'", "'case'", - "'when'", "'then'", "'end'", "'distinct'", "'from'", "'where'", "'collate'", - "'select'", "'insert'", "'values'", "'full'", "'union'", "'intersect'", - "'except'", "'nulls'", "'first'", "'last'", "'returning'", "'into'", - "'conflict'", "'nothing'", "'for'", "'if'", "'elseif'", "'else'", "'break'", - "'return'", "'next'", "", "'true'", "'false'", "", "", "", "'on_update'", - "'on_delete'", "'set_default'", "'set_null'", "'no_action'", - } - staticData.SymbolicNames = []string{ - "", "LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "COL", "SCOL", "LPAREN", - "RPAREN", "COMMA", "AT", "EXCL", "PERIOD", "CONCAT", "STAR", "EQUALS", - "EQUATE", "HASH", "DOLLAR", "MOD", "PLUS", "MINUS", "DIV", "NEQ", "LT", - "LTE", "GT", "GTE", "TYPE_CAST", "UNDERSCORE", "ASSIGN", "RANGE", "DOUBLE_QUOTE", - "DATABASE", "USE", "TABLE", "ACTION", "PROCEDURE", "PUBLIC", "PRIVATE", - "VIEW", "OWNER", "FOREIGN", "PRIMARY", "KEY", "ON", "DO", "UNIQUE", - "CASCADE", "RESTRICT", "SET", "DEFAULT", "NULL", "DELETE", "UPDATE", - "REFERENCES", "REF", "NOT", "INDEX", "AND", "OR", "LIKE", "ILIKE", "IN", - "BETWEEN", "IS", "EXISTS", "ALL", "ANY", "JOIN", "LEFT", "RIGHT", "INNER", - "AS", "ASC", "DESC", "LIMIT", "OFFSET", "ORDER", "BY", "GROUP", "HAVING", - "RETURNS", "NO", "WITH", "CASE", "WHEN", "THEN", "END", "DISTINCT", - "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", "VALUES", "FULL", "UNION", - "INTERSECT", "EXCEPT", "NULLS", "FIRST", "LAST", "RETURNING", "INTO", - "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", "ELSE", "BREAK", "RETURN", - "NEXT", "STRING_", "TRUE", "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", - "LEGACY_ON_UPDATE", "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", - "LEGACY_NO_ACTION", "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", - "HASH_IDENTIFIER", "WS", "BLOCK_COMMENT", "LINE_COMMENT", - } - staticData.RuleNames = []string{ - "LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "COL", "SCOL", "LPAREN", - "RPAREN", "COMMA", "AT", "EXCL", "PERIOD", "CONCAT", "STAR", "EQUALS", - "EQUATE", "HASH", "DOLLAR", "MOD", "PLUS", "MINUS", "DIV", "NEQ", "LT", - "LTE", "GT", "GTE", "TYPE_CAST", "UNDERSCORE", "ASSIGN", "RANGE", "DOUBLE_QUOTE", - "DATABASE", "USE", "TABLE", "ACTION", "PROCEDURE", "PUBLIC", "PRIVATE", - "VIEW", "OWNER", "FOREIGN", "PRIMARY", "KEY", "ON", "DO", "UNIQUE", - "CASCADE", "RESTRICT", "SET", "DEFAULT", "NULL", "DELETE", "UPDATE", - "REFERENCES", "REF", "NOT", "INDEX", "AND", "OR", "LIKE", "ILIKE", "IN", - "BETWEEN", "IS", "EXISTS", "ALL", "ANY", "JOIN", "LEFT", "RIGHT", "INNER", - "AS", "ASC", "DESC", "LIMIT", "OFFSET", "ORDER", "BY", "GROUP", "HAVING", - "RETURNS", "NO", "WITH", "CASE", "WHEN", "THEN", "END", "DISTINCT", - "FROM", "WHERE", "COLLATE", "SELECT", "INSERT", "VALUES", "FULL", "UNION", - "INTERSECT", "EXCEPT", "NULLS", "FIRST", "LAST", "RETURNING", "INTO", - "CONFLICT", "NOTHING", "FOR", "IF", "ELSEIF", "ELSE", "BREAK", "RETURN", - "NEXT", "STRING_", "TRUE", "FALSE", "DIGITS_", "BINARY_", "LEGACY_FOREIGN_KEY", - "LEGACY_ON_UPDATE", "LEGACY_ON_DELETE", "LEGACY_SET_DEFAULT", "LEGACY_SET_NULL", - "LEGACY_NO_ACTION", "IDENTIFIER", "VARIABLE", "CONTEXTUAL_VARIABLE", - "HASH_IDENTIFIER", "WS", "BLOCK_COMMENT", "LINE_COMMENT", - } - staticData.PredictionContextCache = antlr.NewPredictionContextCache() - staticData.serializedATN = []int32{ - 4, 0, 131, 966, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, - 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, - 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, - 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, - 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, - 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, - 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, - 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, - 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, - 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, - 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, - 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, - 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, - 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, - 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, - 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, - 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, - 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, - 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, - 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, - 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, - 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, - 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, - 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, - 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, - 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, - 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, - 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, - 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, - 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, - 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 314, 8, 22, 1, 23, 1, 23, - 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, - 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, - 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, - 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, - 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, - 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, - 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, - 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, - 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, - 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, - 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, - 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, - 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, - 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, - 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, - 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, - 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, - 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, - 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, - 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, - 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, - 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, - 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, - 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, 1, 70, - 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, - 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, - 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, - 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, - 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, - 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, - 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, - 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, - 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, - 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, - 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, - 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, - 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, - 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, - 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, - 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, - 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, - 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, - 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, - 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, - 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, - 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, - 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, - 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, - 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, - 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 5, 113, 825, 8, 113, 10, 113, - 12, 113, 828, 9, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, - 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 4, 116, 844, - 8, 116, 11, 116, 12, 116, 845, 1, 117, 1, 117, 1, 117, 1, 117, 4, 117, - 852, 8, 117, 11, 117, 12, 117, 853, 1, 118, 1, 118, 1, 118, 1, 118, 1, - 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 3, - 118, 869, 8, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, - 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, - 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, - 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, - 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, - 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, - 1, 124, 5, 124, 924, 8, 124, 10, 124, 12, 124, 927, 9, 124, 1, 125, 1, - 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 128, 1, - 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 5, 129, 946, 8, 129, - 10, 129, 12, 129, 949, 9, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, - 1, 130, 1, 130, 1, 130, 1, 130, 5, 130, 960, 8, 130, 10, 130, 12, 130, - 963, 9, 130, 1, 130, 1, 130, 1, 947, 0, 131, 1, 1, 3, 2, 5, 3, 7, 4, 9, - 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, - 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, - 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, - 65, 33, 67, 34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, - 83, 42, 85, 43, 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, - 101, 51, 103, 52, 105, 53, 107, 54, 109, 55, 111, 56, 113, 57, 115, 58, - 117, 59, 119, 60, 121, 61, 123, 62, 125, 63, 127, 64, 129, 65, 131, 66, - 133, 67, 135, 68, 137, 69, 139, 70, 141, 71, 143, 72, 145, 73, 147, 74, - 149, 75, 151, 76, 153, 77, 155, 78, 157, 79, 159, 80, 161, 81, 163, 82, - 165, 83, 167, 84, 169, 85, 171, 86, 173, 87, 175, 88, 177, 89, 179, 90, - 181, 91, 183, 92, 185, 93, 187, 94, 189, 95, 191, 96, 193, 97, 195, 98, - 197, 99, 199, 100, 201, 101, 203, 102, 205, 103, 207, 104, 209, 105, 211, - 106, 213, 107, 215, 108, 217, 109, 219, 110, 221, 111, 223, 112, 225, 113, - 227, 114, 229, 115, 231, 116, 233, 117, 235, 118, 237, 119, 239, 120, 241, - 121, 243, 122, 245, 123, 247, 124, 249, 125, 251, 126, 253, 127, 255, 128, - 257, 129, 259, 130, 261, 131, 1, 0, 32, 2, 0, 68, 68, 100, 100, 2, 0, 65, - 65, 97, 97, 2, 0, 84, 84, 116, 116, 2, 0, 66, 66, 98, 98, 2, 0, 83, 83, - 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 85, 85, 117, 117, 2, 0, 76, 76, - 108, 108, 2, 0, 67, 67, 99, 99, 2, 0, 73, 73, 105, 105, 2, 0, 79, 79, 111, - 111, 2, 0, 78, 78, 110, 110, 2, 0, 80, 80, 112, 112, 2, 0, 82, 82, 114, - 114, 2, 0, 86, 86, 118, 118, 2, 0, 87, 87, 119, 119, 2, 0, 70, 70, 102, - 102, 2, 0, 71, 71, 103, 103, 2, 0, 77, 77, 109, 109, 2, 0, 89, 89, 121, - 121, 2, 0, 75, 75, 107, 107, 2, 0, 81, 81, 113, 113, 2, 0, 88, 88, 120, - 120, 2, 0, 74, 74, 106, 106, 2, 0, 72, 72, 104, 104, 2, 0, 39, 39, 92, - 92, 1, 0, 48, 57, 3, 0, 48, 57, 65, 70, 97, 102, 2, 0, 65, 90, 97, 122, - 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 3, 0, 9, 11, 13, 13, 32, 32, 2, - 0, 10, 10, 13, 13, 974, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, - 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, - 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, - 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, - 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, - 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, - 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, - 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, - 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, - 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, - 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, - 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, - 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95, 1, 0, 0, 0, 0, 97, 1, 0, - 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, - 1, 0, 0, 0, 0, 107, 1, 0, 0, 0, 0, 109, 1, 0, 0, 0, 0, 111, 1, 0, 0, 0, - 0, 113, 1, 0, 0, 0, 0, 115, 1, 0, 0, 0, 0, 117, 1, 0, 0, 0, 0, 119, 1, - 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 123, 1, 0, 0, 0, 0, 125, 1, 0, 0, 0, 0, - 127, 1, 0, 0, 0, 0, 129, 1, 0, 0, 0, 0, 131, 1, 0, 0, 0, 0, 133, 1, 0, - 0, 0, 0, 135, 1, 0, 0, 0, 0, 137, 1, 0, 0, 0, 0, 139, 1, 0, 0, 0, 0, 141, - 1, 0, 0, 0, 0, 143, 1, 0, 0, 0, 0, 145, 1, 0, 0, 0, 0, 147, 1, 0, 0, 0, - 0, 149, 1, 0, 0, 0, 0, 151, 1, 0, 0, 0, 0, 153, 1, 0, 0, 0, 0, 155, 1, - 0, 0, 0, 0, 157, 1, 0, 0, 0, 0, 159, 1, 0, 0, 0, 0, 161, 1, 0, 0, 0, 0, - 163, 1, 0, 0, 0, 0, 165, 1, 0, 0, 0, 0, 167, 1, 0, 0, 0, 0, 169, 1, 0, - 0, 0, 0, 171, 1, 0, 0, 0, 0, 173, 1, 0, 0, 0, 0, 175, 1, 0, 0, 0, 0, 177, - 1, 0, 0, 0, 0, 179, 1, 0, 0, 0, 0, 181, 1, 0, 0, 0, 0, 183, 1, 0, 0, 0, - 0, 185, 1, 0, 0, 0, 0, 187, 1, 0, 0, 0, 0, 189, 1, 0, 0, 0, 0, 191, 1, - 0, 0, 0, 0, 193, 1, 0, 0, 0, 0, 195, 1, 0, 0, 0, 0, 197, 1, 0, 0, 0, 0, - 199, 1, 0, 0, 0, 0, 201, 1, 0, 0, 0, 0, 203, 1, 0, 0, 0, 0, 205, 1, 0, - 0, 0, 0, 207, 1, 0, 0, 0, 0, 209, 1, 0, 0, 0, 0, 211, 1, 0, 0, 0, 0, 213, - 1, 0, 0, 0, 0, 215, 1, 0, 0, 0, 0, 217, 1, 0, 0, 0, 0, 219, 1, 0, 0, 0, - 0, 221, 1, 0, 0, 0, 0, 223, 1, 0, 0, 0, 0, 225, 1, 0, 0, 0, 0, 227, 1, - 0, 0, 0, 0, 229, 1, 0, 0, 0, 0, 231, 1, 0, 0, 0, 0, 233, 1, 0, 0, 0, 0, - 235, 1, 0, 0, 0, 0, 237, 1, 0, 0, 0, 0, 239, 1, 0, 0, 0, 0, 241, 1, 0, - 0, 0, 0, 243, 1, 0, 0, 0, 0, 245, 1, 0, 0, 0, 0, 247, 1, 0, 0, 0, 0, 249, - 1, 0, 0, 0, 0, 251, 1, 0, 0, 0, 0, 253, 1, 0, 0, 0, 0, 255, 1, 0, 0, 0, - 0, 257, 1, 0, 0, 0, 0, 259, 1, 0, 0, 0, 0, 261, 1, 0, 0, 0, 1, 263, 1, - 0, 0, 0, 3, 265, 1, 0, 0, 0, 5, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 9, - 271, 1, 0, 0, 0, 11, 273, 1, 0, 0, 0, 13, 275, 1, 0, 0, 0, 15, 277, 1, - 0, 0, 0, 17, 279, 1, 0, 0, 0, 19, 281, 1, 0, 0, 0, 21, 283, 1, 0, 0, 0, - 23, 285, 1, 0, 0, 0, 25, 287, 1, 0, 0, 0, 27, 290, 1, 0, 0, 0, 29, 292, - 1, 0, 0, 0, 31, 294, 1, 0, 0, 0, 33, 297, 1, 0, 0, 0, 35, 299, 1, 0, 0, - 0, 37, 301, 1, 0, 0, 0, 39, 303, 1, 0, 0, 0, 41, 305, 1, 0, 0, 0, 43, 307, - 1, 0, 0, 0, 45, 313, 1, 0, 0, 0, 47, 315, 1, 0, 0, 0, 49, 317, 1, 0, 0, - 0, 51, 320, 1, 0, 0, 0, 53, 322, 1, 0, 0, 0, 55, 325, 1, 0, 0, 0, 57, 328, - 1, 0, 0, 0, 59, 330, 1, 0, 0, 0, 61, 333, 1, 0, 0, 0, 63, 336, 1, 0, 0, - 0, 65, 338, 1, 0, 0, 0, 67, 347, 1, 0, 0, 0, 69, 351, 1, 0, 0, 0, 71, 357, - 1, 0, 0, 0, 73, 364, 1, 0, 0, 0, 75, 374, 1, 0, 0, 0, 77, 381, 1, 0, 0, - 0, 79, 389, 1, 0, 0, 0, 81, 394, 1, 0, 0, 0, 83, 400, 1, 0, 0, 0, 85, 408, - 1, 0, 0, 0, 87, 416, 1, 0, 0, 0, 89, 420, 1, 0, 0, 0, 91, 423, 1, 0, 0, - 0, 93, 426, 1, 0, 0, 0, 95, 433, 1, 0, 0, 0, 97, 441, 1, 0, 0, 0, 99, 450, - 1, 0, 0, 0, 101, 454, 1, 0, 0, 0, 103, 462, 1, 0, 0, 0, 105, 467, 1, 0, - 0, 0, 107, 474, 1, 0, 0, 0, 109, 481, 1, 0, 0, 0, 111, 492, 1, 0, 0, 0, - 113, 496, 1, 0, 0, 0, 115, 500, 1, 0, 0, 0, 117, 506, 1, 0, 0, 0, 119, - 510, 1, 0, 0, 0, 121, 513, 1, 0, 0, 0, 123, 518, 1, 0, 0, 0, 125, 524, - 1, 0, 0, 0, 127, 527, 1, 0, 0, 0, 129, 535, 1, 0, 0, 0, 131, 538, 1, 0, - 0, 0, 133, 545, 1, 0, 0, 0, 135, 549, 1, 0, 0, 0, 137, 553, 1, 0, 0, 0, - 139, 558, 1, 0, 0, 0, 141, 563, 1, 0, 0, 0, 143, 569, 1, 0, 0, 0, 145, - 575, 1, 0, 0, 0, 147, 578, 1, 0, 0, 0, 149, 582, 1, 0, 0, 0, 151, 587, - 1, 0, 0, 0, 153, 593, 1, 0, 0, 0, 155, 600, 1, 0, 0, 0, 157, 606, 1, 0, - 0, 0, 159, 609, 1, 0, 0, 0, 161, 615, 1, 0, 0, 0, 163, 622, 1, 0, 0, 0, - 165, 630, 1, 0, 0, 0, 167, 633, 1, 0, 0, 0, 169, 638, 1, 0, 0, 0, 171, - 643, 1, 0, 0, 0, 173, 648, 1, 0, 0, 0, 175, 653, 1, 0, 0, 0, 177, 657, - 1, 0, 0, 0, 179, 666, 1, 0, 0, 0, 181, 671, 1, 0, 0, 0, 183, 677, 1, 0, - 0, 0, 185, 685, 1, 0, 0, 0, 187, 692, 1, 0, 0, 0, 189, 699, 1, 0, 0, 0, - 191, 706, 1, 0, 0, 0, 193, 711, 1, 0, 0, 0, 195, 717, 1, 0, 0, 0, 197, - 727, 1, 0, 0, 0, 199, 734, 1, 0, 0, 0, 201, 740, 1, 0, 0, 0, 203, 746, - 1, 0, 0, 0, 205, 751, 1, 0, 0, 0, 207, 761, 1, 0, 0, 0, 209, 766, 1, 0, - 0, 0, 211, 775, 1, 0, 0, 0, 213, 783, 1, 0, 0, 0, 215, 787, 1, 0, 0, 0, - 217, 790, 1, 0, 0, 0, 219, 797, 1, 0, 0, 0, 221, 802, 1, 0, 0, 0, 223, - 808, 1, 0, 0, 0, 225, 815, 1, 0, 0, 0, 227, 820, 1, 0, 0, 0, 229, 831, - 1, 0, 0, 0, 231, 836, 1, 0, 0, 0, 233, 843, 1, 0, 0, 0, 235, 847, 1, 0, - 0, 0, 237, 868, 1, 0, 0, 0, 239, 870, 1, 0, 0, 0, 241, 880, 1, 0, 0, 0, - 243, 890, 1, 0, 0, 0, 245, 902, 1, 0, 0, 0, 247, 911, 1, 0, 0, 0, 249, - 921, 1, 0, 0, 0, 251, 928, 1, 0, 0, 0, 253, 931, 1, 0, 0, 0, 255, 934, - 1, 0, 0, 0, 257, 937, 1, 0, 0, 0, 259, 941, 1, 0, 0, 0, 261, 955, 1, 0, - 0, 0, 263, 264, 5, 123, 0, 0, 264, 2, 1, 0, 0, 0, 265, 266, 5, 125, 0, - 0, 266, 4, 1, 0, 0, 0, 267, 268, 5, 91, 0, 0, 268, 6, 1, 0, 0, 0, 269, - 270, 5, 93, 0, 0, 270, 8, 1, 0, 0, 0, 271, 272, 5, 58, 0, 0, 272, 10, 1, - 0, 0, 0, 273, 274, 5, 59, 0, 0, 274, 12, 1, 0, 0, 0, 275, 276, 5, 40, 0, - 0, 276, 14, 1, 0, 0, 0, 277, 278, 5, 41, 0, 0, 278, 16, 1, 0, 0, 0, 279, - 280, 5, 44, 0, 0, 280, 18, 1, 0, 0, 0, 281, 282, 5, 64, 0, 0, 282, 20, - 1, 0, 0, 0, 283, 284, 5, 33, 0, 0, 284, 22, 1, 0, 0, 0, 285, 286, 5, 46, - 0, 0, 286, 24, 1, 0, 0, 0, 287, 288, 5, 124, 0, 0, 288, 289, 5, 124, 0, - 0, 289, 26, 1, 0, 0, 0, 290, 291, 5, 42, 0, 0, 291, 28, 1, 0, 0, 0, 292, - 293, 5, 61, 0, 0, 293, 30, 1, 0, 0, 0, 294, 295, 5, 61, 0, 0, 295, 296, - 5, 61, 0, 0, 296, 32, 1, 0, 0, 0, 297, 298, 5, 35, 0, 0, 298, 34, 1, 0, - 0, 0, 299, 300, 5, 36, 0, 0, 300, 36, 1, 0, 0, 0, 301, 302, 5, 37, 0, 0, - 302, 38, 1, 0, 0, 0, 303, 304, 5, 43, 0, 0, 304, 40, 1, 0, 0, 0, 305, 306, - 5, 45, 0, 0, 306, 42, 1, 0, 0, 0, 307, 308, 5, 47, 0, 0, 308, 44, 1, 0, - 0, 0, 309, 310, 5, 33, 0, 0, 310, 314, 5, 61, 0, 0, 311, 312, 5, 60, 0, - 0, 312, 314, 5, 62, 0, 0, 313, 309, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, - 46, 1, 0, 0, 0, 315, 316, 5, 60, 0, 0, 316, 48, 1, 0, 0, 0, 317, 318, 5, - 60, 0, 0, 318, 319, 5, 61, 0, 0, 319, 50, 1, 0, 0, 0, 320, 321, 5, 62, - 0, 0, 321, 52, 1, 0, 0, 0, 322, 323, 5, 62, 0, 0, 323, 324, 5, 61, 0, 0, - 324, 54, 1, 0, 0, 0, 325, 326, 5, 58, 0, 0, 326, 327, 5, 58, 0, 0, 327, - 56, 1, 0, 0, 0, 328, 329, 5, 95, 0, 0, 329, 58, 1, 0, 0, 0, 330, 331, 5, - 58, 0, 0, 331, 332, 5, 61, 0, 0, 332, 60, 1, 0, 0, 0, 333, 334, 5, 46, - 0, 0, 334, 335, 5, 46, 0, 0, 335, 62, 1, 0, 0, 0, 336, 337, 5, 34, 0, 0, - 337, 64, 1, 0, 0, 0, 338, 339, 7, 0, 0, 0, 339, 340, 7, 1, 0, 0, 340, 341, - 7, 2, 0, 0, 341, 342, 7, 1, 0, 0, 342, 343, 7, 3, 0, 0, 343, 344, 7, 1, - 0, 0, 344, 345, 7, 4, 0, 0, 345, 346, 7, 5, 0, 0, 346, 66, 1, 0, 0, 0, - 347, 348, 7, 6, 0, 0, 348, 349, 7, 4, 0, 0, 349, 350, 7, 5, 0, 0, 350, - 68, 1, 0, 0, 0, 351, 352, 7, 2, 0, 0, 352, 353, 7, 1, 0, 0, 353, 354, 7, - 3, 0, 0, 354, 355, 7, 7, 0, 0, 355, 356, 7, 5, 0, 0, 356, 70, 1, 0, 0, - 0, 357, 358, 7, 1, 0, 0, 358, 359, 7, 8, 0, 0, 359, 360, 7, 2, 0, 0, 360, - 361, 7, 9, 0, 0, 361, 362, 7, 10, 0, 0, 362, 363, 7, 11, 0, 0, 363, 72, - 1, 0, 0, 0, 364, 365, 7, 12, 0, 0, 365, 366, 7, 13, 0, 0, 366, 367, 7, - 10, 0, 0, 367, 368, 7, 8, 0, 0, 368, 369, 7, 5, 0, 0, 369, 370, 7, 0, 0, - 0, 370, 371, 7, 6, 0, 0, 371, 372, 7, 13, 0, 0, 372, 373, 7, 5, 0, 0, 373, - 74, 1, 0, 0, 0, 374, 375, 7, 12, 0, 0, 375, 376, 7, 6, 0, 0, 376, 377, - 7, 3, 0, 0, 377, 378, 7, 7, 0, 0, 378, 379, 7, 9, 0, 0, 379, 380, 7, 8, - 0, 0, 380, 76, 1, 0, 0, 0, 381, 382, 7, 12, 0, 0, 382, 383, 7, 13, 0, 0, - 383, 384, 7, 9, 0, 0, 384, 385, 7, 14, 0, 0, 385, 386, 7, 1, 0, 0, 386, - 387, 7, 2, 0, 0, 387, 388, 7, 5, 0, 0, 388, 78, 1, 0, 0, 0, 389, 390, 7, - 14, 0, 0, 390, 391, 7, 9, 0, 0, 391, 392, 7, 5, 0, 0, 392, 393, 7, 15, - 0, 0, 393, 80, 1, 0, 0, 0, 394, 395, 7, 10, 0, 0, 395, 396, 7, 15, 0, 0, - 396, 397, 7, 11, 0, 0, 397, 398, 7, 5, 0, 0, 398, 399, 7, 13, 0, 0, 399, - 82, 1, 0, 0, 0, 400, 401, 7, 16, 0, 0, 401, 402, 7, 10, 0, 0, 402, 403, - 7, 13, 0, 0, 403, 404, 7, 5, 0, 0, 404, 405, 7, 9, 0, 0, 405, 406, 7, 17, - 0, 0, 406, 407, 7, 11, 0, 0, 407, 84, 1, 0, 0, 0, 408, 409, 7, 12, 0, 0, - 409, 410, 7, 13, 0, 0, 410, 411, 7, 9, 0, 0, 411, 412, 7, 18, 0, 0, 412, - 413, 7, 1, 0, 0, 413, 414, 7, 13, 0, 0, 414, 415, 7, 19, 0, 0, 415, 86, - 1, 0, 0, 0, 416, 417, 7, 20, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 7, 19, - 0, 0, 419, 88, 1, 0, 0, 0, 420, 421, 7, 10, 0, 0, 421, 422, 7, 11, 0, 0, - 422, 90, 1, 0, 0, 0, 423, 424, 7, 0, 0, 0, 424, 425, 7, 10, 0, 0, 425, - 92, 1, 0, 0, 0, 426, 427, 7, 6, 0, 0, 427, 428, 7, 11, 0, 0, 428, 429, - 7, 9, 0, 0, 429, 430, 7, 21, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 5, - 0, 0, 432, 94, 1, 0, 0, 0, 433, 434, 7, 8, 0, 0, 434, 435, 7, 1, 0, 0, - 435, 436, 7, 4, 0, 0, 436, 437, 7, 8, 0, 0, 437, 438, 7, 1, 0, 0, 438, - 439, 7, 0, 0, 0, 439, 440, 7, 5, 0, 0, 440, 96, 1, 0, 0, 0, 441, 442, 7, - 13, 0, 0, 442, 443, 7, 5, 0, 0, 443, 444, 7, 4, 0, 0, 444, 445, 7, 2, 0, - 0, 445, 446, 7, 13, 0, 0, 446, 447, 7, 9, 0, 0, 447, 448, 7, 8, 0, 0, 448, - 449, 7, 2, 0, 0, 449, 98, 1, 0, 0, 0, 450, 451, 7, 4, 0, 0, 451, 452, 7, - 5, 0, 0, 452, 453, 7, 2, 0, 0, 453, 100, 1, 0, 0, 0, 454, 455, 7, 0, 0, - 0, 455, 456, 7, 5, 0, 0, 456, 457, 7, 16, 0, 0, 457, 458, 7, 1, 0, 0, 458, - 459, 7, 6, 0, 0, 459, 460, 7, 7, 0, 0, 460, 461, 7, 2, 0, 0, 461, 102, - 1, 0, 0, 0, 462, 463, 7, 11, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, - 0, 0, 465, 466, 7, 7, 0, 0, 466, 104, 1, 0, 0, 0, 467, 468, 7, 0, 0, 0, - 468, 469, 7, 5, 0, 0, 469, 470, 7, 7, 0, 0, 470, 471, 7, 5, 0, 0, 471, - 472, 7, 2, 0, 0, 472, 473, 7, 5, 0, 0, 473, 106, 1, 0, 0, 0, 474, 475, - 7, 6, 0, 0, 475, 476, 7, 12, 0, 0, 476, 477, 7, 0, 0, 0, 477, 478, 7, 1, - 0, 0, 478, 479, 7, 2, 0, 0, 479, 480, 7, 5, 0, 0, 480, 108, 1, 0, 0, 0, - 481, 482, 7, 13, 0, 0, 482, 483, 7, 5, 0, 0, 483, 484, 7, 16, 0, 0, 484, - 485, 7, 5, 0, 0, 485, 486, 7, 13, 0, 0, 486, 487, 7, 5, 0, 0, 487, 488, - 7, 11, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 5, 0, 0, 490, 491, 7, 4, - 0, 0, 491, 110, 1, 0, 0, 0, 492, 493, 7, 13, 0, 0, 493, 494, 7, 5, 0, 0, - 494, 495, 7, 16, 0, 0, 495, 112, 1, 0, 0, 0, 496, 497, 7, 11, 0, 0, 497, - 498, 7, 10, 0, 0, 498, 499, 7, 2, 0, 0, 499, 114, 1, 0, 0, 0, 500, 501, - 7, 9, 0, 0, 501, 502, 7, 11, 0, 0, 502, 503, 7, 0, 0, 0, 503, 504, 7, 5, - 0, 0, 504, 505, 7, 22, 0, 0, 505, 116, 1, 0, 0, 0, 506, 507, 7, 1, 0, 0, - 507, 508, 7, 11, 0, 0, 508, 509, 7, 0, 0, 0, 509, 118, 1, 0, 0, 0, 510, - 511, 7, 10, 0, 0, 511, 512, 7, 13, 0, 0, 512, 120, 1, 0, 0, 0, 513, 514, - 7, 7, 0, 0, 514, 515, 7, 9, 0, 0, 515, 516, 7, 20, 0, 0, 516, 517, 7, 5, - 0, 0, 517, 122, 1, 0, 0, 0, 518, 519, 7, 9, 0, 0, 519, 520, 7, 7, 0, 0, - 520, 521, 7, 9, 0, 0, 521, 522, 7, 20, 0, 0, 522, 523, 7, 5, 0, 0, 523, - 124, 1, 0, 0, 0, 524, 525, 7, 9, 0, 0, 525, 526, 7, 11, 0, 0, 526, 126, - 1, 0, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 5, 0, 0, 529, 530, 7, 2, - 0, 0, 530, 531, 7, 15, 0, 0, 531, 532, 7, 5, 0, 0, 532, 533, 7, 5, 0, 0, - 533, 534, 7, 11, 0, 0, 534, 128, 1, 0, 0, 0, 535, 536, 7, 9, 0, 0, 536, - 537, 7, 4, 0, 0, 537, 130, 1, 0, 0, 0, 538, 539, 7, 5, 0, 0, 539, 540, - 7, 22, 0, 0, 540, 541, 7, 9, 0, 0, 541, 542, 7, 4, 0, 0, 542, 543, 7, 2, - 0, 0, 543, 544, 7, 4, 0, 0, 544, 132, 1, 0, 0, 0, 545, 546, 7, 1, 0, 0, - 546, 547, 7, 7, 0, 0, 547, 548, 7, 7, 0, 0, 548, 134, 1, 0, 0, 0, 549, - 550, 7, 1, 0, 0, 550, 551, 7, 11, 0, 0, 551, 552, 7, 19, 0, 0, 552, 136, - 1, 0, 0, 0, 553, 554, 7, 23, 0, 0, 554, 555, 7, 10, 0, 0, 555, 556, 7, - 9, 0, 0, 556, 557, 7, 11, 0, 0, 557, 138, 1, 0, 0, 0, 558, 559, 7, 7, 0, - 0, 559, 560, 7, 5, 0, 0, 560, 561, 7, 16, 0, 0, 561, 562, 7, 2, 0, 0, 562, - 140, 1, 0, 0, 0, 563, 564, 7, 13, 0, 0, 564, 565, 7, 9, 0, 0, 565, 566, - 7, 17, 0, 0, 566, 567, 7, 24, 0, 0, 567, 568, 7, 2, 0, 0, 568, 142, 1, - 0, 0, 0, 569, 570, 7, 9, 0, 0, 570, 571, 7, 11, 0, 0, 571, 572, 7, 11, - 0, 0, 572, 573, 7, 5, 0, 0, 573, 574, 7, 13, 0, 0, 574, 144, 1, 0, 0, 0, - 575, 576, 7, 1, 0, 0, 576, 577, 7, 4, 0, 0, 577, 146, 1, 0, 0, 0, 578, - 579, 7, 1, 0, 0, 579, 580, 7, 4, 0, 0, 580, 581, 7, 8, 0, 0, 581, 148, - 1, 0, 0, 0, 582, 583, 7, 0, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 4, - 0, 0, 585, 586, 7, 8, 0, 0, 586, 150, 1, 0, 0, 0, 587, 588, 7, 7, 0, 0, - 588, 589, 7, 9, 0, 0, 589, 590, 7, 18, 0, 0, 590, 591, 7, 9, 0, 0, 591, - 592, 7, 2, 0, 0, 592, 152, 1, 0, 0, 0, 593, 594, 7, 10, 0, 0, 594, 595, - 7, 16, 0, 0, 595, 596, 7, 16, 0, 0, 596, 597, 7, 4, 0, 0, 597, 598, 7, - 5, 0, 0, 598, 599, 7, 2, 0, 0, 599, 154, 1, 0, 0, 0, 600, 601, 7, 10, 0, - 0, 601, 602, 7, 13, 0, 0, 602, 603, 7, 0, 0, 0, 603, 604, 7, 5, 0, 0, 604, - 605, 7, 13, 0, 0, 605, 156, 1, 0, 0, 0, 606, 607, 7, 3, 0, 0, 607, 608, - 7, 19, 0, 0, 608, 158, 1, 0, 0, 0, 609, 610, 7, 17, 0, 0, 610, 611, 7, - 13, 0, 0, 611, 612, 7, 10, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 12, - 0, 0, 614, 160, 1, 0, 0, 0, 615, 616, 7, 24, 0, 0, 616, 617, 7, 1, 0, 0, - 617, 618, 7, 14, 0, 0, 618, 619, 7, 9, 0, 0, 619, 620, 7, 11, 0, 0, 620, - 621, 7, 17, 0, 0, 621, 162, 1, 0, 0, 0, 622, 623, 7, 13, 0, 0, 623, 624, - 7, 5, 0, 0, 624, 625, 7, 2, 0, 0, 625, 626, 7, 6, 0, 0, 626, 627, 7, 13, - 0, 0, 627, 628, 7, 11, 0, 0, 628, 629, 7, 4, 0, 0, 629, 164, 1, 0, 0, 0, - 630, 631, 7, 11, 0, 0, 631, 632, 7, 10, 0, 0, 632, 166, 1, 0, 0, 0, 633, - 634, 7, 15, 0, 0, 634, 635, 7, 9, 0, 0, 635, 636, 7, 2, 0, 0, 636, 637, - 7, 24, 0, 0, 637, 168, 1, 0, 0, 0, 638, 639, 7, 8, 0, 0, 639, 640, 7, 1, - 0, 0, 640, 641, 7, 4, 0, 0, 641, 642, 7, 5, 0, 0, 642, 170, 1, 0, 0, 0, - 643, 644, 7, 15, 0, 0, 644, 645, 7, 24, 0, 0, 645, 646, 7, 5, 0, 0, 646, - 647, 7, 11, 0, 0, 647, 172, 1, 0, 0, 0, 648, 649, 7, 2, 0, 0, 649, 650, - 7, 24, 0, 0, 650, 651, 7, 5, 0, 0, 651, 652, 7, 11, 0, 0, 652, 174, 1, - 0, 0, 0, 653, 654, 7, 5, 0, 0, 654, 655, 7, 11, 0, 0, 655, 656, 7, 0, 0, - 0, 656, 176, 1, 0, 0, 0, 657, 658, 7, 0, 0, 0, 658, 659, 7, 9, 0, 0, 659, - 660, 7, 4, 0, 0, 660, 661, 7, 2, 0, 0, 661, 662, 7, 9, 0, 0, 662, 663, - 7, 11, 0, 0, 663, 664, 7, 8, 0, 0, 664, 665, 7, 2, 0, 0, 665, 178, 1, 0, - 0, 0, 666, 667, 7, 16, 0, 0, 667, 668, 7, 13, 0, 0, 668, 669, 7, 10, 0, - 0, 669, 670, 7, 18, 0, 0, 670, 180, 1, 0, 0, 0, 671, 672, 7, 15, 0, 0, - 672, 673, 7, 24, 0, 0, 673, 674, 7, 5, 0, 0, 674, 675, 7, 13, 0, 0, 675, - 676, 7, 5, 0, 0, 676, 182, 1, 0, 0, 0, 677, 678, 7, 8, 0, 0, 678, 679, - 7, 10, 0, 0, 679, 680, 7, 7, 0, 0, 680, 681, 7, 7, 0, 0, 681, 682, 7, 1, - 0, 0, 682, 683, 7, 2, 0, 0, 683, 684, 7, 5, 0, 0, 684, 184, 1, 0, 0, 0, - 685, 686, 7, 4, 0, 0, 686, 687, 7, 5, 0, 0, 687, 688, 7, 7, 0, 0, 688, - 689, 7, 5, 0, 0, 689, 690, 7, 8, 0, 0, 690, 691, 7, 2, 0, 0, 691, 186, - 1, 0, 0, 0, 692, 693, 7, 9, 0, 0, 693, 694, 7, 11, 0, 0, 694, 695, 7, 4, - 0, 0, 695, 696, 7, 5, 0, 0, 696, 697, 7, 13, 0, 0, 697, 698, 7, 2, 0, 0, - 698, 188, 1, 0, 0, 0, 699, 700, 7, 14, 0, 0, 700, 701, 7, 1, 0, 0, 701, - 702, 7, 7, 0, 0, 702, 703, 7, 6, 0, 0, 703, 704, 7, 5, 0, 0, 704, 705, - 7, 4, 0, 0, 705, 190, 1, 0, 0, 0, 706, 707, 7, 16, 0, 0, 707, 708, 7, 6, - 0, 0, 708, 709, 7, 7, 0, 0, 709, 710, 7, 7, 0, 0, 710, 192, 1, 0, 0, 0, - 711, 712, 7, 6, 0, 0, 712, 713, 7, 11, 0, 0, 713, 714, 7, 9, 0, 0, 714, - 715, 7, 10, 0, 0, 715, 716, 7, 11, 0, 0, 716, 194, 1, 0, 0, 0, 717, 718, - 7, 9, 0, 0, 718, 719, 7, 11, 0, 0, 719, 720, 7, 2, 0, 0, 720, 721, 7, 5, - 0, 0, 721, 722, 7, 13, 0, 0, 722, 723, 7, 4, 0, 0, 723, 724, 7, 5, 0, 0, - 724, 725, 7, 8, 0, 0, 725, 726, 7, 2, 0, 0, 726, 196, 1, 0, 0, 0, 727, - 728, 7, 5, 0, 0, 728, 729, 7, 22, 0, 0, 729, 730, 7, 8, 0, 0, 730, 731, - 7, 5, 0, 0, 731, 732, 7, 12, 0, 0, 732, 733, 7, 2, 0, 0, 733, 198, 1, 0, - 0, 0, 734, 735, 7, 11, 0, 0, 735, 736, 7, 6, 0, 0, 736, 737, 7, 7, 0, 0, - 737, 738, 7, 7, 0, 0, 738, 739, 7, 4, 0, 0, 739, 200, 1, 0, 0, 0, 740, - 741, 7, 16, 0, 0, 741, 742, 7, 9, 0, 0, 742, 743, 7, 13, 0, 0, 743, 744, - 7, 4, 0, 0, 744, 745, 7, 2, 0, 0, 745, 202, 1, 0, 0, 0, 746, 747, 7, 7, - 0, 0, 747, 748, 7, 1, 0, 0, 748, 749, 7, 4, 0, 0, 749, 750, 7, 2, 0, 0, - 750, 204, 1, 0, 0, 0, 751, 752, 7, 13, 0, 0, 752, 753, 7, 5, 0, 0, 753, - 754, 7, 2, 0, 0, 754, 755, 7, 6, 0, 0, 755, 756, 7, 13, 0, 0, 756, 757, - 7, 11, 0, 0, 757, 758, 7, 9, 0, 0, 758, 759, 7, 11, 0, 0, 759, 760, 7, - 17, 0, 0, 760, 206, 1, 0, 0, 0, 761, 762, 7, 9, 0, 0, 762, 763, 7, 11, - 0, 0, 763, 764, 7, 2, 0, 0, 764, 765, 7, 10, 0, 0, 765, 208, 1, 0, 0, 0, - 766, 767, 7, 8, 0, 0, 767, 768, 7, 10, 0, 0, 768, 769, 7, 11, 0, 0, 769, - 770, 7, 16, 0, 0, 770, 771, 7, 7, 0, 0, 771, 772, 7, 9, 0, 0, 772, 773, - 7, 8, 0, 0, 773, 774, 7, 2, 0, 0, 774, 210, 1, 0, 0, 0, 775, 776, 7, 11, - 0, 0, 776, 777, 7, 10, 0, 0, 777, 778, 7, 2, 0, 0, 778, 779, 7, 24, 0, - 0, 779, 780, 7, 9, 0, 0, 780, 781, 7, 11, 0, 0, 781, 782, 7, 17, 0, 0, - 782, 212, 1, 0, 0, 0, 783, 784, 7, 16, 0, 0, 784, 785, 7, 10, 0, 0, 785, - 786, 7, 13, 0, 0, 786, 214, 1, 0, 0, 0, 787, 788, 7, 9, 0, 0, 788, 789, - 7, 16, 0, 0, 789, 216, 1, 0, 0, 0, 790, 791, 7, 5, 0, 0, 791, 792, 7, 7, - 0, 0, 792, 793, 7, 4, 0, 0, 793, 794, 7, 5, 0, 0, 794, 795, 7, 9, 0, 0, - 795, 796, 7, 16, 0, 0, 796, 218, 1, 0, 0, 0, 797, 798, 7, 5, 0, 0, 798, - 799, 7, 7, 0, 0, 799, 800, 7, 4, 0, 0, 800, 801, 7, 5, 0, 0, 801, 220, - 1, 0, 0, 0, 802, 803, 7, 3, 0, 0, 803, 804, 7, 13, 0, 0, 804, 805, 7, 5, - 0, 0, 805, 806, 7, 1, 0, 0, 806, 807, 7, 20, 0, 0, 807, 222, 1, 0, 0, 0, - 808, 809, 7, 13, 0, 0, 809, 810, 7, 5, 0, 0, 810, 811, 7, 2, 0, 0, 811, - 812, 7, 6, 0, 0, 812, 813, 7, 13, 0, 0, 813, 814, 7, 11, 0, 0, 814, 224, - 1, 0, 0, 0, 815, 816, 7, 11, 0, 0, 816, 817, 7, 5, 0, 0, 817, 818, 7, 22, - 0, 0, 818, 819, 7, 2, 0, 0, 819, 226, 1, 0, 0, 0, 820, 826, 5, 39, 0, 0, - 821, 825, 8, 25, 0, 0, 822, 823, 5, 92, 0, 0, 823, 825, 9, 0, 0, 0, 824, - 821, 1, 0, 0, 0, 824, 822, 1, 0, 0, 0, 825, 828, 1, 0, 0, 0, 826, 824, - 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 829, 1, 0, 0, 0, 828, 826, 1, 0, - 0, 0, 829, 830, 5, 39, 0, 0, 830, 228, 1, 0, 0, 0, 831, 832, 7, 2, 0, 0, - 832, 833, 7, 13, 0, 0, 833, 834, 7, 6, 0, 0, 834, 835, 7, 5, 0, 0, 835, - 230, 1, 0, 0, 0, 836, 837, 7, 16, 0, 0, 837, 838, 7, 1, 0, 0, 838, 839, - 7, 7, 0, 0, 839, 840, 7, 4, 0, 0, 840, 841, 7, 5, 0, 0, 841, 232, 1, 0, - 0, 0, 842, 844, 7, 26, 0, 0, 843, 842, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, - 845, 843, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 234, 1, 0, 0, 0, 847, - 848, 5, 48, 0, 0, 848, 849, 7, 22, 0, 0, 849, 851, 1, 0, 0, 0, 850, 852, - 7, 27, 0, 0, 851, 850, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 851, 1, 0, - 0, 0, 853, 854, 1, 0, 0, 0, 854, 236, 1, 0, 0, 0, 855, 856, 7, 16, 0, 0, - 856, 857, 7, 10, 0, 0, 857, 858, 7, 13, 0, 0, 858, 859, 7, 5, 0, 0, 859, - 860, 7, 9, 0, 0, 860, 861, 7, 17, 0, 0, 861, 862, 7, 11, 0, 0, 862, 863, - 5, 95, 0, 0, 863, 864, 7, 20, 0, 0, 864, 865, 7, 5, 0, 0, 865, 869, 7, - 19, 0, 0, 866, 867, 7, 16, 0, 0, 867, 869, 7, 20, 0, 0, 868, 855, 1, 0, - 0, 0, 868, 866, 1, 0, 0, 0, 869, 238, 1, 0, 0, 0, 870, 871, 7, 10, 0, 0, - 871, 872, 7, 11, 0, 0, 872, 873, 5, 95, 0, 0, 873, 874, 7, 6, 0, 0, 874, - 875, 7, 12, 0, 0, 875, 876, 7, 0, 0, 0, 876, 877, 7, 1, 0, 0, 877, 878, - 7, 2, 0, 0, 878, 879, 7, 5, 0, 0, 879, 240, 1, 0, 0, 0, 880, 881, 7, 10, - 0, 0, 881, 882, 7, 11, 0, 0, 882, 883, 5, 95, 0, 0, 883, 884, 7, 0, 0, - 0, 884, 885, 7, 5, 0, 0, 885, 886, 7, 7, 0, 0, 886, 887, 7, 5, 0, 0, 887, - 888, 7, 2, 0, 0, 888, 889, 7, 5, 0, 0, 889, 242, 1, 0, 0, 0, 890, 891, - 7, 4, 0, 0, 891, 892, 7, 5, 0, 0, 892, 893, 7, 2, 0, 0, 893, 894, 5, 95, - 0, 0, 894, 895, 7, 0, 0, 0, 895, 896, 7, 5, 0, 0, 896, 897, 7, 16, 0, 0, - 897, 898, 7, 1, 0, 0, 898, 899, 7, 6, 0, 0, 899, 900, 7, 7, 0, 0, 900, - 901, 7, 2, 0, 0, 901, 244, 1, 0, 0, 0, 902, 903, 7, 4, 0, 0, 903, 904, - 7, 5, 0, 0, 904, 905, 7, 2, 0, 0, 905, 906, 5, 95, 0, 0, 906, 907, 7, 11, - 0, 0, 907, 908, 7, 6, 0, 0, 908, 909, 7, 7, 0, 0, 909, 910, 7, 7, 0, 0, - 910, 246, 1, 0, 0, 0, 911, 912, 7, 11, 0, 0, 912, 913, 7, 10, 0, 0, 913, - 914, 5, 95, 0, 0, 914, 915, 7, 1, 0, 0, 915, 916, 7, 8, 0, 0, 916, 917, - 7, 2, 0, 0, 917, 918, 7, 9, 0, 0, 918, 919, 7, 10, 0, 0, 919, 920, 7, 11, - 0, 0, 920, 248, 1, 0, 0, 0, 921, 925, 7, 28, 0, 0, 922, 924, 7, 29, 0, - 0, 923, 922, 1, 0, 0, 0, 924, 927, 1, 0, 0, 0, 925, 923, 1, 0, 0, 0, 925, - 926, 1, 0, 0, 0, 926, 250, 1, 0, 0, 0, 927, 925, 1, 0, 0, 0, 928, 929, - 3, 35, 17, 0, 929, 930, 3, 249, 124, 0, 930, 252, 1, 0, 0, 0, 931, 932, - 3, 19, 9, 0, 932, 933, 3, 249, 124, 0, 933, 254, 1, 0, 0, 0, 934, 935, - 3, 33, 16, 0, 935, 936, 3, 249, 124, 0, 936, 256, 1, 0, 0, 0, 937, 938, - 7, 30, 0, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 128, 0, 0, 940, 258, 1, - 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 943, 5, 42, 0, 0, 943, 947, 1, 0, - 0, 0, 944, 946, 9, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, - 947, 948, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 948, 950, 1, 0, 0, 0, 949, - 947, 1, 0, 0, 0, 950, 951, 5, 42, 0, 0, 951, 952, 5, 47, 0, 0, 952, 953, - 1, 0, 0, 0, 953, 954, 6, 129, 0, 0, 954, 260, 1, 0, 0, 0, 955, 956, 5, - 47, 0, 0, 956, 957, 5, 47, 0, 0, 957, 961, 1, 0, 0, 0, 958, 960, 8, 31, - 0, 0, 959, 958, 1, 0, 0, 0, 960, 963, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, - 961, 962, 1, 0, 0, 0, 962, 964, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 964, - 965, 6, 130, 0, 0, 965, 262, 1, 0, 0, 0, 10, 0, 313, 824, 826, 845, 853, - 868, 925, 947, 961, 1, 0, 1, 0, - } - deserializer := antlr.NewATNDeserializer(nil) - staticData.atn = deserializer.Deserialize(staticData.serializedATN) - atn := staticData.atn - staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) - decisionToDFA := staticData.decisionToDFA - for index, state := range atn.DecisionToState { - decisionToDFA[index] = antlr.NewDFA(state, index) - } -} - -// KuneiformLexerInit initializes any static state used to implement KuneiformLexer. By default the -// static state used to implement the lexer is lazily initialized during the first call to -// NewKuneiformLexer(). You can call this function if you wish to initialize the static state ahead -// of time. -func KuneiformLexerInit() { - staticData := &KuneiformLexerLexerStaticData - staticData.once.Do(kuneiformlexerLexerInit) -} - -// NewKuneiformLexer produces a new lexer instance for the optional input antlr.CharStream. -func NewKuneiformLexer(input antlr.CharStream) *KuneiformLexer { - KuneiformLexerInit() - l := new(KuneiformLexer) - l.BaseLexer = antlr.NewBaseLexer(input) - staticData := &KuneiformLexerLexerStaticData - l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache) - l.channelNames = staticData.ChannelNames - l.modeNames = staticData.ModeNames - l.RuleNames = staticData.RuleNames - l.LiteralNames = staticData.LiteralNames - l.SymbolicNames = staticData.SymbolicNames - l.GrammarFileName = "KuneiformLexer.g4" - // TODO: l.EOF = antlr.TokenEOF - - return l -} - -// KuneiformLexer tokens. -const ( - KuneiformLexerLBRACE = 1 - KuneiformLexerRBRACE = 2 - KuneiformLexerLBRACKET = 3 - KuneiformLexerRBRACKET = 4 - KuneiformLexerCOL = 5 - KuneiformLexerSCOL = 6 - KuneiformLexerLPAREN = 7 - KuneiformLexerRPAREN = 8 - KuneiformLexerCOMMA = 9 - KuneiformLexerAT = 10 - KuneiformLexerEXCL = 11 - KuneiformLexerPERIOD = 12 - KuneiformLexerCONCAT = 13 - KuneiformLexerSTAR = 14 - KuneiformLexerEQUALS = 15 - KuneiformLexerEQUATE = 16 - KuneiformLexerHASH = 17 - KuneiformLexerDOLLAR = 18 - KuneiformLexerMOD = 19 - KuneiformLexerPLUS = 20 - KuneiformLexerMINUS = 21 - KuneiformLexerDIV = 22 - KuneiformLexerNEQ = 23 - KuneiformLexerLT = 24 - KuneiformLexerLTE = 25 - KuneiformLexerGT = 26 - KuneiformLexerGTE = 27 - KuneiformLexerTYPE_CAST = 28 - KuneiformLexerUNDERSCORE = 29 - KuneiformLexerASSIGN = 30 - KuneiformLexerRANGE = 31 - KuneiformLexerDOUBLE_QUOTE = 32 - KuneiformLexerDATABASE = 33 - KuneiformLexerUSE = 34 - KuneiformLexerTABLE = 35 - KuneiformLexerACTION = 36 - KuneiformLexerPROCEDURE = 37 - KuneiformLexerPUBLIC = 38 - KuneiformLexerPRIVATE = 39 - KuneiformLexerVIEW = 40 - KuneiformLexerOWNER = 41 - KuneiformLexerFOREIGN = 42 - KuneiformLexerPRIMARY = 43 - KuneiformLexerKEY = 44 - KuneiformLexerON = 45 - KuneiformLexerDO = 46 - KuneiformLexerUNIQUE = 47 - KuneiformLexerCASCADE = 48 - KuneiformLexerRESTRICT = 49 - KuneiformLexerSET = 50 - KuneiformLexerDEFAULT = 51 - KuneiformLexerNULL = 52 - KuneiformLexerDELETE = 53 - KuneiformLexerUPDATE = 54 - KuneiformLexerREFERENCES = 55 - KuneiformLexerREF = 56 - KuneiformLexerNOT = 57 - KuneiformLexerINDEX = 58 - KuneiformLexerAND = 59 - KuneiformLexerOR = 60 - KuneiformLexerLIKE = 61 - KuneiformLexerILIKE = 62 - KuneiformLexerIN = 63 - KuneiformLexerBETWEEN = 64 - KuneiformLexerIS = 65 - KuneiformLexerEXISTS = 66 - KuneiformLexerALL = 67 - KuneiformLexerANY = 68 - KuneiformLexerJOIN = 69 - KuneiformLexerLEFT = 70 - KuneiformLexerRIGHT = 71 - KuneiformLexerINNER = 72 - KuneiformLexerAS = 73 - KuneiformLexerASC = 74 - KuneiformLexerDESC = 75 - KuneiformLexerLIMIT = 76 - KuneiformLexerOFFSET = 77 - KuneiformLexerORDER = 78 - KuneiformLexerBY = 79 - KuneiformLexerGROUP = 80 - KuneiformLexerHAVING = 81 - KuneiformLexerRETURNS = 82 - KuneiformLexerNO = 83 - KuneiformLexerWITH = 84 - KuneiformLexerCASE = 85 - KuneiformLexerWHEN = 86 - KuneiformLexerTHEN = 87 - KuneiformLexerEND = 88 - KuneiformLexerDISTINCT = 89 - KuneiformLexerFROM = 90 - KuneiformLexerWHERE = 91 - KuneiformLexerCOLLATE = 92 - KuneiformLexerSELECT = 93 - KuneiformLexerINSERT = 94 - KuneiformLexerVALUES = 95 - KuneiformLexerFULL = 96 - KuneiformLexerUNION = 97 - KuneiformLexerINTERSECT = 98 - KuneiformLexerEXCEPT = 99 - KuneiformLexerNULLS = 100 - KuneiformLexerFIRST = 101 - KuneiformLexerLAST = 102 - KuneiformLexerRETURNING = 103 - KuneiformLexerINTO = 104 - KuneiformLexerCONFLICT = 105 - KuneiformLexerNOTHING = 106 - KuneiformLexerFOR = 107 - KuneiformLexerIF = 108 - KuneiformLexerELSEIF = 109 - KuneiformLexerELSE = 110 - KuneiformLexerBREAK = 111 - KuneiformLexerRETURN = 112 - KuneiformLexerNEXT = 113 - KuneiformLexerSTRING_ = 114 - KuneiformLexerTRUE = 115 - KuneiformLexerFALSE = 116 - KuneiformLexerDIGITS_ = 117 - KuneiformLexerBINARY_ = 118 - KuneiformLexerLEGACY_FOREIGN_KEY = 119 - KuneiformLexerLEGACY_ON_UPDATE = 120 - KuneiformLexerLEGACY_ON_DELETE = 121 - KuneiformLexerLEGACY_SET_DEFAULT = 122 - KuneiformLexerLEGACY_SET_NULL = 123 - KuneiformLexerLEGACY_NO_ACTION = 124 - KuneiformLexerIDENTIFIER = 125 - KuneiformLexerVARIABLE = 126 - KuneiformLexerCONTEXTUAL_VARIABLE = 127 - KuneiformLexerHASH_IDENTIFIER = 128 - KuneiformLexerWS = 129 - KuneiformLexerBLOCK_COMMENT = 130 - KuneiformLexerLINE_COMMENT = 131 -) diff --git a/parse/go.mod b/parse/go.mod deleted file mode 100644 index f62d01571..000000000 --- a/parse/go.mod +++ /dev/null @@ -1,23 +0,0 @@ -module github.com/kwilteam/kwil-db/parse - -go 1.22.0 - -require ( - github.com/antlr4-go/antlr/v4 v4.13.0 - github.com/google/go-cmp v0.6.0 - github.com/kwilteam/kwil-db/core v0.3.0 - github.com/stretchr/testify v1.9.0 -) - -require ( - github.com/cockroachdb/apd/v3 v3.2.1 // indirect - github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/google/uuid v1.6.0 // indirect - github.com/holiman/uint256 v1.3.1 // indirect - github.com/kr/pretty v0.3.1 // indirect - github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/rogpeppe/go-internal v1.12.0 // indirect - golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) diff --git a/parse/go.sum b/parse/go.sum deleted file mode 100644 index 7d1e5ef3f..000000000 --- a/parse/go.sum +++ /dev/null @@ -1,39 +0,0 @@ -github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= -github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= -github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg= -github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/holiman/uint256 v1.3.1 h1:JfTzmih28bittyHM8z360dCjIA9dbPIBlcTI6lmctQs= -github.com/holiman/uint256 v1.3.1/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kwilteam/kwil-db/core v0.3.0 h1:exeFwTfv7vLvrIb5pDvk5gmHsXsQEYDRWiaYn9s2LXQ= -github.com/kwilteam/kwil-db/core v0.3.0/go.mod h1:rTXHWgWannGuOaR0vK2o7/kBXu5opLWZOqlAhLSRP1Y= -github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= -github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= -golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/parse/grammar/KuneiformParser.g4 b/parse/grammar/KuneiformParser.g4 deleted file mode 100644 index 8a37163fe..000000000 --- a/parse/grammar/KuneiformParser.g4 +++ /dev/null @@ -1,423 +0,0 @@ -/* - * A ANTLR4 grammar for Kuneiform. - * Developed by the Kwil team. -*/ -parser grammar KuneiformParser; - -options { - tokenVocab = KuneiformLexer; -} - -// there are 4 top-level entry points for the parser: -// 1. schema_entry -// 2. sql_entry -// 3. action_entry -// 4. procedure_entry -// It is necessary to keep each type of entry separate, since some statements -// can be ambiguous between the different types of entries. Callers will know -// which entry to use based on when they are parsing. - -schema_entry: - schema EOF -; - -sql_entry: - sql EOF -; - -action_entry: - action_block EOF -; - -procedure_entry: - procedure_block EOF -; - -/* - The following section includes the parser rules that are commonly - used among all sections of the grammar. These include literals, -*/ - -literal: - STRING_ # string_literal - | (PLUS | MINUS)? DIGITS_ # integer_literal - | (PLUS | MINUS)? DIGITS_ PERIOD DIGITS_ # decimal_literal - | (TRUE | FALSE) # boolean_literal - | NULL # null_literal - | BINARY_ # binary_literal -; - -// identifier is used for table / column names -identifier: - (DOUBLE_QUOTE IDENTIFIER DOUBLE_QUOTE) | IDENTIFIER -; - -identifier_list: - identifier (COMMA identifier)* -; - -type: - IDENTIFIER (LPAREN DIGITS_ COMMA DIGITS_ RPAREN)? (LBRACKET RBRACKET)? // Handles arrays of any type, including nested arrays -; - -type_cast: - TYPE_CAST type -; - -variable: - VARIABLE | CONTEXTUAL_VARIABLE -; - -variable_list: - variable (COMMA variable)* -; - -/* - The following section includes parser rules for top-level Kuneiform. - These are the rules that parse the schema / DDL, and are used pre-consensus. -*/ - -// schema is the parser entrypoint for an entire -// Kuneiform schema. -schema: - database_declaration - (use_declaration | table_declaration - | action_declaration | procedure_declaration - | foreign_procedure_declaration - )* -; - -annotation: - // sort've a hack; annotations don't technically use contextual variables, but they have - // the same syntax of @identifier - CONTEXTUAL_VARIABLE LPAREN (IDENTIFIER EQUALS literal (COMMA IDENTIFIER EQUALS literal)*)? RPAREN -; - -database_declaration: - DATABASE IDENTIFIER SCOL -; - -use_declaration: - USE IDENTIFIER - (LBRACE IDENTIFIER COL literal (COMMA IDENTIFIER COL literal)* RBRACE)? - AS IDENTIFIER SCOL -; - -table_declaration: - TABLE IDENTIFIER LBRACE - column_def (COMMA (column_def | index_def | foreign_key_def))* - RBRACE - ; - -column_def: - name=IDENTIFIER type constraint* -; - -index_def: - HASH_IDENTIFIER - (UNIQUE | INDEX | PRIMARY) - LPAREN columns=identifier_list RPAREN -; - -foreign_key_def: - (FOREIGN KEY|LEGACY_FOREIGN_KEY) // for backwards compatibility - LPAREN child_keys=identifier_list RPAREN - (REFERENCES|REF) parent_table=IDENTIFIER LPAREN parent_keys=identifier_list RPAREN - foreign_key_action* -; - -// variability here is to support legacy syntax -foreign_key_action: - ((ON UPDATE|LEGACY_ON_UPDATE)|(ON DELETE|LEGACY_ON_DELETE)) DO? ((NO ACTION|LEGACY_NO_ACTION)|CASCADE|(SET NULL|LEGACY_SET_NULL)|(SET DEFAULT|LEGACY_SET_DEFAULT)|RESTRICT) -; - -type_list: - type (COMMA type)* -; - -named_type_list: - IDENTIFIER type (COMMA IDENTIFIER type)* -; - -typed_variable_list: - variable type (COMMA variable type)* -; - -constraint: - // conditionally allow some tokens, since they are used elsewhere - (IDENTIFIER| PRIMARY KEY? | NOT NULL | DEFAULT | UNIQUE) (LPAREN literal RPAREN)? -; - -access_modifier: - PUBLIC | PRIVATE | VIEW | OWNER -; - -action_declaration: - annotation* - ACTION IDENTIFIER - LPAREN variable_list? RPAREN - (access_modifier)+ - LBRACE action_block RBRACE -; - -procedure_declaration: - annotation* - PROCEDURE IDENTIFIER - LPAREN (typed_variable_list)? RPAREN - (access_modifier)+ - (procedure_return)? - LBRACE procedure_block RBRACE -; - - -foreign_procedure_declaration: - FOREIGN PROCEDURE IDENTIFIER - LPAREN (unnamed_params=type_list|named_params=typed_variable_list)? RPAREN - (procedure_return)? -; - -procedure_return: - RETURNS (TABLE? LPAREN return_columns=named_type_list RPAREN - | LPAREN unnamed_return_types=type_list RPAREN) -; - -/* - The following section includes parser rules for SQL. -*/ - -// sql is a top-level SQL statement. -sql: - sql_statement SCOL -; - -sql_statement: - (WITH common_table_expression (COMMA common_table_expression)*)? - (select_statement | update_statement | insert_statement | delete_statement) -; - -common_table_expression: - identifier (LPAREN (identifier (COMMA identifier)*)? RPAREN)? AS LPAREN select_statement RPAREN -; - -select_statement: - select_core - (compound_operator select_core)* - (ORDER BY ordering_term (COMMA ordering_term)*)? - (LIMIT limit=sql_expr)? - (OFFSET offset=sql_expr)? -; - -compound_operator: - UNION ALL? | INTERSECT | EXCEPT -; - -ordering_term: - sql_expr (ASC | DESC)? (NULLS (FIRST | LAST))? -; - -select_core: - SELECT DISTINCT? - result_column (COMMA result_column)* - (FROM relation join*)? - (WHERE where=sql_expr)? - ( - GROUP BY group_by=sql_expr_list - (HAVING having=sql_expr)? - )? -; - -relation: - table_name=identifier (AS? alias=identifier)? # table_relation - // aliases are technically required in Kuneiform for subquery and function calls, - // but we allow it to pass here since it is standard SQL to not require it, and - // we can throw a better error message after parsing. - | LPAREN select_statement RPAREN (AS? alias=identifier)? # subquery_relation - | sql_function_call (AS? alias=identifier?) # function_relation -; - -join: - (INNER| LEFT | RIGHT | FULL)? JOIN - relation ON sql_expr -; - -result_column: - sql_expr (AS? identifier)? # expression_result_column - | (table_name=identifier PERIOD)? STAR # wildcard_result_column -; - -update_statement: - UPDATE table_name=identifier (AS? alias=identifier)? - SET update_set_clause (COMMA update_set_clause)* - (FROM relation join*)? - (WHERE where=sql_expr)? -; - -update_set_clause: - column=identifier EQUALS sql_expr -; - -insert_statement: - INSERT INTO table_name=identifier (AS? alias=identifier)? - (LPAREN target_columns=identifier_list RPAREN)? - VALUES LPAREN sql_expr_list RPAREN (COMMA LPAREN sql_expr_list RPAREN)* - upsert_clause? -; - -upsert_clause: - ON CONFLICT - (LPAREN conflict_columns=identifier_list RPAREN (WHERE conflict_where=sql_expr)?)? - DO ( - NOTHING - | UPDATE SET update_set_clause (COMMA update_set_clause)* - (WHERE update_where=sql_expr)? - ) -; - -delete_statement: - DELETE FROM table_name=identifier (AS? alias=identifier)? - // (USING relation join*)? - (WHERE where=sql_expr)? -; - -// https://docs.kwil.com/docs/kuneiform/operators -sql_expr: - // highest precedence: - LPAREN sql_expr RPAREN type_cast? # paren_sql_expr - | sql_expr PERIOD identifier type_cast? # field_access_sql_expr - | array_element=sql_expr LBRACKET ( - // can be arr[1], arr[1:2], arr[1:], arr[:2], arr[:] - single=sql_expr - | (left=sql_expr? COL right=sql_expr?) - ) RBRACKET type_cast? # array_access_sql_expr - | (PLUS|MINUS) sql_expr # unary_sql_expr - | sql_expr COLLATE identifier # collate_sql_expr - | left=sql_expr (STAR | DIV | MOD) right=sql_expr # arithmetic_sql_expr - | left=sql_expr (PLUS | MINUS) right=sql_expr # arithmetic_sql_expr - - // any unspecified operator: - | literal type_cast? # literal_sql_expr - | sql_function_call type_cast? # function_call_sql_expr - | variable type_cast? # variable_sql_expr - | (table=identifier PERIOD)? column=identifier type_cast? # column_sql_expr - | CASE case_clause=sql_expr? - (when_then_clause)+ - (ELSE else_clause=sql_expr)? END # case_expr - | (NOT? EXISTS)? LPAREN select_statement RPAREN type_cast? # subquery_sql_expr - // setting precedence for arithmetic operations: - | left=sql_expr CONCAT right=sql_expr # arithmetic_sql_expr - - // the rest: - | sql_expr NOT? IN LPAREN (sql_expr_list|select_statement) RPAREN # in_sql_expr - | left=sql_expr NOT? (LIKE|ILIKE) right=sql_expr # like_sql_expr - | element=sql_expr (NOT)? BETWEEN lower=sql_expr AND upper=sql_expr # between_sql_expr - | left=sql_expr (EQUALS | EQUATE | NEQ | LT | LTE | GT | GTE) right=sql_expr # comparison_sql_expr - | left=sql_expr IS NOT? ((DISTINCT FROM right=sql_expr) | NULL | TRUE | FALSE) # is_sql_expr - | (NOT) sql_expr # unary_sql_expr - | left=sql_expr AND right=sql_expr # logical_sql_expr - | left=sql_expr OR right=sql_expr # logical_sql_expr -; - - -when_then_clause: - WHEN when_condition=sql_expr THEN then=sql_expr -; - -sql_expr_list: - sql_expr (COMMA sql_expr)* -; - -sql_function_call: - identifier LPAREN (DISTINCT? sql_expr_list|STAR)? RPAREN #normal_call_sql - | identifier LBRACKET dbid=sql_expr COMMA procedure=sql_expr RBRACKET LPAREN (sql_expr_list)? RPAREN #foreign_call_sql -; - -/* - The following section includes parser rules for action blocks. -*/ -// action_block is the top-level rule for an action block. -action_block: - (action_statement SCOL)* -; - -// action statements can only be 3 things: -// 1. a sql statement -// 2. a local action/procedure call. -// 3. an extension call -action_statement: - sql_statement # sql_action - | IDENTIFIER LPAREN (procedure_expr_list)? RPAREN # local_action - | (variable_list EQUALS)? IDENTIFIER PERIOD IDENTIFIER LPAREN (procedure_expr_list)? RPAREN # extension_action -; - -/* - This section includes parser rules for procedures -*/ - -// procedure_block is the top-level rule for a procedure. -procedure_block: - proc_statement* -; - -// https://docs.kwil.com/docs/kuneiform/operators -procedure_expr: - // highest precedence: - LPAREN procedure_expr RPAREN type_cast? # paren_procedure_expr - | procedure_expr PERIOD IDENTIFIER type_cast? # field_access_procedure_expr - | array_element=procedure_expr LBRACKET ( - // can be arr[1], arr[1:2], arr[1:], arr[:2], arr[:] - single=procedure_expr - | (left=procedure_expr? COL right=procedure_expr?) - ) RBRACKET type_cast? # array_access_procedure_expr - | (PLUS|MINUS|EXCL) procedure_expr # unary_procedure_expr - | procedure_expr (STAR | DIV | MOD) procedure_expr # procedure_expr_arithmetic - | procedure_expr (PLUS | MINUS) procedure_expr # procedure_expr_arithmetic - - // any unspecified operator: - | literal type_cast? # literal_procedure_expr - | procedure_function_call type_cast? # function_call_procedure_expr - | variable type_cast? # variable_procedure_expr - | LBRACKET (procedure_expr_list)? RBRACKET type_cast? # make_array_procedure_expr - | procedure_expr CONCAT procedure_expr # procedure_expr_arithmetic - - // the rest: - | procedure_expr (EQUALS | EQUATE | NEQ | LT | LTE | GT | GTE) procedure_expr # comparison_procedure_expr - | left=procedure_expr IS NOT? ((DISTINCT FROM right=procedure_expr) | NULL | TRUE | FALSE) # is_procedure_expr - | (NOT) procedure_expr # unary_procedure_expr - | procedure_expr AND procedure_expr # logical_procedure_expr - | procedure_expr OR procedure_expr # logical_procedure_expr -; - -procedure_expr_list: - procedure_expr (COMMA procedure_expr)* -; - -proc_statement: - VARIABLE type SCOL # stmt_variable_declaration - // stmt_procedure_call must go above stmt_variable_assignment due to lexer ambiguity - | ((variable_or_underscore) (COMMA (variable_or_underscore))* ASSIGN)? procedure_function_call SCOL # stmt_procedure_call - | procedure_expr type? ASSIGN procedure_expr SCOL # stmt_variable_assignment - | FOR receiver=VARIABLE IN (range|target_variable=variable|sql_statement) LBRACE proc_statement* RBRACE # stmt_for_loop - | IF if_then_block (ELSEIF if_then_block)* (ELSE LBRACE proc_statement* RBRACE)? # stmt_if - | sql_statement SCOL # stmt_sql - | BREAK SCOL # stmt_break - | RETURN (procedure_expr_list|sql_statement)? SCOL # stmt_return - | RETURN NEXT procedure_expr_list SCOL # stmt_return_next -; - -variable_or_underscore: - VARIABLE | UNDERSCORE -; - -procedure_function_call: - IDENTIFIER LPAREN (procedure_expr_list)? RPAREN #normal_call_procedure - | IDENTIFIER LBRACKET dbid=procedure_expr COMMA procedure=procedure_expr RBRACKET LPAREN (procedure_expr_list)? RPAREN #foreign_call_procedure -; - -if_then_block: - procedure_expr LBRACE proc_statement* RBRACE -; - -// range used for for loops -range: - procedure_expr RANGE procedure_expr -; \ No newline at end of file diff --git a/parse/parse.go b/parse/parse.go deleted file mode 100644 index c47e1c613..000000000 --- a/parse/parse.go +++ /dev/null @@ -1,559 +0,0 @@ -// package parse contains logic for parsing Kuneiform schemas, procedures, actions, -// and SQL. -package parse - -import ( - "fmt" - "reflect" - "runtime" - "strings" - - "github.com/antlr4-go/antlr/v4" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/parse/gen" -) - -// Parse parses a Kuneiform schema. It will perform syntax, semantic, and type -// analysis, and return any errors. -func Parse(kf []byte) (*types.Schema, error) { - res, err := ParseAndValidate(kf) - if err != nil { - return nil, err - } - - if res.Err() != nil { - return nil, res.Err() - } - - return res.Schema, nil -} - -// SchemaParseResult is the result of parsing a schema. -// It returns the resulting schema, as well as any expected errors that occurred during parsing. -// Unexpected errors will not be returned here, but instead in the function returning this type. -type SchemaParseResult struct { - // Schema is the parsed schema. - // The schema can be nil if there are errors. - Schema *types.Schema `json:"schema"` - // ParseErrs is the error listener that contains all the errors that occurred during parsing. - ParseErrs ParseErrs `json:"parse_errs,omitempty"` - // SchemaInfo is the information about the schema. - SchemaInfo *SchemaInfo `json:"schema_info,omitempty"` - // ParsedActions is the ASTs of the parsed actions. - ParsedActions map[string][]ActionStmt `json:"parsed_actions,omitempty"` - // ParsedProcedures is the ASTs of the parsed procedures. - ParsedProcedures map[string][]ProcedureStmt `json:"parsed_procedures,omitempty"` -} - -func (r *SchemaParseResult) Err() error { - return r.ParseErrs.Err() -} - -// ParseAndValidate parses and validates an entire schema. -// It returns the parsed schema, as well as any errors that occurred during parsing and validation. -// It is meant to be used by parsing tools and the CLI. Most external users should use Parse instead. -func ParseAndValidate(kf []byte) (*SchemaParseResult, error) { - res, err := ParseSchemaWithoutValidation(kf) - if err != nil { - return nil, err - } - - // if there is a syntax error, we shouldn't continue with validation. - // We should still return a nil error, as the caller should read the error - // from the ParseErrs field. - if res.ParseErrs.Err() != nil { - return res, nil - } - - // we clean the schema only after checking for parser errors, since parser errors - // might be the reason the schema is invalid in the first place. - err = res.Schema.Clean() - if err != nil { - // all clean validations should get caught before this point, however if they don't - // this will throw an error during parsing, instead of during transaction execution. - return nil, err - } - - for _, proc := range res.Schema.Procedures { - ast := res.ParsedProcedures[proc.Name] - block := res.SchemaInfo.Blocks[proc.Name] - - procRes, err := analyzeProcedureAST(proc, res.Schema, ast, &block.Position) - if err != nil { - return nil, err - } - - res.ParseErrs.Add(procRes.ParseErrs.Errors()...) - } - - for _, act := range res.Schema.Actions { - ast := res.ParsedActions[act.Name] - actRes, err := analyzeActionAST(act, res.Schema, ast) - if err != nil { - return nil, err - } - - res.ParseErrs.Add(actRes.ParseErrs.Errors()...) - } - - return res, nil -} - -// ParseSchemaWithoutValidation parses a Kuneiform schema. -// It will not perform validations on the actions and procedures. -// Most users should use ParseAndValidate instead. -func ParseSchemaWithoutValidation(kf []byte) (res *SchemaParseResult, err error) { - errLis, stream, parser, deferFn := setupParser(string(kf), "schema") - res = &SchemaParseResult{ - ParseErrs: errLis, - ParsedActions: make(map[string][]ActionStmt), - ParsedProcedures: make(map[string][]ProcedureStmt), - } - - visitor := newSchemaVisitor(stream, errLis) - visitor.actions = res.ParsedActions - visitor.procedures = res.ParsedProcedures - - defer func() { - err2 := deferFn(recover()) - if err2 != nil { - err = err2 - } - }() - - schema, ok := parser.Schema_entry().Accept(visitor).(*types.Schema) - if !ok { - err = fmt.Errorf("error parsing schema: could not detect return schema. this is likely a bug in the parser") - } - - res.Schema = schema - res.SchemaInfo = visitor.schemaInfo - - if errLis.Err() != nil { - return res, nil - } - - return res, err -} - -// ProcedureParseResult is the result of parsing a procedure. -// It returns the procedure body AST, as well as any errors that occurred during parsing. -// Unexpected errors will not be returned here, but instead in the function returning this type. -type ProcedureParseResult struct { - // AST is the abstract syntax tree of the procedure. - AST []ProcedureStmt - // Errs are the errors that occurred during parsing and analysis. - // These include syntax errors, type errors, etc. - ParseErrs ParseErrs - // Variables are all variables that are used in the procedure. - Variables map[string]*types.DataType - // CompoundVariables are variables that are created in the procedure. - CompoundVariables map[string]struct{} - // AnonymousReceivers are the anonymous receivers that are used in the procedure, - // in the order they appear - AnonymousReceivers []*types.DataType -} - -// ParseProcedure parses a procedure. -// It takes the procedure definition, as well as the schema. -// It performs type and semantic checks on the procedure. -func ParseProcedure(proc *types.Procedure, schema *types.Schema) (res *ProcedureParseResult, err error) { - return analyzeProcedureAST(proc, schema, nil, &Position{}) // zero position is fine here -} - -// analyzeProcedureAST analyzes the AST of a procedure. -// If AST is nil, it will parse it from the provided body. This is useful because ASTs -// with custom error positions can be passed in. -func analyzeProcedureAST(proc *types.Procedure, schema *types.Schema, ast []ProcedureStmt, procPos *Position) (res *ProcedureParseResult, err error) { - errLis, stream, parser, deferFn := setupParser(proc.Body, "procedure") - defer func() { - err2 := deferFn(recover()) - if err2 != nil { - err = err2 - } - }() - - res = &ProcedureParseResult{ - ParseErrs: errLis, - Variables: make(map[string]*types.DataType), - CompoundVariables: make(map[string]struct{}), - } - - if ast == nil { - schemaVisitor := newSchemaVisitor(stream, errLis) - // first parse the body, then visit it. - res.AST = parser.Procedure_entry().Accept(schemaVisitor).([]ProcedureStmt) - } else { - res.AST = ast - } - - // if there are expected errors, return the parse errors. - if errLis.Err() != nil { - return res, nil - } - - // set the parameters as the initial vars - vars := makeSessionVars() - for _, v := range proc.Parameters { - vars[v.Name] = v.Type - } - - visitor := &procedureAnalyzer{ - sqlAnalyzer: sqlAnalyzer{ - blockContext: blockContext{ - schema: schema, - variables: vars, - anonymousVariables: make(map[string]map[string]*types.DataType), - errs: errLis, - }, - sqlCtx: newSQLContext(), - }, - procCtx: newProcedureContext(proc), - procResult: struct { - allLoopReceivers []*loopTargetTracker - anonymousReceivers []*types.DataType - allVariables map[string]*types.DataType - }{ - allVariables: make(map[string]*types.DataType), - }, - } - - // visit the AST - returns := false - for _, stmt := range res.AST { - res := stmt.Accept(visitor).(*procedureStmtResult) - if res.willReturn { - returns = true - } - } - - // if the procedure is expecting a return that is not a table, and it does not guarantee - // returning a value, we should add an error. - if proc.Returns != nil && !returns && !proc.Returns.IsTable { - if len(res.AST) == 0 { - errLis.AddErr(procPos, ErrReturn, "procedure does not return a value") - } else { - errLis.AddErr(res.AST[len(res.AST)-1], ErrReturn, "procedure does not return a value") - } - } - - for k, v := range visitor.procResult.allVariables { - res.Variables[k] = v - } - - for _, v := range visitor.procResult.allLoopReceivers { - // if type is nil, it is a compound variable, and we add it to the loop variables - // if not nil, it is a value, and we add it to the other variables - if v.dataType == nil { - res.CompoundVariables[v.name.String()] = struct{}{} - } else { - res.Variables[v.name.String()] = v.dataType - } - } - - // we also need to add all input variables to the variables list - for _, v := range proc.Parameters { - res.Variables[v.Name] = v.Type - } - - res.AnonymousReceivers = visitor.procResult.anonymousReceivers - - return res, err -} - -// SQLParseResult is the result of parsing an SQL statement. -// It returns the SQL AST, as well as any errors that occurred during parsing. -// Unexpected errors will not be returned here, but instead in the function returning this type. -type SQLParseResult struct { - // AST is the abstract syntax tree of the SQL statement. - AST *SQLStatement - // Errs are the errors that occurred during parsing and analysis. - // These include syntax errors, type errors, etc. - ParseErrs ParseErrs - - // Mutative is true if the statement mutates state. - Mutative bool -} - -// ParseSQL parses an SQL statement. -// It requires a schema to be passed in, since SQL statements may reference -// schema objects. -// If skipValidation is true, the AST will not be validated or analyzed. -// TODO: once we get farther on the planner, we should remove all validation, in favor -// of the planner. -func ParseSQL(sql string, schema *types.Schema, skipValidation bool) (res *SQLParseResult, err error) { - parser, errLis, sqlVis, parseVis, deferFn, err := setupSQLParser(sql, schema) - - res = &SQLParseResult{ - ParseErrs: errLis, - } - - defer func() { - err2 := deferFn(recover()) - if err2 != nil { - err = err2 - } - }() - - res.AST = parser.Sql_entry().Accept(parseVis).(*SQLStatement) - - if errLis.Err() != nil { - return res, nil - } - - if skipValidation { - return res, nil - } - - res.AST.Accept(sqlVis) - res.Mutative = sqlVis.sqlResult.Mutative - - return res, err -} - -// ParseSQLWithoutValidation parses a SQL AST, but does not perform any validation -// or analysis. ASTs returned from this should not be used in production, as they -// might contain errors, and are not deterministically ordered. -func ParseSQLWithoutValidation(sql string, schema *types.Schema) (res *SQLStatement, err error) { - defer func() { - err2 := recover() - if err2 != nil { - err = fmt.Errorf("panic: %v", err2) - } - }() - - parser, errLis, _, parseVis, deferFn, err := setupSQLParser(sql, schema) - if err != nil { - return nil, err - } - - defer func() { - err2 := deferFn(recover()) - if err2 != nil { - err = err2 - } - }() - - res = parser.Sql_entry().Accept(parseVis).(*SQLStatement) - - if errLis.Err() != nil { - return nil, errLis.Err() - } - - return res, nil -} - -// setupSQLParser sets up the SQL parser. -func setupSQLParser(sql string, schema *types.Schema) (parser *gen.KuneiformParser, errLis *errorListener, sqlVisitor *sqlAnalyzer, parserVisitor *schemaVisitor, deferFn func(any) error, err error) { - if sql == "" { - return nil, nil, nil, nil, nil, fmt.Errorf("empty SQL statement") - } - - // trim whitespace - sql = strings.TrimSpace(sql) - - // add semicolon to the end of the statement, if it is not there - if !strings.HasSuffix(sql, ";") { - sql += ";" - } - - errLis, stream, parser, deferFn := setupParser(sql, "sql") - - sqlVisitor = &sqlAnalyzer{ - blockContext: blockContext{ - schema: schema, - variables: make(map[string]*types.DataType), // no variables exist for pure SQL calls - anonymousVariables: make(map[string]map[string]*types.DataType), - errs: errLis, - }, - sqlCtx: newSQLContext(), - } - sqlVisitor.sqlCtx.inLoneSQL = true - - parserVisitor = newSchemaVisitor(stream, errLis) - - return parser, errLis, sqlVisitor, parserVisitor, deferFn, err -} - -// ActionParseResult is the result of parsing an action. -// It returns the action body AST, as well as any errors that occurred during parsing. -// Unexpected errors will not be returned here, but instead in the function returning this type. -type ActionParseResult struct { - AST []ActionStmt - // Errs are the errors that occurred during parsing and analysis. - // These include syntax errors, type errors, etc. - ParseErrs ParseErrs -} - -// ParseAction parses a Kuneiform action. -// It requires a schema to be passed in, since actions may reference -// schema objects. -func ParseAction(action *types.Action, schema *types.Schema) (res *ActionParseResult, err error) { - return analyzeActionAST(action, schema, nil) -} - -// analyzeActionAST analyzes the AST of an action. -// If AST is nil, it will parse it from the provided body. This is useful because ASTs -// with custom error positions can be passed in. -func analyzeActionAST(action *types.Action, schema *types.Schema, ast []ActionStmt) (res *ActionParseResult, err error) { - errLis, stream, parser, deferFn := setupParser(action.Body, "action") - - res = &ActionParseResult{ - ParseErrs: errLis, - } - - defer func() { - err2 := deferFn(recover()) - if err2 != nil { - err = err2 - } - }() - - if ast == nil { - schemaVisitor := newSchemaVisitor(stream, errLis) - res.AST = parser.Action_entry().Accept(schemaVisitor).([]ActionStmt) - } else { - res.AST = ast - } - - if errLis.Err() != nil { - return res, nil - } - - vars := makeSessionVars() - for _, v := range action.Parameters { - vars[v] = types.UnknownType - } - - visitor := &actionAnalyzer{ - sqlAnalyzer: sqlAnalyzer{ - blockContext: blockContext{ - schema: schema, - variables: vars, - anonymousVariables: make(map[string]map[string]*types.DataType), - errs: errLis, - }, - sqlCtx: newSQLContext(), - }, - schema: schema, - } - - for _, stmt := range res.AST { - stmt.Accept(visitor) - visitor.sqlAnalyzer.reset() - } - - return res, err -} - -// setupParser sets up the necessary antlr objects for parsing. -// It returns an error listener, an input stream, a parser, and a function that -// handles returned errors. The function should be called within deferred panic catch. -// The deferFn will decide whether errors should be swallowed based on the error listener. -func setupParser(inputStream string, errLisName string) (errLis *errorListener, - stream *antlr.InputStream, parser *gen.KuneiformParser, deferFn func(any) error) { - errLis = newErrorListener(errLisName) - stream = antlr.NewInputStream(inputStream) - - lexer := gen.NewKuneiformLexer(stream) - tokens := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel) - parser = gen.NewKuneiformParser(tokens) - errLis.toks = tokens - - // remove defaults - lexer.RemoveErrorListeners() - parser.RemoveErrorListeners() - lexer.AddErrorListener(errLis) - parser.AddErrorListener(errLis) - - parser.BuildParseTrees = true - - deferFn = func(e any) (err error) { - if e != nil { - var ok bool - err, ok = e.(error) - if !ok { - err = fmt.Errorf("panic: %v", e) - } - } - - // if there is a panic, it may be due to a syntax error. - // therefore, we should check for syntax errors first and if - // any occur, swallow the panic and return the syntax errors. - // If the issue persists past syntax errors, the else block - // will return the error. - if errLis.Err() != nil { - return nil - } else if err != nil { - // stack trace since this - buf := make([]byte, 1<<16) - - stackSize := runtime.Stack(buf, false) - err = fmt.Errorf("%w\n\n%s", err, buf[:stackSize]) - - return err - } - - return nil - } - - return errLis, stream, parser, deferFn -} - -// RecursivelyVisitPositions traverses a structure recursively, visiting all position struct types. -// It is used in both parsing tools, as well as in tests. -// WARNING: This function should NEVER be used in consensus, since it is non-deterministic. -func RecursivelyVisitPositions(v any, fn func(GetPositioner)) { - - visited := make(map[uintptr]struct{}) - visitRecursive(reflect.ValueOf(v), reflect.TypeOf((*GetPositioner)(nil)).Elem(), func(v reflect.Value) { - if v.CanInterface() { - a := v.Interface().(GetPositioner) - fn(a) - } - }, visited) -} - -// visitRecursive is a recursive function that visits all types that implement the target interface. -func visitRecursive(v reflect.Value, target reflect.Type, fn func(reflect.Value), visited map[uintptr]struct{}) { - if v.Type().Implements(target) { - // check if the value is nil - if !v.IsNil() { - fn(v) - } - } - - switch v.Kind() { - case reflect.Interface: - if v.IsNil() { - return - } - - visitRecursive(v.Elem(), target, fn, visited) - case reflect.Ptr: - if v.IsNil() { - return - } - - // check if we have visited this pointer before - ptr := v.Pointer() - if _, ok := visited[ptr]; ok { - return - } - visited[ptr] = struct{}{} - - visitRecursive(v.Elem(), target, fn, visited) - case reflect.Struct: - for i := range v.NumField() { - visitRecursive(v.Field(i), target, fn, visited) - } - case reflect.Slice, reflect.Array: - for i := range v.Len() { - visitRecursive(v.Index(i), target, fn, visited) - } - case reflect.Map: - for _, key := range v.MapKeys() { - visitRecursive(v.MapIndex(key), target, fn, visited) - } - } -} diff --git a/parse/parse_test.go b/parse/parse_test.go deleted file mode 100644 index fcc676081..000000000 --- a/parse/parse_test.go +++ /dev/null @@ -1,3223 +0,0 @@ -package parse_test - -import ( - "encoding/json" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/utils/order" - "github.com/kwilteam/kwil-db/parse" -) - -// Test_Kuneiform tests the Kuneiform parser. -func Test_Kuneiform(t *testing.T) { - type testCase struct { - name string - kf string - want *types.Schema - err error // can be nil - // checkAfterErr will continue with the schema comparison - // after an error is encountered. - checkAfterErr bool - } - - tests := []testCase{ - { - name: "simple schema", - kf: ` - database mydb; - - table users { - id int primary_key notnull, - username text not null unique minlen(5) maxlen(32) - } - - action create_user ($id, $username) public { - insert into users (id, username) values ($id, $username); - } - - procedure get_username ($id int) public view RETURNS (name text) { - return select username from users where id = $id; // this is a comment - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - tblUsers, - }, - Actions: []*types.Action{ - { - Name: "create_user", - Parameters: []string{ - "$id", - "$username", - }, - Public: true, - Body: `insert into users (id, username) values ($id, $username);`, - }, - }, - Procedures: []*types.Procedure{ - { - Name: "get_username", - Parameters: []*types.ProcedureParameter{ - { - Name: "$id", - Type: types.IntType, - }, - }, - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: `return select username from users where id = $id;`, - Returns: &types.ProcedureReturn{Fields: []*types.NamedType{ - { - Name: "name", - Type: types.TextType, - }, - }}, - }, - }, - }, - }, - { - name: "foreign key and index", - kf: ` - database mydb; - - table users { - id int primary not null, - username text not null unique minlen(5) maxlen(32) - } - - table posts { - id int primary key, - author_id int not null, - foreign key (author_id) references users (id) on delete cascade on update cascade, - #idx index(author_id) - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - tblUsers, - { - Name: "posts", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "author_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "idx", - Type: types.BTREE, - Columns: []string{"author_id"}, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"author_id"}, - ParentTable: "users", - ParentKeys: []string{"id"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "procedure returns table", - kf: ` - database mydb; - - procedure get_users() public view RETURNS table(id int) { - return select id from users; - } - `, - want: &types.Schema{ - Name: "mydb", - Procedures: []*types.Procedure{ - { - Name: "get_users", - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: `return select id from users;`, - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - }, - }, - }, - }, - { - name: "use", - kf: ` - database mydb; - - uSe myext AS db1; - use myext { - a: 'b', - c: 1 - } aS db2; - `, - want: &types.Schema{ - Name: "mydb", - Extensions: []*types.Extension{ - { - Name: "myext", - Alias: "db1", - }, - { - Name: "myext", - Initialization: []*types.ExtensionConfig{ - { - Key: "a", - Value: "'b'", - }, - { - Key: "c", - Value: "1", - }, - }, - Alias: "db2", - }, - }, - }, - }, - { - name: "annotations", - kf: ` - database mydb; - - @kgw(authn='true') - procedure get_users() public view {} - `, - want: &types.Schema{ - Name: "mydb", - Procedures: []*types.Procedure{ - { - Name: "get_users", - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Annotations: []string{"@kgw(authn='true')"}, - }, - }, - }, - }, - { - name: "all possible constraints", - kf: ` - database mydb; - - table other_users { - id int pk, - username text not null unique minlen(5) maxlen(32), - age int max(100) min(18) default(18) - } - - table users { - id int primary key, - username text not null unique minlen(5) maxlen(32), - age int max(100) min(18) default(18), - bts blob default(0x00), - foreign key (id) references other_users (id) on delete cascade on update set null, - foreign key (username) references other_users (username) on delete set default on update no action, - foreign key (age) references other_users (age) on delete restrict - } - - table other_uses { - id int primary key, - username text unique, - age int unique - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - { - Name: "other_users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "username", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.UNIQUE, - }, - { - Type: types.MIN_LENGTH, - Value: "5", - }, - { - Type: types.MAX_LENGTH, - Value: "32", - }, - }, - }, - { - Name: "age", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.MAX, - Value: "100", - }, - { - Type: types.MIN, - Value: "18", - }, - { - Type: types.DEFAULT, - Value: "18", - }, - }, - }, - }, - }, - { - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "username", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.UNIQUE, - }, - { - Type: types.MIN_LENGTH, - Value: "5", - }, - { - Type: types.MAX_LENGTH, - Value: "32", - }, - }, - }, - { - Name: "age", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.MAX, - Value: "100", - }, - { - Type: types.MIN, - Value: "18", - }, - { - Type: types.DEFAULT, - Value: "18", - }, - }, - }, - { - Name: "bts", - Type: types.BlobType, - Attributes: []*types.Attribute{ - { - Type: types.DEFAULT, - Value: "0x00", - }, - }, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"id"}, - ParentTable: "other_users", - ParentKeys: []string{"id"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_UPDATE, - Do: types.DO_SET_NULL, - }, - }, - }, - { - ChildKeys: []string{"username"}, - ParentTable: "other_users", - ParentKeys: []string{"username"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_SET_DEFAULT, - }, - { - On: types.ON_UPDATE, - Do: types.DO_NO_ACTION, - }, - }, - }, - { - ChildKeys: []string{"age"}, - ParentTable: "other_users", - ParentKeys: []string{"age"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_RESTRICT, - }, - }, - }, - }, - }, - { - Name: "other_uses", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "username", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.UNIQUE, - }, - }, - }, - { - Name: "age", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.UNIQUE, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "foreign, no parameters, returns nothing", - kf: ` - database mydb; - - foreign procedure get_users() - `, - want: &types.Schema{ - Name: "mydb", - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_users", - }, - }, - }, - }, - { - name: "foreign, with parameters, returns unnamed types", - kf: ` - database mydb; - - foreign procedure get_users(int, text) RETURNS (int, text) - `, - want: &types.Schema{ - Name: "mydb", - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_users", - Parameters: []*types.DataType{ - types.IntType, - types.TextType, - }, - Returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "col0", - Type: types.IntType, - }, - { - Name: "col1", - Type: types.TextType, - }, - }, - }, - }, - }, - }, - }, - { - name: "foreign, with parameters, returns named types", - kf: ` - database mydb; - - foreign procedure get_users() RETURNS (id int, name text) - `, - want: &types.Schema{ - Name: "mydb", - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_users", - Returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - { - Name: "name", - Type: types.TextType, - }, - }, - }, - }, - }, - }, - }, - { - name: "foreign, returns table", - kf: ` - database mydb; - - foreign procedure get_users() RETURNS table(id int) - `, - want: &types.Schema{ - Name: "mydb", - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_users", - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - }, - }, - }, - }, - { - name: "named foreign parameters", - kf: ` - database mydb; - - foreign procedure get_users($id int, $name text) returns (id int, name text) - `, - want: &types.Schema{ - Name: "mydb", - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_users", - Parameters: []*types.DataType{ - types.IntType, - types.TextType, - }, - Returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - { - Name: "name", - Type: types.TextType, - }, - }, - }, - }, - }, - }, - }, - { - // this test tries to break case sensitivity in every way possible - name: "case insensitive", - kf: ` - database myDB; - - table UsErS { - iD inT pRimaRy kEy nOt nUll - } - - table posts { - id int primary key, - author_id int not null, - ForEign key (author_ID) references usErs (Id) On delEte cAscade on Update cascadE, - #iDx inDex(author_iD) - } - - uSe myeXt As dB1; - - pRoceDure get_Users($nAme tExt) Public viEw ReTURNS tablE(iD iNt) { - return select id from users; // this wont actually get parsed in this test - } - - fOreign proceduRe get_othEr_Users($Id inT, $nAme Text) RETURNS table(iD inT, Name tExt) - - @kGw( autHn='tRue' ) - AcTion create_User ($Id, $usErname) Public { - insert into users (id, username) values ($id, $username); - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - { - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - { - Type: types.NOT_NULL, - }, - }, - }, - }, - }, - { - Name: "posts", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "author_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "idx", - Type: types.BTREE, - Columns: []string{"author_id"}, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"author_id"}, - ParentTable: "users", - ParentKeys: []string{"id"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - }, - }, - }, - }, - }, - Extensions: []*types.Extension{ - { - Name: "myext", - Alias: "db1", - }, - }, - Procedures: []*types.Procedure{ - { - Name: "get_users", - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Parameters: []*types.ProcedureParameter{ - { - Name: "$name", - Type: types.TextType, - }, - }, - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - Body: `return select id from users;`, // comments will not be parsed - }, - }, - ForeignProcedures: []*types.ForeignProcedure{ - { - Name: "get_other_users", - Parameters: []*types.DataType{ - types.IntType, - types.TextType, - }, - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - { - Name: "name", - Type: types.TextType, - }, - }, - }, - }, - }, - Actions: []*types.Action{ - { - Annotations: []string{"@kgw(authn='tRue')"}, - Name: "create_user", - Parameters: []string{ - "$id", - "$username", - }, - Public: true, - Body: `insert into users (id, username) values ($id, $username);`, - }, - }, - }, - }, - { - name: "two database blocks", - kf: `database a; - database b;`, - err: parse.ErrSyntax, - }, - { - // tests for https://github.com/kwilteam/kwil-db/issues/752 - name: "incomplete database block", - kf: `datab`, - want: &types.Schema{}, - err: parse.ErrSyntax, - checkAfterErr: true, - }, - { - // similar to the above test, the same edge case existed for foreign procedures - name: "incomplete foreign procedure", - kf: `database a; - foreign proce`, - want: &types.Schema{ - Name: "a", - ForeignProcedures: []*types.ForeignProcedure{ - {}, // there will be one empty foreign procedure - }, - }, - err: parse.ErrSyntax, - checkAfterErr: true, - }, - { - // this test tests for properly handling errors for missing primary keys - name: "missing primary key", - kf: ` - database mydb; - - table users { - id int not null - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - { - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - }, - }, - }, - err: parse.ErrNoPrimaryKey, - }, - { - name: "empty body", - kf: ` - database mydb; - - procedure get_users() public view {} - `, - want: &types.Schema{ - Name: "mydb", - Procedures: []*types.Procedure{ - { - Name: "get_users", - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - }, - }, - }, - }, - { - name: "foreign key to non-existent table", - kf: `database mydb; - - table a { - id int primary key, - foreign key (id) references b (id) - } - `, - err: parse.ErrUnknownTable, - }, - { - name: "foreign key to non-existent column", - kf: `database mydb; - - table a { - id int primary key - } - - table b { - id int primary key, - foreign key (id) references a (id_not_exist) - } - `, - err: parse.ErrUnknownColumn, - }, - { - name: "foreign key on non-existent column", - kf: `database mydb; - - table a { - id int primary key - } - - table b { - id int primary key, - foreign key (id_not_exist) references a (id) - } - `, - err: parse.ErrUnknownColumn, - }, - { - name: "index on non-existent column", - kf: `database mydb; - - table a { - id int primary key, - #idx index(id_not_exist) - } - `, - err: parse.ErrUnknownColumn, - }, - { - // regression test for https://github.com/kwilteam/kwil-db/issues/896#issue-2423754035 - name: "invalid foreign key", - kf: `database glow; - - table data { - id uuid primary key, - owner_id uuid notnull, - foreign key (owner_id) references users(id) on update cascade - // TODO: add other columns - } - `, - err: parse.ErrUnknownTable, - }, - { - // regression test for https://github.com/kwilteam/kwil-db/issues/896#issue-2423754035 - name: "invalid foreign key", - kf: `database mydb; - - table a { - id int primary key - } - - table b { - id int primary key, - id2 int, - foreign key (id2) references a(id2) - } - `, - err: parse.ErrUnknownColumn, - }, - { - // regression test for https://github.com/kwilteam/kwil-db/issues/896#issuecomment-2243806123 - name: "max on non-numeric type", - kf: `database mydb; - - table a { - id uuid primary key, - age text max(100) - } - `, - err: parse.ErrColumnConstraint, - }, - { - // regression test for https://github.com/kwilteam/kwil-db/issues/896#issuecomment-2243835819 - name: "mex_len on blob", - kf: `database mydb; - - table a { - id uuid primary key, - bts blob maxlen(100) - } - `, - want: &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - { - Name: "a", - Columns: []*types.Column{ - { - Name: "id", - Type: types.UUIDType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "bts", - Type: types.BlobType, - Attributes: []*types.Attribute{ - { - Type: types.MAX_LENGTH, - Value: "100", - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "conflict with function", - kf: `database mydb; - - table a { - id int primary key, - age int max(100) - } - - procedure max() public view {} - `, - err: parse.ErrReservedKeyword, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - res, err := parse.ParseSchemaWithoutValidation([]byte(tt.kf)) - require.NoError(t, err) - if tt.err != nil { - parseErrs := res.ParseErrs.Errors() - if len(parseErrs) == 0 { - require.Fail(t, "expected parse errors") - } - - require.ErrorIs(t, parseErrs[0], tt.err) - if !tt.checkAfterErr { - return - } - } else { - require.NoError(t, res.ParseErrs.Err()) - if tt.checkAfterErr { - panic("cannot use checkAfterErr without an error") - } - } - - assertPositionsAreSet(t, res.ParsedActions) - assertPositionsAreSet(t, res.ParsedProcedures) - - require.EqualValues(t, tt.want, res.Schema) - - // we will also test that the schemas were properly cleaned. - // we test this by copying the schema to a new schema, cleaning the new schema, and comparing the two. - bts, err := json.Marshal(res.Schema) - require.NoError(t, err) - - var got2 types.Schema - err = json.Unmarshal(bts, &got2) - require.NoError(t, err) - - // since checkAfterErr means we expect a parser error, we shouldn't clean since - // it will likely fail since the schema is invalid anyways - if !tt.checkAfterErr { - err = got2.Clean() - require.NoError(t, err) - } - - got2.Owner = nil // unmarshal sets Owner to empty array, so we need to set it to nil to compare - - require.EqualValues(t, res.Schema, &got2) - }) - } -} - -// assertPositionsAreSet asserts that all positions in the ast are set. -func assertPositionsAreSet(t *testing.T, v any) { - parse.RecursivelyVisitPositions(v, func(gp parse.GetPositioner) { - pos := gp.GetPosition() - // if not set, this will tell us the struct - assert.True(t, pos.IsSet, "position is not set. struct type: %T", gp) - }) -} - -// some default tables and procedures for testing -var ( - tblUsers = &types.Table{ - Name: "users", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - { - Type: types.NOT_NULL, - }, - }, - }, - { - Name: "username", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - { - Type: types.UNIQUE, - }, - { - Type: types.MIN_LENGTH, - Value: "5", - }, - { - Type: types.MAX_LENGTH, - Value: "32", - }, - }, - }, - }, - } - - tblPosts = &types.Table{ - Name: "posts", - Columns: []*types.Column{ - { - Name: "id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "author_id", - Type: types.IntType, - Attributes: []*types.Attribute{ - { - Type: types.NOT_NULL, - }, - }, - }, - }, - Indexes: []*types.Index{ - { - Name: "idx", - Type: types.BTREE, - Columns: []string{"author_id"}, - }, - }, - ForeignKeys: []*types.ForeignKey{ - { - ChildKeys: []string{"author_id"}, - ParentTable: "users", - ParentKeys: []string{"id"}, - Actions: []*types.ForeignKeyAction{ - { - On: types.ON_DELETE, - Do: types.DO_CASCADE, - }, - { - On: types.ON_UPDATE, - Do: types.DO_CASCADE, - }, - }, - }, - }, - } - - procGetAllUserIds = &types.Procedure{ - Name: "get_all_user_ids", - Public: true, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - Body: `return select id from users;`, - } - - foreignProcGetUser = &types.ForeignProcedure{ - Name: "get_user_id", - Parameters: []*types.DataType{ - types.TextType, - }, - Returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - } - - foreignProcCreateUser = &types.ForeignProcedure{ - Name: "foreign_create_user", - Parameters: []*types.DataType{ - types.IntType, - types.TextType, - }, - } -) - -func Test_Procedure(t *testing.T) { - type testCase struct { - name string - proc string - // inputs should be a map of $var to type - inputs map[string]*types.DataType - // returns is the expected return type - // it can be left nil if there is no return type. - returns *types.ProcedureReturn - // want is the desired output. - // Errs should be left nil for this test, - // and passed in the test case. - // inputs will automatically be added - // to the expected output as variables. - want *parse.ProcedureParseResult - err error - } - - tests := []testCase{ - { - name: "simple procedure", - proc: `$a int := 1;`, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$a": types.IntType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$a"), - Type: types.IntType, - Value: exprLit(1), - }, - }, - }, - }, - { - name: "procedure applies default ordering to selects", - proc: ` - select * from users; - `, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtSQL{ - SQL: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "for loop", - proc: ` - $found := false; - for $row in SELECT * FROM users { - $found := true; - INSERT INTO posts (id, author_id) VALUES ($row.id, $row.username::int); - } - if !$found { - error('no users found'); - } - `, - want: &parse.ProcedureParseResult{ - CompoundVariables: map[string]struct{}{ - "$row": {}, - }, - Variables: map[string]*types.DataType{ - "$found": types.BoolType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$found"), - Value: exprLit(false), - }, - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$row"), - LoopTerm: &parse.LoopTermSQL{ - Statement: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: &parse.ExpressionColumn{ - Table: "users", - Column: "id", - }, - }, - }, - }, - }, - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$found"), - Value: exprLit(true), - }, - &parse.ProcedureStmtSQL{ - SQL: &parse.SQLStatement{ - SQL: &parse.InsertStatement{ - Table: "posts", - Columns: []string{"id", "author_id"}, - Values: [][]parse.Expression{ - { - &parse.ExpressionFieldAccess{ - Record: exprVar("$row"), - Field: "id", - }, - &parse.ExpressionFieldAccess{ - Record: exprVar("$row"), - Field: "username", - Typecastable: parse.Typecastable{ - TypeCast: types.IntType, - }, - }, - }, - }, - }, - }, - }, - }, - }, - &parse.ProcedureStmtIf{ - IfThens: []*parse.IfThen{ - { - If: &parse.ExpressionUnary{ - Operator: parse.UnaryOperatorNot, - Expression: exprVar("$found"), - }, - Then: []parse.ProcedureStmt{ - &parse.ProcedureStmtCall{ - Call: &parse.ExpressionFunctionCall{ - Name: "error", - Args: []parse.Expression{ - exprLit("no users found"), - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "arrays", - proc: ` - $arr2 := array_append($arr, 2); - $arr3 int[] := array_prepend(3, $arr2); - $arr4 := [4,5]; - - $arr5 := array_cat($arr3, $arr4); - $arr6 := $arr5[1:2]; - $arr7 := $arr5[1:]; - `, - inputs: map[string]*types.DataType{ - "$arr": types.ArrayType(types.IntType), - }, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$arr2": types.ArrayType(types.IntType), - "$arr3": types.ArrayType(types.IntType), - "$arr4": types.ArrayType(types.IntType), - "$arr5": types.ArrayType(types.IntType), - "$arr6": types.ArrayType(types.IntType), - "$arr7": types.ArrayType(types.IntType), - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtCall{ - Receivers: []*parse.ExpressionVariable{ - exprVar("$arr2"), - }, - Call: &parse.ExpressionFunctionCall{ - Name: "array_append", - Args: []parse.Expression{ - exprVar("$arr"), - exprLit(2), - }, - }, - }, - &parse.ProcedureStmtAssign{ - Variable: exprVar("$arr3"), - Type: types.ArrayType(types.IntType), - Value: &parse.ExpressionFunctionCall{ - Name: "array_prepend", - Args: []parse.Expression{ - exprLit(3), - exprVar("$arr2"), - }, - }, - }, - &parse.ProcedureStmtAssign{ - Variable: exprVar("$arr4"), - Value: &parse.ExpressionMakeArray{ - Values: []parse.Expression{ - exprLit(4), - exprLit(5), - }, - }, - }, - &parse.ProcedureStmtCall{ - Receivers: []*parse.ExpressionVariable{ - exprVar("$arr5"), - }, - Call: &parse.ExpressionFunctionCall{ - Name: "array_cat", - Args: []parse.Expression{ - exprVar("$arr3"), - exprVar("$arr4"), - }, - }, - }, - &parse.ProcedureStmtAssign{ - Variable: exprVar("$arr6"), - Value: &parse.ExpressionArrayAccess{ - Array: exprVar("$arr5"), - FromTo: [2]parse.Expression{ - exprLit(1), - exprLit(2), - }, - }, - }, - &parse.ProcedureStmtAssign{ - Variable: exprVar("$arr7"), - Value: &parse.ExpressionArrayAccess{ - Array: exprVar("$arr5"), - FromTo: [2]parse.Expression{ - exprLit(1), - }, - }, - }, - }, - }, - }, - { - name: "loop", - proc: ` - $arr := [1,2,3]; - $rec int; - for $i in $arr { - $rec := $i; - } - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$arr": types.ArrayType(types.IntType), - "$rec": types.IntType, - "$i": types.IntType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$arr"), - Value: &parse.ExpressionMakeArray{ - Values: []parse.Expression{ - exprLit(1), - exprLit(2), - exprLit(3), - }, - }, - }, - &parse.ProcedureStmtDeclaration{ - Variable: exprVar("$rec"), - Type: types.IntType, - }, - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$i"), - LoopTerm: &parse.LoopTermVariable{ - Variable: exprVar("$arr"), - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$rec"), - Value: exprVar("$i"), - }, - }, - }, - }, - }, - }, - { - name: "and/or", - proc: ` - if $a and $b or $c {} - `, - inputs: map[string]*types.DataType{ - "$a": types.BoolType, - "$b": types.BoolType, - "$c": types.BoolType, - }, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtIf{ - IfThens: []*parse.IfThen{ - { - If: &parse.ExpressionLogical{ - Left: &parse.ExpressionLogical{ - Left: exprVar("$a"), - Operator: parse.LogicalOperatorAnd, - Right: exprVar("$b"), - }, - Operator: parse.LogicalOperatorOr, - Right: exprVar("$c"), - }, - }, - }, - }, - }, - }, - }, - { - name: "is distinct", - proc: ` - $a := 1 is distinct from null; - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$a": types.BoolType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$a"), - Value: &parse.ExpressionIs{ - Left: exprLit(1), - Right: &parse.ExpressionLiteral{ - Type: types.NullType, - }, - Distinct: true, - }, - }, - }, - }, - }, - { - name: "missing return values", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - { - Name: "name", - Type: types.TextType, - }, - }, - }, - proc: `return 1;`, - err: parse.ErrReturn, - }, - { - name: "no return values", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: `$a := 1;`, - err: parse.ErrReturn, - }, - { - name: "if/then missing return in one branch", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: ` - if true { - return 1; - } else { - $a := 1; - } - `, - err: parse.ErrReturn, - }, - { - name: "for loop with if return", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: ` - $arr := [1,2,3]; - for $i in $arr { - if $i == -1 { - break; - } - return $i; - } - `, - err: parse.ErrReturn, - }, - { - name: "nested for loop", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: ` - $arr int[]; - for $i in $arr { - for $j in 1..$i { - break; // only breaks the inner loop - } - - return $i; // this will always exit on first $i iteration - } - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$arr": types.ArrayType(types.IntType), - "$i": types.IntType, - "$j": types.IntType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtDeclaration{ - Variable: exprVar("$arr"), - Type: types.ArrayType(types.IntType), - }, - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$i"), - LoopTerm: &parse.LoopTermVariable{ - Variable: exprVar("$arr"), - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$j"), - LoopTerm: &parse.LoopTermRange{ - Start: exprLit(1), - End: exprVar("$i"), - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtBreak{}, - }, - }, - &parse.ProcedureStmtReturn{ - Values: []parse.Expression{exprVar("$i")}, - }, - }, - }, - }, - }, - }, - { - name: "returns table incorrect", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: `return select id from users;`, // this is intentional- plpgsql treats this as a table return - err: parse.ErrReturn, - }, - { - name: "returns table correct", - returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: `return select 1 as id;`, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtReturn{ - SQL: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprLit(1), - Alias: "id", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "returns next incorrect", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: `$a int[]; - for $row in $a { - return next $row; - } - `, - err: parse.ErrReturn, - }, - { - name: "returns next correct", - returns: &types.ProcedureReturn{ - IsTable: true, - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - proc: ` - for $row in select * from get_all_user_ids() { - return next $row.id; - } - `, - want: &parse.ProcedureParseResult{ - CompoundVariables: map[string]struct{}{ - "$row": {}, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$row"), - LoopTerm: &parse.LoopTermSQL{ - Statement: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{&parse.ResultColumnWildcard{}}, - From: &parse.RelationFunctionCall{ - FunctionCall: &parse.ExpressionFunctionCall{ - Name: "get_all_user_ids", - }, - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("", "id"), - }, - }, - }, - }, - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtReturnNext{ - Values: []parse.Expression{ - &parse.ExpressionFieldAccess{ - Record: exprVar("$row"), - Field: "id", - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "error func exits", - proc: `error('error message');`, - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{{ - Name: "id", - Type: types.IntType, - }}, - }, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtCall{ - Call: &parse.ExpressionFunctionCall{ - Name: "error", - Args: []parse.Expression{ - exprLit("error message"), - }, - }, - }, - }, - }, - }, - { - // this tests for regression on a previously known bug - name: "foreign procedure returning nothing to a variable", - returns: &types.ProcedureReturn{ - Fields: []*types.NamedType{ - { - Name: "id", - Type: types.IntType, - }, - }, - }, - proc: ` - return foreign_create_user['xbd', 'create_user'](1, 'user1'); - `, - err: parse.ErrType, - }, - { - // regression test for a previously known bug - name: "calling a procedure that returns nothing works fine", - proc: ` - foreign_create_user['xbd', 'create_user'](1, 'user1'); - `, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtCall{ - Call: &parse.ExpressionForeignCall{ - Name: "foreign_create_user", - ContextualArgs: []parse.Expression{ - exprLit("xbd"), - exprLit("create_user"), - }, - Args: []parse.Expression{ - exprLit(1), - exprLit("user1"), - }, - }, - }, - }, - }, - }, - { - // this is a regression test for a previous bug - name: "discarding return values of a function is ok", - proc: `abs(-1);`, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtCall{ - Call: &parse.ExpressionFunctionCall{ - Name: "abs", - Args: []parse.Expression{ - exprLit(-1), - }, - }, - }, - }, - }, - }, - { - name: "sum types - failure", - proc: ` - $sum := 0; - for $row in select sum(id) as id from users { - $sum := $sum + $row.id; - } - `, - // this should error, since sum returns numeric - err: parse.ErrType, - }, - { - name: "sum types - success", - proc: ` - $sum decimal(1000,0); - for $row in select sum(id) as id from users { - $sum := $sum + $row.id; - } - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$sum": mustNewDecimal(1000, 0), - }, - CompoundVariables: map[string]struct{}{ - "$row": {}, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtDeclaration{ - Variable: exprVar("$sum"), - Type: mustNewDecimal(1000, 0), - }, - &parse.ProcedureStmtForLoop{ - Receiver: exprVar("$row"), - LoopTerm: &parse.LoopTermSQL{ - Statement: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: &parse.ExpressionFunctionCall{ - Name: "sum", - Args: []parse.Expression{ - exprColumn("", "id"), - }, - }, - Alias: "id", - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - // If there is an aggregate clause with no group by, then no ordering is applied. - }, - }, - }, - Body: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$sum"), - Value: &parse.ExpressionArithmetic{ - Left: exprVar("$sum"), - Operator: parse.ArithmeticOperatorAdd, - Right: &parse.ExpressionFieldAccess{Record: exprVar("$row"), Field: "id"}, - }, - }, - }, - }, - }, - }, - }, - { - // this is a regression test for a previous bug - name: "adding arrays", - proc: ` - $arr1 := [1,2,3]; - $arr2 := [4,5,6]; - $arr3 := $arr1 + $arr2; - `, - err: parse.ErrType, - }, - { - // this is a regression test for a previous bug - name: "early return", - proc: ` - return; - `, - want: &parse.ProcedureParseResult{ - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtReturn{}, - }, - }, - }, - { - name: "variable scoping", - proc: ` - for $i in 1..10 { - } - $j := $i; - `, - err: parse.ErrUndeclaredVariable, - }, - { - name: "scoping 2", - proc: ` - for $i in 1..10 { - $j := $i; - } - $k := $j; - `, - err: parse.ErrUndeclaredVariable, - }, - { - name: "if scoping", - proc: ` - if true { - $i := 1; - } - $j := $i; - `, - err: parse.ErrUndeclaredVariable, - }, - { - name: "else scoping", - proc: ` - if false { - } else { - $i := 1; - } - $j := $i; - `, - err: parse.ErrUndeclaredVariable, - }, - { // regression test - name: "equals order of operations", - proc: ` - $a := 1+2 == 3; - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$a": types.BoolType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$a"), - Value: &parse.ExpressionComparison{ - Left: &parse.ExpressionArithmetic{Left: exprLit(1), Operator: parse.ArithmeticOperatorAdd, Right: exprLit(2)}, - Operator: parse.ComparisonOperatorEqual, - Right: exprLit(3), - }, - }, - }, - }, - }, - { - // regression test https://github.com/kwilteam/kwil-db/pull/947 - name: "string literal", - proc: ` - $a := '\'hello\''; - `, - want: &parse.ProcedureParseResult{ - Variables: map[string]*types.DataType{ - "$a": types.TextType, - }, - AST: []parse.ProcedureStmt{ - &parse.ProcedureStmtAssign{ - Variable: exprVar("$a"), - Value: exprLit("\\'hello\\'"), - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var params []*types.ProcedureParameter - for _, in := range order.OrderMap(tt.inputs) { - params = append(params, &types.ProcedureParameter{ - Name: in.Key, - Type: in.Value, - }) - } - - proc := &types.Procedure{ - Name: "test", - Parameters: params, - Public: true, - Returns: tt.returns, - Body: tt.proc, - } - - res, err := parse.ParseProcedure(proc, &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - tblUsers, - tblPosts, - }, - Procedures: []*types.Procedure{ - proc, - procGetAllUserIds, - }, - ForeignProcedures: []*types.ForeignProcedure{ - foreignProcGetUser, - foreignProcCreateUser, - }, - }) - require.NoError(t, err) - - if tt.err != nil { - require.ErrorIs(t, res.ParseErrs.Err(), tt.err) - return - } - require.NoError(t, res.ParseErrs.Err()) - - assertPositionsAreSet(t, res.AST) - - // set res errs to nil to match test - res.ParseErrs = nil - - if tt.want.CompoundVariables == nil { - tt.want.CompoundVariables = make(map[string]struct{}) - } - if tt.want.Variables == nil { - tt.want.Variables = make(map[string]*types.DataType) - } - - // add the inputs to the expected output - for k, v := range tt.inputs { - tt.want.Variables[k] = v - } - - if !deepCompare(tt.want, res) { - t.Errorf("unexpected output: %s", diff(tt.want, res)) - } - }) - } -} - -// exprVar makes an ExpressionVariable. -func exprVar(n string) *parse.ExpressionVariable { - if n[0] != '$' && n[0] != '@' { - panic("TEST ERROR: variable name must start with $ or @") - } - pref := parse.VariablePrefix(n[0]) - - return &parse.ExpressionVariable{ - Name: n[1:], - Prefix: pref, - } -} - -func mustNewDecimal(precision, scale uint16) *types.DataType { - dt, err := types.NewDecimalType(precision, scale) - if err != nil { - panic(err) - } - return dt -} - -// exprLit makes an ExpressionLiteral. -// it can only make strings and ints -func exprLit(v any) parse.Expression { - switch t := v.(type) { - case int: - isNeg := t < 0 - if isNeg { - t *= -1 - } - - liter := &parse.ExpressionLiteral{ - Type: types.IntType, - Value: int64(t), - Typecastable: parse.Typecastable{ - TypeCast: types.IntType, - }, - } - - if isNeg { - return &parse.ExpressionUnary{ - Operator: parse.UnaryOperatorNeg, - Expression: liter, - } - } - - return liter - case int64: - isNeg := t < 0 - if isNeg { - t *= -1 - } - - liter := &parse.ExpressionLiteral{ - Type: types.IntType, - Value: t, - Typecastable: parse.Typecastable{ - TypeCast: types.IntType, - }, - } - - if isNeg { - return &parse.ExpressionUnary{ - Operator: parse.UnaryOperatorNeg, - Expression: liter, - } - } - - return liter - case string: - return &parse.ExpressionLiteral{ - Type: types.TextType, - Value: t, - Typecastable: parse.Typecastable{ - TypeCast: types.TextType, - }, - } - case bool: - return &parse.ExpressionLiteral{ - Type: types.BoolType, - Value: t, - Typecastable: parse.Typecastable{ - TypeCast: types.BoolType, - }, - } - default: - panic("TEST ERROR: invalid type for literal") - } -} - -func exprFunctionCall(name string, args ...parse.Expression) *parse.ExpressionFunctionCall { - return &parse.ExpressionFunctionCall{ - Name: name, - Args: args, - } -} - -func Test_SQL(t *testing.T) { - type testCase struct { - name string - sql string - want *parse.SQLStatement - err error - } - - tests := []testCase{ - { - name: "simple select", - sql: "select *, id i, length(username) as name_len from users u where u.id = 1;", - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - &parse.ResultColumnExpression{ - Expression: exprColumn("", "id"), - Alias: "i", - }, - &parse.ResultColumnExpression{ - Expression: &parse.ExpressionFunctionCall{ - Name: "length", - Args: []parse.Expression{ - exprColumn("", "username"), - }, - }, - Alias: "name_len", - }, - }, - From: &parse.RelationTable{ - Table: "users", - Alias: "u", - }, - Where: &parse.ExpressionComparison{ - Left: exprColumn("u", "id"), - Operator: parse.ComparisonOperatorEqual, - Right: exprLit(1), - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("u", "id"), - }, - }, - }, - }, - }, - { - name: "insert", - sql: `insert into posts (id, author_id) values (1, 1), - (2, (SELECT id from users where username = 'user2' LIMIT 1));`, - want: &parse.SQLStatement{ - SQL: &parse.InsertStatement{ - Table: "posts", - Columns: []string{"id", "author_id"}, - Values: [][]parse.Expression{ - { - exprLit(1), - exprLit(1), - }, - { - exprLit(2), - &parse.ExpressionSubquery{ - Subquery: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "id"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - Where: &parse.ExpressionComparison{ - Left: exprColumn("", "username"), - Operator: parse.ComparisonOperatorEqual, - Right: exprLit("user2"), - }, - }, - }, - Limit: exprLit(1), - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "select join", - sql: `SELECT p.id as id, u.username as author FROM posts AS p - INNER JOIN users AS u ON p.author_id = u.id - WHERE u.username = 'satoshi' order by u.username DESC NULLS LAST;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("p", "id"), - Alias: "id", - }, - &parse.ResultColumnExpression{ - Expression: exprColumn("u", "username"), - Alias: "author", - }, - }, - From: &parse.RelationTable{ - Table: "posts", - Alias: "p", - }, - Joins: []*parse.Join{ - { - Type: parse.JoinTypeInner, - Relation: &parse.RelationTable{ - Table: "users", - Alias: "u", - }, - On: &parse.ExpressionComparison{ - Left: exprColumn("p", "author_id"), - Operator: parse.ComparisonOperatorEqual, - Right: exprColumn("u", "id"), - }, - }, - }, - Where: &parse.ExpressionComparison{ - Left: exprColumn("u", "username"), - Operator: parse.ComparisonOperatorEqual, - Right: exprLit("satoshi"), - }, - }, - }, - - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("u", "username"), - Order: parse.OrderTypeDesc, - Nulls: parse.NullOrderLast, - }, - // apply default ordering - { - Expression: exprColumn("p", "id"), - }, - { - Expression: exprColumn("u", "id"), - }, - }, - }, - }, - }, - { - name: "delete", - sql: "delete from users where id = 1;", - want: &parse.SQLStatement{ - SQL: &parse.DeleteStatement{ - Table: "users", - Where: &parse.ExpressionComparison{ - Left: exprColumn("", "id"), - Operator: parse.ComparisonOperatorEqual, - Right: exprLit(1), - }, - }, - }, - }, - { - name: "upsert with conflict - success", - sql: `INSERT INTO users (id) VALUES (1) ON CONFLICT (id) DO UPDATE SET id = users.id + excluded.id;`, - want: &parse.SQLStatement{ - SQL: &parse.InsertStatement{ - Table: "users", - Columns: []string{"id"}, - Values: [][]parse.Expression{ - { - exprLit(1), - }, - }, - Upsert: &parse.UpsertClause{ - ConflictColumns: []string{"id"}, - DoUpdate: []*parse.UpdateSetClause{ - { - Column: "id", - Value: &parse.ExpressionArithmetic{ - Left: exprColumn("users", "id"), - Operator: parse.ArithmeticOperatorAdd, - Right: exprColumn("excluded", "id"), - }, - }, - }, - }, - }, - }, - }, - { - name: "upsert with conflict - ambiguous error", - sql: `INSERT INTO users (id) VALUES (1) ON CONFLICT (id) DO UPDATE SET id = id + 1;`, - err: parse.ErrAmbiguousConflictTable, - }, - { - name: "select against unnamed procedure", - sql: "select * from get_all_user_ids();", - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationFunctionCall{ - FunctionCall: &parse.ExpressionFunctionCall{ - Name: "get_all_user_ids", - }, - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("", "id"), - }, - }, - }, - }, - }, - { - name: "select join with unnamed subquery", - sql: `SELECT p.id as id, u.username as author FROM posts AS p - INNER JOIN (SELECT id as uid FROM users WHERE id = 1) ON p.author_id = uid;`, - err: parse.ErrUnnamedJoin, - }, - { - name: "compound select", - sql: `SELECT * FROM users union SELECT * FROM users;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - CompoundOperators: []parse.CompoundOperator{ - parse.CompoundOperatorUnion, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("", "id"), - }, - { - Expression: exprColumn("", "username"), - }, - }, - }, - }, - }, - { - name: "compound with mismatched shape", - sql: `SELECT username, id FROM users union SELECT id, username FROM users;`, - err: parse.ErrResultShape, - }, - { - name: "compound selecting 1 column", - sql: `SELECT username FROM users union SELECT username FROM users;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "username"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "username"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - CompoundOperators: []parse.CompoundOperator{parse.CompoundOperatorUnion}, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("", "username"), - }, - }, - }, - }, - }, - { - name: "group by", - sql: `SELECT u.username, count(u.id) FROM users as u GROUP BY u.username HAVING count(u.id) > 1;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("u", "username"), - }, - &parse.ResultColumnExpression{ - Expression: &parse.ExpressionFunctionCall{ - Name: "count", - Args: []parse.Expression{ - exprColumn("u", "id"), - }, - }, - }, - }, - From: &parse.RelationTable{ - Table: "users", - Alias: "u", - }, - GroupBy: []parse.Expression{ - exprColumn("u", "username"), - }, - Having: &parse.ExpressionComparison{ - Left: &parse.ExpressionFunctionCall{ - Name: "count", - Args: []parse.Expression{ - exprColumn("u", "id"), - }, - }, - Operator: parse.ComparisonOperatorGreaterThan, - Right: exprLit(1), - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("u", "username"), - }, - }, - }, - }, - }, - { - name: "group by with having, having is in group by clause", - // there's a much easier way to write this query, but this is just to test the parser - sql: `SELECT username FROM users GROUP BY username HAVING length(username) > 1;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "username"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - GroupBy: []parse.Expression{ - exprColumn("", "username"), - }, - Having: &parse.ExpressionComparison{ - Left: &parse.ExpressionFunctionCall{ - Name: "length", - Args: []parse.Expression{ - exprColumn("", "username"), - }, - }, - Operator: parse.ComparisonOperatorGreaterThan, - Right: exprLit(1), - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "username"), - }, - }, - }, - }, - }, - { - name: "group by with having, invalid column without aggregate", - sql: `SELECT u.username, count(u.id) FROM users as u GROUP BY u.username HAVING u.id > 1;`, - err: parse.ErrAggregate, - }, - { - name: "compound select with group by", - sql: `SELECT u.username, count(u.id) FROM users as u GROUP BY u.username HAVING count(u.id) > 1 UNION SELECT u.username, count(u.id) FROM users as u GROUP BY u.username HAVING count(u.id) > 1;`, - err: parse.ErrAggregate, - }, - { - name: "aggregate with no group by returns many columns", - sql: `SELECT count(u.id), u.username FROM users as u;`, - err: parse.ErrAggregate, - }, - { - name: "aggregate with no group by returns one column", - sql: `SELECT count(*) FROM users;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: &parse.ExpressionFunctionCall{ - Name: "count", - Star: true, - }, - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - }, - }, - }, - { - name: "aggregate with no group by and ordering fails", - sql: `SELECT count(*) FROM users order by count(*) DESC;`, - err: parse.ErrAggregate, - }, - { - name: "ordering for subqueries", - sql: `SELECT u.username, p.id FROM (SELECT * FROM users) as u inner join (SELECT * FROM posts) as p on u.id = p.author_id;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("u", "username"), - }, - &parse.ResultColumnExpression{ - Expression: exprColumn("p", "id"), - }, - }, - From: &parse.RelationSubquery{ - Subquery: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - Alias: "u", - }, - Joins: []*parse.Join{ - { - Type: parse.JoinTypeInner, - Relation: &parse.RelationSubquery{ - Subquery: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "posts", - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("posts", "id"), - }, - }, - }, - Alias: "p", - }, - On: &parse.ExpressionComparison{ - Left: exprColumn("u", "id"), - Operator: parse.ComparisonOperatorEqual, - Right: exprColumn("p", "author_id"), - }, - }, - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("u", "id"), - }, - { - Expression: exprColumn("u", "username"), - }, - { - Expression: exprColumn("p", "id"), - }, - { - Expression: exprColumn("p", "author_id"), - }, - }, - }, - }, - }, - { - name: "select against subquery with table join", - sql: `SELECT u.username, p.id FROM (SELECT * FROM users) inner join posts as p on users.id = p.author_id;`, - err: parse.ErrUnnamedJoin, - }, - { - name: "default ordering on procedure call", - sql: `SELECT * FROM get_all_user_ids();`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationFunctionCall{ - FunctionCall: &parse.ExpressionFunctionCall{ - Name: "get_all_user_ids", - }, - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("", "id"), - }, - }, - }, - }, - }, - { - name: "join against unnamed function call fails", - sql: `SELECT * FROM users inner join get_all_user_ids() on users.id = u.id;`, - err: parse.ErrUnnamedJoin, - }, - {name: "non utf-8", sql: "\xbd\xb2\x3d\xbc\x20\xe2\x8c\x98;", err: parse.ErrSyntax}, - { - // this select doesn't make much sense, however - // it is a regression test for a previously known bug - // https://github.com/kwilteam/kwil-db/pull/810 - name: "offset and limit", - sql: `SELECT * FROM users LIMIT id OFFSET id;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - Offset: exprColumn("", "id"), - Limit: exprColumn("", "id"), - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - }, - }, - { - // this is a regression test for a previous bug. - // when parsing just SQL, we can have unknown variables - name: "unknown variable is ok", - sql: `SELECT $id;`, - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprVar("$id"), - }, - }, - }, - }, - }, - }, - }, - { - name: "select JOIN, with no specified INNER/OUTER", - sql: `SELECT u.* FROM users as u - JOIN posts as p ON u.id = p.author_id;`, // default is INNER - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{ - Table: "u", - }, - }, - From: &parse.RelationTable{ - Table: "users", - Alias: "u", - }, - Joins: []*parse.Join{ - { - Type: parse.JoinTypeInner, - Relation: &parse.RelationTable{ - Table: "posts", - Alias: "p", - }, - On: &parse.ExpressionComparison{ - Left: exprColumn("u", "id"), - Operator: parse.ComparisonOperatorEqual, - Right: exprColumn("p", "author_id"), - }, - }, - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("u", "id"), - }, - { - Expression: exprColumn("p", "id"), - }, - }, - }, - }, - }, - { - // regression tests for a previous bug, where whitespace after - // the semicolon would cause the parser to add an extra semicolon - name: "whitespace after semicolon", - sql: "SELECT 1; ", - want: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprLit(1), - }, - }, - }, - }, - }, - }, - }, - { - name: "cte", - sql: `WITH cte AS (SELECT id FROM users) SELECT * FROM cte;`, - want: &parse.SQLStatement{ - CTEs: []*parse.CommonTableExpression{ - { - Name: "cte", - Query: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "id"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - }, - }, - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "cte", - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("cte", "id"), - }, - }, - }, - }, - }, - { - name: "cte with columns", - sql: `WITH cte (id2) AS (SELECT id FROM users) SELECT * FROM cte;`, - want: &parse.SQLStatement{ - CTEs: []*parse.CommonTableExpression{ - { - Name: "cte", - Columns: []string{"id2"}, - Query: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: exprColumn("", "id"), - }, - }, - From: &parse.RelationTable{ - Table: "users", - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("users", "id"), - }, - }, - }, - }, - }, - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnWildcard{}, - }, - From: &parse.RelationTable{ - Table: "cte", - }, - }, - }, - // apply default ordering - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("cte", "id2"), - }, - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - res, err := parse.ParseSQL(tt.sql, &types.Schema{ - Name: "mydb", - Tables: []*types.Table{ - tblUsers, - tblPosts, - }, - Procedures: []*types.Procedure{ - procGetAllUserIds, - }, - }, false) - require.NoError(t, err) - - if res.ParseErrs.Err() != nil { - if tt.err == nil { - t.Errorf("unexpected error: %v", res.ParseErrs.Err()) - } else { - require.ErrorIs(t, res.ParseErrs.Err(), tt.err) - } - - return - } - - assertPositionsAreSet(t, res.AST) - - if !deepCompare(tt.want, res.AST) { - t.Errorf("unexpected AST:%s", diff(tt.want, res.AST)) - } - }) - } -} - -func exprColumn(t, c string) *parse.ExpressionColumn { - return &parse.ExpressionColumn{ - Table: t, - Column: c, - } -} - -// deepCompare deep compares the values of two nodes. -// It ignores the parseTypes.Node field. -func deepCompare(node1, node2 any) bool { - // we return true for the parseTypes.Node field, - // we also need to ignore the unexported "schema" fields - return cmp.Equal(node1, node2, cmpOpts()...) -} - -// diff returns the diff between two nodes. -func diff(node1, node2 any) string { - return cmp.Diff(node1, node2, cmpOpts()...) -} - -func cmpOpts() []cmp.Option { - return []cmp.Option{ - cmp.AllowUnexported( - parse.ExpressionLiteral{}, - parse.ExpressionFunctionCall{}, - parse.ExpressionForeignCall{}, - parse.ExpressionVariable{}, - parse.ExpressionArrayAccess{}, - parse.ExpressionMakeArray{}, - parse.ExpressionFieldAccess{}, - parse.ExpressionParenthesized{}, - parse.ExpressionColumn{}, - parse.ExpressionSubquery{}, - parse.ProcedureStmtDeclaration{}, - parse.ProcedureStmtAssign{}, - parse.ProcedureStmtCall{}, - parse.ProcedureStmtForLoop{}, - parse.ProcedureStmtIf{}, - parse.ProcedureStmtSQL{}, - parse.ProcedureStmtBreak{}, - parse.ProcedureStmtReturn{}, - parse.ProcedureStmtReturnNext{}, - parse.LoopTermRange{}, - parse.LoopTermSQL{}, - parse.LoopTermVariable{}, - ), - cmp.Comparer(func(x, y parse.Position) bool { - return true - }), - } -} - -func Test_Actions(t *testing.T) { - type testcase struct { - name string - tables []*types.Table - action *types.Action - want *parse.ActionParseResult - err error - } - - tests := []testcase{ - { - name: "return value", - tables: []*types.Table{tableBalances}, - action: &types.Action{ - Name: "check_balance", - Public: false, - Modifiers: []types.Modifier{ - types.ModifierView, - }, - Body: "SELECT CASE WHEN balance < 10 THEN ERROR('insufficient balance') ELSE null END FROM balances WHERE wallet = @caller;", - }, - want: &parse.ActionParseResult{ - AST: []parse.ActionStmt{ - &parse.ActionStmtSQL{ - SQL: &parse.SQLStatement{ - SQL: &parse.SelectStatement{ - SelectCores: []*parse.SelectCore{ - { - Columns: []parse.ResultColumn{ - &parse.ResultColumnExpression{ - Expression: &parse.ExpressionCase{ - WhenThen: [][2]parse.Expression{ - { - &parse.ExpressionComparison{ - Left: exprColumn("", "balance"), - Operator: parse.ComparisonOperatorLessThan, - Right: exprLit(10), - }, - exprFunctionCall("error", exprLit("insufficient balance")), - }, - }, - Else: &parse.ExpressionLiteral{ - Value: nil, - Type: types.NullType, - }, - }, - }, - }, - From: &parse.RelationTable{ - Table: "balances", - }, - Where: &parse.ExpressionComparison{ - Left: exprColumn("", "wallet"), - Operator: parse.ComparisonOperatorEqual, - Right: exprVar("@caller"), - }, - }, - }, - Ordering: []*parse.OrderingTerm{ - { - Expression: exprColumn("balances", "wallet"), - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "action in-line statement calls select", - action: &types.Action{ - Name: "check_balance", - Parameters: []string{"$arg"}, - Body: "$res = my_ext.my_method($arg[0]);", - }, - err: parse.ErrAssignment, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - res, err := parse.ParseAction(tt.action, &types.Schema{ - Name: "mydb", - Tables: tt.tables, - Procedures: []*types.Procedure{ - procGetAllUserIds, - }, - }) - require.NoError(t, err) - - if tt.err != nil { - require.ErrorIs(t, res.ParseErrs.Err(), tt.err) - return - } - require.NoError(t, res.ParseErrs.Err()) - res.ParseErrs = nil - - assertPositionsAreSet(t, res.AST) - - if !deepCompare(tt.want, res) { - t.Errorf("unexpected output: %s", diff(tt.want, res)) - } - }) - } -} - -var tableBalances = &types.Table{ - Name: "balances", - Columns: []*types.Column{ - { - Name: "wallet", - Type: types.TextType, - Attributes: []*types.Attribute{ - { - Type: types.PRIMARY_KEY, - }, - }, - }, - { - Name: "balance", - Type: types.IntType, - }, - }, -} - -// this tests full end-to-end parsing of a schema, with full validation. -// It is mostly necessary to test for bugs that slip through the cracks -// of testing individual components. -func Test_FullParse(t *testing.T) { - type testcase struct { - name string - kf string - err error // if nil, no error is expected - } - - tests := []testcase{ - { - // this is a regression test for a previous bug where the parser would panic - // when a procedure had a return statement with no body. - name: "empty body with returns", - kf: `database proxy; -// admin simply tracks the schema admins -table admin { - address text primary key -} -// add_admin adds a new admin to the schema. -// only current admins can add new admins -procedure add_admin ($address text) public {} -procedure is_admin ($address text) public view returns (bool) {} - `, - err: parse.ErrReturn, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := parse.Parse([]byte(tt.kf)) - if tt.err != nil { - require.ErrorIs(t, err, tt.err) - return - } - require.NoError(t, err) - }) - } -} diff --git a/parse/types.go b/parse/types.go deleted file mode 100644 index 5bae3e8e4..000000000 --- a/parse/types.go +++ /dev/null @@ -1,215 +0,0 @@ -package parse - -import ( - "github.com/antlr4-go/antlr/v4" - "github.com/kwilteam/kwil-db/core/types" -) - -// Position is a Position in the parse tree. It represents a range of line and column -// values in Kuneiform source code. -type Position struct { - // Set is true if the position of the Position has been set. - // This is useful for testing parsers. - IsSet bool `json:"-"` - StartLine int `json:"start_line"` - StartCol int `json:"start_col"` - EndLine int `json:"end_line"` - EndCol int `json:"end_col"` -} - -// Set sets the position of the Position based on the given parser rule context. -func (n *Position) Set(r antlr.ParserRuleContext) { - n.IsSet = true - n.StartLine = r.GetStart().GetLine() - n.StartCol = r.GetStart().GetColumn() - n.EndLine = r.GetStop().GetLine() - n.EndCol = r.GetStop().GetColumn() -} - -// SetToken sets the position of the Position based on the given token. -func (n *Position) SetToken(t antlr.Token) { - n.IsSet = true - n.StartLine = t.GetLine() - n.StartCol = t.GetColumn() - n.EndLine = t.GetLine() - n.EndCol = t.GetColumn() -} - -// GetPosition returns the Position. -// It is useful if the Position is embedded in another struct. -func (n *Position) GetPosition() *Position { - return n -} - -// Clear clears the position of the Position. -func (n *Position) Clear() { - n.IsSet = false - n.StartLine = 0 - n.StartCol = 0 - n.EndLine = 0 - n.EndCol = 0 -} - -// unaryNode creates a Position with the same start and end position. -func unaryNode(start, end int) *Position { - return &Position{ - StartLine: start, - StartCol: end, - EndLine: start, - EndCol: end, - } -} - -// MergeNodes merges two Positions into a single Position. -// It starts at the left Position and ends at the right Position. -func MergeNodes(left, right *Position) *Position { - return &Position{ - StartLine: left.StartLine, - StartCol: left.StartCol, - EndLine: right.EndLine, - EndCol: right.EndCol, - } -} - -// SchemaInfo contains information about a parsed schema -type SchemaInfo struct { - // Blocks maps declared block names to their Positions. - // Block names include: - // - tables - // - extensions - // - actions - // - procedures - // - foreign procedures - Blocks map[string]*Block `json:"blocks"` -} - -type Block struct { - Position - // AbsStart is the absolute start position of the block in the source code. - AbsStart int `json:"abs_start"` - // AbsEnd is the absolute end position of the block in the source code. - AbsEnd int `json:"abs_end"` -} - -// Relation represents a relation in a sql statement. -// It is meant to represent the shape of a relation, not the data. -type Relation struct { - Name string - // Attributes holds the attributes of the relation. - Attributes []*Attribute -} - -// ShapesMatch checks if the shapes of two relations match. -// It only checks types and order, and ignores the names of the attributes, -// as well as uniques and primary keys. -func (r *Relation) ShapesMatch(r2 *Relation) bool { - return ShapesMatch(r.Attributes, r2.Attributes) -} - -// FindAttribute finds an attribute by name. -// If it is not found, it returns nil and false -func (r *Relation) FindAttribute(name string) (*Attribute, bool) { - for _, a := range r.Attributes { - if a.Name == name { - return a, true - } - } - return nil, false -} - -// Copy returns a copy of the relation. -func (r *Relation) Copy() *Relation { - attrs := make([]*Attribute, len(r.Attributes)) - for i, a := range r.Attributes { - attrs[i] = &Attribute{ - Name: a.Name, - Type: a.Type.Copy(), - } - } - - return &Relation{ - Name: r.Name, - Attributes: attrs, - } -} - -// Attribute represents an attribute in a relation. -type Attribute struct { - Name string - Type *types.DataType -} - -// ShapesMatch checks if the shapes of two relations match. -// It only checks types and order, and ignores the names of the attributes, -// as well as uniques and primary keys. -func ShapesMatch(a1, a2 []*Attribute) bool { - if len(a1) != len(a2) { - return false - } - for i := range a1 { - if !a1[i].Type.EqualsStrict(a2[i].Type) { - return false - } - } - return true -} - -// Flatten flattens many relations into one unnamed relation. -// If there are column conflicts / ambiguities, it will return an error, -// and the name that caused the conflict. It will also discard any -// primary keys, and will unmark any unique columns. -func Flatten(rels ...*Relation) (res []*Attribute, col string, err error) { - var attrs []*Attribute - for _, r := range rels { - attrs = append(attrs, r.Attributes...) - } - - res, col, err = Coalesce(attrs...) - if err != nil { - return nil, col, err - } - - return res, "", nil -} - -// Coalesce coalesces sets of attributes. If there are ambiguities, it will -// return an error, and the name that caused the conflict. It will also discard -// any primary keys, and will unmark any unique columns. -func Coalesce(attrs ...*Attribute) (res []*Attribute, ambigousCol string, err error) { - colNames := make(map[string]struct{}) - - for _, a := range attrs { - // if unnamed, then we can just add and not worry about conflicts, - // since it cannot be referenced - if a.Name == "" { - res = append(res, &Attribute{ - Name: a.Name, - Type: a.Type.Copy(), - }) - - continue - } - - if _, ok := colNames[a.Name]; ok { - return nil, a.Name, ErrDuplicateResultColumnName - } - - colNames[a.Name] = struct{}{} - res = append(res, &Attribute{ - Name: a.Name, - Type: a.Type.Copy(), - }) - } - - return res, "", nil -} - -// findAttribute finds an attribute by name. -func findAttribute(attrs []*Attribute, name string) *Attribute { - for _, a := range attrs { - if a.Name == name { - return a - } - } - return nil -} diff --git a/parse/wasm/wasm.go b/parse/wasm/wasm.go deleted file mode 100644 index 422295d66..000000000 --- a/parse/wasm/wasm.go +++ /dev/null @@ -1,59 +0,0 @@ -//go:build js && wasm - -//go:generate env GOOS=js CGO_ENABLED=0 GOARCH=wasm go build -o kuneiform.wasm -ldflags "-s -w" -trimpath -tags netgo wasm.go -package main - -import ( - "encoding/json" - "fmt" - "syscall/js" - - "github.com/kwilteam/kwil-db/parse" -) - -func parseAndMarshal(input string) (jsonStr string, err error) { - schema, err := parse.ParseAndValidate([]byte(input)) - if err != nil { - return "", err - } - - // remove parsed action and procedure asts - schema.ParsedActions = nil - schema.ParsedProcedures = nil - - jsonBytes, err := json.Marshal(schema) - if err != nil { - return "", err - } - - jsonStr = string(jsonBytes) - return -} - -// parseWrapper wraps the parse function to be exposed to the global scope -// returns a map {"json": "json output", "error": "error string"} -func parseWrapper() js.Func { - parseFunc := js.FuncOf(func(this js.Value, args []js.Value) any { - if len(args) != 1 { - return "Invalid no of arguments passed" - } - input := args[0].String() - result := map[string]any{} - - jsonOut, err := parseAndMarshal(input) - if err != nil { - errStr := fmt.Sprintf("parsing failed: %s\n", err) - result["error"] = errStr - } - result["json"] = jsonOut - return result - }) - return parseFunc -} - -func main() { - fmt.Println("Loading Kuneiform parser...") - // expose the parse function to the global scope - js.Global().Set("parseKuneiform", parseWrapper()) - <-make(chan bool) -} diff --git a/testing/proxy/impl_1.kf b/testing/proxy/impl_1.kf deleted file mode 100644 index 49ffbbb80..000000000 --- a/testing/proxy/impl_1.kf +++ /dev/null @@ -1,20 +0,0 @@ -database impl_1; - -table users { - id uuid primary key, - name text not null unique, - address text not null unique -} - -procedure create_user ($name text) public { - // derive a deterministic uuid from the blockchain transaction ID - // https://www.postgresql.org/docs/16.1/uuid-ossp.html#UUID-OSSP-FUNCTIONS-SECT - $uuid := uuid_generate_v5('f541de32-5ede-4083-bdbc-b29c3f02be9e'::uuid, @txid); - - insert into users (id, name, address) - values ($uuid, $name, @caller); -} - -procedure get_users() public view returns table (name text, address text) { - return select name, address from users; -} \ No newline at end of file diff --git a/testing/proxy/proxy_test.go b/testing/proxy/proxy_test.go index e6c3aab3b..af9bef6d1 100644 --- a/testing/proxy/proxy_test.go +++ b/testing/proxy/proxy_test.go @@ -1,4 +1,4 @@ -//go:build rely_docker +//go:build pglive // package proxy is an example of how the testing package can be used. It tests // three contracts that are used to form a proxy pattern. An explanation of @@ -7,209 +7,68 @@ package proxy import ( - "context" _ "embed" - "encoding/json" "testing" - "github.com/kwilteam/kwil-db/common" - "github.com/kwilteam/kwil-db/core/utils" + "github.com/kwilteam/kwil-db/node/pg" kwilTesting "github.com/kwilteam/kwil-db/testing" - "github.com/stretchr/testify/require" ) // Test_Impl_1 tests the impl_1.kf file. func Test_Impl_1(t *testing.T) { kwilTesting.RunSchemaTest(t, kwilTesting.SchemaTest{ Name: "impl_1", - SchemaFiles: []string{"./impl_1.kf"}, - SeedStatements: map[string][]string{ - "impl_1": { - `INSERT INTO users (id, name, address) - VALUES ('42f856df-b212-4bdc-a396-f8fb6eae6901'::uuid, 'satoshi', '0xAddress'), - ('d68e737d-708f-45f8-9311-317afcaccc63'::uuid, 'zeus', 'zeus.eth')`, - }, + SeedScripts: []string{"./seed_1.sql"}, + SeedStatements: []string{ + "{users}INSERT INTO users (id, name, owner_address) VALUES (-1, 'satoshi', '0xAddress');", }, TestCases: []kwilTesting.TestCase{ { // should create a user - happy case - Name: "create user - success", - Database: "impl_1", - Target: "create_user", - Args: []any{"gilgamesh"}, + Name: "create user - success", + Namespace: "users", + Action: "create_user", + Args: []any{1, "gilgamesh"}, }, { // conflicting with the name "satoshi" in the "name" column, // which is unique. - Name: "conflicting username - failure", - Database: "impl_1", - Target: "create_user", - Args: []any{"satoshi"}, - ErrMsg: "duplicate key value", + Name: "conflicting username - failure", + Namespace: "users", + Action: "create_user", + Args: []any{1, "satoshi"}, + ErrMsg: "duplicate key value", }, { // conflicting with the wallet address provided by @caller // in the "address" column, which is unique - Name: "conflicting wallet address - failure", - Database: "impl_1", - Target: "create_user", - Args: []any{"poseidon"}, - Caller: "0xAddress", // same address as satoshi - ErrMsg: "duplicate key value", + Name: "conflicting wallet address - failure", + Namespace: "users", + Action: "create_user", + Args: []any{1, "poseidon"}, + Caller: "0xAddress", // same address as satoshi + ErrMsg: "duplicate key value", }, { // tests get_users, expecting the users that were seeded. - Name: "reading a table of users - success", - Database: "impl_1", - Target: "get_users", + Name: "reading a table of users - success", + Namespace: "users", + Action: "get_users", Returns: [][]any{ { "satoshi", "0xAddress", }, - { - "zeus", "zeus.eth", - }, }, }, }, - }) -} - -//go:embed impl_2_test.json -var impl2TestJson []byte - -// Test_Impl_2 tests the impl_2.kf file. -// It uses the impl_2_test.json file to show how tests -// can be done using json files as well. -func Test_Impl_2(t *testing.T) { - var schemaTest kwilTesting.SchemaTest - err := json.Unmarshal(impl2TestJson, &schemaTest) - require.NoError(t, err) - - kwilTesting.RunSchemaTest(t, schemaTest) -} - -// Test_Proxy tests proxy.kf to ensure that proxy functionality -// works as expected. -func Test_Proxy(t *testing.T) { - kwilTesting.RunSchemaTest(t, kwilTesting.SchemaTest{ - Name: "proxy", - SchemaFiles: []string{"./proxy.kf", "./impl_1.kf", "./impl_2.kf"}, - SeedStatements: map[string][]string{ - "impl_1": { - `INSERT INTO users (id, name, address) - VALUES ('42f856df-b212-4bdc-a396-f8fb6eae6901'::uuid, 'satoshi', '0xAddress')`, - }, - }, - // since this is a more complex test, we use the function test to - // allow us to code arbitrary logic against the engine. - FunctionTests: []kwilTesting.TestFunc{ - func(ctx context.Context, platform *kwilTesting.Platform) error { - proxyDbid := utils.GenerateDBID("proxy", platform.Deployer) - impl1Dbid := utils.GenerateDBID("impl_1", platform.Deployer) - impl2Dbid := utils.GenerateDBID("impl_2", platform.Deployer) - - // register the owner - _, err := platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 1, - }, - Dataset: proxyDbid, - Procedure: "register_owner", - }) - require.NoError(t, err) - - // set the proxy to schema 1 - _, err = platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 1, - }, - Dataset: proxyDbid, - Procedure: "set_target", - Args: []any{impl1Dbid}, - }) - require.NoError(t, err) - - // get the user from schema 1 - res, err := platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 1, - }, - Dataset: proxyDbid, - Procedure: "get_users", - }) - require.NoError(t, err) - - require.EqualValues(t, [][]any{ - {"satoshi", "0xAddress"}, - }, res.Rows) - - // set the proxy to schema 2 - _, err = platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 2, - }, - Dataset: proxyDbid, - Procedure: "set_target", - Args: []any{impl2Dbid}, - }) - require.NoError(t, err) - - // migrate schema 2 from schema 1 - _, err = platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 2, - }, - Dataset: impl2Dbid, - Procedure: "migrate", - Args: []any{impl1Dbid, "get_users"}, - }) - require.NoError(t, err) - - // drop the old schema - err = platform.Engine.DeleteDataset(ctx, platform.DB, impl1Dbid, - &common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 2, - }) - require.NoError(t, err) - - // check that the users exist in schema 2 - res, err = platform.Engine.Procedure(ctx, platform.DB, &common.ExecutionData{ - TransactionData: common.TransactionData{ - Signer: platform.Deployer, - Caller: string(platform.Deployer), - TxID: platform.Txid(), - Height: 2, - }, - Dataset: proxyDbid, - Procedure: "get_users", - }) - require.NoError(t, err) - - require.EqualValues(t, [][]any{ - {"satoshi", "0xAddress"}, - }, res.Rows) - - return nil - }, + }, &kwilTesting.Options{ + Conn: &pg.ConnConfig{ + Host: "127.0.0.1", + Port: "5432", + User: "kwild", + Pass: "kwild", // would be ignored if pg_hba.conf set with trust + DBName: "kwil_test_db", }, + Logger: t, }) } diff --git a/testing/proxy/seed_1.sql b/testing/proxy/seed_1.sql new file mode 100644 index 000000000..2fc3aee2f --- /dev/null +++ b/testing/proxy/seed_1.sql @@ -0,0 +1,15 @@ +CREATE NAMESPACE users; + +{users}CREATE TABLE users ( + id INT8 PRIMARY KEY, + name TEXT UNIQUE, + owner_address TEXT NOT NULL +); + +{users}CREATE ACTION create_user($id int, $name TEXT) public { + INSERT INTO users (id, name, owner_address) VALUES ($id, $name, @caller); +}; + +{users}CREATE ACTION get_users() public view returns (name text, address text) { + return SELECT name, owner_address FROM users; +}; \ No newline at end of file diff --git a/testing/testing.go b/testing/testing.go index 9166fd52f..713cbe78d 100644 --- a/testing/testing.go +++ b/testing/testing.go @@ -9,7 +9,6 @@ import ( "encoding/hex" "errors" "fmt" - "maps" "os" "os/exec" "reflect" @@ -22,22 +21,26 @@ import ( "github.com/kwilteam/kwil-db/common" "github.com/kwilteam/kwil-db/config" "github.com/kwilteam/kwil-db/core/log" - "github.com/kwilteam/kwil-db/core/types" - "github.com/kwilteam/kwil-db/core/utils" - "github.com/kwilteam/kwil-db/extensions/precompiles" - "github.com/kwilteam/kwil-db/node/engine/execution" + "github.com/kwilteam/kwil-db/node/engine/interpreter" "github.com/kwilteam/kwil-db/node/pg" "github.com/kwilteam/kwil-db/node/types/sql" - "github.com/kwilteam/kwil-db/parse" ) // RunSchemaTest runs a SchemaTest. // It is meant to be used with Go's testing package. -func RunSchemaTest(t *testing.T, s SchemaTest) { - err := s.Run(context.Background(), &Options{ - UseTestContainer: true, - Logger: t, - }) +func RunSchemaTest(t *testing.T, s SchemaTest, options *Options) { + if options == nil { + options = &Options{ + UseTestContainer: true, + Logger: t, + } + } + + if s.Owner == "" { + s.Owner = string(deployer) + } + + err := s.Run(context.Background(), options) if err != nil { t.Fatalf("test failed: %s", err.Error()) } @@ -49,17 +52,16 @@ func RunSchemaTest(t *testing.T, s SchemaTest) { type SchemaTest struct { // Name is the name of the test case. Name string `json:"name"` - // Schemas are plain text schemas to deploy as - // part of the text. - Schemas []string `json:"-"` - // SchemaFiles are paths to the schema files to deploy. - SchemaFiles []string `json:"schema_files"` + // Owner is a public identifier of the user that owns the database. + // If empty, a pre-defined deployer will be used. + Owner string `json:"owner"` + // SeedScripts are paths to the files containing SQL + // scripts that are run before each test to seed the database + SeedScripts []string `json:"seed_scripts"` // SeedStatements are SQL statements run before each test that are - // meant to seed the database with data. It maps the database name - // to the SQL statements to run. The name is the database name, - // defined using "database ;". The test case will derive the - // DBID from the name. - SeedStatements map[string][]string `json:"seed_statements"` + // meant to seed the database with data. They are run after the + // SeedScripts. + SeedStatements []string `json:"seed_statements"` // TestCases execute actions or procedures against the database // engine, taking certain inputs and expecting certain outputs or // errors. These run separately from the functions, and separately @@ -103,27 +105,20 @@ func (tc SchemaTest) Run(ctx context.Context, opts *Options) error { return fmt.Errorf("test configuration error: %w", err) } - schemas := tc.Schemas - for _, schemaFile := range tc.SchemaFiles { + // we read in the scripts of seed statements + seedStmts := []string{} + for _, schemaFile := range tc.SeedScripts { bts, err := os.ReadFile(schemaFile) if err != nil { return err } - schemas = append(schemas, string(bts)) - } - - var parsedSchemas []*types.Schema - for _, schema := range schemas { - s, err := parse.Parse([]byte(schema)) - if err != nil { - return fmt.Errorf(`error parsing schema: %w`, err) - } - parsedSchemas = append(parsedSchemas, s) - s.Owner = deployer + opts.Logger.Logf(`reading seed script "%s"`, schemaFile) - opts.Logger.Logf(`using schema "%s" (DBID: "%s")`, s.Name, s.DBID()) + seedStmts = append(seedStmts, string(bts)) } + // once we read in the scripts, we need to add the adhoc seed statements + seedStmts = append(seedStmts, tc.SeedStatements...) // connect to Postgres, and run each test case in its // own transaction that is rolled back. @@ -162,11 +157,6 @@ func (tc SchemaTest) Run(ctx context.Context, opts *Options) error { // always rollback the outer transaction to reset the database defer outerTx.Rollback(ctx) - err = execution.InitializeEngine(ctx, outerTx) - if err != nil { - return err - } - var logger log.Logger // if this is a kwil logger, we can keep using it. // If it is from testing.T, we should make a Kwil logger. @@ -176,26 +166,36 @@ func (tc SchemaTest) Run(ctx context.Context, opts *Options) error { logger = log.New(log.WithLevel(log.LevelInfo)) } - engine, err := execution.NewGlobalContext(ctx, outerTx, maps.Clone(precompiles.RegisteredPrecompiles()), - &common.Service{ - Logger: logger, - LocalConfig: &config.Config{}, - Identity: []byte("node"), - }) + interp, err := interpreter.NewInterpreter(ctx, outerTx, &common.Service{ + Logger: logger, + LocalConfig: &config.Config{}, + Identity: []byte("node"), + }) if err != nil { return err } + err = interp.SetOwner(ctx, outerTx, tc.Owner) + if err != nil { + return err + } + + tx2, err := outerTx.BeginTx(ctx) + if err != nil { + return err + } + defer tx2.Rollback(ctx) + platform := &Platform{ - Engine: engine, - DB: outerTx, + Engine: interp, + DB: tx2, Deployer: deployer, Logger: opts.Logger, } // deploy schemas - for _, schema := range parsedSchemas { - err := engine.CreateDataset(&common.TxContext{ + for _, stmt := range seedStmts { + err = interp.Execute(&common.TxContext{ Ctx: ctx, Signer: deployer, Caller: string(deployer), @@ -203,36 +203,15 @@ func (tc SchemaTest) Run(ctx context.Context, opts *Options) error { BlockContext: &common.BlockContext{ Height: 0, }, - }, outerTx, schema) + }, tx2, stmt, nil, func(r *common.Row) error { + // do nothing + return nil + }) if err != nil { return err } } - // seed data - for dbName, seed := range tc.SeedStatements { - if strings.HasSuffix(dbName, ".kf") { - // while I was testing this, I hit this twice by accident, so I - // figured I should add in a helpful error message - return fmt.Errorf(`seed statement target must be the schema name, not the file name. Received "%s"`, dbName) - } - - for _, sql := range seed { - dbid := utils.GenerateDBID(dbName, deployer) - _, err = engine.Execute(&common.TxContext{ - Signer: deployer, - Caller: string(deployer), - TxID: platform.Txid(), - BlockContext: &common.BlockContext{ - Height: 0, - }, - }, outerTx, dbid, sql, nil) - if err != nil { - return fmt.Errorf(`error executing seed query "%s" on schema "%s": %s`, sql, dbName, err) - } - } - } - // run test function err = testFn(ctx, platform) if err != nil { @@ -264,13 +243,11 @@ type TestFunc func(ctx context.Context, platform *Platform) error type TestCase struct { // Name is a name that the test will be identified by if it fails. Name string `json:"name"` - // Database is the name of the database schema to execute the - // action/procedure against. This is the database NAME, - // defined using "database ;". The test case will - // derive the DBID from the name. - Database string `json:"database"` - // Name is the name of the action/procedure. - Target string `json:"target"` + // Namespace is the name of the database schema to execute the + // action/procedure against. + Namespace string `json:"database"` + // Action is the name of the action/procedure. + Action string `json:"action"` // Args are the inputs to the action/procedure. // If the action/procedure takes no parameters, this should be nil. Args []any `json:"args"` @@ -300,12 +277,11 @@ func (e *TestCase) runExecution(ctx context.Context, platform *Platform) error { caller = e.Caller } - dbid := utils.GenerateDBID(e.Database, deployer) - // log to help users debug failed tests - platform.Logger.Logf(`executing action/procedure "%s" against schema "%s" (DBID: %s)`, e.Target, e.Database, dbid) + platform.Logger.Logf(`executing action/procedure "%s" against namespace "%s"`, e.Action, e.Namespace) - res, err := platform.Engine.Procedure(&common.TxContext{ + var results [][]any + _, err := platform.Engine.Call(&common.TxContext{ Ctx: ctx, Signer: []byte(caller), Caller: caller, @@ -317,10 +293,9 @@ func (e *TestCase) runExecution(ctx context.Context, platform *Platform) error { NetworkParameters: &common.NetworkParameters{}, }, }, - }, platform.DB, &common.ExecutionData{ - Dataset: dbid, - Procedure: e.Target, - Args: e.Args, + }, platform.DB, e.Namespace, e.Action, e.Args, func(r *common.Row) error { + results = append(results, r.Values) + return nil }) if err != nil { // if error is not nil, the test should only pass if either @@ -347,11 +322,11 @@ func (e *TestCase) runExecution(ctx context.Context, platform *Platform) error { return nil } - if len(res.Rows) != len(e.Returns) { - return fmt.Errorf("expected %d rows to be returned, received %d", len(e.Returns), len(res.Rows)) + if len(results) != len(e.Returns) { + return fmt.Errorf("expected %d rows to be returned, received %d", len(e.Returns), len(results)) } - for i, row := range res.Rows { + for i, row := range results { if len(row) != len(e.Returns[i]) { return fmt.Errorf("expected %d columns to be returned, received %d", len(e.Returns[i]), len(row)) }