commit 369ffc8be8ae854806f6ffedd226bc19ab1b196b Author: fg-admin Date: Wed Jan 25 13:40:44 2023 +0300 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b556a97 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +/qdsl +/cmd/qdsl/qdsl +/cmdb-agent +/cmd/cmdb-agent/cmdb-agent diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..81a45d3 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,34 @@ +project_name: cmdb +gitea_urls: + api: http://git.fg-tech.ru/api/v1 + download: http://git.fg-tech.ru + skip_tls_verify: true + +builds: + - main: ./cmd/qdsl + id: qdsl + binary: qdsl + goos: + - linux + goarch: + - amd64 + ldflags: + - -X main.version={{ .Version }} + - -X main.release={{ .ShortCommit }} + env: + - CGO_ENABLED=0 + +nfpms: + - id: qdsl + file_name_template: "qdsl-{{ .Version }}-{{ .Os }}-{{ .Arch }}" + package_name: qdsl + maintainer: listware + description: QDSL + license: "Apache 2.0" + bindir: /usr/bin + builds: + - qdsl + formats: + - rpm + - deb + contents: [] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2c0d27e --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# Cluster Management Data Base [CMDB](./cmd/cmdb-agent) + +# Query Database Search Language [QDSL](./cmd/qdsl) \ No newline at end of file diff --git a/cmd/cmdb-agent/README.md b/cmd/cmdb-agent/README.md new file mode 100644 index 0000000..707f5e5 --- /dev/null +++ b/cmd/cmdb-agent/README.md @@ -0,0 +1,9 @@ +# Cluster Management Data Base (CMDB) + +## Stack +* [ArangoDB](https://github.com/arangodb/arangodb) +* [cmdb-agent](./cmd/cmdb-agent/main.go) + +###### Feature +* All CRUD are performed on the graph +* Removing objects also destroys object links \ No newline at end of file diff --git a/cmd/cmdb-agent/main.go b/cmd/cmdb-agent/main.go new file mode 100644 index 0000000..669fb69 --- /dev/null +++ b/cmd/cmdb-agent/main.go @@ -0,0 +1,11 @@ +// Copyright 2022 Listware + +package main + +import ( + "git.fg-tech.ru/listware/cmdb/internal/server" +) + +func main() { + server.New() +} diff --git a/cmd/qdsl/README.md b/cmd/qdsl/README.md new file mode 100644 index 0000000..e9a4a1a --- /dev/null +++ b/cmd/qdsl/README.md @@ -0,0 +1,19 @@ +# Query Database Search Language +* search and remove objects/links by qdsl query + +###### Example +``` +qdsl *.root +qdsl --remove --id *.root +qdsl --remove --linkid *.root +``` +###### Flags +* `id - get vertex '_id', default true` +* `key - get vertex '_key', default false` +* `object - get vertex 'object', default false` +* `link - get edge 'object', default false` +* `linkId - get edge 'id', default false` +* `name - get edge '_name', default false` +* `type - get edge '_type', default false` +* `remove - remove all results, default false` +* `confirm - confirm remove, default false` \ No newline at end of file diff --git a/cmd/qdsl/main.go b/cmd/qdsl/main.go new file mode 100644 index 0000000..ee79c13 --- /dev/null +++ b/cmd/qdsl/main.go @@ -0,0 +1,7 @@ +// Copyright 2022 Listware + +package main + +func main() { + execute() +} diff --git a/cmd/qdsl/qdsl.go b/cmd/qdsl/qdsl.go new file mode 100644 index 0000000..9fd573d --- /dev/null +++ b/cmd/qdsl/qdsl.go @@ -0,0 +1,85 @@ +// Copyright 2022 Listware + +package main + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "strings" + + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/qdsl" + "github.com/manifoldco/promptui" + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +var options = qdsl.NewOptions() + +var byPT string +var customFilter []string + +var confirm bool + +func buildFilter(query string, filters []string) string { + if len(filters) == 0 { + return query + } + // find *. or <. at the start + r, err := regexp.Compile(`^[\*\<]\.`) + if err != nil { + log.Error("Can't add filter, parse error: ", err) + return query + } + num := r.FindStringIndex(query) + if num != nil { + i := num[1] - 1 + query = query[:i] + "[?" + filters[0] + "?]" + query[i:] + filters = filters[1:] + } + + if strings.Index(query, "[?") != -1 { + i := strings.Index(query, "?]") + if i == -1 { + log.Error("Error while parsing filter: can't find close filter operator '?]'") + return query + } + newQuery := query[:i] + for _, filter := range filters { + newQuery += " && " + filter + } + newQuery += query[i:] + query = newQuery + // return newQuery + } + log.WithFields(logrus.Fields{"cli": "qdsl"}).Debug(query) + + return query +} + +func qdslQuery(cmd *cobra.Command, args []string) { + log.WithFields(logrus.Fields{"cli": "qdsl"}).Debug("QDSL called with argument: ", args[0]) + query := buildFilter(args[0], customFilter) + + if options.Remove && !confirm { + prompt := promptui.Prompt{ + Label: fmt.Sprintf("Confirm remove %s", query), + IsConfirm: true, + } + + _, err := prompt.Run() + if err != nil { + return + } + } + + elements, err := qdsl.RawQdsl(context.Background(), query, options) + if err != nil { + log.Error(err) + return + } + + s, _ := json.Marshal(elements) + fmt.Println(string(s)) +} diff --git a/cmd/qdsl/root.go b/cmd/qdsl/root.go new file mode 100644 index 0000000..a672f90 --- /dev/null +++ b/cmd/qdsl/root.go @@ -0,0 +1,76 @@ +// Copyright 2022 Listware + +package main + +import ( + "errors" + "fmt" + "os" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" +) + +var log = logrus.New() + +var ( + version = "v1.0.0" + release = "dev" + versionTemplate = `{{printf "%s" .Short}} +{{printf "Version: %s" .Version}} +Release: ` + release + ` +` +) + +// rootCmd represents the base command when called without any subcommands +var rootCmd = &cobra.Command{ + Version: version, + Use: "qdsl QUERY", + Short: "CMDB query language", + Long: `CMDB query language for getting information about nodes`, + Args: func(cmd *cobra.Command, args []string) error { + if len(args) < 1 { + return errors.New("requires at least one arg") + } + return nil + }, + Run: qdslQuery, +} + +// Execute adds all child commands to the root command and sets flags appropriately. +// This is called by main.main(). It only needs to happen once to the rootCmd. +func execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} + +func init() { + rootCmd.SetVersionTemplate(versionTemplate) + + // Add commands + rootCmd.AddCommand(autoShellCmd) + + rootCmd.Flags().BoolVarP(&options.Key, "key", "k", false, "add key to result") + rootCmd.Flags().BoolVarP(&options.Id, "id", "i", false, "add id to result") + rootCmd.Flags().BoolVarP(&options.Type, "type", "t", false, `add type to result`) + rootCmd.Flags().BoolVarP(&options.Object, "object", "o", false, "add object to result") + rootCmd.Flags().BoolVarP(&options.Link, "link", "l", false, `add link to result`) + rootCmd.Flags().BoolVarP(&options.LinkId, "linkid", "I", false, "add link id to result") + rootCmd.Flags().BoolVarP(&options.Name, "name", "n", false, "add name in particular topology to result") + rootCmd.Flags().BoolVarP(&options.Path, "path", "p", false, `add path to result`) + + rootCmd.Flags().BoolVarP(&options.Remove, "remove", "r", false, "remove result") + rootCmd.Flags().BoolVarP(&confirm, "confirm", "y", false, "confirm remove") +} + +var autoShellCmd = &cobra.Command{ + Use: "autoshell", + Short: "Generate bash completion script", + Long: "Generate bash completion script", + Hidden: true, + RunE: func(cmd *cobra.Command, args []string) error { + return cmd.Root().GenBashCompletionFile("/etc/bash_completion.d/qdsl.sh") + }, +} diff --git a/genpeg.go b/genpeg.go new file mode 100644 index 0000000..498e338 --- /dev/null +++ b/genpeg.go @@ -0,0 +1,5 @@ +// Copyright 2022 Listware + +package genpeg + +//go:generate pigeon -o ./internal/cmdb/qdsl/qdslpeg.go qdsl.peg diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..51b5bf0 --- /dev/null +++ b/go.mod @@ -0,0 +1,27 @@ +module git.fg-tech.ru/listware/cmdb + +go 1.19 + +require ( + git.fg-tech.ru/listware/proto v0.1.1 + github.com/arangodb/go-driver v1.4.1 + github.com/bbuck/go-lexer v1.0.0 + github.com/manifoldco/promptui v0.9.0 + github.com/sirupsen/logrus v1.9.0 + github.com/spf13/cobra v1.6.1 + google.golang.org/grpc v1.52.1 +) + +require ( + github.com/arangodb/go-velocypack v0.0.0-20200318135517-5af53c29c67e // indirect + github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/inconshreveable/mousetrap v1.0.1 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/spf13/pflag v1.0.5 // indirect + golang.org/x/net v0.4.0 // indirect + golang.org/x/sys v0.3.0 // indirect + golang.org/x/text v0.5.0 // indirect + google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6 // indirect + google.golang.org/protobuf v1.28.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..be2f57a --- /dev/null +++ b/go.sum @@ -0,0 +1,64 @@ +git.fg-tech.ru/listware/proto v0.1.1 h1:CSqteAtgysiJe7+KtLOEXSIvxypmlJCKwQtla1d2v+A= +git.fg-tech.ru/listware/proto v0.1.1/go.mod h1:t5lyMTuX/if05HI/na9tJAlHCHHMdhdPLBTkhvscedQ= +github.com/arangodb/go-driver v1.4.1 h1:Jg0N7XKxiKwjswmAcMCnefWmt81KJEqybqRAGJDRWlo= +github.com/arangodb/go-driver v1.4.1/go.mod h1:UTtaxTUMmyPWzKc2dsWWOZzZ3yM6aHWxn/eubGa3YmQ= +github.com/arangodb/go-velocypack v0.0.0-20200318135517-5af53c29c67e h1:Xg+hGrY2LcQBbxd0ZFdbGSyRKTYMZCfBbw/pMJFOk1g= +github.com/arangodb/go-velocypack v0.0.0-20200318135517-5af53c29c67e/go.mod h1:mq7Shfa/CaixoDxiyAAc5jZ6CVBAyPaNQCGS7mkj4Ho= +github.com/bbuck/go-lexer v1.0.0 h1:ZwzxWHnxQslJ/5I01nlhZwE7MtmvXbtEyRwrgQ70Qew= +github.com/bbuck/go-lexer v1.0.0/go.mod h1:JOt4Q0nNqWxYEy+spld4SJGe9r8G8suXd1dukur9O90= +github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= +github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= +golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6 h1:a2S6M0+660BgMNl++4JPlcAO/CjkqYItDEZwkoDQK7c= +google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= +google.golang.org/grpc v1.52.1 h1:2NpOPk5g5Xtb0qebIEs7hNIa++PdtZLo2AQUpc1YnSU= +google.golang.org/grpc v1.52.1/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5vorUY= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/arangodb/arangodb.go b/internal/arangodb/arangodb.go new file mode 100644 index 0000000..aea279c --- /dev/null +++ b/internal/arangodb/arangodb.go @@ -0,0 +1,70 @@ +// Copyright 2022 Listware + +package arangodb + +import ( + "context" + "crypto/tls" + "net/http" + "os" + + driver "github.com/arangodb/go-driver" + arangohttp "github.com/arangodb/go-driver/http" +) + +const ( + cmdbName = "CMDB" + systemGraphName = "system" +) + +var ( + arangoAddr string + arangoUser string + arangoPassword string +) + +func init() { + if value, ok := os.LookupEnv("ARANGO_ADDR"); ok { + arangoAddr = value + } + if value, ok := os.LookupEnv("ARANGO_USER"); ok { + arangoUser = value + } + if value, ok := os.LookupEnv("ARANGO_PASSWORD"); ok { + arangoPassword = value + } + +} + +func Connect() (client driver.Client, err error) { + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + + // Open a client connection + conn, err := arangohttp.NewConnection(arangohttp.ConnectionConfig{ + Transport: tr, + Endpoints: []string{arangoAddr}, + }) + if err != nil { + return + } + + return driver.NewClient(driver.ClientConfig{ + Connection: conn, + Authentication: driver.BasicAuthentication(arangoUser, arangoPassword), + }) + +} + +func Database(ctx context.Context, client driver.Client) (driver.Database, error) { + return client.Database(ctx, cmdbName) +} + +func Graph(ctx context.Context, client driver.Client) (graph driver.Graph, err error) { + db, err := client.Database(ctx, cmdbName) + if err != nil { + return + } + return db.Graph(ctx, systemGraphName) +} diff --git a/internal/arangodb/bootstrap.go b/internal/arangodb/bootstrap.go new file mode 100644 index 0000000..eea5649 --- /dev/null +++ b/internal/arangodb/bootstrap.go @@ -0,0 +1,128 @@ +// Copyright 2022 Listware + +package arangodb + +import ( + "context" + + driver "github.com/arangodb/go-driver" +) + +const ( + systemCollection = "system" + typesCollection = "types" + objectsCollection = "objects" + linksCollection = "links" +) + +var ( + allowUserKeysPtr = true +) + +func Bootstrap(ctx context.Context) (err error) { + client, err := Connect() + if err != nil { + return + } + + ok, err := client.DatabaseExists(ctx, cmdbName) + if err != nil { + return + } + + if !ok { + options := &driver.CreateDatabaseOptions{} + if _, err = client.CreateDatabase(ctx, cmdbName, options); err != nil { + return + } + } + + db, err := client.Database(ctx, cmdbName) + if err != nil { + return + } + + // system collection + if ok, err = db.CollectionExists(ctx, systemCollection); err != nil { + return + } + + if !ok { + options := &driver.CreateCollectionOptions{ + IsSystem: true, + KeyOptions: &driver.CollectionKeyOptions{ + AllowUserKeysPtr: &allowUserKeysPtr, + }, + } + if _, err = db.CreateCollection(ctx, systemCollection, options); err != nil { + return + } + } + + // types collection + if ok, err = db.CollectionExists(ctx, typesCollection); err != nil { + return + } + + if !ok { + options := &driver.CreateCollectionOptions{ + KeyOptions: &driver.CollectionKeyOptions{ + AllowUserKeysPtr: &allowUserKeysPtr, + }, + } + if _, err = db.CreateCollection(ctx, typesCollection, options); err != nil { + return + } + } + + // objects collection + if ok, err = db.CollectionExists(ctx, objectsCollection); err != nil { + return + } + + if !ok { + options := &driver.CreateCollectionOptions{ + KeyOptions: &driver.CollectionKeyOptions{ + Type: driver.KeyGeneratorTraditional, + }, + } + if _, err = db.CreateCollection(ctx, objectsCollection, options); err != nil { + return + } + } + + // links collection + if ok, err = db.CollectionExists(ctx, linksCollection); err != nil { + return + } + + if !ok { + options := &driver.CreateCollectionOptions{ + Type: driver.CollectionTypeEdge, + } + if _, err = db.CreateCollection(ctx, linksCollection, options); err != nil { + return + } + } + + // system graph + if ok, err = db.GraphExists(ctx, systemGraphName); err != nil { + return + } + + if !ok { + options := &driver.CreateGraphOptions{ + EdgeDefinitions: []driver.EdgeDefinition{ + driver.EdgeDefinition{ + Collection: linksCollection, + From: []string{systemCollection, typesCollection, objectsCollection}, + To: []string{typesCollection, objectsCollection}, + }, + }, + } + if _, err = db.CreateGraphV2(ctx, systemGraphName, options); err != nil { + return + } + } + return +} diff --git a/internal/arangodb/edge/create.go b/internal/arangodb/edge/create.go new file mode 100644 index 0000000..0c1b59f --- /dev/null +++ b/internal/arangodb/edge/create.go @@ -0,0 +1,38 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + "encoding/json" + "fmt" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Create(ctx context.Context, client driver.Client, name string, payload any) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + + collection, _, err := graph.EdgeCollection(ctx, name) + if err != nil { + return + } + + ctx = driver.WithReturnNew(ctx, &resp) + + if b, ok := payload.([]byte); ok { + var req map[string]any + if err = json.Unmarshal(b, &req); err != nil { + return + } + fmt.Println("create edge bytes ", req) + meta, err = collection.CreateDocument(ctx, req) + return + } + meta, err = collection.CreateDocument(ctx, payload) + return +} diff --git a/internal/arangodb/edge/read.go b/internal/arangodb/edge/read.go new file mode 100644 index 0000000..01cf48a --- /dev/null +++ b/internal/arangodb/edge/read.go @@ -0,0 +1,23 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Read(ctx context.Context, client driver.Client, name, key string) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, _, err := graph.EdgeCollection(ctx, name) + if err != nil { + return + } + meta, err = collection.ReadDocument(ctx, key, &resp) + return +} diff --git a/internal/arangodb/edge/remove.go b/internal/arangodb/edge/remove.go new file mode 100644 index 0000000..87003a4 --- /dev/null +++ b/internal/arangodb/edge/remove.go @@ -0,0 +1,22 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Remove(ctx context.Context, client driver.Client, name, key string) (meta driver.DocumentMeta, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, _, err := graph.EdgeCollection(ctx, name) + if err != nil { + return + } + return collection.RemoveDocument(ctx, key) +} diff --git a/internal/arangodb/edge/update.go b/internal/arangodb/edge/update.go new file mode 100644 index 0000000..64a57c1 --- /dev/null +++ b/internal/arangodb/edge/update.go @@ -0,0 +1,33 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Update(ctx context.Context, client driver.Client, name, key string, payload any) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, _, err := graph.EdgeCollection(ctx, name) + if err != nil { + return + } + ctx = driver.WithReturnNew(ctx, &resp) + if b, ok := payload.([]byte); ok { + var req map[string]any + if err = json.Unmarshal(b, &req); err != nil { + return + } + meta, err = collection.ReplaceDocument(ctx, key, req) + return + } + meta, err = collection.ReplaceDocument(ctx, key, payload) + return +} diff --git a/internal/arangodb/query/query.go b/internal/arangodb/query/query.go new file mode 100644 index 0000000..cdb3804 --- /dev/null +++ b/internal/arangodb/query/query.go @@ -0,0 +1,35 @@ +// Copyright 2022 Listware + +package query + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Query(ctx context.Context, client driver.Client, query string, vars map[string]any) (metas []driver.DocumentMeta, resp []map[string]any, err error) { + db, err := arangodb.Database(ctx, client) + if err != nil { + return + } + + cursor, err := db.Query(ctx, query, vars) + if err != nil { + return + } + defer cursor.Close() + + for cursor.HasMore() { + var obj map[string]any + + meta, err := cursor.ReadDocument(ctx, &obj) + if err != nil { + return nil, nil, err + } + resp = append(resp, obj) + metas = append(metas, meta) + } + return +} diff --git a/internal/arangodb/vertex/create.go b/internal/arangodb/vertex/create.go new file mode 100644 index 0000000..ffc786c --- /dev/null +++ b/internal/arangodb/vertex/create.go @@ -0,0 +1,37 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Create(ctx context.Context, client driver.Client, name string, payload any) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + + collection, err := graph.VertexCollection(ctx, name) + if err != nil { + return + } + + ctx = driver.WithReturnNew(ctx, &resp) + + if b, ok := payload.([]byte); ok { + var req map[string]any + if err = json.Unmarshal(b, &req); err != nil { + return + } + meta, err = collection.CreateDocument(ctx, req) + return + } + + meta, err = collection.CreateDocument(ctx, payload) + return +} diff --git a/internal/arangodb/vertex/read.go b/internal/arangodb/vertex/read.go new file mode 100644 index 0000000..e2a4be9 --- /dev/null +++ b/internal/arangodb/vertex/read.go @@ -0,0 +1,24 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Read(ctx context.Context, client driver.Client, name, key string) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, err := graph.VertexCollection(ctx, name) + if err != nil { + return + } + + meta, err = collection.ReadDocument(ctx, key, &resp) + return +} diff --git a/internal/arangodb/vertex/remove.go b/internal/arangodb/vertex/remove.go new file mode 100644 index 0000000..15fd57c --- /dev/null +++ b/internal/arangodb/vertex/remove.go @@ -0,0 +1,22 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Remove(ctx context.Context, client driver.Client, name, key string) (meta driver.DocumentMeta, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, err := graph.VertexCollection(ctx, name) + if err != nil { + return + } + return collection.RemoveDocument(ctx, key) +} diff --git a/internal/arangodb/vertex/update.go b/internal/arangodb/vertex/update.go new file mode 100644 index 0000000..55f7581 --- /dev/null +++ b/internal/arangodb/vertex/update.go @@ -0,0 +1,33 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + driver "github.com/arangodb/go-driver" +) + +func Update(ctx context.Context, client driver.Client, name, key string, payload any) (meta driver.DocumentMeta, resp map[string]any, err error) { + graph, err := arangodb.Graph(ctx, client) + if err != nil { + return + } + collection, err := graph.VertexCollection(ctx, name) + if err != nil { + return + } + ctx = driver.WithReturnNew(ctx, &resp) + if b, ok := payload.([]byte); ok { + var req map[string]any + if err = json.Unmarshal(b, &req); err != nil { + return + } + meta, err = collection.ReplaceDocument(ctx, key, req) + return + } + meta, err = collection.ReplaceDocument(ctx, key, payload) + return +} diff --git a/internal/cmdb/edge/cmdb.go b/internal/cmdb/edge/cmdb.go new file mode 100644 index 0000000..35ae0d1 --- /dev/null +++ b/internal/cmdb/edge/cmdb.go @@ -0,0 +1,23 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" + driver "github.com/arangodb/go-driver" +) + +type Server struct { + pbcmdb.UnimplementedEdgeServiceServer + + client driver.Client +} + +func New(ctx context.Context) (s *Server, err error) { + s = &Server{} + s.client, err = arangodb.Connect() + return +} diff --git a/internal/cmdb/edge/create.go b/internal/cmdb/edge/create.go new file mode 100644 index 0000000..524711a --- /dev/null +++ b/internal/cmdb/edge/create.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb/edge" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Create(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := edge.Create(ctx, s.client, request.GetCollection(), request.GetPayload()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/cmdb/edge/read.go b/internal/cmdb/edge/read.go new file mode 100644 index 0000000..b983597 --- /dev/null +++ b/internal/cmdb/edge/read.go @@ -0,0 +1,27 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb/edge" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Read(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := edge.Read(ctx, s.client, request.GetCollection(), request.GetKey()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/cmdb/edge/remove.go b/internal/cmdb/edge/remove.go new file mode 100644 index 0000000..2f2c051 --- /dev/null +++ b/internal/cmdb/edge/remove.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb/edge" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Remove(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + + meta, err := edge.Remove(ctx, s.client, request.GetCollection(), request.GetKey()) + if err != nil { + return + } + + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + return +} diff --git a/internal/cmdb/edge/update.go b/internal/cmdb/edge/update.go new file mode 100644 index 0000000..fe48698 --- /dev/null +++ b/internal/cmdb/edge/update.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb/edge" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Update(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := edge.Update(ctx, s.client, request.GetCollection(), request.GetKey(), request.GetPayload()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/cmdb/finder/finder.go b/internal/cmdb/finder/finder.go new file mode 100644 index 0000000..1148d91 --- /dev/null +++ b/internal/cmdb/finder/finder.go @@ -0,0 +1,23 @@ +// Copyright 2022 Listware + +package finder + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbfinder" + driver "github.com/arangodb/go-driver" +) + +type Server struct { + pbfinder.UnimplementedFinderServiceServer + + client driver.Client +} + +func New(ctx context.Context) (s *Server, err error) { + s = &Server{} + s.client, err = arangodb.Connect() + return +} diff --git a/internal/cmdb/finder/links.go b/internal/cmdb/finder/links.go new file mode 100644 index 0000000..070f49d --- /dev/null +++ b/internal/cmdb/finder/links.go @@ -0,0 +1,58 @@ +// Copyright 2022 Listware + +package finder + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb/query" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbfinder" +) + +func (s *Server) Links(ctx context.Context, request *pbfinder.Request) (response *pbfinder.Response, err error) { + response = &pbfinder.Response{} + + vars := make(map[string]any) + var args []string + + if request.From != "" { + args = append(args, "t._from == @from") + vars["from"] = request.From + } + if request.To != "" { + args = append(args, "t._to == @to") + vars["to"] = request.To + } + if request.Name != "" { + args = append(args, "t._name == @name") + vars["name"] = request.Name + } + + metas, resp, err := query.Query(ctx, s.client, fmt.Sprintf("FOR t IN links FILTER %s RETURN t", strings.Join(args, " && ")), vars) + if err != nil { + return + } + + for i, meta := range metas { + r := &pbcmdb.Response{} + + r.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + + r.Payload, err = json.Marshal(resp[i]) + if err != nil { + return + } + response.Links = append(response.Links, r) + + } + + return +} diff --git a/internal/cmdb/qdsl/aql.go b/internal/cmdb/qdsl/aql.go new file mode 100644 index 0000000..7910c02 --- /dev/null +++ b/internal/cmdb/qdsl/aql.go @@ -0,0 +1,378 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "fmt" + "math" + "strconv" + "strings" + + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +var ( + pathLinks = []string{"links"} + rootName = "root" +) + +type refSpec struct { + specialLevels, queryLevel int + hasCatchall bool +} +type refPath struct { + v, e string +} + +func omitRoot(qdsl Path) Path { + rootBlock := &Block{Node: &Node{Name: &rootName}} + + max := len(qdsl) - 1 + if max == -1 { + return append(qdsl, rootBlock) + } + + last := qdsl[max] + if last.Node == nil { + return append(qdsl, rootBlock) + } + + if last.Node.Name == nil { + return append(qdsl, rootBlock) + } + + if *last.Node.Name != rootName { + return append(qdsl, rootBlock) + } + + return qdsl +} + +func pathToAql(element *Element, options *pbqdsl.Options) { + path := omitRoot(element.Path) + + max := len(path) - 1 + + root := path[max] + + qdsl := path[:max] + + length := len(qdsl) + + hasPath := length > 0 + + // <... + hasCatchallValue := 0 + + hasCatchall := hasPath && !!qdsl[0].Catchall + if hasCatchall { + hasCatchallValue = 1 + } + + specialLevels := getSpecialDepth(options) + + queryLevel := int(math.Max(float64(length-hasCatchallValue-specialLevels), 0)) + queryLevelMax := queryLevel + + qdslBase := make(Path, len(qdsl)) + copy(qdslBase, qdsl) + + if hasCatchall { + queryLevelMax = int(math.Max(float64(queryLevel), 10)) + + qdslBase := make(Path, len(qdsl)-1) + copy(qdslBase, qdsl[1:]) + } + + var aqlFraments []string + + // объект, линк, путь на уровень 1..1 + // начальный объект + // коллекиця линков и/или граф + aqlFraments = append(aqlFraments, fmt.Sprintf("for object, link, path in %d..%d outbound 'system/%s' graph system\n", queryLevel, queryLevelMax, *root.Node.Name)) + + // непонятная магия, которая вроде бы не влияет на результат, но добавляет задержку + // aqlFraments = append(aqlFraments, autoRestrict("path", false)) + + // protect against missing object + aqlFraments = append(aqlFraments, "filter object\n") + + reverse(qdslBase) + + for i, level := range qdslBase { + res := convertLevelToAQL(level, i, refSpec{specialLevels, queryLevel, hasCatchall}) + aqlFraments = append(aqlFraments, res) + } + + if hasCatchall { + res := convertLevelToAQL(qdsl[0], -1, refSpec{specialLevels, queryLevel, hasCatchall}) + aqlFraments = append(aqlFraments, res) + } + + querySearch := assembleFrags(aqlFraments, "\n") + + var returnList []string + + if options.Object { + returnList = append(returnList, "object: object") + } + + if options.Id { + returnList = append(returnList, "id: object._id") + } + + if options.Key { + returnList = append(returnList, "key: object._key") + } + + if options.Link { + returnList = append(returnList, "link: link") + } + + if options.LinkId { + returnList = append(returnList, "link_id: link._id") + } + + if options.Name { + returnList = append(returnList, "name: link._name") + } + + if options.Type { + returnList = append(returnList, "type: link._type") + } + + if options.Path { + returnList = append(returnList, "path: path") + } + + returnExpr := assembleFrags(returnList, ",\n ") + element.Query = fmt.Sprintf(` +%s + +return { + %s +} + `, querySearch, returnExpr) +} + +func getSpecialDepth(options *pbqdsl.Options) int { + return 0 +} + +func autoRestrict(pathVar string, reverse bool) string { + var reverseStr = "iv" + if reverse { + reverseStr = "0, iv" + } + return fmt.Sprintf( + ` +filter (for iv in 0..(length(%s.vertices) - 1) + let r = %s.vertices[iv]._meta.restrict + return iv < 0 || !r || (for e in slice(%s.edges, %s) + return parse_identifier(e).collection) all in r + ) all == true`, pathVar, pathVar, pathVar, reverseStr) +} + +func assembleFrags(frags []string, separator string) string { + // FIXME remove '\n' from array + return strings.Join(frags, separator) +} + +func convertLevelToAQL(block *Block, i int, refSpec refSpec) string { + refs := getRefPath(i, refSpec) + + var nameFilter string + + if block.Node != nil { + names := enrollNodeName(block.Node) + var condition string + if len(names) == 1 { + condition = fmt.Sprintf("== '%s'", names[0]) + } else { + condition = fmt.Sprintf("in [%s]", strings.Join(names, ",")) + } + nameFilter = fmt.Sprintf("filter %s._name %s\n", refs.e, condition) + } + var attrFilter []string + if block.Filter != nil { + for _, filter := range block.Filter.Filter { + var items []string + for _, exp := range filter { + prefix := exp.Expression.Variable[0] + propPath := exp.Expression.Variable[1:] + + var vertexPivot bool + if prefix == "@" || prefix == "object" { + vertexPivot = true + } + + var edgePivot bool + if prefix == "$" || prefix == "link" { + edgePivot = true + } + + pivot := prefix + if vertexPivot { + pivot = refs.v + } else if edgePivot { + pivot = refs.e + } + + var propAccess string + + for _, value := range propPath { + propAccess = fmt.Sprintf("%s.%s", propAccess, value) + } + + var result []string + result = append(result, fmt.Sprintf("%s%s", pivot, propAccess)) + + result = append(result, exp.Expression.Op) + result = append(result, exp.Expression.Evaluation) + result = append(result, exp.BoolOp) + + items = append(items, assembleFrags(result, " ")) + } + + attrFilter = append(attrFilter, "filter "+assembleFrags(items, " ")) + } + } + + return nameFilter + assembleFrags(attrFilter, "\n") +} + +func single(pathVar string, i int) refPath { + j := i + //if i > 0 { + j = i + 1 + //} + return refPath{ + v: fmt.Sprintf("%s.vertices[%d]", pathVar, j), + e: fmt.Sprintf("%s.edges[%d]", pathVar, i), + } +} + +func getRefPath(i int, refSpec refSpec) refPath { + if refSpec.specialLevels == 0 || (i >= 0 && i < refSpec.queryLevel) { + return single("path", i) + } + + if !refSpec.hasCatchall && i >= refSpec.queryLevel { + return single("specialPath", i-refSpec.queryLevel) + } + + if i < 0 { + if i < -refSpec.specialLevels { + return single("path", i+refSpec.specialLevels) + } else { + return single("specialPath", i) + } + } + // includeDescendants && specialLevels > 0 && i >= queryLevel + return refPath{ + v: fmt.Sprintf("(path.vertices[%d] || specialPath.vertices[%d - length(path.vertices)])", i+1, i+1), + e: fmt.Sprintf("(path.edges[%d] || specialPath.edges[%d - length(path.edges)])", i, i), + } +} + +func enrollNodeName(node *Node) (names []string) { + if node.Ranges == nil { + names = append(names, *node.Name) + return + } + + for _, ranges := range node.Ranges { + from := *ranges.From + + if ranges.To == nil { + if node.Name == nil { + names = append(names, from) + } else { + names = append(names, *node.Name+from) + } + } else { + to := *ranges.To + + var isPadded bool + var fixedSize int + + if from[0] == '0' { + isPadded = true + fixedSize = len(from) + } + + fromInt, _ := strconv.Atoi(from) + toInt, _ := strconv.Atoi(to) + + for i := fromInt; i <= toInt; i++ { + if isPadded { + names = append(names, fmt.Sprintf("\"%s%s\"", *node.Name, padZeroes(i, fixedSize))) + } else { + names = append(names, fmt.Sprintf("\"%s%d\"", *node.Name, i)) + } + + } + } + + } + return +} + +func padZeroes(i, paddedSize int) (s string) { + s = fmt.Sprint(i) + + for len(s) < paddedSize { + s = "0" + s + } + + return +} + +func getQuerySort(qdsl *Block) (s string) { + if qdsl == nil { + return + } + + return +} + +/* +function getQueryLimit(qdsl) { + const l = (qdsl && qdsl.limits) ? qdsl.limits : null; + return l + ? `limit ${l.offset}, ${l.limit}` + : ''; +} + +function getQuerySort(qdsl) { + const sortStr = (!qdsl || !qdsl.sort || qdsl.sort.length === 0) + ? null + : qdsl.sort.reduce((str, el) => { + let currentPrefix; + const [prefix, ...rest] = el.field; + + switch(prefix) { + case '@': + case 'object': + currentPrefix = 'rObject'; + break; + case '$': + case 'link': + currentPrefix = 'rLink'; + break; + case 'path': + currentPrefix = 'rPath'; + break; + default: + currentPrefix = prefix; + } + + const fieldStr = [currentPrefix, ...rest].join('.'); + + return str + ? `${str}, ${fieldStr} ${el.direction}` + : `sort ${fieldStr} ${el.direction}`; + }, null); + + return sortStr ? sortStr + '\n' : ''; +}*/ diff --git a/internal/cmdb/qdsl/aql_test.go b/internal/cmdb/qdsl/aql_test.go new file mode 100644 index 0000000..dcea845 --- /dev/null +++ b/internal/cmdb/qdsl/aql_test.go @@ -0,0 +1,27 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "testing" + + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +func TestPathToAql(t *testing.T) { + query := &pbqdsl.Query{ + Query: "*[?$._name == 'init'?].exmt.functions.objects", + Options: &pbqdsl.Options{ + LinkId: true, + Object: true, + }, + } + elements, err := parse(query) + if err != nil { + t.Fatal(err) + } + + for _, element := range elements { + t.Log(element.Query) + } +} diff --git a/internal/cmdb/qdsl/lexer/lexer.go b/internal/cmdb/qdsl/lexer/lexer.go new file mode 100644 index 0000000..80b4f9f --- /dev/null +++ b/internal/cmdb/qdsl/lexer/lexer.go @@ -0,0 +1,11 @@ +// Copyright 2022 Listware + +package lexer + +import ( + "github.com/bbuck/go-lexer" +) + +type QdslLexer struct { + lexer.L +} diff --git a/internal/cmdb/qdsl/link.go b/internal/cmdb/qdsl/link.go new file mode 100644 index 0000000..3011515 --- /dev/null +++ b/internal/cmdb/qdsl/link.go @@ -0,0 +1,5 @@ +// Copyright 2022 Listware + +package qdsl + +type Link map[string]any diff --git a/internal/cmdb/qdsl/object.go b/internal/cmdb/qdsl/object.go new file mode 100644 index 0000000..1cafc2b --- /dev/null +++ b/internal/cmdb/qdsl/object.go @@ -0,0 +1,5 @@ +// Copyright 2022 Listware + +package qdsl + +type Object map[string]any diff --git a/internal/cmdb/qdsl/qdsl.go b/internal/cmdb/qdsl/qdsl.go new file mode 100644 index 0000000..43d3a84 --- /dev/null +++ b/internal/cmdb/qdsl/qdsl.go @@ -0,0 +1,62 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "context" + "sort" + "strings" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/documents" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +func parse(query *pbqdsl.Query) (elements []*Element, err error) { + got, err := ParseReader("", strings.NewReader(query.GetQuery())) + if err != nil { + return + } + + if query.GetOptions() == nil { + query.Options = &pbqdsl.Options{} + } + + normalizeOptions(query.GetOptions()) + + for _, i := range got.([]any) { + if element, ok := i.(*Element); ok { + pathToAql(element, query.GetOptions()) + elements = append(elements, element) + } + } + + sort.Slice(elements, func(i, j int) bool { + return elements[i].Action > elements[j].Action + }) + return +} + +func (s *Server) query(ctx context.Context, element *Element) (nodes documents.Nodes, err error) { + db, err := arangodb.Database(ctx, s.client) + if err != nil { + return + } + cursor, err := db.Query(ctx, element.Query, nil) + if err != nil { + return + } + defer cursor.Close() + + for cursor.HasMore() { + var node documents.Node + + if _, err = cursor.ReadDocument(ctx, &node); err != nil { + return + } + + nodes.Add(&node) + } + + return +} diff --git a/internal/cmdb/qdsl/qdsl_test.go b/internal/cmdb/qdsl/qdsl_test.go new file mode 100644 index 0000000..17c1ad9 --- /dev/null +++ b/internal/cmdb/qdsl/qdsl_test.go @@ -0,0 +1,24 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "testing" + + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +func TestQdslToAql(t *testing.T) { + query := &pbqdsl.Query{ + Query: "*[?$._from == '47e98408-3d47-4730-ba94-c2314ce1982e'?]", + Options: &pbqdsl.Options{}, + } + elements, err := parse(query) + if err != nil { + t.Fatal(err) + } + + for _, element := range elements { + t.Log(element.Query) + } +} diff --git a/internal/cmdb/qdsl/qdslpeg.go b/internal/cmdb/qdsl/qdslpeg.go new file mode 100644 index 0000000..93a5e3b --- /dev/null +++ b/internal/cmdb/qdsl/qdslpeg.go @@ -0,0 +1,3392 @@ +// Code generated by pigeon; DO NOT EDIT. + +package qdsl + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "math" + "os" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type Limits struct { + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +type Limit struct { + Sort any `json:"sort"` + Limits *Limits `json:"limits"` +} + +type Direction struct { + Direction string `json:"direction"` + Field any `json:"field"` +} + +type Variable struct { + Variable []string `json:"variable"` + Op string `json:"op"` + Evaluation string `json:"evaluation"` +} + +type Expression struct { + Expression *Variable `json:"expression"` + BoolOp string `json:"boolOp"` +} + +type Filter struct { + Filter [][]*Expression `json:"filter"` + + // ...limits +} + +type Range struct { + From *string `json:"from"` + To *string `json:"to"` +} + +type Node struct { + Name *string `json:"name"` + Ranges []*Range `json:"ranges"` +} + +type Block struct { + *Filter `json:"filter"` + Any bool `json:"any"` + Catchall bool `json:"catchall"` + Node *Node `json:"node"` + IsGroup bool `json:"isGroup"` + Children []*Element `json:"children"` +} + +type Path []*Block + +type Element struct { + Action string `json:"action"` + Path Path `json:"path"` + RootExpand bool `json:"rootExpand"` + Query string `json:"query"` +} + +func toString(i interface{}) string { + if i == nil { + return "" + } + switch i.(type) { + case string: + return i.(string) + default: + return string(i.([]byte)) + } +} + +func arrayToStringArray(arr interface{}) (result []string) { + for _, i := range arr.([]interface{}) { + result = append(result, toString(i)) + } + return +} + +var g = &grammar{ + rules: []*rule{ + { + name: "start", + pos: position{line: 85, col: 1, offset: 1518}, + expr: &ruleRefExpr{ + pos: position{line: 85, col: 9, offset: 1526}, + name: "QUERY", + }, + }, + { + name: "QUERY", + pos: position{line: 87, col: 1, offset: 1533}, + expr: &actionExpr{ + pos: position{line: 87, col: 9, offset: 1541}, + run: (*parser).callonQUERY1, + expr: &seqExpr{ + pos: position{line: 87, col: 9, offset: 1541}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 87, col: 9, offset: 1541}, + label: "base", + expr: &zeroOrMoreExpr{ + pos: position{line: 87, col: 14, offset: 1546}, + expr: &actionExpr{ + pos: position{line: 87, col: 15, offset: 1547}, + run: (*parser).callonQUERY5, + expr: &seqExpr{ + pos: position{line: 87, col: 15, offset: 1547}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 87, col: 15, offset: 1547}, + label: "e", + expr: &ruleRefExpr{ + pos: position{line: 87, col: 17, offset: 1549}, + name: "ELEMENT", + }, + }, + &choiceExpr{ + pos: position{line: 87, col: 26, offset: 1558}, + alternatives: []interface{}{ + &seqExpr{ + pos: position{line: 87, col: 26, offset: 1558}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 87, col: 26, offset: 1558}, + name: "__", + }, + &litMatcher{ + pos: position{line: 87, col: 29, offset: 1561}, + val: ",", + ignoreCase: false, + want: "\",\"", + }, + &ruleRefExpr{ + pos: position{line: 87, col: 33, offset: 1565}, + name: "__", + }, + }, + }, + &oneOrMoreExpr{ + pos: position{line: 87, col: 38, offset: 1570}, + expr: &charClassMatcher{ + pos: position{line: 87, col: 38, offset: 1570}, + val: "[ ]", + chars: []rune{' '}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, + }, + }, + }, + }, + }, + &labeledExpr{ + pos: position{line: 87, col: 64, offset: 1596}, + label: "last", + expr: &ruleRefExpr{ + pos: position{line: 87, col: 69, offset: 1601}, + name: "ELEMENT", + }, + }, + }, + }, + }, + }, + { + name: "ELEMENT", + pos: position{line: 91, col: 1, offset: 1654}, + expr: &actionExpr{ + pos: position{line: 91, col: 11, offset: 1664}, + run: (*parser).callonELEMENT1, + expr: &seqExpr{ + pos: position{line: 91, col: 11, offset: 1664}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 91, col: 11, offset: 1664}, + label: "action", + expr: &ruleRefExpr{ + pos: position{line: 91, col: 18, offset: 1671}, + name: "UNARY", + }, + }, + &labeledExpr{ + pos: position{line: 91, col: 24, offset: 1677}, + label: "levels", + expr: &zeroOrMoreExpr{ + pos: position{line: 91, col: 31, offset: 1684}, + expr: &actionExpr{ + pos: position{line: 91, col: 32, offset: 1685}, + run: (*parser).callonELEMENT7, + expr: &seqExpr{ + pos: position{line: 91, col: 32, offset: 1685}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 91, col: 32, offset: 1685}, + label: "l", + expr: &ruleRefExpr{ + pos: position{line: 91, col: 34, offset: 1687}, + name: "LEVEL", + }, + }, + &litMatcher{ + pos: position{line: 91, col: 40, offset: 1693}, + val: ".", + ignoreCase: false, + want: "\".\"", + }, + }, + }, + }, + }, + }, + &labeledExpr{ + pos: position{line: 91, col: 64, offset: 1717}, + label: "last", + expr: &choiceExpr{ + pos: position{line: 91, col: 70, offset: 1723}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 91, col: 70, offset: 1723}, + name: "LEVEL", + }, + &litMatcher{ + pos: position{line: 91, col: 78, offset: 1731}, + val: "_", + ignoreCase: false, + want: "\"_\"", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "UNARY", + pos: position{line: 107, col: 1, offset: 2103}, + expr: &actionExpr{ + pos: position{line: 107, col: 9, offset: 2111}, + run: (*parser).callonUNARY1, + expr: &labeledExpr{ + pos: position{line: 107, col: 9, offset: 2111}, + label: "op", + expr: &zeroOrOneExpr{ + pos: position{line: 107, col: 12, offset: 2114}, + expr: &litMatcher{ + pos: position{line: 107, col: 13, offset: 2115}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + }, + }, + }, + }, + { + name: "NODE", + pos: position{line: 114, col: 1, offset: 2190}, + expr: &choiceExpr{ + pos: position{line: 114, col: 8, offset: 2197}, + alternatives: []interface{}{ + &actionExpr{ + pos: position{line: 114, col: 8, offset: 2197}, + run: (*parser).callonNODE2, + expr: &seqExpr{ + pos: position{line: 114, col: 8, offset: 2197}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 114, col: 8, offset: 2197}, + label: "nodename", + expr: &ruleRefExpr{ + pos: position{line: 114, col: 17, offset: 2206}, + name: "NODENAME", + }, + }, + &labeledExpr{ + pos: position{line: 114, col: 26, offset: 2215}, + label: "ranges", + expr: &zeroOrOneExpr{ + pos: position{line: 114, col: 33, offset: 2222}, + expr: &ruleRefExpr{ + pos: position{line: 114, col: 33, offset: 2222}, + name: "NODERANGE", + }, + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 124, col: 3, offset: 2440}, + run: (*parser).callonNODE9, + expr: &labeledExpr{ + pos: position{line: 124, col: 3, offset: 2440}, + label: "ranges", + expr: &ruleRefExpr{ + pos: position{line: 124, col: 10, offset: 2447}, + name: "NODERANGE", + }, + }, + }, + }, + }, + }, + { + name: "NODENAME", + pos: position{line: 134, col: 1, offset: 2635}, + expr: &labeledExpr{ + pos: position{line: 134, col: 12, offset: 2646}, + label: "nodename", + expr: &actionExpr{ + pos: position{line: 134, col: 22, offset: 2656}, + run: (*parser).callonNODENAME2, + expr: &seqExpr{ + pos: position{line: 134, col: 22, offset: 2656}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 134, col: 22, offset: 2656}, + label: "head", + expr: &charClassMatcher{ + pos: position{line: 134, col: 27, offset: 2661}, + val: "[a-z0-9]", + ranges: []rune{'a', 'z', '0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + &labeledExpr{ + pos: position{line: 134, col: 36, offset: 2670}, + label: "tail", + expr: &zeroOrMoreExpr{ + pos: position{line: 134, col: 41, offset: 2675}, + expr: &charClassMatcher{ + pos: position{line: 134, col: 41, offset: 2675}, + val: "[a-z_0-9\\\\-]i", + chars: []rune{'_', '\\', '-'}, + ranges: []rune{'a', 'z', '0', '9'}, + ignoreCase: true, + inverted: false, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "NODERANGE", + pos: position{line: 138, col: 1, offset: 2770}, + expr: &actionExpr{ + pos: position{line: 138, col: 13, offset: 2782}, + run: (*parser).callonNODERANGE1, + expr: &seqExpr{ + pos: position{line: 138, col: 13, offset: 2782}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 138, col: 13, offset: 2782}, + val: "[", + ignoreCase: false, + want: "\"[\"", + }, + &labeledExpr{ + pos: position{line: 138, col: 17, offset: 2786}, + label: "ranges", + expr: &oneOrMoreExpr{ + pos: position{line: 138, col: 24, offset: 2793}, + expr: &actionExpr{ + pos: position{line: 138, col: 25, offset: 2794}, + run: (*parser).callonNODERANGE6, + expr: &seqExpr{ + pos: position{line: 138, col: 25, offset: 2794}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 138, col: 25, offset: 2794}, + label: "from", + expr: &ruleRefExpr{ + pos: position{line: 138, col: 30, offset: 2799}, + name: "INT", + }, + }, + &labeledExpr{ + pos: position{line: 138, col: 34, offset: 2803}, + label: "to", + expr: &zeroOrOneExpr{ + pos: position{line: 138, col: 37, offset: 2806}, + expr: &actionExpr{ + pos: position{line: 138, col: 38, offset: 2807}, + run: (*parser).callonNODERANGE12, + expr: &seqExpr{ + pos: position{line: 138, col: 38, offset: 2807}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 138, col: 38, offset: 2807}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + &labeledExpr{ + pos: position{line: 138, col: 42, offset: 2811}, + label: "to", + expr: &ruleRefExpr{ + pos: position{line: 138, col: 45, offset: 2814}, + name: "INT", + }, + }, + }, + }, + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 140, col: 5, offset: 2839}, + name: "__", + }, + &zeroOrOneExpr{ + pos: position{line: 140, col: 8, offset: 2842}, + expr: &litMatcher{ + pos: position{line: 140, col: 8, offset: 2842}, + val: ",", + ignoreCase: false, + want: "\",\"", + }, + }, + &ruleRefExpr{ + pos: position{line: 140, col: 13, offset: 2847}, + name: "__", + }, + }, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 144, col: 5, offset: 2962}, + val: "]", + ignoreCase: false, + want: "\"]\"", + }, + }, + }, + }, + }, + { + name: "INT", + pos: position{line: 148, col: 1, offset: 2991}, + expr: &actionExpr{ + pos: position{line: 148, col: 7, offset: 2997}, + run: (*parser).callonINT1, + expr: &labeledExpr{ + pos: position{line: 148, col: 7, offset: 2997}, + label: "num", + expr: &oneOrMoreExpr{ + pos: position{line: 148, col: 11, offset: 3001}, + expr: &charClassMatcher{ + pos: position{line: 148, col: 11, offset: 3001}, + val: "[0-9a-z]i", + ranges: []rune{'0', '9', 'a', 'z'}, + ignoreCase: true, + inverted: false, + }, + }, + }, + }, + }, + { + name: "LEVEL", + pos: position{line: 152, col: 1, offset: 3072}, + expr: &choiceExpr{ + pos: position{line: 152, col: 9, offset: 3080}, + alternatives: []interface{}{ + &actionExpr{ + pos: position{line: 152, col: 9, offset: 3080}, + run: (*parser).callonLEVEL2, + expr: &seqExpr{ + pos: position{line: 152, col: 9, offset: 3080}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 152, col: 9, offset: 3080}, + val: "(", + ignoreCase: false, + want: "\"(\"", + }, + &labeledExpr{ + pos: position{line: 152, col: 13, offset: 3084}, + label: "children", + expr: &ruleRefExpr{ + pos: position{line: 152, col: 22, offset: 3093}, + name: "QUERY", + }, + }, + &litMatcher{ + pos: position{line: 152, col: 28, offset: 3099}, + val: ")", + ignoreCase: false, + want: "\")\"", + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 161, col: 3, offset: 3293}, + run: (*parser).callonLEVEL8, + expr: &seqExpr{ + pos: position{line: 161, col: 3, offset: 3293}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 161, col: 3, offset: 3293}, + val: "<", + ignoreCase: false, + want: "\"<\"", + }, + &labeledExpr{ + pos: position{line: 161, col: 7, offset: 3297}, + label: "block", + expr: &ruleRefExpr{ + pos: position{line: 161, col: 13, offset: 3303}, + name: "BLOCK", + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 164, col: 3, offset: 3387}, + run: (*parser).callonLEVEL13, + expr: &seqExpr{ + pos: position{line: 164, col: 3, offset: 3387}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 164, col: 3, offset: 3387}, + val: "*", + ignoreCase: false, + want: "\"*\"", + }, + &labeledExpr{ + pos: position{line: 164, col: 7, offset: 3391}, + label: "block", + expr: &ruleRefExpr{ + pos: position{line: 164, col: 13, offset: 3397}, + name: "BLOCK", + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 167, col: 3, offset: 3464}, + run: (*parser).callonLEVEL18, + expr: &seqExpr{ + pos: position{line: 167, col: 3, offset: 3464}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 167, col: 3, offset: 3464}, + label: "node", + expr: &ruleRefExpr{ + pos: position{line: 167, col: 8, offset: 3469}, + name: "NODE", + }, + }, + &labeledExpr{ + pos: position{line: 167, col: 13, offset: 3474}, + label: "block", + expr: &ruleRefExpr{ + pos: position{line: 167, col: 19, offset: 3480}, + name: "BLOCK", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "BLOCK", + pos: position{line: 171, col: 1, offset: 3567}, + expr: &choiceExpr{ + pos: position{line: 171, col: 9, offset: 3575}, + alternatives: []interface{}{ + &actionExpr{ + pos: position{line: 171, col: 9, offset: 3575}, + run: (*parser).callonBLOCK2, + expr: &seqExpr{ + pos: position{line: 171, col: 9, offset: 3575}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 171, col: 9, offset: 3575}, + label: "limits", + expr: &ruleRefExpr{ + pos: position{line: 171, col: 16, offset: 3582}, + name: "LIMIT", + }, + }, + &labeledExpr{ + pos: position{line: 171, col: 22, offset: 3588}, + label: "filter", + expr: &ruleRefExpr{ + pos: position{line: 171, col: 29, offset: 3595}, + name: "SEARCH", + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 183, col: 3, offset: 3871}, + run: (*parser).callonBLOCK8, + expr: &seqExpr{ + pos: position{line: 183, col: 3, offset: 3871}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 183, col: 3, offset: 3871}, + label: "filter", + expr: &ruleRefExpr{ + pos: position{line: 183, col: 10, offset: 3878}, + name: "SEARCH", + }, + }, + &labeledExpr{ + pos: position{line: 183, col: 17, offset: 3885}, + label: "limits", + expr: &ruleRefExpr{ + pos: position{line: 183, col: 24, offset: 3892}, + name: "LIMIT", + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 195, col: 3, offset: 4166}, + run: (*parser).callonBLOCK14, + expr: &labeledExpr{ + pos: position{line: 195, col: 3, offset: 4166}, + label: "filter", + expr: &ruleRefExpr{ + pos: position{line: 195, col: 10, offset: 4173}, + name: "SEARCH", + }, + }, + }, + }, + }, + }, + { + name: "LIMIT", + pos: position{line: 208, col: 1, offset: 4447}, + expr: &choiceExpr{ + pos: position{line: 208, col: 9, offset: 4455}, + alternatives: []interface{}{ + &actionExpr{ + pos: position{line: 208, col: 9, offset: 4455}, + run: (*parser).callonLIMIT2, + expr: &seqExpr{ + pos: position{line: 208, col: 9, offset: 4455}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 208, col: 9, offset: 4455}, + val: "{", + ignoreCase: false, + want: "\"{\"", + }, + &labeledExpr{ + pos: position{line: 208, col: 13, offset: 4459}, + label: "sort", + expr: &ruleRefExpr{ + pos: position{line: 208, col: 18, offset: 4464}, + name: "SORT", + }, + }, + &labeledExpr{ + pos: position{line: 208, col: 23, offset: 4469}, + label: "limit", + expr: &ruleRefExpr{ + pos: position{line: 208, col: 29, offset: 4475}, + name: "NUMBER", + }, + }, + &litMatcher{ + pos: position{line: 208, col: 36, offset: 4482}, + val: "..", + ignoreCase: false, + want: "\"..\"", + }, + &labeledExpr{ + pos: position{line: 208, col: 41, offset: 4487}, + label: "offset", + expr: &ruleRefExpr{ + pos: position{line: 208, col: 48, offset: 4494}, + name: "NUMBER", + }, + }, + &litMatcher{ + pos: position{line: 208, col: 55, offset: 4501}, + val: "}", + ignoreCase: false, + want: "\"}\"", + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 210, col: 3, offset: 4589}, + run: (*parser).callonLIMIT13, + expr: &seqExpr{ + pos: position{line: 210, col: 3, offset: 4589}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 210, col: 3, offset: 4589}, + val: "{", + ignoreCase: false, + want: "\"{\"", + }, + &labeledExpr{ + pos: position{line: 210, col: 7, offset: 4593}, + label: "sort", + expr: &ruleRefExpr{ + pos: position{line: 210, col: 12, offset: 4598}, + name: "SORT", + }, + }, + &labeledExpr{ + pos: position{line: 210, col: 17, offset: 4603}, + label: "limit", + expr: &ruleRefExpr{ + pos: position{line: 210, col: 23, offset: 4609}, + name: "NUMBER", + }, + }, + &litMatcher{ + pos: position{line: 210, col: 30, offset: 4616}, + val: "}", + ignoreCase: false, + want: "\"}\"", + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 212, col: 3, offset: 4694}, + run: (*parser).callonLIMIT21, + expr: &seqExpr{ + pos: position{line: 212, col: 3, offset: 4694}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 212, col: 3, offset: 4694}, + val: "{", + ignoreCase: false, + want: "\"{\"", + }, + &labeledExpr{ + pos: position{line: 212, col: 7, offset: 4698}, + label: "sort", + expr: &ruleRefExpr{ + pos: position{line: 212, col: 12, offset: 4703}, + name: "SORT", + }, + }, + &litMatcher{ + pos: position{line: 212, col: 17, offset: 4708}, + val: "}", + ignoreCase: false, + want: "\"}\"", + }, + }, + }, + }, + }, + }, + }, + { + name: "SORT", + pos: position{line: 215, col: 1, offset: 4750}, + expr: &zeroOrMoreExpr{ + pos: position{line: 215, col: 8, offset: 4757}, + expr: &actionExpr{ + pos: position{line: 215, col: 9, offset: 4758}, + run: (*parser).callonSORT2, + expr: &seqExpr{ + pos: position{line: 215, col: 9, offset: 4758}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 215, col: 9, offset: 4758}, + label: "direction", + expr: &ruleRefExpr{ + pos: position{line: 215, col: 19, offset: 4768}, + name: "DIRECTION", + }, + }, + &labeledExpr{ + pos: position{line: 215, col: 29, offset: 4778}, + label: "v", + expr: &ruleRefExpr{ + pos: position{line: 215, col: 31, offset: 4780}, + name: "VARIABLE", + }, + }, + &zeroOrOneExpr{ + pos: position{line: 215, col: 40, offset: 4789}, + expr: &litMatcher{ + pos: position{line: 215, col: 40, offset: 4789}, + val: ",", + ignoreCase: false, + want: "\",\"", + }, + }, + &ruleRefExpr{ + pos: position{line: 215, col: 45, offset: 4794}, + name: "__", + }, + }, + }, + }, + }, + }, + { + name: "DIRECTION", + pos: position{line: 218, col: 1, offset: 4852}, + expr: &actionExpr{ + pos: position{line: 218, col: 13, offset: 4864}, + run: (*parser).callonDIRECTION1, + expr: &labeledExpr{ + pos: position{line: 218, col: 13, offset: 4864}, + label: "d", + expr: &zeroOrOneExpr{ + pos: position{line: 218, col: 15, offset: 4866}, + expr: &litMatcher{ + pos: position{line: 218, col: 16, offset: 4867}, + val: "^", + ignoreCase: false, + want: "\"^\"", + }, + }, + }, + }, + }, + { + name: "SEARCH", + pos: position{line: 225, col: 1, offset: 4936}, + expr: &actionExpr{ + pos: position{line: 225, col: 10, offset: 4945}, + run: (*parser).callonSEARCH1, + expr: &labeledExpr{ + pos: position{line: 225, col: 10, offset: 4945}, + label: "filters", + expr: &zeroOrMoreExpr{ + pos: position{line: 225, col: 18, offset: 4953}, + expr: &actionExpr{ + pos: position{line: 225, col: 19, offset: 4954}, + run: (*parser).callonSEARCH4, + expr: &seqExpr{ + pos: position{line: 225, col: 19, offset: 4954}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 225, col: 19, offset: 4954}, + val: "[?", + ignoreCase: false, + want: "\"[?\"", + }, + &labeledExpr{ + pos: position{line: 225, col: 24, offset: 4959}, + label: "filter", + expr: &ruleRefExpr{ + pos: position{line: 225, col: 31, offset: 4966}, + name: "FILTER", + }, + }, + &litMatcher{ + pos: position{line: 225, col: 38, offset: 4973}, + val: "?]", + ignoreCase: false, + want: "\"?]\"", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "FILTER", + pos: position{line: 231, col: 1, offset: 5035}, + expr: &oneOrMoreExpr{ + pos: position{line: 231, col: 10, offset: 5044}, + expr: &actionExpr{ + pos: position{line: 231, col: 11, offset: 5045}, + run: (*parser).callonFILTER2, + expr: &seqExpr{ + pos: position{line: 231, col: 11, offset: 5045}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 231, col: 11, offset: 5045}, + label: "expression", + expr: &ruleRefExpr{ + pos: position{line: 231, col: 22, offset: 5056}, + name: "EXPRESSION", + }, + }, + &ruleRefExpr{ + pos: position{line: 231, col: 33, offset: 5067}, + name: "__", + }, + &labeledExpr{ + pos: position{line: 231, col: 36, offset: 5070}, + label: "boolOp", + expr: &zeroOrOneExpr{ + pos: position{line: 231, col: 43, offset: 5077}, + expr: &choiceExpr{ + pos: position{line: 231, col: 44, offset: 5078}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 231, col: 44, offset: 5078}, + val: "&&", + ignoreCase: false, + want: "\"&&\"", + }, + &litMatcher{ + pos: position{line: 231, col: 51, offset: 5085}, + val: "||", + ignoreCase: false, + want: "\"||\"", + }, + }, + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 231, col: 58, offset: 5092}, + name: "__", + }, + }, + }, + }, + }, + }, + { + name: "EXPRESSION", + pos: position{line: 236, col: 1, offset: 5208}, + expr: &actionExpr{ + pos: position{line: 236, col: 14, offset: 5221}, + run: (*parser).callonEXPRESSION1, + expr: &seqExpr{ + pos: position{line: 236, col: 14, offset: 5221}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 236, col: 14, offset: 5221}, + label: "variable", + expr: &ruleRefExpr{ + pos: position{line: 236, col: 23, offset: 5230}, + name: "VARIABLE", + }, + }, + &ruleRefExpr{ + pos: position{line: 236, col: 32, offset: 5239}, + name: "__", + }, + &labeledExpr{ + pos: position{line: 236, col: 35, offset: 5242}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 236, col: 38, offset: 5245}, + name: "OP", + }, + }, + &ruleRefExpr{ + pos: position{line: 236, col: 41, offset: 5248}, + name: "__", + }, + &labeledExpr{ + pos: position{line: 236, col: 44, offset: 5251}, + label: "evaluation", + expr: &ruleRefExpr{ + pos: position{line: 236, col: 55, offset: 5262}, + name: "EVALUATION", + }, + }, + }, + }, + }, + }, + { + name: "OP", + pos: position{line: 244, col: 1, offset: 5434}, + expr: &choiceExpr{ + pos: position{line: 244, col: 6, offset: 5439}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 244, col: 6, offset: 5439}, + val: "==", + ignoreCase: false, + want: "\"==\"", + }, + &litMatcher{ + pos: position{line: 245, col: 3, offset: 5446}, + val: "=~", + ignoreCase: false, + want: "\"=~\"", + }, + &litMatcher{ + pos: position{line: 246, col: 3, offset: 5453}, + val: "!~", + ignoreCase: false, + want: "\"!~\"", + }, + &litMatcher{ + pos: position{line: 247, col: 3, offset: 5460}, + val: "<=", + ignoreCase: false, + want: "\"<=\"", + }, + &litMatcher{ + pos: position{line: 248, col: 3, offset: 5467}, + val: ">=", + ignoreCase: false, + want: "\">=\"", + }, + &litMatcher{ + pos: position{line: 249, col: 3, offset: 5474}, + val: "<", + ignoreCase: false, + want: "\"<\"", + }, + &litMatcher{ + pos: position{line: 250, col: 3, offset: 5480}, + val: ">", + ignoreCase: false, + want: "\">\"", + }, + &litMatcher{ + pos: position{line: 251, col: 3, offset: 5486}, + val: "!=", + ignoreCase: false, + want: "\"!=\"", + }, + &litMatcher{ + pos: position{line: 252, col: 3, offset: 5493}, + val: "in", + ignoreCase: true, + want: "\"IN\"i", + }, + &litMatcher{ + pos: position{line: 253, col: 3, offset: 5501}, + val: "like", + ignoreCase: true, + want: "\"LIKE\"i", + }, + &litMatcher{ + pos: position{line: 254, col: 3, offset: 5511}, + val: "not like", + ignoreCase: true, + want: "\"NOT LIKE\"i", + }, + }, + }, + }, + { + name: "VARIABLE", + pos: position{line: 256, col: 1, offset: 5524}, + expr: &actionExpr{ + pos: position{line: 256, col: 12, offset: 5535}, + run: (*parser).callonVARIABLE1, + expr: &seqExpr{ + pos: position{line: 256, col: 12, offset: 5535}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 256, col: 12, offset: 5535}, + label: "variable", + expr: &choiceExpr{ + pos: position{line: 256, col: 22, offset: 5545}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 256, col: 22, offset: 5545}, + val: "object", + ignoreCase: false, + want: "\"object\"", + }, + &litMatcher{ + pos: position{line: 256, col: 33, offset: 5556}, + val: "link", + ignoreCase: false, + want: "\"link\"", + }, + &litMatcher{ + pos: position{line: 256, col: 42, offset: 5565}, + val: "path", + ignoreCase: false, + want: "\"path\"", + }, + &litMatcher{ + pos: position{line: 256, col: 51, offset: 5574}, + val: "@", + ignoreCase: false, + want: "\"@\"", + }, + &litMatcher{ + pos: position{line: 256, col: 57, offset: 5580}, + val: "$", + ignoreCase: false, + want: "\"$\"", + }, + }, + }, + }, + &labeledExpr{ + pos: position{line: 256, col: 62, offset: 5585}, + label: "attribute", + expr: &oneOrMoreExpr{ + pos: position{line: 256, col: 72, offset: 5595}, + expr: &ruleRefExpr{ + pos: position{line: 256, col: 72, offset: 5595}, + name: "ATTRIBUTE", + }, + }, + }, + }, + }, + }, + }, + { + name: "ATTRIBUTE", + pos: position{line: 260, col: 1, offset: 5673}, + expr: &choiceExpr{ + pos: position{line: 260, col: 13, offset: 5685}, + alternatives: []interface{}{ + &actionExpr{ + pos: position{line: 260, col: 14, offset: 5686}, + run: (*parser).callonATTRIBUTE2, + expr: &seqExpr{ + pos: position{line: 260, col: 14, offset: 5686}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 260, col: 14, offset: 5686}, + val: ".", + ignoreCase: false, + want: "\".\"", + }, + &labeledExpr{ + pos: position{line: 260, col: 18, offset: 5690}, + label: "attrname", + expr: &ruleRefExpr{ + pos: position{line: 260, col: 27, offset: 5699}, + name: "ATTRNAME", + }, + }, + }, + }, + }, + &actionExpr{ + pos: position{line: 262, col: 4, offset: 5738}, + run: (*parser).callonATTRIBUTE7, + expr: &seqExpr{ + pos: position{line: 262, col: 4, offset: 5738}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 262, col: 4, offset: 5738}, + val: "[", + ignoreCase: false, + want: "\"[\"", + }, + &labeledExpr{ + pos: position{line: 262, col: 8, offset: 5742}, + label: "attrname", + expr: &choiceExpr{ + pos: position{line: 262, col: 18, offset: 5752}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 262, col: 18, offset: 5752}, + name: "STRING_LITERAL", + }, + &ruleRefExpr{ + pos: position{line: 262, col: 35, offset: 5769}, + name: "INT", + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 262, col: 40, offset: 5774}, + val: "]", + ignoreCase: false, + want: "\"]\"", + }, + }, + }, + }, + }, + }, + }, + { + name: "ATTRNAME", + pos: position{line: 265, col: 1, offset: 5806}, + expr: &actionExpr{ + pos: position{line: 265, col: 12, offset: 5817}, + run: (*parser).callonATTRNAME1, + expr: &labeledExpr{ + pos: position{line: 265, col: 12, offset: 5817}, + label: "attrname", + expr: &oneOrMoreExpr{ + pos: position{line: 265, col: 21, offset: 5826}, + expr: &charClassMatcher{ + pos: position{line: 265, col: 21, offset: 5826}, + val: "[a-z\\\\*0-9_\\\\-]i", + chars: []rune{'\\', '*', '_', '\\', '-'}, + ranges: []rune{'a', 'z', '0', '9'}, + ignoreCase: true, + inverted: false, + }, + }, + }, + }, + }, + { + name: "STRING_LITERAL", + pos: position{line: 275, col: 1, offset: 5978}, + expr: &actionExpr{ + pos: position{line: 275, col: 18, offset: 5995}, + run: (*parser).callonSTRING_LITERAL1, + expr: &seqExpr{ + pos: position{line: 275, col: 18, offset: 5995}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 275, col: 18, offset: 5995}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + &labeledExpr{ + pos: position{line: 275, col: 22, offset: 5999}, + label: "text", + expr: &oneOrMoreExpr{ + pos: position{line: 275, col: 27, offset: 6004}, + expr: &choiceExpr{ + pos: position{line: 275, col: 28, offset: 6005}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 275, col: 28, offset: 6005}, + val: "\\'", + ignoreCase: false, + want: "\"\\\\'\"", + }, + &charClassMatcher{ + pos: position{line: 275, col: 34, offset: 6011}, + val: "[^']", + chars: []rune{'\''}, + ignoreCase: false, + inverted: true, + }, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 275, col: 41, offset: 6018}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + }, + }, + }, + }, + { + name: "BOOL", + pos: position{line: 279, col: 1, offset: 6074}, + expr: &choiceExpr{ + pos: position{line: 279, col: 8, offset: 6081}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 279, col: 8, offset: 6081}, + val: "true", + ignoreCase: false, + want: "\"true\"", + }, + &litMatcher{ + pos: position{line: 279, col: 17, offset: 6090}, + val: "false", + ignoreCase: false, + want: "\"false\"", + }, + }, + }, + }, + { + name: "NULL", + pos: position{line: 280, col: 1, offset: 6098}, + expr: &litMatcher{ + pos: position{line: 280, col: 8, offset: 6105}, + val: "null", + ignoreCase: false, + want: "\"null\"", + }, + }, + { + name: "EVALUATION", + pos: position{line: 282, col: 1, offset: 6113}, + expr: &ruleRefExpr{ + pos: position{line: 282, col: 14, offset: 6126}, + name: "LITERAL", + }, + }, + { + name: "LITERAL", + pos: position{line: 284, col: 1, offset: 6135}, + expr: &choiceExpr{ + pos: position{line: 284, col: 11, offset: 6145}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 284, col: 11, offset: 6145}, + name: "DBL_LITERAL", + }, + &ruleRefExpr{ + pos: position{line: 284, col: 25, offset: 6159}, + name: "SNG_LITERAL", + }, + &ruleRefExpr{ + pos: position{line: 284, col: 39, offset: 6173}, + name: "NUMBER", + }, + &ruleRefExpr{ + pos: position{line: 284, col: 48, offset: 6182}, + name: "BOOL", + }, + &ruleRefExpr{ + pos: position{line: 284, col: 55, offset: 6189}, + name: "NULL", + }, + &ruleRefExpr{ + pos: position{line: 284, col: 62, offset: 6196}, + name: "ARR", + }, + }, + }, + }, + { + name: "SNG_LITERAL", + pos: position{line: 286, col: 1, offset: 6201}, + expr: &actionExpr{ + pos: position{line: 286, col: 15, offset: 6215}, + run: (*parser).callonSNG_LITERAL1, + expr: &seqExpr{ + pos: position{line: 286, col: 15, offset: 6215}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 286, col: 15, offset: 6215}, + label: "q1", + expr: &litMatcher{ + pos: position{line: 286, col: 18, offset: 6218}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + }, + &labeledExpr{ + pos: position{line: 286, col: 22, offset: 6222}, + label: "cc", + expr: &zeroOrMoreExpr{ + pos: position{line: 286, col: 25, offset: 6225}, + expr: &charClassMatcher{ + pos: position{line: 286, col: 25, offset: 6225}, + val: "[^\\\\']", + chars: []rune{'\\', '\''}, + ignoreCase: false, + inverted: true, + }, + }, + }, + &labeledExpr{ + pos: position{line: 286, col: 33, offset: 6233}, + label: "q2", + expr: &litMatcher{ + pos: position{line: 286, col: 36, offset: 6236}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + }, + }, + }, + }, + }, + { + name: "DBL_LITERAL", + pos: position{line: 287, col: 1, offset: 6327}, + expr: &actionExpr{ + pos: position{line: 287, col: 15, offset: 6341}, + run: (*parser).callonDBL_LITERAL1, + expr: &seqExpr{ + pos: position{line: 287, col: 15, offset: 6341}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 287, col: 15, offset: 6341}, + label: "q1", + expr: &litMatcher{ + pos: position{line: 287, col: 18, offset: 6344}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + }, + &labeledExpr{ + pos: position{line: 287, col: 22, offset: 6348}, + label: "cc", + expr: &zeroOrMoreExpr{ + pos: position{line: 287, col: 25, offset: 6351}, + expr: &charClassMatcher{ + pos: position{line: 287, col: 25, offset: 6351}, + val: "[^\\\\\"]", + chars: []rune{'\\', '"'}, + ignoreCase: false, + inverted: true, + }, + }, + }, + &labeledExpr{ + pos: position{line: 287, col: 33, offset: 6359}, + label: "q2", + expr: &litMatcher{ + pos: position{line: 287, col: 36, offset: 6362}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + }, + }, + }, + }, + }, + { + name: "NUMBER", + pos: position{line: 289, col: 1, offset: 6455}, + expr: &actionExpr{ + pos: position{line: 289, col: 10, offset: 6464}, + run: (*parser).callonNUMBER1, + expr: &seqExpr{ + pos: position{line: 289, col: 10, offset: 6464}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 289, col: 10, offset: 6464}, + label: "num", + expr: &oneOrMoreExpr{ + pos: position{line: 289, col: 14, offset: 6468}, + expr: &charClassMatcher{ + pos: position{line: 289, col: 14, offset: 6468}, + val: "[0-9]", + ranges: []rune{'0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + &labeledExpr{ + pos: position{line: 289, col: 21, offset: 6475}, + label: "tail", + expr: &zeroOrOneExpr{ + pos: position{line: 289, col: 26, offset: 6480}, + expr: &seqExpr{ + pos: position{line: 289, col: 27, offset: 6481}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 289, col: 27, offset: 6481}, + val: ".", + ignoreCase: false, + want: "\".\"", + }, + &oneOrMoreExpr{ + pos: position{line: 289, col: 31, offset: 6485}, + expr: &charClassMatcher{ + pos: position{line: 289, col: 31, offset: 6485}, + val: "[0-9]", + ranges: []rune{'0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "ARR", + pos: position{line: 301, col: 1, offset: 6712}, + expr: &actionExpr{ + pos: position{line: 301, col: 7, offset: 6718}, + run: (*parser).callonARR1, + expr: &seqExpr{ + pos: position{line: 301, col: 7, offset: 6718}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 301, col: 7, offset: 6718}, + val: "[", + ignoreCase: false, + want: "\"[\"", + }, + &ruleRefExpr{ + pos: position{line: 301, col: 11, offset: 6722}, + name: "__", + }, + &labeledExpr{ + pos: position{line: 302, col: 3, offset: 6727}, + label: "body", + expr: &zeroOrOneExpr{ + pos: position{line: 302, col: 8, offset: 6732}, + expr: &actionExpr{ + pos: position{line: 302, col: 9, offset: 6733}, + run: (*parser).callonARR7, + expr: &seqExpr{ + pos: position{line: 302, col: 9, offset: 6733}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 302, col: 9, offset: 6733}, + label: "hd", + expr: &ruleRefExpr{ + pos: position{line: 302, col: 12, offset: 6736}, + name: "LITERAL", + }, + }, + &labeledExpr{ + pos: position{line: 302, col: 20, offset: 6744}, + label: "items", + expr: &zeroOrMoreExpr{ + pos: position{line: 302, col: 26, offset: 6750}, + expr: &actionExpr{ + pos: position{line: 302, col: 27, offset: 6751}, + run: (*parser).callonARR13, + expr: &seqExpr{ + pos: position{line: 302, col: 27, offset: 6751}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 302, col: 27, offset: 6751}, + name: "__", + }, + &litMatcher{ + pos: position{line: 302, col: 30, offset: 6754}, + val: ",", + ignoreCase: false, + want: "\",\"", + }, + &ruleRefExpr{ + pos: position{line: 302, col: 34, offset: 6758}, + name: "__", + }, + &labeledExpr{ + pos: position{line: 302, col: 37, offset: 6761}, + label: "e", + expr: &ruleRefExpr{ + pos: position{line: 302, col: 39, offset: 6763}, + name: "LITERAL", + }, + }, + }, + }, + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 302, col: 67, offset: 6791}, + name: "__", + }, + }, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 306, col: 1, offset: 6850}, + val: "]", + ignoreCase: false, + want: "\"]\"", + }, + }, + }, + }, + }, + { + name: "__", + pos: position{line: 315, col: 1, offset: 6997}, + expr: &zeroOrMoreExpr{ + pos: position{line: 315, col: 6, offset: 7002}, + expr: &charClassMatcher{ + pos: position{line: 315, col: 6, offset: 7002}, + val: "[ ]", + chars: []rune{' '}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, +} + +func (c *current) onQUERY5(e interface{}) (interface{}, error) { + return e, nil +} + +func (p *parser) callonQUERY5() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onQUERY5(stack["e"]) +} + +func (c *current) onQUERY1(base, last interface{}) (interface{}, error) { + return append(base.([]any), last), nil +} + +func (p *parser) callonQUERY1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onQUERY1(stack["base"], stack["last"]) +} + +func (c *current) onELEMENT7(l interface{}) (interface{}, error) { + return l, nil +} + +func (p *parser) callonELEMENT7() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onELEMENT7(stack["l"]) +} + +func (c *current) onELEMENT1(action, levels, last interface{}) (interface{}, error) { + var blocks []*Block + + for _, level := range levels.([]any) { + if block, ok := level.(*Block); ok { + blocks = append(blocks, block) + } + } + block, ok := last.(*Block) + // if last == "_" { + if !ok { + return &Element{Action: toString(action), Path: blocks, RootExpand: true}, nil + } + + return &Element{Action: toString(action), Path: append(blocks, block)}, nil +} + +func (p *parser) callonELEMENT1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onELEMENT1(stack["action"], stack["levels"], stack["last"]) +} + +func (c *current) onUNARY1(op interface{}) (interface{}, error) { + + if op == "-" { + return "subtract", nil + } + return "add", nil +} + +func (p *parser) callonUNARY1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onUNARY1(stack["op"]) +} + +func (c *current) onNODE2(nodename, ranges interface{}) (interface{}, error) { + name := toString(nodename) + var arr []*Range + if val, ok := ranges.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Range)) + } + } + return &Node{Name: &name, Ranges: arr}, nil +} + +func (p *parser) callonNODE2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODE2(stack["nodename"], stack["ranges"]) +} + +func (c *current) onNODE9(ranges interface{}) (interface{}, error) { + + var arr []*Range + if val, ok := ranges.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Range)) + } + } + return &Node{Name: nil, Ranges: arr}, nil + +} + +func (p *parser) callonNODE9() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODE9(stack["ranges"]) +} + +func (c *current) onNODENAME2(head, tail interface{}) (interface{}, error) { + return toString(head) + strings.Join(arrayToStringArray(tail), ""), nil +} + +func (p *parser) callonNODENAME2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODENAME2(stack["head"], stack["tail"]) +} + +func (c *current) onNODERANGE12(to interface{}) (interface{}, error) { + return to, nil +} + +func (p *parser) callonNODERANGE12() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODERANGE12(stack["to"]) +} + +func (c *current) onNODERANGE6(from, to interface{}) (interface{}, error) { + fromValue := toString(from) + toValue := toString(to) + return &Range{From: &fromValue, To: &toValue}, nil +} + +func (p *parser) callonNODERANGE6() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODERANGE6(stack["from"], stack["to"]) +} + +func (c *current) onNODERANGE1(ranges interface{}) (interface{}, error) { + return ranges, nil +} + +func (p *parser) callonNODERANGE1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNODERANGE1(stack["ranges"]) +} + +func (c *current) onINT1(num interface{}) (interface{}, error) { + return strings.Join(arrayToStringArray(num), ""), nil +} + +func (p *parser) callonINT1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onINT1(stack["num"]) +} + +func (c *current) onLEVEL2(children interface{}) (interface{}, error) { + var arr []*Element + if val, ok := children.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Element)) + } + } + return &Block{IsGroup: true, Children: arr}, nil +} + +func (p *parser) callonLEVEL2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLEVEL2(stack["children"]) +} + +func (c *current) onLEVEL8(block interface{}) (interface{}, error) { + return &Block{Any: false, Catchall: true, Filter: block.(*Filter)}, nil +} + +func (p *parser) callonLEVEL8() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLEVEL8(stack["block"]) +} + +func (c *current) onLEVEL13(block interface{}) (interface{}, error) { + return &Block{Any: true, Filter: block.(*Filter)}, nil +} + +func (p *parser) callonLEVEL13() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLEVEL13(stack["block"]) +} + +func (c *current) onLEVEL18(node, block interface{}) (interface{}, error) { + return &Block{Any: false, Node: node.(*Node), Filter: block.(*Filter)}, nil +} + +func (p *parser) callonLEVEL18() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLEVEL18(stack["node"], stack["block"]) +} + +func (c *current) onBLOCK2(limits, filter interface{}) (interface{}, error) { + + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil +} + +func (p *parser) callonBLOCK2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onBLOCK2(stack["limits"], stack["filter"]) +} + +func (c *current) onBLOCK8(filter, limits interface{}) (interface{}, error) { + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil +} + +func (p *parser) callonBLOCK8() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onBLOCK8(stack["filter"], stack["limits"]) +} + +func (c *current) onBLOCK14(filter interface{}) (interface{}, error) { + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil +} + +func (p *parser) callonBLOCK14() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onBLOCK14(stack["filter"]) +} + +func (c *current) onLIMIT2(sort, limit, offset interface{}) (interface{}, error) { + return &Limit{Sort: sort, Limits: &Limits{limit.(int), offset.(int)}}, nil +} + +func (p *parser) callonLIMIT2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLIMIT2(stack["sort"], stack["limit"], stack["offset"]) +} + +func (c *current) onLIMIT13(sort, limit interface{}) (interface{}, error) { + return &Limit{Sort: sort, Limits: &Limits{limit.(int), 0}}, nil +} + +func (p *parser) callonLIMIT13() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLIMIT13(stack["sort"], stack["limit"]) +} + +func (c *current) onLIMIT21(sort interface{}) (interface{}, error) { + + return &Limit{Sort: sort}, nil +} + +func (p *parser) callonLIMIT21() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLIMIT21(stack["sort"]) +} + +func (c *current) onSORT2(direction, v interface{}) (interface{}, error) { + return &Direction{toString(direction), v}, nil +} + +func (p *parser) callonSORT2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSORT2(stack["direction"], stack["v"]) +} + +func (c *current) onDIRECTION1(d interface{}) (interface{}, error) { + if d == nil { + return "ASC", nil + } + return "DESC", nil +} + +func (p *parser) callonDIRECTION1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onDIRECTION1(stack["d"]) +} + +func (c *current) onSEARCH4(filter interface{}) (interface{}, error) { + return filter, nil +} + +func (p *parser) callonSEARCH4() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSEARCH4(stack["filter"]) +} + +func (c *current) onSEARCH1(filters interface{}) (interface{}, error) { + return filters, nil +} + +func (p *parser) callonSEARCH1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSEARCH1(stack["filters"]) +} + +func (c *current) onFILTER2(expression, boolOp interface{}) (interface{}, error) { + expressionValue, _ := expression.(*Variable) + return &Expression{expressionValue, toString(boolOp)}, nil +} + +func (p *parser) callonFILTER2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onFILTER2(stack["expression"], stack["boolOp"]) +} + +func (c *current) onEXPRESSION1(variable, op, evaluation interface{}) (interface{}, error) { + var arr []string + for _, i := range variable.([]any) { + arr = append(arr, toString(i)) + } + return &Variable{arr, toString(op), toString(evaluation)}, nil +} + +func (p *parser) callonEXPRESSION1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onEXPRESSION1(stack["variable"], stack["op"], stack["evaluation"]) +} + +func (c *current) onVARIABLE1(variable, attribute interface{}) (interface{}, error) { + return append([]any{variable}, attribute.([]any)...), nil +} + +func (p *parser) callonVARIABLE1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onVARIABLE1(stack["variable"], stack["attribute"]) +} + +func (c *current) onATTRIBUTE2(attrname interface{}) (interface{}, error) { + return attrname, nil +} + +func (p *parser) callonATTRIBUTE2() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onATTRIBUTE2(stack["attrname"]) +} + +func (c *current) onATTRIBUTE7(attrname interface{}) (interface{}, error) { + + return attrname, nil +} + +func (p *parser) callonATTRIBUTE7() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onATTRIBUTE7(stack["attrname"]) +} + +func (c *current) onATTRNAME1(attrname interface{}) (interface{}, error) { + var arr []string + + for _, a := range attrname.([]any) { + arr = append(arr, toString(a)) + } + + return strings.Join(arr, ""), nil +} + +func (p *parser) callonATTRNAME1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onATTRNAME1(stack["attrname"]) +} + +func (c *current) onSTRING_LITERAL1(text interface{}) (interface{}, error) { + return strings.Join(text.([]string), ""), nil +} + +func (p *parser) callonSTRING_LITERAL1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSTRING_LITERAL1(stack["text"]) +} + +func (c *current) onSNG_LITERAL1(q1, cc, q2 interface{}) (interface{}, error) { + return toString(q1) + strings.Join(arrayToStringArray(cc), "") + toString(q2), nil +} + +func (p *parser) callonSNG_LITERAL1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onSNG_LITERAL1(stack["q1"], stack["cc"], stack["q2"]) +} + +func (c *current) onDBL_LITERAL1(q1, cc, q2 interface{}) (interface{}, error) { + return toString(q1) + strings.Join(arrayToStringArray(cc), "") + toString(q2), nil +} + +func (p *parser) callonDBL_LITERAL1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onDBL_LITERAL1(stack["q1"], stack["cc"], stack["q2"]) +} + +func (c *current) onNUMBER1(num, tail interface{}) (interface{}, error) { + fmt.Println("AAAAAAaa") + arr := tail.([]string) + var end string + if len(arr) > 0 { + // FIXME [][]arr? + // end = "." + strings.Join(arr[1], "") + } + + return strings.Join(num.([]string), "") + end, nil +} + +func (p *parser) callonNUMBER1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNUMBER1(stack["num"], stack["tail"]) +} + +func (c *current) onARR13(e interface{}) (interface{}, error) { + return e, nil +} + +func (p *parser) callonARR13() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onARR13(stack["e"]) +} + +func (c *current) onARR7(hd, items interface{}) (interface{}, error) { + arr := []any{hd} + return append(arr, items), nil +} + +func (p *parser) callonARR7() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onARR7(stack["hd"], stack["items"]) +} + +func (c *current) onARR1(body interface{}) (interface{}, error) { + bodyArr := body.([]string) + arr := []string{} + if len(bodyArr) > 0 { + arr = append(arr, strings.Join(bodyArr, ",")) + } + return arr, nil +} + +func (p *parser) callonARR1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onARR1(stack["body"]) +} + +var ( + // errNoRule is returned when the grammar to parse has no rule. + errNoRule = errors.New("grammar has no rule") + + // errInvalidEntrypoint is returned when the specified entrypoint rule + // does not exit. + errInvalidEntrypoint = errors.New("invalid entrypoint") + + // errInvalidEncoding is returned when the source is not properly + // utf8-encoded. + errInvalidEncoding = errors.New("invalid encoding") + + // errMaxExprCnt is used to signal that the maximum number of + // expressions have been parsed. + errMaxExprCnt = errors.New("max number of expresssions parsed") +) + +// Option is a function that can set an option on the parser. It returns +// the previous setting as an Option. +type Option func(*parser) Option + +// MaxExpressions creates an Option to stop parsing after the provided +// number of expressions have been parsed, if the value is 0 then the parser will +// parse for as many steps as needed (possibly an infinite number). +// +// The default for maxExprCnt is 0. +func MaxExpressions(maxExprCnt uint64) Option { + return func(p *parser) Option { + oldMaxExprCnt := p.maxExprCnt + p.maxExprCnt = maxExprCnt + return MaxExpressions(oldMaxExprCnt) + } +} + +// Entrypoint creates an Option to set the rule name to use as entrypoint. +// The rule name must have been specified in the -alternate-entrypoints +// if generating the parser with the -optimize-grammar flag, otherwise +// it may have been optimized out. Passing an empty string sets the +// entrypoint to the first rule in the grammar. +// +// The default is to start parsing at the first rule in the grammar. +func Entrypoint(ruleName string) Option { + return func(p *parser) Option { + oldEntrypoint := p.entrypoint + p.entrypoint = ruleName + if ruleName == "" { + p.entrypoint = g.rules[0].name + } + return Entrypoint(oldEntrypoint) + } +} + +// Statistics adds a user provided Stats struct to the parser to allow +// the user to process the results after the parsing has finished. +// Also the key for the "no match" counter is set. +// +// Example usage: +// +// input := "input" +// stats := Stats{} +// _, err := Parse("input-file", []byte(input), Statistics(&stats, "no match")) +// if err != nil { +// log.Panicln(err) +// } +// b, err := json.MarshalIndent(stats.ChoiceAltCnt, "", " ") +// if err != nil { +// log.Panicln(err) +// } +// fmt.Println(string(b)) +func Statistics(stats *Stats, choiceNoMatch string) Option { + return func(p *parser) Option { + oldStats := p.Stats + p.Stats = stats + oldChoiceNoMatch := p.choiceNoMatch + p.choiceNoMatch = choiceNoMatch + if p.Stats.ChoiceAltCnt == nil { + p.Stats.ChoiceAltCnt = make(map[string]map[string]int) + } + return Statistics(oldStats, oldChoiceNoMatch) + } +} + +// Debug creates an Option to set the debug flag to b. When set to true, +// debugging information is printed to stdout while parsing. +// +// The default is false. +func Debug(b bool) Option { + return func(p *parser) Option { + old := p.debug + p.debug = b + return Debug(old) + } +} + +// Memoize creates an Option to set the memoize flag to b. When set to true, +// the parser will cache all results so each expression is evaluated only +// once. This guarantees linear parsing time even for pathological cases, +// at the expense of more memory and slower times for typical cases. +// +// The default is false. +func Memoize(b bool) Option { + return func(p *parser) Option { + old := p.memoize + p.memoize = b + return Memoize(old) + } +} + +// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes. +// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD) +// by character class matchers and is matched by the any matcher. +// The returned matched value, c.text and c.offset are NOT affected. +// +// The default is false. +func AllowInvalidUTF8(b bool) Option { + return func(p *parser) Option { + old := p.allowInvalidUTF8 + p.allowInvalidUTF8 = b + return AllowInvalidUTF8(old) + } +} + +// Recover creates an Option to set the recover flag to b. When set to +// true, this causes the parser to recover from panics and convert it +// to an error. Setting it to false can be useful while debugging to +// access the full stack trace. +// +// The default is true. +func Recover(b bool) Option { + return func(p *parser) Option { + old := p.recover + p.recover = b + return Recover(old) + } +} + +// GlobalStore creates an Option to set a key to a certain value in +// the globalStore. +func GlobalStore(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.globalStore[key] + p.cur.globalStore[key] = value + return GlobalStore(key, old) + } +} + +// InitState creates an Option to set a key to a certain value in +// the global "state" store. +func InitState(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.state[key] + p.cur.state[key] = value + return InitState(key, old) + } +} + +// ParseFile parses the file identified by filename. +func ParseFile(filename string, opts ...Option) (i interface{}, err error) { + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if closeErr := f.Close(); closeErr != nil { + err = closeErr + } + }() + return ParseReader(filename, f, opts...) +} + +// ParseReader parses the data from r using filename as information in the +// error messages. +func ParseReader(filename string, r io.Reader, opts ...Option) (interface{}, error) { + b, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + + return Parse(filename, b, opts...) +} + +// Parse parses the data from b using filename as information in the +// error messages. +func Parse(filename string, b []byte, opts ...Option) (interface{}, error) { + return newParser(filename, b, opts...).parse(g) +} + +// position records a position in the text. +type position struct { + line, col, offset int +} + +func (p position) String() string { + return strconv.Itoa(p.line) + ":" + strconv.Itoa(p.col) + " [" + strconv.Itoa(p.offset) + "]" +} + +// savepoint stores all state required to go back to this point in the +// parser. +type savepoint struct { + position + rn rune + w int +} + +type current struct { + pos position // start position of the match + text []byte // raw text of the match + + // state is a store for arbitrary key,value pairs that the user wants to be + // tied to the backtracking of the parser. + // This is always rolled back if a parsing rule fails. + state storeDict + + // globalStore is a general store for the user to store arbitrary key-value + // pairs that they need to manage and that they do not want tied to the + // backtracking of the parser. This is only modified by the user and never + // rolled back by the parser. It is always up to the user to keep this in a + // consistent state. + globalStore storeDict +} + +type storeDict map[string]interface{} + +// the AST types... + +type grammar struct { + pos position + rules []*rule +} + +type rule struct { + pos position + name string + displayName string + expr interface{} +} + +type choiceExpr struct { + pos position + alternatives []interface{} +} + +type actionExpr struct { + pos position + expr interface{} + run func(*parser) (interface{}, error) +} + +type recoveryExpr struct { + pos position + expr interface{} + recoverExpr interface{} + failureLabel []string +} + +type seqExpr struct { + pos position + exprs []interface{} +} + +type throwExpr struct { + pos position + label string +} + +type labeledExpr struct { + pos position + label string + expr interface{} +} + +type expr struct { + pos position + expr interface{} +} + +type andExpr expr +type notExpr expr +type zeroOrOneExpr expr +type zeroOrMoreExpr expr +type oneOrMoreExpr expr + +type ruleRefExpr struct { + pos position + name string +} + +type stateCodeExpr struct { + pos position + run func(*parser) error +} + +type andCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type notCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type litMatcher struct { + pos position + val string + ignoreCase bool + want string +} + +type charClassMatcher struct { + pos position + val string + basicLatinChars [128]bool + chars []rune + ranges []rune + classes []*unicode.RangeTable + ignoreCase bool + inverted bool +} + +type anyMatcher position + +// errList cumulates the errors found by the parser. +type errList []error + +func (e *errList) add(err error) { + *e = append(*e, err) +} + +func (e errList) err() error { + if len(e) == 0 { + return nil + } + e.dedupe() + return e +} + +func (e *errList) dedupe() { + var cleaned []error + set := make(map[string]bool) + for _, err := range *e { + if msg := err.Error(); !set[msg] { + set[msg] = true + cleaned = append(cleaned, err) + } + } + *e = cleaned +} + +func (e errList) Error() string { + switch len(e) { + case 0: + return "" + case 1: + return e[0].Error() + default: + var buf bytes.Buffer + + for i, err := range e { + if i > 0 { + buf.WriteRune('\n') + } + buf.WriteString(err.Error()) + } + return buf.String() + } +} + +// parserError wraps an error with a prefix indicating the rule in which +// the error occurred. The original error is stored in the Inner field. +type parserError struct { + Inner error + pos position + prefix string + expected []string +} + +// Error returns the error message. +func (p *parserError) Error() string { + return p.prefix + ": " + p.Inner.Error() +} + +// newParser creates a parser with the specified input source and options. +func newParser(filename string, b []byte, opts ...Option) *parser { + stats := Stats{ + ChoiceAltCnt: make(map[string]map[string]int), + } + + p := &parser{ + filename: filename, + errs: new(errList), + data: b, + pt: savepoint{position: position{line: 1}}, + recover: true, + cur: current{ + state: make(storeDict), + globalStore: make(storeDict), + }, + maxFailPos: position{col: 1, line: 1}, + maxFailExpected: make([]string, 0, 20), + Stats: &stats, + // start rule is rule [0] unless an alternate entrypoint is specified + entrypoint: g.rules[0].name, + } + p.setOptions(opts) + + if p.maxExprCnt == 0 { + p.maxExprCnt = math.MaxUint64 + } + + return p +} + +// setOptions applies the options to the parser. +func (p *parser) setOptions(opts []Option) { + for _, opt := range opts { + opt(p) + } +} + +type resultTuple struct { + v interface{} + b bool + end savepoint +} + +const choiceNoMatch = -1 + +// Stats stores some statistics, gathered during parsing +type Stats struct { + // ExprCnt counts the number of expressions processed during parsing + // This value is compared to the maximum number of expressions allowed + // (set by the MaxExpressions option). + ExprCnt uint64 + + // ChoiceAltCnt is used to count for each ordered choice expression, + // which alternative is used how may times. + // These numbers allow to optimize the order of the ordered choice expression + // to increase the performance of the parser + // + // The outer key of ChoiceAltCnt is composed of the name of the rule as well + // as the line and the column of the ordered choice. + // The inner key of ChoiceAltCnt is the number (one-based) of the matching alternative. + // For each alternative the number of matches are counted. If an ordered choice does not + // match, a special counter is incremented. The name of this counter is set with + // the parser option Statistics. + // For an alternative to be included in ChoiceAltCnt, it has to match at least once. + ChoiceAltCnt map[string]map[string]int +} + +type parser struct { + filename string + pt savepoint + cur current + + data []byte + errs *errList + + depth int + recover bool + debug bool + + memoize bool + // memoization table for the packrat algorithm: + // map[offset in source] map[expression or rule] {value, match} + memo map[int]map[interface{}]resultTuple + + // rules table, maps the rule identifier to the rule node + rules map[string]*rule + // variables stack, map of label to value + vstack []map[string]interface{} + // rule stack, allows identification of the current rule in errors + rstack []*rule + + // parse fail + maxFailPos position + maxFailExpected []string + maxFailInvertExpected bool + + // max number of expressions to be parsed + maxExprCnt uint64 + // entrypoint for the parser + entrypoint string + + allowInvalidUTF8 bool + + *Stats + + choiceNoMatch string + // recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse + recoveryStack []map[string]interface{} +} + +// push a variable set on the vstack. +func (p *parser) pushV() { + if cap(p.vstack) == len(p.vstack) { + // create new empty slot in the stack + p.vstack = append(p.vstack, nil) + } else { + // slice to 1 more + p.vstack = p.vstack[:len(p.vstack)+1] + } + + // get the last args set + m := p.vstack[len(p.vstack)-1] + if m != nil && len(m) == 0 { + // empty map, all good + return + } + + m = make(map[string]interface{}) + p.vstack[len(p.vstack)-1] = m +} + +// pop a variable set from the vstack. +func (p *parser) popV() { + // if the map is not empty, clear it + m := p.vstack[len(p.vstack)-1] + if len(m) > 0 { + // GC that map + p.vstack[len(p.vstack)-1] = nil + } + p.vstack = p.vstack[:len(p.vstack)-1] +} + +// push a recovery expression with its labels to the recoveryStack +func (p *parser) pushRecovery(labels []string, expr interface{}) { + if cap(p.recoveryStack) == len(p.recoveryStack) { + // create new empty slot in the stack + p.recoveryStack = append(p.recoveryStack, nil) + } else { + // slice to 1 more + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)+1] + } + + m := make(map[string]interface{}, len(labels)) + for _, fl := range labels { + m[fl] = expr + } + p.recoveryStack[len(p.recoveryStack)-1] = m +} + +// pop a recovery expression from the recoveryStack +func (p *parser) popRecovery() { + // GC that map + p.recoveryStack[len(p.recoveryStack)-1] = nil + + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)-1] +} + +func (p *parser) print(prefix, s string) string { + if !p.debug { + return s + } + + fmt.Printf("%s %d:%d:%d: %s [%#U]\n", + prefix, p.pt.line, p.pt.col, p.pt.offset, s, p.pt.rn) + return s +} + +func (p *parser) in(s string) string { + p.depth++ + return p.print(strings.Repeat(" ", p.depth)+">", s) +} + +func (p *parser) out(s string) string { + p.depth-- + return p.print(strings.Repeat(" ", p.depth)+"<", s) +} + +func (p *parser) addErr(err error) { + p.addErrAt(err, p.pt.position, []string{}) +} + +func (p *parser) addErrAt(err error, pos position, expected []string) { + var buf bytes.Buffer + if p.filename != "" { + buf.WriteString(p.filename) + } + if buf.Len() > 0 { + buf.WriteString(":") + } + buf.WriteString(fmt.Sprintf("%d:%d (%d)", pos.line, pos.col, pos.offset)) + if len(p.rstack) > 0 { + if buf.Len() > 0 { + buf.WriteString(": ") + } + rule := p.rstack[len(p.rstack)-1] + if rule.displayName != "" { + buf.WriteString("rule " + rule.displayName) + } else { + buf.WriteString("rule " + rule.name) + } + } + pe := &parserError{Inner: err, pos: pos, prefix: buf.String(), expected: expected} + p.errs.add(pe) +} + +func (p *parser) failAt(fail bool, pos position, want string) { + // process fail if parsing fails and not inverted or parsing succeeds and invert is set + if fail == p.maxFailInvertExpected { + if pos.offset < p.maxFailPos.offset { + return + } + + if pos.offset > p.maxFailPos.offset { + p.maxFailPos = pos + p.maxFailExpected = p.maxFailExpected[:0] + } + + if p.maxFailInvertExpected { + want = "!" + want + } + p.maxFailExpected = append(p.maxFailExpected, want) + } +} + +// read advances the parser to the next rune. +func (p *parser) read() { + p.pt.offset += p.pt.w + rn, n := utf8.DecodeRune(p.data[p.pt.offset:]) + p.pt.rn = rn + p.pt.w = n + p.pt.col++ + if rn == '\n' { + p.pt.line++ + p.pt.col = 0 + } + + if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune + if !p.allowInvalidUTF8 { + p.addErr(errInvalidEncoding) + } + } +} + +// restore parser position to the savepoint pt. +func (p *parser) restore(pt savepoint) { + if p.debug { + defer p.out(p.in("restore")) + } + if pt.offset == p.pt.offset { + return + } + p.pt = pt +} + +// Cloner is implemented by any value that has a Clone method, which returns a +// copy of the value. This is mainly used for types which are not passed by +// value (e.g map, slice, chan) or structs that contain such types. +// +// This is used in conjunction with the global state feature to create proper +// copies of the state to allow the parser to properly restore the state in +// the case of backtracking. +type Cloner interface { + Clone() interface{} +} + +var statePool = &sync.Pool{ + New: func() interface{} { return make(storeDict) }, +} + +func (sd storeDict) Discard() { + for k := range sd { + delete(sd, k) + } + statePool.Put(sd) +} + +// clone and return parser current state. +func (p *parser) cloneState() storeDict { + if p.debug { + defer p.out(p.in("cloneState")) + } + + state := statePool.Get().(storeDict) + for k, v := range p.cur.state { + if c, ok := v.(Cloner); ok { + state[k] = c.Clone() + } else { + state[k] = v + } + } + return state +} + +// restore parser current state to the state storeDict. +// every restoreState should applied only one time for every cloned state +func (p *parser) restoreState(state storeDict) { + if p.debug { + defer p.out(p.in("restoreState")) + } + p.cur.state.Discard() + p.cur.state = state +} + +// get the slice of bytes from the savepoint start to the current position. +func (p *parser) sliceFrom(start savepoint) []byte { + return p.data[start.position.offset:p.pt.position.offset] +} + +func (p *parser) getMemoized(node interface{}) (resultTuple, bool) { + if len(p.memo) == 0 { + return resultTuple{}, false + } + m := p.memo[p.pt.offset] + if len(m) == 0 { + return resultTuple{}, false + } + res, ok := m[node] + return res, ok +} + +func (p *parser) setMemoized(pt savepoint, node interface{}, tuple resultTuple) { + if p.memo == nil { + p.memo = make(map[int]map[interface{}]resultTuple) + } + m := p.memo[pt.offset] + if m == nil { + m = make(map[interface{}]resultTuple) + p.memo[pt.offset] = m + } + m[node] = tuple +} + +func (p *parser) buildRulesTable(g *grammar) { + p.rules = make(map[string]*rule, len(g.rules)) + for _, r := range g.rules { + p.rules[r.name] = r + } +} + +func (p *parser) parse(g *grammar) (val interface{}, err error) { + if len(g.rules) == 0 { + p.addErr(errNoRule) + return nil, p.errs.err() + } + + // TODO : not super critical but this could be generated + p.buildRulesTable(g) + + if p.recover { + // panic can be used in action code to stop parsing immediately + // and return the panic as an error. + defer func() { + if e := recover(); e != nil { + if p.debug { + defer p.out(p.in("panic handler")) + } + val = nil + switch e := e.(type) { + case error: + p.addErr(e) + default: + p.addErr(fmt.Errorf("%v", e)) + } + err = p.errs.err() + } + }() + } + + startRule, ok := p.rules[p.entrypoint] + if !ok { + p.addErr(errInvalidEntrypoint) + return nil, p.errs.err() + } + + p.read() // advance to first rune + val, ok = p.parseRule(startRule) + if !ok { + if len(*p.errs) == 0 { + // If parsing fails, but no errors have been recorded, the expected values + // for the farthest parser position are returned as error. + maxFailExpectedMap := make(map[string]struct{}, len(p.maxFailExpected)) + for _, v := range p.maxFailExpected { + maxFailExpectedMap[v] = struct{}{} + } + expected := make([]string, 0, len(maxFailExpectedMap)) + eof := false + if _, ok := maxFailExpectedMap["!."]; ok { + delete(maxFailExpectedMap, "!.") + eof = true + } + for k := range maxFailExpectedMap { + expected = append(expected, k) + } + sort.Strings(expected) + if eof { + expected = append(expected, "EOF") + } + p.addErrAt(errors.New("no match found, expected: "+listJoin(expected, ", ", "or")), p.maxFailPos, expected) + } + + return nil, p.errs.err() + } + return val, p.errs.err() +} + +func listJoin(list []string, sep string, lastSep string) string { + switch len(list) { + case 0: + return "" + case 1: + return list[0] + default: + return strings.Join(list[:len(list)-1], sep) + " " + lastSep + " " + list[len(list)-1] + } +} + +func (p *parser) parseRule(rule *rule) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRule " + rule.name)) + } + + if p.memoize { + res, ok := p.getMemoized(rule) + if ok { + p.restore(res.end) + return res.v, res.b + } + } + + start := p.pt + p.rstack = append(p.rstack, rule) + p.pushV() + val, ok := p.parseExpr(rule.expr) + p.popV() + p.rstack = p.rstack[:len(p.rstack)-1] + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + + if p.memoize { + p.setMemoized(start, rule, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseExpr(expr interface{}) (interface{}, bool) { + var pt savepoint + + if p.memoize { + res, ok := p.getMemoized(expr) + if ok { + p.restore(res.end) + return res.v, res.b + } + pt = p.pt + } + + p.ExprCnt++ + if p.ExprCnt > p.maxExprCnt { + panic(errMaxExprCnt) + } + + var val interface{} + var ok bool + switch expr := expr.(type) { + case *actionExpr: + val, ok = p.parseActionExpr(expr) + case *andCodeExpr: + val, ok = p.parseAndCodeExpr(expr) + case *andExpr: + val, ok = p.parseAndExpr(expr) + case *anyMatcher: + val, ok = p.parseAnyMatcher(expr) + case *charClassMatcher: + val, ok = p.parseCharClassMatcher(expr) + case *choiceExpr: + val, ok = p.parseChoiceExpr(expr) + case *labeledExpr: + val, ok = p.parseLabeledExpr(expr) + case *litMatcher: + val, ok = p.parseLitMatcher(expr) + case *notCodeExpr: + val, ok = p.parseNotCodeExpr(expr) + case *notExpr: + val, ok = p.parseNotExpr(expr) + case *oneOrMoreExpr: + val, ok = p.parseOneOrMoreExpr(expr) + case *recoveryExpr: + val, ok = p.parseRecoveryExpr(expr) + case *ruleRefExpr: + val, ok = p.parseRuleRefExpr(expr) + case *seqExpr: + val, ok = p.parseSeqExpr(expr) + case *stateCodeExpr: + val, ok = p.parseStateCodeExpr(expr) + case *throwExpr: + val, ok = p.parseThrowExpr(expr) + case *zeroOrMoreExpr: + val, ok = p.parseZeroOrMoreExpr(expr) + case *zeroOrOneExpr: + val, ok = p.parseZeroOrOneExpr(expr) + default: + panic(fmt.Sprintf("unknown expression type %T", expr)) + } + if p.memoize { + p.setMemoized(pt, expr, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseActionExpr")) + } + + start := p.pt + val, ok := p.parseExpr(act.expr) + if ok { + p.cur.pos = start.position + p.cur.text = p.sliceFrom(start) + state := p.cloneState() + actVal, err := act.run(p) + if err != nil { + p.addErrAt(err, start.position, []string{}) + } + p.restoreState(state) + + val = actVal + } + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + return val, ok +} + +func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndCodeExpr")) + } + + state := p.cloneState() + + ok, err := and.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, ok +} + +func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + _, ok := p.parseExpr(and.expr) + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, ok +} + +func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAnyMatcher")) + } + + if p.pt.rn == utf8.RuneError && p.pt.w == 0 { + // EOF - see utf8.DecodeRune + p.failAt(false, p.pt.position, ".") + return nil, false + } + start := p.pt + p.read() + p.failAt(true, start.position, ".") + return p.sliceFrom(start), true +} + +func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseCharClassMatcher")) + } + + cur := p.pt.rn + start := p.pt + + // can't match EOF + if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune + p.failAt(false, start.position, chr.val) + return nil, false + } + + if chr.ignoreCase { + cur = unicode.ToLower(cur) + } + + // try to match in the list of available chars + for _, rn := range chr.chars { + if rn == cur { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of ranges + for i := 0; i < len(chr.ranges); i += 2 { + if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of Unicode classes + for _, cl := range chr.classes { + if unicode.Is(cl, cur) { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + if chr.inverted { + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + p.failAt(false, start.position, chr.val) + return nil, false +} + +func (p *parser) incChoiceAltCnt(ch *choiceExpr, altI int) { + choiceIdent := fmt.Sprintf("%s %d:%d", p.rstack[len(p.rstack)-1].name, ch.pos.line, ch.pos.col) + m := p.ChoiceAltCnt[choiceIdent] + if m == nil { + m = make(map[string]int) + p.ChoiceAltCnt[choiceIdent] = m + } + // We increment altI by 1, so the keys do not start at 0 + alt := strconv.Itoa(altI + 1) + if altI == choiceNoMatch { + alt = p.choiceNoMatch + } + m[alt]++ +} + +func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseChoiceExpr")) + } + + for altI, alt := range ch.alternatives { + // dummy assignment to prevent compile error if optimized + _ = altI + + state := p.cloneState() + + p.pushV() + val, ok := p.parseExpr(alt) + p.popV() + if ok { + p.incChoiceAltCnt(ch, altI) + return val, ok + } + p.restoreState(state) + } + p.incChoiceAltCnt(ch, choiceNoMatch) + return nil, false +} + +func (p *parser) parseLabeledExpr(lab *labeledExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLabeledExpr")) + } + + p.pushV() + val, ok := p.parseExpr(lab.expr) + p.popV() + if ok && lab.label != "" { + m := p.vstack[len(p.vstack)-1] + m[lab.label] = val + } + return val, ok +} + +func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLitMatcher")) + } + + start := p.pt + for _, want := range lit.val { + cur := p.pt.rn + if lit.ignoreCase { + cur = unicode.ToLower(cur) + } + if cur != want { + p.failAt(false, start.position, lit.want) + p.restore(start) + return nil, false + } + p.read() + } + p.failAt(true, start.position, lit.want) + return p.sliceFrom(start), true +} + +func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotCodeExpr")) + } + + state := p.cloneState() + + ok, err := not.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, !ok +} + +func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + p.maxFailInvertExpected = !p.maxFailInvertExpected + _, ok := p.parseExpr(not.expr) + p.maxFailInvertExpected = !p.maxFailInvertExpected + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, !ok +} + +func (p *parser) parseOneOrMoreExpr(expr *oneOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseOneOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + if len(vals) == 0 { + // did not match once, no match + return nil, false + } + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseRecoveryExpr(recover *recoveryExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRecoveryExpr (" + strings.Join(recover.failureLabel, ",") + ")")) + } + + p.pushRecovery(recover.failureLabel, recover.recoverExpr) + val, ok := p.parseExpr(recover.expr) + p.popRecovery() + + return val, ok +} + +func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRuleRefExpr " + ref.name)) + } + + if ref.name == "" { + panic(fmt.Sprintf("%s: invalid rule: missing name", ref.pos)) + } + + rule := p.rules[ref.name] + if rule == nil { + p.addErr(fmt.Errorf("undefined rule: %s", ref.name)) + return nil, false + } + return p.parseRule(rule) +} + +func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseSeqExpr")) + } + + vals := make([]interface{}, 0, len(seq.exprs)) + + pt := p.pt + state := p.cloneState() + for _, expr := range seq.exprs { + val, ok := p.parseExpr(expr) + if !ok { + p.restoreState(state) + p.restore(pt) + return nil, false + } + vals = append(vals, val) + } + return vals, true +} + +func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseStateCodeExpr")) + } + + err := state.run(p) + if err != nil { + p.addErr(err) + } + return nil, true +} + +func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseThrowExpr")) + } + + for i := len(p.recoveryStack) - 1; i >= 0; i-- { + if recoverExpr, ok := p.recoveryStack[i][expr.label]; ok { + if val, ok := p.parseExpr(recoverExpr); ok { + return val, ok + } + } + } + + return nil, false +} + +func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseZeroOrOneExpr(expr *zeroOrOneExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrOneExpr")) + } + + p.pushV() + val, _ := p.parseExpr(expr.expr) + p.popV() + // whether it matched or not, consider it a match + return val, true +} diff --git a/internal/cmdb/qdsl/server.go b/internal/cmdb/qdsl/server.go new file mode 100644 index 0000000..af65871 --- /dev/null +++ b/internal/cmdb/qdsl/server.go @@ -0,0 +1,114 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "context" + "crypto/tls" + "net/http" + "os" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" + driver "github.com/arangodb/go-driver" + arangohttp "github.com/arangodb/go-driver/http" +) + +var ( + arangoAddr string + arangoUser string + arangoPassword string +) + +func init() { + if value, ok := os.LookupEnv("ARANGO_ADDR"); ok { + arangoAddr = value + } + if value, ok := os.LookupEnv("ARANGO_USER"); ok { + arangoUser = value + } + if value, ok := os.LookupEnv("ARANGO_PASSWORD"); ok { + arangoPassword = value + } +} + +type Server struct { + pbqdsl.UnimplementedQdslServiceServer + + client driver.Client +} + +func New(ctx context.Context) (s *Server, err error) { + s = &Server{} + + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + + // Open a client connection + conn, err := arangohttp.NewConnection(arangohttp.ConnectionConfig{ + Transport: tr, + Endpoints: []string{arangoAddr}, + }) + if err != nil { + return + } + + s.client, err = driver.NewClient(driver.ClientConfig{ + Connection: conn, + Authentication: driver.BasicAuthentication(arangoUser, arangoPassword), + }) + + return +} + +func (s *Server) Qdsl(ctx context.Context, query *pbqdsl.Query) (elements *pbqdsl.Elements, err error) { + elements = &pbqdsl.Elements{Elements: make([]*pbqdsl.Element, 0)} + + qdslElements, err := parse(query) + if err != nil { + return + } + + graph, err := arangodb.Graph(ctx, s.client) + if err != nil { + return + } + + for _, element := range qdslElements { + documents, err := s.query(ctx, element) + if err != nil { + return elements, err + } + + for _, document := range documents { + elements.Elements = append(elements.Elements, document.ToElement()) + + if query.Options.Remove { + + if query.Options.Id { + collection, err := graph.VertexCollection(ctx, document.Id.Collection()) + if err != nil { + return elements, err + } + if _, err := collection.RemoveDocument(ctx, document.Id.Key()); err != nil { + return elements, err + } + } + + if query.Options.LinkId { + collection, _, err := graph.EdgeCollection(ctx, document.LinkId.Collection()) + if err != nil { + return elements, err + } + if _, err := collection.RemoveDocument(ctx, document.LinkId.Key()); err != nil { + return elements, err + } + } + + } + } + } + + return +} diff --git a/internal/cmdb/qdsl/utils.go b/internal/cmdb/qdsl/utils.go new file mode 100644 index 0000000..0bd1a11 --- /dev/null +++ b/internal/cmdb/qdsl/utils.go @@ -0,0 +1,25 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +func normalizeOptions(options *pbqdsl.Options) { + if !options.GetId() && + !options.GetKey() && + !options.GetName() && + !options.GetType() && + !options.GetLink() && + !options.GetLinkId() && + !options.GetPath() && + !options.GetObject() { + options.Id = true + } +} +func reverse[S ~[]E, E any](s S) { + for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } +} diff --git a/internal/cmdb/vertex/cmdb.go b/internal/cmdb/vertex/cmdb.go new file mode 100644 index 0000000..6b7a197 --- /dev/null +++ b/internal/cmdb/vertex/cmdb.go @@ -0,0 +1,23 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" + driver "github.com/arangodb/go-driver" +) + +type Server struct { + pbcmdb.UnimplementedVertexServiceServer + + client driver.Client +} + +func New(ctx context.Context) (s *Server, err error) { + s = &Server{} + s.client, err = arangodb.Connect() + return +} diff --git a/internal/cmdb/vertex/create.go b/internal/cmdb/vertex/create.go new file mode 100644 index 0000000..5a5dfa8 --- /dev/null +++ b/internal/cmdb/vertex/create.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + + vertex "git.fg-tech.ru/listware/cmdb/internal/arangodb/vertex" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Create(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := vertex.Create(ctx, s.client, request.GetCollection(), request.GetPayload()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/cmdb/vertex/read.go b/internal/cmdb/vertex/read.go new file mode 100644 index 0000000..47773f2 --- /dev/null +++ b/internal/cmdb/vertex/read.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + + vertex "git.fg-tech.ru/listware/cmdb/internal/arangodb/vertex" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Read(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := vertex.Read(ctx, s.client, request.GetCollection(), request.GetKey()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/cmdb/vertex/remove.go b/internal/cmdb/vertex/remove.go new file mode 100644 index 0000000..69fd87b --- /dev/null +++ b/internal/cmdb/vertex/remove.go @@ -0,0 +1,24 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + + vertex "git.fg-tech.ru/listware/cmdb/internal/arangodb/vertex" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Remove(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, err := vertex.Remove(ctx, s.client, request.GetCollection(), request.GetKey()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + return +} diff --git a/internal/cmdb/vertex/update.go b/internal/cmdb/vertex/update.go new file mode 100644 index 0000000..3950fc3 --- /dev/null +++ b/internal/cmdb/vertex/update.go @@ -0,0 +1,26 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + + vertex "git.fg-tech.ru/listware/cmdb/internal/arangodb/vertex" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func (s *Server) Update(ctx context.Context, request *pbcmdb.Request) (response *pbcmdb.Response, err error) { + response = &pbcmdb.Response{} + meta, resp, err := vertex.Update(ctx, s.client, request.GetCollection(), request.GetKey(), request.GetPayload()) + if err != nil { + return + } + response.Meta = &pbcmdb.Meta{ + Key: meta.Key, + Id: meta.ID.String(), + Rev: meta.Rev, + } + response.Payload, err = json.Marshal(resp) + return +} diff --git a/internal/schema/reflect.go b/internal/schema/reflect.go new file mode 100644 index 0000000..770fb16 --- /dev/null +++ b/internal/schema/reflect.go @@ -0,0 +1,513 @@ +package schema + +import ( + "encoding/json" + "net" + "net/url" + "reflect" + "regexp" + "strconv" + "strings" + "time" +) + +// Version is the JSON Schema version. +// If extending JSON Schema with custom values use a custom URI. +// RFC draft-wright-json-schema-00, section 6 +var Version = "http://json-schema.org/draft-04/schema#" + +// Schema is the root schema. +// RFC draft-wright-json-schema-00, section 4.5 +type Schema struct { + *Type + Definitions Definitions `json:"definitions,omitempty"` +} + +// Type represents a JSON Schema object type. +type Type struct { + // RFC draft-wright-json-schema-00 + // Version string `json:"$schema,omitempty"` // section 6.1 + Version string `json:"-"` // section 6.1 + Ref string `json:"$ref,omitempty"` // section 7 + // RFC draft-wright-json-schema-validation-00, section 5 + MultipleOf int `json:"multipleOf,omitempty"` // section 5.1 + Maximum int `json:"maximum,omitempty"` // section 5.2 + ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` // section 5.3 + Minimum int `json:"minimum,omitempty"` // section 5.4 + ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` // section 5.5 + MaxLength int `json:"maxLength,omitempty"` // section 5.6 + MinLength int `json:"minLength,omitempty"` // section 5.7 + Pattern string `json:"pattern,omitempty"` // section 5.8 + AdditionalItems *Type `json:"additionalItems,omitempty"` // section 5.9 + Items *Type `json:"items,omitempty"` // section 5.9 + MaxItems int `json:"maxItems,omitempty"` // section 5.10 + MinItems int `json:"minItems,omitempty"` // section 5.11 + UniqueItems bool `json:"uniqueItems,omitempty"` // section 5.12 + MaxProperties int `json:"maxProperties,omitempty"` // section 5.13 + MinProperties int `json:"minProperties,omitempty"` // section 5.14 + Required []string `json:"required,omitempty"` // section 5.15 + Properties map[string]*Type `json:"properties,omitempty"` // section 5.16 + PatternProperties map[string]*Type `json:"patternProperties,omitempty"` // section 5.17 + AdditionalProperties json.RawMessage `json:"additionalProperties,omitempty"` // section 5.18 + Dependencies map[string]*Type `json:"dependencies,omitempty"` // section 5.19 + Enum []interface{} `json:"enum,omitempty"` // section 5.20 + Type string `json:"type,omitempty"` // section 5.21 + AllOf []*Type `json:"allOf,omitempty"` // section 5.22 + AnyOf []*Type `json:"anyOf,omitempty"` // section 5.23 + OneOf []*Type `json:"oneOf,omitempty"` // section 5.24 + Not *Type `json:"not,omitempty"` // section 5.25 + Definitions Definitions `json:"definitions,omitempty"` // section 5.26 + // RFC draft-wright-json-schema-validation-00, section 6, 7 + Title string `json:"title,omitempty"` // section 6.1 + Description string `json:"description,omitempty"` // section 6.1 + Default interface{} `json:"default,omitempty"` // section 6.2 + Format string `json:"format,omitempty"` // section 7 + // RFC draft-wright-json-schema-hyperschema-00, section 4 + Media *Type `json:"media,omitempty"` // section 4.3 + BinaryEncoding string `json:"binaryEncoding,omitempty"` // section 4.3 +} + +// Reflect reflects to Schema from a value using the default Reflector +func Reflect(v interface{}) *Schema { + return ReflectFromType(reflect.TypeOf(v)) +} + +// ReflectFromType generates root schema using the default Reflector +func ReflectFromType(t reflect.Type) *Schema { + r := &Reflector{} + return r.ReflectFromType(t) +} + +// A Reflector reflects values into a Schema. +type Reflector struct { + // AllowAdditionalProperties will cause the Reflector to generate a schema + // with additionalProperties to 'true' for all struct types. This means + // the presence of additional keys in JSON objects will not cause validation + // to fail. Note said additional keys will simply be dropped when the + // validated JSON is unmarshaled. + AllowAdditionalProperties bool + + // RequiredFromJSONSchemaTags will cause the Reflector to generate a schema + // that requires any key tagged with `jsonschema:required`, overriding the + // default of requiring any key *not* tagged with `json:,omitempty`. + RequiredFromJSONSchemaTags bool + + // ExpandedStruct will cause the toplevel definitions of the schema not + // be referenced itself to a definition. + ExpandedStruct bool + + // TitleNotation will cause the Reflector to generate a schema + // with different title notations. This means + // the title will be transform your structure name from + // camelcase(go-case) notation to the declared. There are + // two supported notations: snake and dash. + // P.S. first letter capitalisation will be ignored (converted to lower-case) + TitleNotation string +} + +// Reflect reflects to Schema from a value. +func (r *Reflector) Reflect(v interface{}) *Schema { + return r.ReflectFromType(reflect.TypeOf(v)) +} + +// ReflectTitle return only structure name as profile type title +func (r *Reflector) ReflectTitle(v interface{}) string { + return r.getTitle(reflect.TypeOf(v)) +} + +func (r *Reflector) getTitle(t reflect.Type) string { + name := t.Name() + if t.Kind() == reflect.Ptr { + name = t.Elem().Name() + } + var matchAllCap = regexp.MustCompile("([a-z0-9])([A-Z])") + switch r.TitleNotation { + case "dash": + return strings.ToLower(matchAllCap.ReplaceAllString(name, "${1}-${2}")) + case "snake": + return strings.ToLower(matchAllCap.ReplaceAllString(name, "${1}_${2}")) + default: + return name + } +} + +// ReflectFromType generates root schema +func (r *Reflector) ReflectFromType(t reflect.Type) *Schema { + definitions := Definitions{} + if r.ExpandedStruct { + + st := &Type{ + Version: Version, + Title: r.getTitle(t), + Type: "object", + Properties: map[string]*Type{}, + AdditionalProperties: []byte("false"), + } + if r.AllowAdditionalProperties { + st.AdditionalProperties = []byte("true") + } + r.reflectStructFields(st, definitions, t) + r.reflectStruct(definitions, t) + delete(definitions, t.Name()) + return &Schema{Type: st, Definitions: definitions} + } + + s := &Schema{ + Type: r.reflectTypeToSchema(definitions, t), + Definitions: definitions, + } + return s +} + +// Definitions hold schema definitions. +// http://json-schema.org/latest/json-schema-validation.html#rfc.section.5.26 +// RFC draft-wright-json-schema-validation-00, section 5.26 +type Definitions map[string]*Type + +// Available Go defined types for JSON Schema Validation. +// RFC draft-wright-json-schema-validation-00, section 7.3 +var ( + timeType = reflect.TypeOf(time.Time{}) // date-time RFC section 7.3.1 + ipType = reflect.TypeOf(net.IP{}) // ipv4 and ipv6 RFC section 7.3.4, 7.3.5 + uriType = reflect.TypeOf(url.URL{}) // uri RFC section 7.3.6 +) + +// Byte slices will be encoded as base64 +var byteSliceType = reflect.TypeOf([]byte(nil)) + +// Go code generated from protobuf enum types should fulfil this interface. +type protoEnum interface { + EnumDescriptor() ([]byte, []int) +} + +var protoEnumType = reflect.TypeOf((*protoEnum)(nil)).Elem() + +func (r *Reflector) reflectTypeToSchema(definitions Definitions, t reflect.Type) *Type { + // Already added to definitions? + if _, ok := definitions[t.Name()]; ok { + return &Type{Ref: "#/definitions/" + t.Name()} + } + + // jsonpb will marshal protobuf enum options as either strings or integers. + // It will unmarshal either. + if t.Implements(protoEnumType) { + return &Type{OneOf: []*Type{ + {Type: "string"}, + {Type: "integer"}, + }} + } + + // Defined format types for JSON Schema Validation + // RFC draft-wright-json-schema-validation-00, section 7.3 + // TODO email RFC section 7.3.2, hostname RFC section 7.3.3, uriref RFC section 7.3.7 + switch t { + case ipType: + + // TODO differentiate ipv4 and ipv6 RFC section 7.3.4, 7.3.5 + return &Type{Type: "string", Format: "ipv4"} // ipv4 RFC section 7.3.4 + } + + switch t.Kind() { + case reflect.Struct: + + switch t { + case timeType: // date-time RFC section 7.3.1 + return &Type{Type: "string", Format: "date-time"} + case uriType: // uri RFC section 7.3.6 + return &Type{Type: "string", Format: "uri"} + default: + return r.reflectStruct(definitions, t) + } + + case reflect.Map: + rt := &Type{ + Type: "object", + PatternProperties: map[string]*Type{ + ".*": r.reflectTypeToSchema(definitions, t.Elem()), + }, + } + delete(rt.PatternProperties, "additionalProperties") + return rt + + case reflect.Slice, reflect.Array: + returnType := &Type{} + if t.Kind() == reflect.Array { + returnType.MinItems = t.Len() + returnType.MaxItems = returnType.MinItems + } + switch t { + case byteSliceType: + returnType.Type = "string" + returnType.Media = &Type{BinaryEncoding: "base64"} + return returnType + default: + returnType.Type = "array" + returnType.Items = r.reflectTypeToSchema(definitions, t.Elem()) + return returnType + } + + case reflect.Interface: + return &Type{ + Type: "object", + AdditionalProperties: []byte("true"), + } + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return &Type{Type: "integer"} + + case reflect.Float32, reflect.Float64: + return &Type{Type: "number"} + + case reflect.Bool: + return &Type{Type: "boolean"} + + case reflect.String: + return &Type{Type: "string"} + + case reflect.Ptr: + return r.reflectTypeToSchema(definitions, t.Elem()) + } + panic("unsupported type " + t.String()) +} + +// Refects a struct to a JSON Schema type. +func (r *Reflector) reflectStruct(definitions Definitions, t reflect.Type) *Type { + st := &Type{ + Type: "object", + Properties: map[string]*Type{}, + AdditionalProperties: []byte("false"), + } + if r.AllowAdditionalProperties { + st.AdditionalProperties = []byte("true") + } + definitions[t.Name()] = st + r.reflectStructFields(st, definitions, t) + + return &Type{ + Version: Version, + Ref: "#/definitions/" + t.Name(), + } +} + +func (r *Reflector) reflectStructFields(st *Type, definitions Definitions, t reflect.Type) { + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + + // Skip non Struct types + // https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/reflect/value.go;l=1219 + if t.Kind() != reflect.Struct { + return + } + + for i := 0; i < t.NumField(); i++ { + f := t.Field(i) + if !f.IsExported() { + continue + } + + // anonymous and exported type should be processed recursively + // current type should inherit properties of anonymous one + if f.Anonymous && (f.Type.Kind() == reflect.Struct || f.Type.Kind() == reflect.Ptr) { + r.reflectStructFields(st, definitions, f.Type) + continue + } + + name, required := r.reflectFieldName(f) + if name == "" { + continue + } + property := r.reflectTypeToSchema(definitions, f.Type) + property.structKeywordsFromTags(f) + property.defaultValueFromTags(f) + st.Properties[name] = property + if required { + st.Required = append(st.Required, name) + } + } +} + +func (t *Type) defaultValueFromTags(f reflect.StructField) { + def := f.Tag.Get("default") // Get default value + switch t.Type { + case "bool", "boolean": + if def == "true" { + t.Default = true + } else if def == "false" { + t.Default = false + } + case "string": + if def != "" { + t.Default = def + } + case "number", "integer": + if num, err := strconv.Atoi(def); err == nil { + t.Default = num + } + case "array": + // TODO: implement default values for arrays + } +} + +func (t *Type) structKeywordsFromTags(f reflect.StructField) { + tags := strings.Split(f.Tag.Get("jsonschema"), ",") + switch t.Type { + case "string": + t.stringKeywords(tags) + case "number", "integer": + t.numbericKeywords(tags) + case "array": + t.arrayKeywords(tags) + } +} + +// read struct tags for string type keyworks +func (t *Type) stringKeywords(tags []string) { + for _, tag := range tags { + nameValue := strings.Split(tag, "=") + if len(nameValue) == 2 { + name, val := nameValue[0], nameValue[1] + switch name { + case "minLength": + i, _ := strconv.Atoi(val) + t.MinLength = i + case "maxLength": + i, _ := strconv.Atoi(val) + t.MaxLength = i + case "format": + switch val { + case "date-time", "email", "hostname", "ipv4", "ipv6", "uri": + t.Format = val + break + } + } + } + } +} + +// read struct tags for numberic type keyworks +func (t *Type) numbericKeywords(tags []string) { + for _, tag := range tags { + nameValue := strings.Split(tag, "=") + if len(nameValue) == 2 { + name, val := nameValue[0], nameValue[1] + switch name { + case "multipleOf": + i, _ := strconv.Atoi(val) + t.MultipleOf = i + case "minimum": + i, _ := strconv.Atoi(val) + t.Minimum = i + case "maximum": + i, _ := strconv.Atoi(val) + t.Maximum = i + case "exclusiveMaximum": + b, _ := strconv.ParseBool(val) + t.ExclusiveMaximum = b + case "exclusiveMinimum": + b, _ := strconv.ParseBool(val) + t.ExclusiveMinimum = b + } + } + } +} + +// read struct tags for object type keyworks +// func (t *Type) objectKeywords(tags []string) { +// for _, tag := range tags{ +// nameValue := strings.Split(tag, "=") +// name, val := nameValue[0], nameValue[1] +// switch name{ +// case "dependencies": +// t.Dependencies = val +// break; +// case "patternProperties": +// t.PatternProperties = val +// break; +// } +// } +// } + +// read struct tags for array type keyworks +func (t *Type) arrayKeywords(tags []string) { + for _, tag := range tags { + nameValue := strings.Split(tag, "=") + if len(nameValue) == 2 { + name, val := nameValue[0], nameValue[1] + switch name { + case "minItems": + i, _ := strconv.Atoi(val) + t.MinItems = i + case "maxItems": + i, _ := strconv.Atoi(val) + t.MaxItems = i + case "uniqueItems": + t.UniqueItems = true + } + } + } +} + +func requiredFromJSONTags(tags []string) bool { + if ignoredByJSONTags(tags) { + return false + } + + for _, tag := range tags[1:] { + if tag == "omitempty" { + return false + } + } + return true +} + +func requiredFromJSONSchemaTags(tags []string) bool { + if ignoredByJSONSchemaTags(tags) { + return false + } + for _, tag := range tags { + if tag == "required" { + return true + } + } + return false +} + +func ignoredByJSONTags(tags []string) bool { + return tags[0] == "-" +} + +func ignoredByJSONSchemaTags(tags []string) bool { + return tags[0] == "-" +} + +func (r *Reflector) reflectFieldName(f reflect.StructField) (string, bool) { + if !f.IsExported() { // unexported field, ignore it + return "", false + } + + jsonTags := strings.Split(f.Tag.Get("json"), ",") + + if ignoredByJSONTags(jsonTags) { + return "", false + } + + jsonSchemaTags := strings.Split(f.Tag.Get("jsonschema"), ",") + if ignoredByJSONSchemaTags(jsonSchemaTags) { + return "", false + } + + name := f.Name + required := requiredFromJSONTags(jsonTags) + + if r.RequiredFromJSONSchemaTags { + required = requiredFromJSONSchemaTags(jsonSchemaTags) + } + + if jsonTags[0] != "" { + name = jsonTags[0] + } + + return name, required +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000..66f7d26 --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,130 @@ +// Copyright 2022 Listware + +package server + +import ( + "context" + "fmt" + "net" + "os" + "os/signal" + "syscall" + + "git.fg-tech.ru/listware/cmdb/internal/arangodb" + "git.fg-tech.ru/listware/cmdb/internal/cmdb/edge" + "git.fg-tech.ru/listware/cmdb/internal/cmdb/finder" + "git.fg-tech.ru/listware/cmdb/internal/cmdb/qdsl" + "git.fg-tech.ru/listware/cmdb/internal/cmdb/vertex" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbfinder" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" + "google.golang.org/grpc" +) + +// set max 100 MB +const maxMsgSize = 100 * 1024 * 1024 + +var ( + cmdbAddr = "127.0.0.1" + cmdbPort = "31415" +) + +func init() { + if value, ok := os.LookupEnv("CMDB_ADDR"); ok { + cmdbAddr = value + } + if value, ok := os.LookupEnv("CMDB_PORT"); ok { + cmdbPort = value + } +} + +func New() { + ctx, cancel := context.WithCancel(context.Background()) + + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, + syscall.SIGINT, + syscall.SIGTERM, + syscall.SIGQUIT, + syscall.SIGHUP, + syscall.SIGUSR1, + syscall.SIGUSR2, + ) + + go func() { + for { + select { + case sig := <-sigChan: + switch sig { + case syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGINT: + cancel() + } + case <-ctx.Done(): + return + } + } + }() + + if err := serve(ctx); err != nil { + fmt.Println(err) + return + } + return +} + +func serve(ctx context.Context) (err error) { + + if err = arangodb.Bootstrap(ctx); err != nil { + return + } + + pc, err := net.Listen("tcp", fmt.Sprintf(":%s", cmdbPort)) + if err != nil { + return + } + defer pc.Close() + + server := grpc.NewServer( + grpc.MaxMsgSize(maxMsgSize), + grpc.MaxRecvMsgSize(maxMsgSize), + grpc.MaxSendMsgSize(maxMsgSize), + ) + defer server.Stop() + + qdsl, err := qdsl.New(ctx) + if err != nil { + return + } + pbqdsl.RegisterQdslServiceServer(server, pbqdsl.QdslServiceServer(qdsl)) + + finder, err := finder.New(ctx) + if err != nil { + return + } + pbfinder.RegisterFinderServiceServer(server, pbfinder.FinderServiceServer(finder)) + + edge, err := edge.New(ctx) + if err != nil { + return + } + + pbcmdb.RegisterEdgeServiceServer(server, pbcmdb.EdgeServiceServer(edge)) + + vertex, err := vertex.New(ctx) + if err != nil { + return + } + + pbcmdb.RegisterVertexServiceServer(server, pbcmdb.VertexServiceServer(vertex)) + + go server.Serve(pc) + + <-ctx.Done() + + return +} + +func Client() (conn *grpc.ClientConn, err error) { + + return grpc.Dial(fmt.Sprintf("%s:%s", cmdbAddr, cmdbPort), grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(maxMsgSize), grpc.MaxCallSendMsgSize(maxMsgSize))) +} diff --git a/pkg/cmdb/documents/base.go b/pkg/cmdb/documents/base.go new file mode 100644 index 0000000..51cfaba --- /dev/null +++ b/pkg/cmdb/documents/base.go @@ -0,0 +1,22 @@ +// Copyright 2022 Listware + +package documents + +import ( + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +// BaseDocument is a minimal document for use in non-edge collection. +type BaseDocument struct { + Key string `json:"_key,omitempty"` + ID DocumentID `json:"_id,omitempty"` + Rev string `json:"_rev,omitempty"` +} + +func NewBaseDocument(meta *pbcmdb.Meta) *BaseDocument { + return &BaseDocument{ + Key: meta.GetKey(), + ID: DocumentID(meta.GetId()), + Rev: meta.GetRev(), + } +} diff --git a/pkg/cmdb/documents/edge.go b/pkg/cmdb/documents/edge.go new file mode 100644 index 0000000..053c009 --- /dev/null +++ b/pkg/cmdb/documents/edge.go @@ -0,0 +1,11 @@ +// Copyright 2022 Listware + +package documents + +// EdgeDocument is a minimal document for use in edge collection. +type EdgeDocument struct { + BaseDocument + From DocumentID `json:"_from,omitempty"` + To DocumentID `json:"_to,omitempty"` + Type string `json:"_type,omitempty"` +} diff --git a/pkg/cmdb/documents/meta.go b/pkg/cmdb/documents/meta.go new file mode 100644 index 0000000..ef2d8ba --- /dev/null +++ b/pkg/cmdb/documents/meta.go @@ -0,0 +1,81 @@ +// Copyright 2022 Listware + +package documents + +import ( + "fmt" + "net/url" + "path" + "strings" +) + +// DocumentID is a document ID +// Consists of two parts - collection name and key +type DocumentID string + +// Topic representation of DocumentID +func (id DocumentID) Topic() string { + return strings.Replace(id.String(), "/", ".", -1) +} + +// Parent - return parent's Document ID +func (id DocumentID) Parent() DocumentID { + return DocumentID(path.Dir(id.String())) +} + +// Validate validates the given id +func (id DocumentID) Validate() error { + if id == "" { + return fmt.Errorf("DocumentID is empty") + } + parts := strings.Split(string(id), "/") + if len(parts) < 2 { + return fmt.Errorf("Expected 'collection/key[/profile_name]', got '%s'", string(id)) + } + if parts[0] == "" { + return fmt.Errorf("Collection part of '%s' is empty", string(id)) + } + if parts[1] == "" { + return fmt.Errorf("Key part of '%s' is empty", string(id)) + } + return nil +} + +// pathEscape escapes the given value for use in a URL path. +func pathEscape(s string) string { + return url.QueryEscape(s) +} + +// pathUnescape unescapes the given value for use in a URL path. +func pathUnescape(s string) string { + r, _ := url.QueryUnescape(s) + return r +} + +// Collection returns the collection part of the ID. +func (id DocumentID) Collection() string { + parts := strings.Split(string(id), "/") + return pathUnescape(parts[0]) +} + +// Key returns the key part of the ID. +func (id DocumentID) Key() string { + parts := strings.Split(string(id), "/") + if len(parts) >= 2 { + return pathUnescape(parts[1]) + } + return "" +} + +// ProfileName returns the profile name part of the ID. +func (id DocumentID) ProfileName() string { + parts := strings.Split(string(id), "/") + if len(parts) >= 3 { + return pathUnescape(parts[2]) + } + return "" +} + +func (id DocumentID) String() string { + return string(id) +} diff --git a/pkg/cmdb/documents/node.go b/pkg/cmdb/documents/node.go new file mode 100644 index 0000000..1db0495 --- /dev/null +++ b/pkg/cmdb/documents/node.go @@ -0,0 +1,59 @@ +// Copyright 2022 Listware + +package documents + +import ( + "encoding/json" + + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +type Node struct { + Id DocumentID `json:"id,omitempty"` + LinkId DocumentID `json:"link_id,omitempty"` + Key string `json:"key,omitempty"` + Name string `json:"name,omitempty"` + Type string `json:"type,omitempty"` + Object json.RawMessage `json:"object,omitempty"` + Link json.RawMessage `json:"link,omitempty"` + Path json.RawMessage `json:"path,omitempty"` +} + +type Nodes []*Node + +func NewNodes() Nodes { + return make([]*Node, 0) +} + +func (nodes *Nodes) AddElement(element *pbqdsl.Element) { + *nodes = append(*nodes, NewNode(element)) +} +func (nodes *Nodes) Add(node ...*Node) { + *nodes = append(*nodes, node...) +} + +func NewNode(element *pbqdsl.Element) *Node { + return &Node{ + Id: DocumentID(element.Id), + Key: element.Key, + Name: element.Name, + Type: element.Type, + Object: element.Object, + LinkId: DocumentID(element.LinkId), + Link: element.Link, + Path: element.Path, + } +} + +func (node *Node) ToElement() *pbqdsl.Element { + return &pbqdsl.Element{ + Id: node.Id.String(), + Key: node.Key, + Name: node.Name, + Type: node.Type, + Object: node.Object, + Link: node.Link, + LinkId: node.LinkId.String(), + Path: node.Path, + } +} diff --git a/pkg/cmdb/edge/client.go b/pkg/cmdb/edge/client.go new file mode 100644 index 0000000..b6e5c35 --- /dev/null +++ b/pkg/cmdb/edge/client.go @@ -0,0 +1,63 @@ +// Copyright 2022 Listware + +package edge + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/server" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +func Create(ctx context.Context, collection string, payload any) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewEdgeServiceClient(conn) + + request := &pbcmdb.Request{Collection: collection} + return client.Create(ctx, request) +} + +func Read(ctx context.Context, key, collection string) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewEdgeServiceClient(conn) + + return client.Read(ctx, &pbcmdb.Request{Key: key, Collection: collection}) +} + +func Update(ctx context.Context, key, collection string, payload any) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewEdgeServiceClient(conn) + + request := &pbcmdb.Request{Key: key, Collection: collection} + + return client.Update(ctx, request) +} + +func Remove(ctx context.Context, key, collection string) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewEdgeServiceClient(conn) + + request := &pbcmdb.Request{Key: key, Collection: collection} + + return client.Remove(ctx, request) +} diff --git a/pkg/cmdb/edge/links/client.go b/pkg/cmdb/edge/links/client.go new file mode 100644 index 0000000..1c7faeb --- /dev/null +++ b/pkg/cmdb/edge/links/client.go @@ -0,0 +1,51 @@ +// Copyright 2022 Listware + +package links + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/documents" + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/edge" +) + +const ( + collection = "links" +) + +func Create(ctx context.Context, payload any) (meta *documents.BaseDocument, err error) { + response, err := edge.Create(ctx, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Read(ctx context.Context, key string) (payload json.RawMessage, err error) { + response, err := edge.Read(ctx, key, collection) + if err != nil { + return + } + payload = response.GetPayload() + return +} + +func Update(ctx context.Context, key string, payload any) (meta *documents.BaseDocument, err error) { + response, err := edge.Update(ctx, key, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Remove(ctx context.Context, key string) (meta *documents.BaseDocument, err error) { + response, err := edge.Remove(ctx, key, collection) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} diff --git a/pkg/cmdb/qdsl/client.go b/pkg/cmdb/qdsl/client.go new file mode 100644 index 0000000..f5b167b --- /dev/null +++ b/pkg/cmdb/qdsl/client.go @@ -0,0 +1,38 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "context" + + "git.fg-tech.ru/listware/cmdb/internal/server" + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/documents" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +// RawQdsl query with options as object +func RawQdsl(ctx context.Context, query string, options *pbqdsl.Options) (nodes documents.Nodes, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + client := pbqdsl.NewQdslServiceClient(conn) + + elements, err := client.Qdsl(ctx, &pbqdsl.Query{Query: query, Options: options}) + if err != nil { + return + } + + nodes = documents.NewNodes() + + for _, element := range elements.GetElements() { + nodes.AddElement(element) + } + return +} + +// Qdsl query with options OptionsOption array +func Qdsl(ctx context.Context, query string, options ...OptionsOption) (documents.Nodes, error) { + return RawQdsl(ctx, query, NewOptions(options...)) +} diff --git a/pkg/cmdb/qdsl/options.go b/pkg/cmdb/qdsl/options.go new file mode 100644 index 0000000..b16d47a --- /dev/null +++ b/pkg/cmdb/qdsl/options.go @@ -0,0 +1,82 @@ +// Copyright 2022 Listware + +package qdsl + +import ( + "git.fg-tech.ru/listware/proto/sdk/pbcmdb/pbqdsl" +) + +// OptionsOption query options +type OptionsOption func(*pbqdsl.Options) + +// NewOptions return new query options +func NewOptions(opts ...OptionsOption) *pbqdsl.Options { + h := &pbqdsl.Options{} + for _, opt := range opts { + opt(h) + } + return h +} + +// WithId return vertex '_id', default true +func WithId() OptionsOption { + return func(h *pbqdsl.Options) { + h.Id = true + } +} + +// WithKey return vertex '_key', default false +func WithKey() OptionsOption { + return func(h *pbqdsl.Options) { + h.Key = true + } +} + +// WithName return edge '_name', default false +func WithName() OptionsOption { + return func(h *pbqdsl.Options) { + h.Name = true + } +} + +// WithObject return vertex 'object', default false +func WithObject() OptionsOption { + return func(h *pbqdsl.Options) { + h.Object = true + } +} + +// WithLink return edge 'object', default false +func WithLink() OptionsOption { + return func(h *pbqdsl.Options) { + h.Link = true + } +} + +// WithLinkId return edge '_id', default false +func WithLinkId() OptionsOption { + return func(h *pbqdsl.Options) { + h.LinkId = true + } +} + +// WithType return edge '_type', default false +func WithType() OptionsOption { + return func(h *pbqdsl.Options) { + h.Type = true + } +} + +// FIXME WithType now disabled +func WithPath() OptionsOption { + return func(h *pbqdsl.Options) { + h.Path = true + } +} + +// WithRemove remove all founded results +func WithRemove() OptionsOption { + return func(h *pbqdsl.Options) { + h.Remove = true + } +} diff --git a/pkg/cmdb/vertex/client.go b/pkg/cmdb/vertex/client.go new file mode 100644 index 0000000..a8305b4 --- /dev/null +++ b/pkg/cmdb/vertex/client.go @@ -0,0 +1,87 @@ +// Copyright 2022 Listware + +package vertex + +import ( + "context" + "encoding/json" + "errors" + + "git.fg-tech.ru/listware/cmdb/internal/server" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +var ( + ErrEmptyPayload = errors.New("empty payload") +) + +func Create(ctx context.Context, collection string, payload any) (resp *pbcmdb.Response, err error) { + if payload == nil { + return nil, ErrEmptyPayload + } + + payloadRaw, err := json.Marshal(payload) + if err != nil { + return + } + + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewVertexServiceClient(conn) + + request := &pbcmdb.Request{Collection: collection, Payload: payloadRaw} + return client.Create(ctx, request) +} + +func Read(ctx context.Context, key, collection string) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewVertexServiceClient(conn) + + return client.Read(ctx, &pbcmdb.Request{Key: key, Collection: collection}) +} + +func Update(ctx context.Context, key, collection string, payload any) (resp *pbcmdb.Response, err error) { + if payload == nil { + return nil, ErrEmptyPayload + } + + payloadRaw, err := json.Marshal(payload) + if err != nil { + return + } + + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewVertexServiceClient(conn) + + request := &pbcmdb.Request{Key: key, Collection: collection, Payload: payloadRaw} + + return client.Update(ctx, request) +} + +func Remove(ctx context.Context, key, collection string) (resp *pbcmdb.Response, err error) { + conn, err := server.Client() + if err != nil { + return + } + defer conn.Close() + + client := pbcmdb.NewVertexServiceClient(conn) + + request := &pbcmdb.Request{Key: key, Collection: collection} + + return client.Remove(ctx, request) +} diff --git a/pkg/cmdb/vertex/objects/client.go b/pkg/cmdb/vertex/objects/client.go new file mode 100644 index 0000000..96cdb84 --- /dev/null +++ b/pkg/cmdb/vertex/objects/client.go @@ -0,0 +1,52 @@ +// Copyright 2022 Listware + +package objects + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/documents" + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/vertex" +) + +const ( + collection = "objects" +) + +func Create(ctx context.Context, payload any) (meta *documents.BaseDocument, err error) { + response, err := vertex.Create(ctx, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Read(ctx context.Context, key string) (meta *documents.BaseDocument, payload json.RawMessage, err error) { + response, err := vertex.Read(ctx, key, collection) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + payload = response.GetPayload() + return +} + +func Update(ctx context.Context, key string, payload any) (meta *documents.BaseDocument, err error) { + response, err := vertex.Update(ctx, key, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Remove(ctx context.Context, key string) (meta *documents.BaseDocument, err error) { + response, err := vertex.Remove(ctx, key, collection) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} diff --git a/pkg/cmdb/vertex/types/client.go b/pkg/cmdb/vertex/types/client.go new file mode 100644 index 0000000..c60b083 --- /dev/null +++ b/pkg/cmdb/vertex/types/client.go @@ -0,0 +1,52 @@ +// Copyright 2022 Listware + +package types + +import ( + "context" + "encoding/json" + + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/documents" + "git.fg-tech.ru/listware/cmdb/pkg/cmdb/vertex" +) + +const ( + collection = "types" +) + +func Create(ctx context.Context, payload any) (meta *documents.BaseDocument, err error) { + response, err := vertex.Create(ctx, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Read(ctx context.Context, key string) (meta *documents.BaseDocument, payload json.RawMessage, err error) { + response, err := vertex.Read(ctx, key, collection) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + payload = response.GetPayload() + return +} + +func Update(ctx context.Context, key string, payload any) (meta *documents.BaseDocument, err error) { + response, err := vertex.Update(ctx, key, collection, payload) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} + +func Remove(ctx context.Context, key string) (meta *documents.BaseDocument, err error) { + response, err := vertex.Remove(ctx, key, collection) + if err != nil { + return + } + meta = documents.NewBaseDocument(response.Meta) + return +} diff --git a/pkg/cmdb/vertex/types/type.go b/pkg/cmdb/vertex/types/type.go new file mode 100644 index 0000000..3e3108c --- /dev/null +++ b/pkg/cmdb/vertex/types/type.go @@ -0,0 +1,35 @@ +// Copyright 2022 Listware + +package types + +import ( + "git.fg-tech.ru/listware/cmdb/internal/schema" + "git.fg-tech.ru/listware/proto/sdk/pbcmdb" +) + +// Type is a object type struct +type Type struct { + Schema *schema.Schema `json:"schema"` + Triggers map[string]map[string]*pbcmdb.Trigger `json:"triggers"` +} + +// NewType return new object type +func NewType(schema *schema.Schema) Type { + return Type{ + Schema: schema, + Triggers: make(map[string]map[string]*pbcmdb.Trigger), + } +} + +// ReflectType get reflected profiletype +func ReflectType(v interface{}) *Type { + r := &schema.Reflector{ + AllowAdditionalProperties: true, + ExpandedStruct: true, + TitleNotation: "dash", + } + return &Type{ + Schema: r.Reflect(v), + Triggers: make(map[string]map[string]*pbcmdb.Trigger), + } +} diff --git a/qdsl.peg b/qdsl.peg new file mode 100644 index 0000000..b9e0542 --- /dev/null +++ b/qdsl.peg @@ -0,0 +1,315 @@ +{ +package qdsl + +type Limits struct { + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +type Limit struct { + Sort any `json:"sort"` + Limits *Limits `json:"limits"` +} + +type Direction struct { + Direction string `json:"direction"` + Field any `json:"field"` +} + +type Variable struct { + Variable []string `json:"variable"` + Op string `json:"op"` + Evaluation string `json:"evaluation"` +} + +type Expression struct { + Expression *Variable `json:"expression"` + BoolOp string `json:"boolOp"` +} + +type Filter struct { + Filter [][]*Expression `json:"filter"` + + // ...limits +} + +type Range struct { + From *string `json:"from"` + To *string `json:"to"` +} + +type Node struct { + Name *string `json:"name"` + Ranges []*Range `json:"ranges"` +} + +type Block struct { + *Filter `json:"filter"` + Any bool `json:"any"` + Catchall bool `json:"catchall"` + Node *Node `json:"node"` + IsGroup bool `json:"isGroup"` + Children []*Element `json:"children"` +} + +type Path []*Block + +type Element struct { + Action string `json:"action"` + Path Path `json:"path"` + RootExpand bool `json:"rootExpand"` + Query string `json:"query"` +} + +func toString(i interface{}) string { + if i == nil { + return "" + } + switch i.(type) { + case string: + return i.(string) + default: + return string(i.([]byte)) + } +} + +func arrayToStringArray(arr interface{}) (result []string) { + for _, i := range arr.([]interface{}) { + result = append(result, toString(i)) + } + return +} + +} + +start = QUERY + +QUERY = base:(e:ELEMENT (__ "," __ / [ ]+) { return e, nil })* last:ELEMENT { + return append(base.([]any), last), nil +} + +ELEMENT = action:UNARY levels:(l:LEVEL "." { return l, nil })* last:(LEVEL / "_") { + var blocks []*Block + + for _, level := range levels.([]any) { + if block, ok := level.(*Block); ok { + blocks = append(blocks, block) + } + } + block, ok := last.(*Block) + // if last == "_" { + if !ok { + return &Element{Action: toString(action), Path: blocks, RootExpand: true}, nil + } + + return &Element{Action: toString(action), Path: append(blocks, block)}, nil +} +UNARY = op:("-")? { + if op == "-" { + return "subtract", nil + } + return "add", nil +} + +NODE = nodename:NODENAME ranges:NODERANGE? { + name := toString(nodename) + var arr []*Range + if val, ok := ranges.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Range)) + } + } + return &Node{Name: &name, Ranges: arr}, nil +} +/ ranges:NODERANGE { + var arr []*Range + if val, ok := ranges.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Range)) + } + } + return &Node{Name: nil, Ranges: arr}, nil + } + +NODENAME = nodename:(head:[a-z0-9] tail:[a-z_0-9\\-]i* { + return toString(head) + strings.Join(arrayToStringArray(tail), ""), nil +} ) + +NODERANGE = "[" ranges:(from:INT to:("-" to:INT { +return to, nil +})? __ ","? __ { + fromValue := toString(from) + toValue := toString(to) + return &Range{From: &fromValue, To: &toValue}, nil +})+ "]" { + return ranges, nil +} + +INT = num:[0-9a-z]i+ { + return strings.Join(arrayToStringArray(num), ""), nil +} + +LEVEL = "(" children:QUERY ")" { + var arr []*Element + if val, ok := children.([]interface{}); ok { + for _, a := range val { + arr = append(arr, a.(*Element)) + } + } + return &Block{IsGroup: true, Children: arr}, nil +} +/ "<" block:BLOCK { +return &Block{ Any: false, Catchall: true, Filter:block.(*Filter)}, nil +} +/ "*" block:BLOCK { +return &Block{ Any: true, Filter:block.(*Filter)}, nil +} +/ node:NODE block:BLOCK { +return &Block{ Any: false, Node: node.(*Node), Filter:block.(*Filter)}, nil +} + +BLOCK = limits:LIMIT filter:SEARCH { + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil} +/ filter:SEARCH limits:LIMIT { + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil} +/ filter:SEARCH { + var filters [][]*Expression + + for _, i := range filter.([]any) { + var expressions []*Expression + for _, j := range i.([]any) { + expressions = append(expressions, j.(*Expression)) + } + filters = append(filters, expressions) + } + + return &Filter{filters}, nil} + +LIMIT = "{" sort:SORT limit:NUMBER ".." offset:NUMBER "}" { + return &Limit{ Sort:sort, Limits: &Limits{ limit.(int), offset.(int) }}, nil } +/ "{" sort:SORT limit:NUMBER "}" { + return &Limit{ Sort:sort, Limits: &Limits{ limit.(int) , 0 }}, nil } +/ "{" sort:SORT "}" { +return &Limit{ Sort:sort }, nil } + +SORT = (direction:DIRECTION v:VARIABLE ","? __ { + return &Direction{toString(direction) ,v }, nil})* + +DIRECTION = d:("^")? { + if d == nil { + return "ASC", nil + } + return "DESC", nil +} + +SEARCH = filters:("[?" filter:FILTER "?]"{ + return filter, nil +})* { + return filters, nil +} + +FILTER = (expression:EXPRESSION __ boolOp:("&&" / "||")? __ { + expressionValue, _ := expression.(*Variable) + return &Expression{expressionValue, toString(boolOp)}, nil +})+ + +EXPRESSION = variable:VARIABLE __ op:OP __ evaluation:EVALUATION { + var arr []string + for _, i := range variable.([]any) { + arr = append(arr, toString(i)) + } + return &Variable{arr, toString(op), toString(evaluation)}, nil +} + +OP = "==" +/ "=~" +/ "!~" +/ "<=" +/ ">=" +/ "<" +/ ">" +/ "!=" +/ "IN"i +/ "LIKE"i +/ "NOT LIKE"i + +VARIABLE = variable:("object" / "link" / "path" / "@" / "$") attribute:ATTRIBUTE+ { + return append([]any{variable}, attribute.([]any)...), nil +} + +ATTRIBUTE = ("." attrname:ATTRNAME { + return attrname, nil }) +/ ("[" attrname:(STRING_LITERAL / INT) "]" { +return attrname, nil }) + +ATTRNAME = attrname:[a-z\\*0-9_\\-]i+ { + var arr []string + + for _, a := range attrname.([]any) { + arr = append(arr, toString(a)) + } + + return strings.Join(arr, ""), nil +} + +STRING_LITERAL = "'" text:("\\'"/[^'])+ "'" { + return strings.Join(text.([]string), ""), nil +} + +BOOL = "true" / "false" / "True" / "False" +NULL = "null" + +EVALUATION = LITERAL + +LITERAL = DBL_LITERAL / SNG_LITERAL / NUMBER / BOOL / NULL / ARR + +SNG_LITERAL = q1:"'" cc:[^\\']* q2:"'" { return toString(q1) + strings.Join(arrayToStringArray(cc), "") + toString(q2), nil } +DBL_LITERAL = q1:'"' cc:[^\\"]* q2:'"' { return toString(q1) + strings.Join(arrayToStringArray(cc), "") + toString(q2), nil } + +NUMBER = num:[0-9]+ tail:('.' [0-9]+)? { + fmt.Println("AAAAAAaa") + arr:= tail.([]string) + var end string + if len(arr) > 0 { + // FIXME [][]arr? + // end = "." + strings.Join(arr[1], "") + } + + return strings.Join(num.([]string), "") + end, nil +} + +ARR = '[' __ + body:(hd:LITERAL items:(__ ',' __ e:LITERAL { return e, nil })* __ { + arr := []any{hd} + return append(arr, items), nil +})? +']' { + bodyArr := body.([]string) + arr := []string{} + if len(bodyArr) > 0 { + arr = append(arr, strings.Join(bodyArr, ",")) + } + return arr, nil +} + +__ = [ ]*