diff --git a/.gitignore b/.gitignore index 4c58316..62a497e 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,13 @@ # screen logs screenlog.* + +# Temp files +*.swo +*.swp +*.orig + + +**/mocks/* +!**/mocks/.gitkeep + diff --git a/.golangci.yml b/.golangci.yml index 6bad1bb..9f7b999 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -24,13 +24,14 @@ run: # from this option's value: # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ skip-dirs: + - src/core/mocks # which files to skip: they will be analyzed, but issues from them # won't be reported. Default value is empty list, but there is # no need to include all autogenerated files, we confidently recognize # autogenerated files. If it's not please let us know. skip-files: - + - src/rxt/grammar/lexer.nn.go # output configuration options output: diff --git a/.travis.yml b/.travis.yml index 4346412..436f195 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,6 +18,7 @@ install: - if [[ ! -d $GOPATH/src/github.com/simelo/rextporter ]]; then mkdir -p $GOPATH/src/github.com/simelo; ln -s $TRAVIS_BUILD_DIR $GOPATH/src/github.com/simelo/rextporter; fi - cd $GOPATH/src/github.com/simelo/rextporter - go get -t ./... + - go get github.com/vektra/mockery/.../ - make install-linters diff --git a/Gopkg.toml b/Gopkg.toml index 518df12..3497d81 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -56,7 +56,3 @@ [[constraint]] branch = "master" name = "github.com/cznic/goyacc" - -[[constraint]] - branch = "master" - name = "github.com/cznic/goyacc" diff --git a/Makefile b/Makefile index 8379be6..4fb67d3 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,21 @@ .DEFAULT_GOAL := help .PHONY: test install-linters test-386 test-amd64 lint + +build-grammar: ## Generate source code for REXT grammar + nex -s src/rxt/grammar/lexer.nex -test: ## Run test with GOARCH=Default +mocks: ## Create all mock files for unit tests + echo "Generating mock files" + cd src/core/ ; mockery -all ; cd ../../ + +test-grammar: build-grammar ## Test cases for REXT lexer and parser + go run cmd/rxtc/lexer.go < src/rxt/testdata/skyexample.rxt 2> src/rxt/testdata/skyexample.golden.orig + diff -u src/rxt/testdata/skyexample.golden src/rxt/testdata/skyexample.golden.orig + +test: mocks ## Run test with GOARCH=Default go test -count=1 github.com/simelo/rextporter/src/config go test -count=1 github.com/simelo/rextporter/src/scrapper + go test -count=1 github.com/simelo/rextporter/src/memconfig if ! screen -list | grep -q "fakeSkycoinForIntegrationTest"; then echo "creating screen fakeSkycoinForIntegrationTest"; screen -L -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; else echo "fakeSkycoinForIntegrationTest screen already exist. quiting it to create a new one"; screen -S fakeSkycoinForIntegrationTest -X quit; screen -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; fi sleep 3 go test -count=1 -cpu=1 -parallel=1 github.com/simelo/rextporter/test/integration -args -test.v @@ -13,9 +25,10 @@ test: ## Run test with GOARCH=Default cat screenlog.0 -test-386: ## Run tests with GOARCH=386 +test-386: mocks ## Run tests with GOARCH=386 GOARCH=386 go test -count=1 github.com/simelo/rextporter/src/config GOARCH=386 go test -count=1 github.com/simelo/rextporter/src/scrapper + GOARCH=386 go test -count=1 github.com/simelo/rextporter/src/memconfig if ! screen -list | grep -q "fakeSkycoinForIntegrationTest"; then echo "creating screen fakeSkycoinForIntegrationTest"; screen -L -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; else echo "fakeSkycoinForIntegrationTest screen already exist. quiting it to create a new one"; screen -S fakeSkycoinForIntegrationTest -X quit; screen -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; fi sleep 3 GOARCH=386 go test -cpu=1 -parallel=1 -count=1 github.com/simelo/rextporter/test/integration -args -test.v @@ -24,9 +37,10 @@ test-386: ## Run tests with GOARCH=386 screen -S fakeSkycoinForIntegrationTest -X quit cat screenlog.0 -test-amd64: ## Run tests with GOARCH=amd64 +test-amd64: mocks ## Run tests with GOARCH=amd64 GOARCH=amd64 go test -count=1 github.com/simelo/rextporter/src/config GOARCH=amd64 go test -count=1 github.com/simelo/rextporter/src/scrapper + GOARCH=amd64 go test -count=1 github.com/simelo/rextporter/src/memconfig if ! screen -list | grep -q "fakeSkycoinForIntegrationTest"; then echo "creating screen fakeSkycoinForIntegrationTest"; screen -L -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; else echo "fakeSkycoinForIntegrationTest screen already exist. quiting it to create a new one"; screen -S fakeSkycoinForIntegrationTest -X quit; screen -dm -S fakeSkycoinForIntegrationTest go run test/integration/fake_skycoin_node.go; fi sleep 3 GOARCH=amd64 go test -cpu=1 -parallel=1 -count=1 github.com/simelo/rextporter/test/integration -args -test.v diff --git a/cmd/rextporter/main.go b/cmd/rextporter/main.go index 36e79ca..673cf57 100644 --- a/cmd/rextporter/main.go +++ b/cmd/rextporter/main.go @@ -2,20 +2,35 @@ package main import ( "flag" + "os" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/exporter" + "github.com/simelo/rextporter/src/toml2config" + "github.com/simelo/rextporter/src/tomlconfig" + log "github.com/sirupsen/logrus" ) func main() { - mainConfigFile := flag.String("config", "", "Metrics main config file path.") + // log.SetFlags(log.LstdFlags | log.Lshortfile) + mainConfigFile := flag.String("config", "/home/adacosta/.config/simelo/rextporter/main.toml", "Metrics main config file path.") defaultListenPort := 8080 listenPort := flag.Uint("port", uint(defaultListenPort), "Listen port.") defaultHandlerEndpoint := "/metrics" handlerEndpoint := flag.String("handler", defaultHandlerEndpoint, "Handler endpoint.") flag.Parse() - conf := config.MustConfigFromFileSystem(*mainConfigFile) - exporter.MustExportMetrics(*handlerEndpoint, uint16(*listenPort), conf) + conf, err := tomlconfig.ReadConfigFromFileSystem(*mainConfigFile) + if err != nil { + log.WithError(err).Errorln("error reading config from file system") + os.Exit(1) + } + var rootConf core.RextRoot + rootConf, err = toml2config.Fill(conf) + if err != nil { + log.WithError(err).Errorln("error filling config info") + os.Exit(1) + } + exporter.MustExportMetrics(*handlerEndpoint, uint16(*listenPort), rootConf) waitForEver := make(chan bool) <-waitForEver } diff --git a/cmd/rxtc/lexer.go b/cmd/rxtc/lexer.go new file mode 100644 index 0000000..3bc72d9 --- /dev/null +++ b/cmd/rxtc/lexer.go @@ -0,0 +1,10 @@ +package main + +import ( + "github.com/simelo/rextporter/src/rxt" + "github.com/simelo/rextporter/src/rxt/grammar" +) + +func main() { + grammar.LexTheRxt(&rxt.TokenWriter{}, "LEX") +} diff --git a/src/client/api_rest.go b/src/client/api_rest.go index e61f799..38bfc1c 100644 --- a/src/client/api_rest.go +++ b/src/client/api_rest.go @@ -10,7 +10,7 @@ import ( "github.com/oliveagle/jsonpath" "github.com/prometheus/client_golang/prometheus" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/util" log "github.com/sirupsen/logrus" ) @@ -26,19 +26,59 @@ type APIRestCreator struct { } // CreateAPIRestCreator create an APIRestCreator -func CreateAPIRestCreator(metric config.Metric, service config.Service, dataSourceResponseDurationDesc *prometheus.Desc) (cf CacheableFactory, err error) { +func CreateAPIRestCreator(resConf core.RextResourceDef, srvConf core.RextServiceDef, dataSourceResponseDurationDesc *prometheus.Desc) (cf CacheableFactory, err error) { + resOptions := resConf.GetOptions() + httpMethod, err := resOptions.GetString(core.OptKeyRextResourceDefHTTPMethod) + if err != nil { + log.WithError(err).Errorln("Can not find httpMethod") + return cf, err + } + resURI := strings.TrimPrefix(resConf.GetResourcePATH(srvConf.GetBasePath()), srvConf.GetBasePath()) + auth := resConf.GetAuth(srvConf.GetAuthForBaseURL()) + var tkHeaderKey, tkKeyFromEndpoint, tkKeyGenEndpoint string + if auth != nil { + authOpts := auth.GetOptions() + tkHeaderKey, err = authOpts.GetString(core.OptKeyRextAuthDefTokenHeaderKey) + if err != nil { + log.WithError(err).Errorln("Can not find tokenHeaderKey") + return cf, err + } + tkKeyFromEndpoint, err = authOpts.GetString(core.OptKeyRextAuthDefTokenKeyFromEndpoint) + if err != nil { + log.WithError(err).Errorln("Can not find tokenKeyFromEndpoint") + return cf, err + } + tkKeyGenEndpoint, err = authOpts.GetString(core.OptKeyRextAuthDefTokenGenEndpoint) + if err != nil { + log.WithError(err).Errorln("Can not find tkKeyGenEndpoint") + return cf, err + } + } else { + log.Warnln("you have an empty auth") + } + srvOpts := srvConf.GetOptions() + jobName, err := srvOpts.GetString(core.OptKeyRextServiceDefJobName) + if err != nil { + log.WithError(err).Errorln("Can not find jobName") + return cf, err + } + instanceName, err := srvOpts.GetString(core.OptKeyRextServiceDefInstanceName) + if err != nil { + log.WithError(err).Errorln("Can not find instanceName") + return cf, err + } cf = APIRestCreator{ baseFactory: baseFactory{ - jobName: service.JobName(), - instanceName: service.InstanceName(), - dataSource: metric.URL, + jobName: jobName, + instanceName: instanceName, + dataSource: resURI, dataSourceResponseDurationDesc: dataSourceResponseDurationDesc, }, - httpMethod: metric.HTTPMethod, - dataPath: service.URIToGetMetric(metric), - tokenPath: service.URIToGetToken(), - tokenHeaderKey: service.TokenHeaderKey, - tokenKeyFromEndpoint: service.TokenKeyFromEndpoint, + httpMethod: httpMethod, + dataPath: resConf.GetResourcePATH(srvConf.GetBasePath()), + tokenPath: tkKeyGenEndpoint, + tokenHeaderKey: tkHeaderKey, + tokenKeyFromEndpoint: tkKeyFromEndpoint, } return cf, err } diff --git a/src/client/metrics_forwader.go b/src/client/metrics_forwader.go index 25571d6..e624192 100644 --- a/src/client/metrics_forwader.go +++ b/src/client/metrics_forwader.go @@ -2,16 +2,16 @@ package client import ( "compress/gzip" - "errors" "fmt" "io" "io/ioutil" "net/http" "time" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/util" "github.com/simelo/rextporter/src/util/metrics" + log "github.com/sirupsen/logrus" ) // ProxyMetricClientCreator create a metrics fordwader client @@ -23,15 +23,24 @@ type ProxyMetricClientCreator struct { } // CreateProxyMetricClientCreator create a ProxyMetricClientCreator with required info to create a metrics fordwader client -func CreateProxyMetricClientCreator(service config.Service, fDefMetrics *metrics.DefaultFordwaderMetrics) (cf ProxyMetricClientCreator, err error) { - if !util.StrSliceContains(service.Modes, config.ServiceTypeProxy) { - return ProxyMetricClientCreator{}, errors.New("can not create a forward_metrics metric client from a service whitout type " + config.ServiceTypeProxy) +func CreateProxyMetricClientCreator(resConf core.RextResourceDef, srvConf core.RextServiceDef, fDefMetrics *metrics.DefaultFordwaderMetrics) (cf ProxyMetricClientCreator, err error) { + srvOpts := srvConf.GetOptions() + jobName, err := srvOpts.GetString(core.OptKeyRextServiceDefJobName) + if err != nil { + log.WithError(err).Errorln("Can not find jobName") + return cf, err } + instanceName, err := srvOpts.GetString(core.OptKeyRextServiceDefInstanceName) + if err != nil { + log.WithError(err).Errorln("Can not find instanceName") + return cf, err + } + resPath := resConf.GetResourcePATH(srvConf.GetBasePath()) cf = ProxyMetricClientCreator{ defFordwaderMetrics: fDefMetrics, - dataPath: service.URIToGetExposedMetric(), - JobName: service.JobName(), - InstanceName: service.InstanceName(), + dataPath: resPath, + JobName: jobName, + InstanceName: instanceName, } return cf, err } diff --git a/src/config/config.go b/src/config/config.go deleted file mode 100644 index 862fbed..0000000 --- a/src/config/config.go +++ /dev/null @@ -1,161 +0,0 @@ -package config - -import ( - "bytes" - "container/list" - "fmt" - "net/url" - - "github.com/simelo/rextporter/src/util" - log "github.com/sirupsen/logrus" - "github.com/spf13/viper" -) - -// RootConfig is the top level node for the config tree, it has a list of metrics and a -// service from which get this metrics. -type RootConfig struct { - Services []Service `json:"services"` -} - -// MustConfigFromRawString allow you to define a `.toml` config in the fly, a raw string with the "config content" -func MustConfigFromRawString(strConf string) (conf RootConfig, err error) { - const generalScopeErr = "error creating a config instance" - viper.SetConfigType("toml") - buff := bytes.NewBuffer([]byte(strConf)) - if err = viper.ReadConfig(buff); err != nil { - errCause := fmt.Sprintln("can not read the buffer: ", err.Error()) - return conf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = viper.Unmarshal(&conf); err != nil { - errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) - return conf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - if !conf.isValid() { - log.Panic("config is not valid") - } - return conf, err -} - -// newMetricsConfig desserialize a metrics config from the 'toml' file path -func newMetricsConfig(path string) (metricsConf []Metric, err error) { - const generalScopeErr = "error reading metrics config" - if len(path) == 0 { - errCause := "path should not be null" - return metricsConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - viper.SetConfigFile(path) - if err := viper.ReadInConfig(); err != nil { - errCause := fmt.Sprintln("error reading config file: ", path, err.Error()) - return metricsConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - type metricsForService struct { - Metrics []Metric - } - var root metricsForService - if err := viper.Unmarshal(&root); err != nil { - errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) - return metricsConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - metricsConf = root.Metrics - return metricsConf, nil -} - -// newServicesConfigFromFile desserialize a service config from the 'toml' file path -func newServicesConfigFromFile(path string, conf mainConfigData) (servicesConf []Service, err error) { - const generalScopeErr = "error reading service config" - servicesConfReader := NewServicesConfigFromFile(path) - if servicesConf, err = servicesConfReader.GetConfig(); err != nil { - errCause := "error reading service config" - return servicesConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - for idxService, service := range servicesConf { - if util.StrSliceContains(service.Modes, ServiceTypeAPIRest) { - if servicesConf[idxService].Metrics, err = newMetricsConfig(conf.MetricsConfigPath(service.Name)); err != nil { - errCause := "error reading metrics config: " + err.Error() - return servicesConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - } - } - return servicesConf, err -} - -// MustConfigFromFileSystem will read the config from the file system, you should send the -// metric config file path and service config file path into metricsPath, servicePath respectively. -// This function can cause a panic. -func MustConfigFromFileSystem(mainConfigPath string) (rootConf RootConfig) { - const generalScopeErr = "error getting config values from file system" - var conf mainConfigData - var err error - if conf, err = newMainConfigData(mainConfigPath); err != nil { - errCause := "root cause: " + err.Error() - log.WithError(util.ErrorFromThisScope(errCause, generalScopeErr)).Panicln("can not read main config") - } - if rootConf.Services, err = newServicesConfigFromFile(conf.ServicesConfigPath(), conf); err != nil { - errCause := "root cause: " + err.Error() - log.WithError(util.ErrorFromThisScope(errCause, generalScopeErr)).Errorln("can not read services config file") - } - if !rootConf.isValid() { - log.Panic("config is not valid") - } - return rootConf -} - -// FilterMetricsByType will return all the metrics who match with the 't' parameter. -func (conf RootConfig) FilterMetricsByType(t string) (metrics []Metric) { - tmpMetrics := list.New() - for _, service := range conf.Services { - metricsForService := service.FilterMetricsByType(t) - for _, metric := range metricsForService { - tmpMetrics.PushBack(metric) - } - } - metrics = make([]Metric, tmpMetrics.Len()) - idxLink := 0 - for it := tmpMetrics.Front(); it != nil; it = it.Next() { - metrics[idxLink] = it.Value.(Metric) - idxLink++ - } - return metrics -} - -// FilterServicesByType will return all the services who match with the 't' parameter. -func (conf RootConfig) FilterServicesByType(t string) (services []Service) { - return filterServicesByType(t, conf.Services) -} - -func filterServicesByType(t string, services []Service) (filteredService []Service) { - tmpServices := list.New() - for _, service := range services { - if util.StrSliceContains(service.Modes, t) { - tmpServices.PushBack(service) - } - } - filteredService = make([]Service, tmpServices.Len()) - idxLink := 0 - for it := tmpServices.Front(); it != nil; it = it.Next() { - filteredService[idxLink] = it.Value.(Service) - idxLink++ - } - return filteredService -} - -func (conf RootConfig) isValid() bool { - var errs []error - for _, service := range conf.Services { - errs = append(errs, service.validate()...) - } - if len(errs) != 0 { - for _, err := range errs { - log.WithError(err).Errorln("Error") - } - } - return len(errs) == 0 -} - -// isValidUrl tests a string to determine if it is a valid URL or not. -func isValidURL(toTest string) bool { - if _, err := url.ParseRequestURI(toTest); err != nil { - return false - } - return true -} diff --git a/src/config/config_render.go b/src/config/config_render.go deleted file mode 100644 index 66bf5a4..0000000 --- a/src/config/config_render.go +++ /dev/null @@ -1,431 +0,0 @@ -package config - -import ( - "fmt" - "os" - "path/filepath" - "text/template" - - "github.com/shibukawa/configdir" - "github.com/simelo/rextporter/src/util" - "github.com/simelo/rextporter/src/util/file" - "github.com/spf13/viper" -) - -type templateData struct { - ServicesConfigPath string - MetricsForServicesPath string -} - -type metricsForServiceTemplateData struct { - ServiceNameToMetricsConfPath map[string]string -} - -type metricsForServiceConfigTemplateData struct { - TmplData metricsForServiceTemplateData -} - -type mainConfigData struct { - mainConfigPath string - tmplData templateData - metricsForServiceConfigTmplData metricsForServiceConfigTemplateData -} - -func (confData mainConfigData) ServicesConfigPath() string { - return confData.tmplData.ServicesConfigPath -} - -func (confData mainConfigData) metricsForServicesPath() string { - return confData.tmplData.MetricsForServicesPath -} - -func (confData mainConfigData) MetricsConfigPath(serviceName string) string { - return confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath[serviceName] -} - -func (confData mainConfigData) MainConfigPath() string { - return confData.mainConfigPath -} - -const mainConfigFileContentTemplate = ` -servicesConfigTransport = "file" # "file" | "consulCatalog" -servicesConfigPath = "{{.ServicesConfigPath}}" -servicesConfigPath = "{{.ServicesConfigPath}}" -metricsForServicesPath = "{{.MetricsForServicesPath}}" -` - -const serviceConfigFileContentTemplate = ` -# Services configuration. -[[services]] - name = "skycoin" - modes = ["rest_api"] - scheme = "http" - port = 8080 - authType = "CSRF" - tokenHeaderKey = "X-CSRF-Token" - genTokenEndpoint = "/api/v1/csrf" - tokenKeyFromEndpoint = "csrf_token" - - [services.location] - location = "localhost" -` -const skycoinMetricsConfigFileContentTemplate = ` -# All metrics to be measured. -[[metrics]] - name = "seq" - url = "/api/v1/health" - httpMethod = "GET" - path = "/blockchain/head/seq" - - [metrics.options] - type = "Counter" - description = "I am running since" - -[[metrics]] - name = "openConnections" - url = "/api/v1/network/connections" - httpMethod = "GET" - path = "/connections/unconfirmed_verify_transaction/burn_factor" - - [metrics.options] - type = "Histogram" - description = "Connections amount" - - [metrics.histogramOptions] - buckets = [1, 2, 3] - - -[[metrics]] - name = "burn_factor_by_service" - url = "/api/v1/network/connections" - httpMethod = "GET" - path = "/connections" - - [metrics.options] - type = "Gauge" - itemPath = "/unconfirmed_verify_transaction/burn_factor" - description = "I am running since" - - [[metrics.options.labels]] - name = "ip_port" - path = "/address" - -[[metrics]] - name = "connectedAtBySocketAndByBurnFactor" - url = "/api/v1/network/connections" - httpMethod = "GET" - path = "/connections" - - [metrics.options] - type = "Gauge" - itemPath = "/unconfirmed_verify_transaction/burn_factor" - description = "Connections amount" - - [[metrics.options.labels]] - name = "ip:port" - path = "/address" - - [[metrics.options.labels]] - name = "ip:port" - path = "/address" - - -# TODO(denisacostaq@gmail.com): -# if you refer(under "metrics_for_host") to a not previously defined host or metric it will be raise an error and the process will not start -# if in all your definition you not use some host or metric the process will raise a warning and the process will start normally. -# from https://github.com/simelo/rextporter/pull/17 -# @denisacostaq services should be match against whole templates , rather than individual metrics. -# The match is not for hosts directly . The match is for service types . Works like this -# metrics <- m:1 -> templates <- 1:n -> services <- m:n -> (physical | virtual) hosts -` - -const metricsForServiceMappingConfFileContentTemplate = ` -serviceNameToMetricsConfPath = [{{range $key, $value := .}} - { {{$key}} = "{{$value}}" },{{end}} -] -` - -const ( - systemVendorName = "simelo" - // SystemProgramName is the program's name - SystemProgramName = "rextporter" - mainConfigFileName = "main.toml" - servicesConfigFileName = "services.toml" - metricsForServicesConfigFileName = "metricsForServices.toml" - skycoinMetricsConfigFileName = "skycoinMetrics.toml" - walletMetricsConfigFileName = "walletMetrics.toml" -) - -func (confData mainConfigData) existServicesConfigFile() bool { - return file.ExistFile(confData.ServicesConfigPath()) -} - -// createServicesConfigFile creates the service file or return an error if any, -// if the file already exist does no thin. -func (confData mainConfigData) createServicesConfigFile() (err error) { - generalScopeErr := "error creating service config file" - if confData.existServicesConfigFile() { - return nil - } - tmpl := template.New("serviceConfig") - var templateEngine *template.Template - if templateEngine, err = tmpl.Parse(serviceConfigFileContentTemplate); err != nil { - errCause := "error parsing service config: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = file.CreateFullPathForFile(confData.ServicesConfigPath()); err != nil { - errCause := "error creating directory for service file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - var servicesConfigFile *os.File - if servicesConfigFile, err = os.Create(confData.ServicesConfigPath()); err != nil { - errCause := "error creating service config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = templateEngine.Execute(servicesConfigFile, nil); err != nil { - errCause := "error writing main config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err -} - -// createMetricsConfigFile creates the metrics file or return an error if any, -// if the file already exist does no thin. -func createMetricsConfigFile(metricConfPath string) (err error) { - generalScopeErr := "error creating metrics config file" - if file.ExistFile(metricConfPath) { - return nil - } - tmpl := template.New("metricsConfig") - var templateEngine *template.Template - if templateEngine, err = tmpl.Parse(skycoinMetricsConfigFileContentTemplate); err != nil { - errCause := "error parsing metrics config: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = file.CreateFullPathForFile(metricConfPath); err != nil { - errCause := "error creating directory for metrics file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - var metricsConfigFile *os.File - if metricsConfigFile, err = os.Create(metricConfPath); err != nil { - errCause := "error creating metrics config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = templateEngine.Execute(metricsConfigFile, nil); err != nil { - errCause := "error writing metrics config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err -} - -func (confData mainConfigData) existMetricsForServicesConfigFile() bool { - return file.ExistFile(confData.tmplData.MetricsForServicesPath) -} - -// createMetricsForServicesConfFile creates the metrics for services file or return an error if any, -// if the file already exist does no thin. -func (confData mainConfigData) createMetricsForServicesConfFile() (err error) { - generalScopeErr := "error creating metrics for services config file" - if confData.existMetricsForServicesConfigFile() { - // FIXME(denisacostaq@gmail.com): check each metric file and create one of not exist - return nil - } - tmpl := template.New("metricsForServiceConfig") - var templateEngine *template.Template - if templateEngine, err = tmpl.Parse(metricsForServiceMappingConfFileContentTemplate); err != nil { - errCause := "error parsing metrics for services config: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = file.CreateFullPathForFile(confData.metricsForServicesPath()); err != nil { - errCause := "error creating directory for metrics for services file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - var metricsForServiceConfigFile *os.File - if metricsForServiceConfigFile, err = os.Create(confData.metricsForServicesPath()); err != nil { - errCause := "error creating metrics for services config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = templateEngine.Execute(metricsForServiceConfigFile, confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath); err != nil { - errCause := "error writing metrics for services config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - for key, val := range confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath { - if err = createMetricsConfigFile(val); err != nil { - errCause := fmt.Sprintf("error creating metrics config file for service %s: %s", key, err.Error()) - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - } - return err -} - -func (confData mainConfigData) existMainConfigFile() bool { - return file.ExistFile(confData.MainConfigPath()) -} - -// createMainConfigFile creates the main file or return an error if any, -// if the file already exist does no thin. -func (confData mainConfigData) createMainConfigFile() (err error) { - generalScopeErr := "error creating main config file" - if confData.existMainConfigFile() { - return nil - } - tmpl := template.New("mainConfig") - var templateEngine *template.Template - if templateEngine, err = tmpl.Parse(mainConfigFileContentTemplate); err != nil { - errCause := "error parsing main config: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = file.CreateFullPathForFile(confData.MainConfigPath()); err != nil { - errCause := "error creating directory for main file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - var mainConfigFile *os.File - if mainConfigFile, err = os.Create(confData.MainConfigPath()); err != nil { - errCause := "error creating main config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = templateEngine.Execute(mainConfigFile, confData.tmplData); err != nil { - errCause := "error writing main config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err -} - -func servicesDefaultConfigPath(conf *configdir.Config) (path string) { - return file.DefaultConfigPath(servicesConfigFileName, conf) -} - -func mainDefaultConfigPath(conf *configdir.Config) (path string) { - return file.DefaultConfigPath(mainConfigFileName, conf) -} - -func metricsForServicesDefaultConfigPath(conf *configdir.Config) (path string) { - return file.DefaultConfigPath(metricsForServicesConfigFileName, conf) -} - -func skycoinMetricsConfigPath(conf *configdir.Config) (path string) { - return file.DefaultConfigPath(skycoinMetricsConfigFileName, conf) -} - -func walletMetricsConfigPath(conf *configdir.Config) (path string) { - return file.DefaultConfigPath(walletMetricsConfigFileName, conf) -} - -func defaultTmplData(conf *configdir.Config) (tmplData templateData) { - tmplData = templateData{ - ServicesConfigPath: servicesDefaultConfigPath(conf), - MetricsForServicesPath: metricsForServicesDefaultConfigPath(conf), - } - return tmplData -} - -func defaultMetricsForServiceTmplData(conf *configdir.Config) (tmplData metricsForServiceConfigTemplateData) { - tmplData = metricsForServiceConfigTemplateData{ - TmplData: metricsForServiceTemplateData{ - ServiceNameToMetricsConfPath: map[string]string{ - "skycoin": skycoinMetricsConfigPath(conf), - "wallet": walletMetricsConfigPath(conf), - }, - }, - } - return tmplData -} - -func tmplDataFromMainFile(mainConfigFilePath string) (tmpl templateData, err error) { - generalScopeErr := "error filling template data" - viper.SetConfigFile(mainConfigFilePath) - viper.SetConfigType("toml") - if err := viper.ReadInConfig(); err != nil { - errCause := fmt.Sprintln("error reading config file: ", mainConfigFilePath, err.Error()) - return tmpl, util.ErrorFromThisScope(errCause, generalScopeErr) - } - var mainConf templateData - if err := viper.Unmarshal(&mainConf); err != nil { - errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) - return tmpl, util.ErrorFromThisScope(errCause, generalScopeErr) - } - tmpl.ServicesConfigPath, tmpl.MetricsForServicesPath = mainConf.ServicesConfigPath, mainConf.MetricsForServicesPath - return tmpl, err -} - -func (tmpl templateData) metricsForServicesTmplDataFromFile() (metricsForServicesTmpl metricsForServiceConfigTemplateData, err error) { - generalScopeErr := "error filling template data" - viper.SetConfigFile(tmpl.MetricsForServicesPath) - viper.SetConfigType("toml") - if err := viper.ReadInConfig(); err != nil { - errCause := fmt.Sprintln("error reading config file: ", tmpl.MetricsForServicesPath, err.Error()) - return metricsForServicesTmpl, util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err := viper.Unmarshal(&(metricsForServicesTmpl.TmplData)); err != nil { - errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) - return metricsForServicesTmpl, util.ErrorFromThisScope(errCause, generalScopeErr) - } - return metricsForServicesTmpl, err -} - -func metricsForServicesTmplData(conf *configdir.Config) metricsForServiceConfigTemplateData { - return defaultMetricsForServiceTmplData(conf) -} - -func newMainConfigData(path string) (mainConf mainConfigData, err error) { - generalScopeErr := "can not create main config instance" - if file.IsADirectoryPath(path) { - path = filepath.Join(path, mainConfigFileName) - } - var tmplData templateData - var metricsForServiceTmplData metricsForServiceConfigTemplateData - if len(path) == 0 || !file.ExistFile(path) { - // TODO(denisacostaq@gmail.com): move homeConf to fn defaultTmplData - var homeConf *configdir.Config - if homeConf, err = file.HomeConfigFolder(systemVendorName, SystemProgramName); err != nil { - errCause := "error looking for config folder under home: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - path = mainDefaultConfigPath(homeConf) - tmplData = defaultTmplData(homeConf) - metricsForServiceTmplData = metricsForServicesTmplData(homeConf) - } else { - if tmplData, err = tmplDataFromMainFile(path); err != nil { - errCause := "error reading template data from file: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - // BUG(denisacostaq@gmail.com): if file not exist, metricsForServicesTmplDataFromFile panics - if metricsForServiceTmplData, err = tmplData.metricsForServicesTmplDataFromFile(); err != nil { - errCause := "error reading template data from file: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - } - if len(tmplData.ServicesConfigPath) == 0 || len(tmplData.MetricsForServicesPath) == 0 { - var homeConf *configdir.Config - if homeConf, err = file.HomeConfigFolder(systemVendorName, SystemProgramName); err != nil { - errCause := "error looking for config folder under home: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - tmpTmplData := defaultTmplData(homeConf) - if len(tmplData.ServicesConfigPath) == 0 { - tmplData.ServicesConfigPath = tmpTmplData.ServicesConfigPath - } - if len(tmplData.MetricsForServicesPath) == 0 { - tmplData.MetricsForServicesPath = tmpTmplData.MetricsForServicesPath - } - metricsForServiceTmplData = metricsForServicesTmplData(homeConf) - } - mainConf = mainConfigData{ - mainConfigPath: path, - tmplData: tmplData, - metricsForServiceConfigTmplData: metricsForServiceTmplData, - } - if err = mainConf.createMainConfigFile(); err != nil { - errCause := "error creating main config file: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = mainConf.createServicesConfigFile(); err != nil { - errCause := "error creating service config file: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = mainConf.createMetricsForServicesConfFile(); err != nil { - errCause := "error creating metrics for services config file: " + err.Error() - return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) - } - return mainConf, err -} diff --git a/src/config/metric.go b/src/config/metric.go deleted file mode 100644 index c052ad0..0000000 --- a/src/config/metric.go +++ /dev/null @@ -1,145 +0,0 @@ -package config - -import ( - "errors" - "fmt" -) - -const ( - // KeyTypeCounter is the key you should define in the config file for counters. - KeyTypeCounter = "Counter" - // KeyTypeGauge is the key you should define in the config file for gauges. - KeyTypeGauge = "Gauge" - // KeyTypeHistogram is the key you should define in the config file for histograms. - KeyTypeHistogram = "Histogram" - // KeyTypeSummary is the key you should define in the config file for summaries. - KeyTypeSummary = "Summary" -) - -// Metric keep the metric name as an instance of MetricOptions -type Metric struct { - Name string `json:"name"` - URL string `json:"url"` - HTTPMethod string `json:"http_method"` - Path string `json:"path,omitempty"` - Options MetricOptions `json:"options"` - HistogramOptions HistogramOptions `json:"histogram_options"` -} - -// LabelNames return a slice with all the labels name -func (metric Metric) LabelNames() []string { - labelNames := make([]string, len(metric.Options.Labels)) - for idxLabel, label := range metric.Options.Labels { - labelNames[idxLabel] = label.Name - } - return labelNames -} - -func (metric Metric) isHistogram() bool { - hasBuckets := len(metric.HistogramOptions.ExponentialBuckets) != 0 || len(metric.HistogramOptions.Buckets) != 0 - return hasBuckets || metric.Options.Type == "Histogram" -} - -func (metric Metric) validate() (errs []error) { - if len(metric.Name) == 0 { - errs = append(errs, errors.New("name is required in metric")) - } - if len(metric.URL) == 0 { - errs = append(errs, errors.New("url is required in metric")) - } - if len(metric.HTTPMethod) == 0 { - errs = append(errs, errors.New("HttpMethod is required in metric")) - } - if len(metric.Path) == 0 { - errs = append(errs, errors.New("path is required in metric")) - } - if metric.HistogramOptions.inferType() == "Histogram" && metric.Options.Type != "Histogram" { - errs = append(errs, errors.New("the buckets, only apply for metrics of type histogram")) - } - errs = append(errs, metric.Options.validate()...) - if metric.isHistogram() { - errs = append(errs, metric.HistogramOptions.validate()...) - } - return errs -} - -// Label to create metrics grouping by json path value, for example: -// {Name: "color", "/properties/color"} -type Label struct { - // Name the name of the label, different values can be assigned to it - Name string - // Path the json path from where you need to extract the label - Path string -} - -func (l *Label) validate() (errs []error) { - if len(l.Name) == 0 { - errs = append(errs, errors.New("Name is required in metric")) - } - if len(l.Path) == 0 { - errs = append(errs, errors.New("Path is required in metric")) - } - return errs -} - -// MetricOptions keep information you about the metric, mostly the type(Counter, Gauge, Summary, and Histogram) -type MetricOptions struct { - Type string `json:"type"` - ItemPath string `json:"item_path"` - Description string `json:"description"` - Labels []Label -} - -func (mo MetricOptions) validate() (errs []error) { - if len(mo.Type) == 0 { - errs = append(errs, errors.New("type is required in metric")) - } - switch mo.Type { - case KeyTypeCounter, KeyTypeGauge, KeyTypeHistogram: - case KeyTypeSummary: - errs = append(errs, fmt.Errorf("type %s is not supported yet", KeyTypeSummary)) - default: - errs = append(errs, fmt.Errorf("type should be one of %s, %s, %s or %s", KeyTypeCounter, KeyTypeGauge, KeyTypeSummary, KeyTypeHistogram)) - } - if len(mo.ItemPath) == 0 && len(mo.Labels) != 0 { - errs = append(errs, errors.New("if you define labels this is a vector and itemPath config is required")) - } - if len(mo.ItemPath) != 0 && len(mo.Labels) == 0 { - errs = append(errs, errors.New("if you define itemPath this is a vector and labels config is required")) - } - for _, label := range mo.Labels { - errs = append(errs, label.validate()...) - } - return errs -} - -// HistogramOptions allows you to define the histogram is buckets. -type HistogramOptions struct { - Buckets []float64 `json:"buckets"` - - // ExponentialBuckets is a len three array where: - // - The first value is the low bound start bucket. - // - The second vale is the growing factor. - // - The three one is the buckets amount. - ExponentialBuckets []float64 `json:"exponential_buckets"` -} - -func (ho HistogramOptions) validate() (errs []error) { - if len(ho.Buckets) > 0 && len(ho.ExponentialBuckets) > 0 { - errs = append(errs, errors.New("You should define only one betwen the 'buckets' and the 'exponentialBuckets'")) - } - if len(ho.Buckets) == 0 && len(ho.ExponentialBuckets) == 0 { - errs = append(errs, errors.New("At least one should be defined the 'buckets' or the 'exponentialBuckets'")) - } - if len(ho.ExponentialBuckets) != 0 && len(ho.ExponentialBuckets) != 3 { - errs = append(errs, errors.New("'exponentialBuckets' should have an exact length of 3(start, factor, amount)")) - } - return errs -} - -func (ho HistogramOptions) inferType() (t string) { - if len(ho.Buckets) != 0 || len(ho.ExponentialBuckets) != 0 { - t = "Histogram" - } - return t -} diff --git a/src/config/metric_test.go b/src/config/metric_test.go deleted file mode 100644 index 9ec5a7b..0000000 --- a/src/config/metric_test.go +++ /dev/null @@ -1,102 +0,0 @@ -package config - -import ( - "testing" - - "github.com/stretchr/testify/suite" -) - -type metricConfSuit struct { - suite.Suite - MetricConf *Metric -} - -func (suite *metricConfSuit) SetupTest() { - var conf RootConfig - conf.Services = []Service{ - Service{ - Name: "MySupperServer", - Modes: []string{"rest_api"}, - Scheme: "http", - Location: Server{Location: "http://localhost:8080"}, - Port: 8080, - BasePath: "/skycoin/node", - AuthType: "CSRF", - TokenHeaderKey: "X-CSRF-Token", - GenTokenEndpoint: "/api/v1/csrf", - TokenKeyFromEndpoint: "csrf_token", - Metrics: []Metric{ - Metric{ - Name: "MySupperMetric", - URL: "/api/v1/health", - HTTPMethod: "GET", - Path: "/blockchain/head/seq", - Options: MetricOptions{Type: KeyTypeCounter, Description: "It is all about ..."}, - HistogramOptions: HistogramOptions{}, - }}, - }, - } - suite.MetricConf = &(conf.Services[0].Metrics[0]) -} - -func TestMetricConfSuit(t *testing.T) { - suite.Run(t, new(metricConfSuit)) -} - -func (suite *metricConfSuit) TestEnsureDefaultSuitMetricConfIsValid() { - // NOTE(denisacostaq@gmail.com): Giving - // default - metricConf := suite.MetricConf - - // NOTE(denisacostaq@gmail.com): When - // test start - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(metricConf.validate(), 0) -} - -func (suite *metricConfSuit) TestNotEmptyName() { - // NOTE(denisacostaq@gmail.com): Giving - var metricConf = suite.MetricConf - metricConf.Name = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(metricConf.validate(), 1) -} - -func (suite *metricConfSuit) TestNotEmptyURL() { - // NOTE(denisacostaq@gmail.com): Giving - var metricConf = suite.MetricConf - metricConf.URL = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(metricConf.validate(), 1) -} - -func (suite *metricConfSuit) TestNotEmptyHTTPMethod() { - // NOTE(denisacostaq@gmail.com): Giving - var metricConf = suite.MetricConf - metricConf.HTTPMethod = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(metricConf.validate(), 1) -} - -func (suite *metricConfSuit) TestNotEmptyPath() { - // NOTE(denisacostaq@gmail.com): Giving - var metricConf = suite.MetricConf - metricConf.Path = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(metricConf.validate(), 1) -} - -// TODO(denisacostaq@gmail.com): test define buckets but declare type counter for example diff --git a/src/config/server.go b/src/config/server.go deleted file mode 100644 index ae1435c..0000000 --- a/src/config/server.go +++ /dev/null @@ -1,20 +0,0 @@ -package config - -import "errors" - -// Server the server where is running the service -type Server struct { - // Location should have the ip or URL. - Location string `json:"location"` -} - -func (server Server) validate() (errs []error) { - if len(server.Location) == 0 { - errs = append(errs, errors.New("location is required in server")) - } - // if !isValidURL(server.Location) { - // // TODO(denisacostaq@gmail.com): how to do this? - // // errs = append(errs, errors.New("location is not a valid url in server")) - // } - return errs -} diff --git a/src/config/server_test.go b/src/config/server_test.go deleted file mode 100644 index 7369e3a..0000000 --- a/src/config/server_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package config - -import ( - "testing" - - "github.com/stretchr/testify/suite" -) - -type serverConfSuit struct { - suite.Suite - ServerConf Server -} - -func (suite *serverConfSuit) SetupTest() { - suite.ServerConf = Server{Location: "http://localhost:8080"} -} - -func TestServerConfSuit(t *testing.T) { - suite.Run(t, new(metricConfSuit)) -} - -func (suite *serverConfSuit) TestEnsureDefaultSuitServerConfIsValid() { - // NOTE(denisacostaq@gmail.com): Giving - // default - serverConf := suite.ServerConf - - // NOTE(denisacostaq@gmail.com): When - // test start - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serverConf.validate(), 0) -} - -func (suite *serverConfSuit) TestNotEmptyName() { - // NOTE(denisacostaq@gmail.com): Giving - var serverConf = suite.ServerConf - serverConf.Location = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serverConf.validate(), 1) -} diff --git a/src/config/service.go b/src/config/service.go deleted file mode 100644 index 909f2e6..0000000 --- a/src/config/service.go +++ /dev/null @@ -1,151 +0,0 @@ -package config - -import ( - "container/list" - "errors" - "fmt" -) - -const ( - // ServiceTypeAPIRest is the key you should define in the config file for a service who request remote data - // and uses this to build the metrics. - ServiceTypeAPIRest = "rest_api" - // ServiceTypeProxy is the key you should define in the config file for a service to work like a middleware/forward_metrics. - ServiceTypeProxy = "forward_metrics" -) - -// Service is a concept to grab information about a running server, for example: -// where is it http://localhost:1234 (Location + : + Port), what auth kind you need to use? -// what is the header key you in which you need to send the token, and so on. -type Service struct { - Name string `json:"name"` - Modes []string `json:"modes"` - // Scheme is http or https - Scheme string `json:"scheme"` - Port uint16 `json:"port"` - BasePath string `json:"basePath"` - MetricsToForwardPath string `json:"metrics_to_forward"` - AuthType string `json:"authType"` - TokenHeaderKey string `json:"tokenHeaderKey"` - GenTokenEndpoint string `json:"genTokenEndpoint"` - TokenKeyFromEndpoint string `json:"tokenKeyFromEndpoint"` - Location Server `json:"location"` - Metrics []Metric `json:"metrics"` -} - -// JobName returns the default label value for job -func (srv Service) JobName() string { - return srv.Name -} - -// InstanceName returns the default label value for instance -func (srv Service) InstanceName() string { - return fmt.Sprintf("%s:%d", srv.Location.Location, srv.Port) -} - -// URIToGetMetric build the URI from where you will to get metric information -func (srv Service) URIToGetMetric(metric Metric) string { - return fmt.Sprintf("%s://%s:%d%s%s", srv.Scheme, srv.Location.Location, srv.Port, srv.BasePath, metric.URL) -} - -// URIToGetExposedMetric build the URI from where you will to get the exposed metrics. -func (srv Service) URIToGetExposedMetric() string { - return fmt.Sprintf("%s://%s:%d%s%s", srv.Scheme, srv.Location.Location, srv.Port, srv.BasePath, srv.MetricsToForwardPath) -} - -// URIToGetToken build the URI from where you will to get the token -func (srv Service) URIToGetToken() string { - return fmt.Sprintf("%s://%s:%d%s%s", srv.Scheme, srv.Location.Location, srv.Port, srv.BasePath, srv.GenTokenEndpoint) -} - -// FilterMetricsByType will return all the metrics who match with the 't' parameter in this service. -func (srv Service) FilterMetricsByType(t string) (metrics []Metric) { - tmpMetrics := list.New() - for _, metric := range srv.Metrics { - if metric.Options.Type == t { - tmpMetrics.PushBack(metric) - } - } - metrics = make([]Metric, tmpMetrics.Len()) - idxLink := 0 - for it := tmpMetrics.Front(); it != nil; it = it.Next() { - metrics[idxLink] = it.Value.(Metric) - idxLink++ - } - return metrics -} - -// CountMetricsByType will return the number of metrics who match with the 't' parameter in this service. -func (srv Service) CountMetricsByType(t string) (amount int) { - for _, metric := range srv.Metrics { - if metric.Options.Type == t { - amount++ - } - } - return -} - -func (srv Service) validateProxy() (errs []error) { - if !isValidURL(srv.URIToGetExposedMetric()) { - errs = append(errs, errors.New("can not create a valid url to get the exposed metric")) - } - if len(srv.MetricsToForwardPath) == 0 { - errs = append(errs, errors.New("you need to define metricsToForwardPath if you enable proxy(forward_metrics) mode")) - } - return errs -} - -func (srv Service) validateAPIRest() (errs []error) { - if !isValidURL(srv.URIToGetToken()) { - errs = append(errs, errors.New("can not create a valid url to get token: "+srv.URIToGetToken())) - } - for _, metric := range srv.Metrics { - if !isValidURL(srv.URIToGetMetric(metric)) { - errs = append(errs, errors.New("can not create a valid url to get metric: "+srv.URIToGetMetric(metric))) - } - } - if srv.AuthType == "CSRF" && len(srv.TokenHeaderKey) == 0 { - errs = append(errs, errors.New("TokenHeaderKey is required if you are using CSRF")) - } - if srv.AuthType == "CSRF" && len(srv.TokenKeyFromEndpoint) == 0 { - errs = append(errs, errors.New("TokenKeyFromEndpoint is required if you are using CSRF")) - } - if srv.AuthType == "CSRF" && len(srv.GenTokenEndpoint) == 0 { - errs = append(errs, errors.New("GenTokenEndpoint is required if you are using CSRF")) - } - return errs -} - -func (srv Service) validate() (errs []error) { - if len(srv.Name) == 0 { - errs = append(errs, errors.New("name is required in service")) - } - if len(srv.Scheme) == 0 { - errs = append(errs, errors.New("scheme is required in service")) - } - if srv.Port < 1 || srv.Port > 65535 { - errs = append(errs, errors.New("port must be betwen 1 and 65535")) - } - if len(srv.Modes) == 0 { - errs = append(errs, fmt.Errorf("you you have to define at least a service mode, possibles are: %s or %s", ServiceTypeAPIRest, ServiceTypeProxy)) - } - for _, mode := range srv.Modes { - switch mode { - case ServiceTypeProxy: - errs = append(errs, srv.validateProxy()...) - case ServiceTypeAPIRest: - errs = append(errs, srv.validateAPIRest()...) - default: - if len(mode) == 0 { - errs = append(errs, fmt.Errorf("mode is required in service")) - } else { - errs = append(errs, fmt.Errorf("mode allow instances of %s or %s only", ServiceTypeAPIRest, ServiceTypeProxy)) - } - } - } - for _, metric := range srv.Metrics { - errs = append(errs, metric.validate()...) - } - errs = append(errs, srv.Location.validate()...) - return errs -} diff --git a/src/config/service_test.go b/src/config/service_test.go deleted file mode 100644 index 8a320a2..0000000 --- a/src/config/service_test.go +++ /dev/null @@ -1,137 +0,0 @@ -package config - -import ( - "testing" - - "github.com/stretchr/testify/suite" -) - -type serviceConfSuite struct { - suite.Suite - ServiceConf Service -} - -func (suite *serviceConfSuite) SetupTest() { - suite.ServiceConf = Service{ - Name: "MySupperServer", - Modes: []string{"rest_api"}, - Scheme: "http", - Location: Server{Location: "http://localhost:8080"}, - Port: 8080, - BasePath: "/skycoin/node", - AuthType: "CSRF", - TokenHeaderKey: "X-CSRF-Token", - GenTokenEndpoint: "/api/v1/csrf", - TokenKeyFromEndpoint: "csrf_token", - } -} - -func TestServiceConfSuite(t *testing.T) { - suite.Run(t, new(serviceConfSuite)) -} - -func (suite *serviceConfSuite) TestEnsureDefaultSuiteServiceConfIsValid() { - // NOTE(denisacostaq@gmail.com): Giving - // default - serviceConf := suite.ServiceConf - - // NOTE(denisacostaq@gmail.com): When - // test start - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 0) -} - -func (suite *serviceConfSuite) TestNotEmptyName() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.Name = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} - -func (suite *serviceConfSuite) TestNotEmptyMode() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.Modes = []string{} - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} - -func (suite *serviceConfSuite) TestBothModes() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.MetricsToForwardPath = "/metrics" - serviceConf.Modes = []string{ServiceTypeProxy, ServiceTypeAPIRest} - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 0) -} - -func (suite *serviceConfSuite) TestNotValidMode() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.Modes = []string{"acere"} - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} - -func (suite *serviceConfSuite) TestValidateLocation() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.Location.Location = string("") - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.NotEmpty(serviceConf.validate()) // empty, invalid and url + port invalid -} - -// TODO(denisacostaq@gmail.com): validate port if change type from uint16 to ... - -func (suite *serviceConfSuite) TestCsrfAuthButEmptyTokenKeyFromEndpoint() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.AuthType = "CSRF" - serviceConf.TokenKeyFromEndpoint = "" - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} - -func (suite *serviceConfSuite) TestCsrfAuthButEmptyTokenHeaderKey() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.AuthType = "CSRF" - serviceConf.TokenHeaderKey = "" - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} - -func (suite *serviceConfSuite) TestCsrfAuthButEmptyGenTokenEndpoint() { - // NOTE(denisacostaq@gmail.com): Giving - var serviceConf = suite.ServiceConf - serviceConf.AuthType = "CSRF" - serviceConf.GenTokenEndpoint = "" - - // NOTE(denisacostaq@gmail.com): When - - // NOTE(denisacostaq@gmail.com): Assert - suite.Len(serviceConf.validate(), 1) -} diff --git a/src/config/serviceconfigfromfile.go b/src/config/serviceconfigfromfile.go deleted file mode 100644 index e32350f..0000000 --- a/src/config/serviceconfigfromfile.go +++ /dev/null @@ -1,41 +0,0 @@ -package config - -import ( - "fmt" - - "github.com/simelo/rextporter/src/util" - "github.com/spf13/viper" -) - -// ServiceConfigFromFile get a service config from a file toml -type ServiceConfigFromFile struct { - filePath string -} - -// NewServicesConfigFromFile create a config reader configure to read config from the file in path parameter -func NewServicesConfigFromFile(path string) (conf *ServiceConfigFromFile) { - conf = &ServiceConfigFromFile{} - conf.filePath = path - return conf -} - -// GetConfig read the file 'filePath' and returns the services config or an error if any -func (srvConf ServiceConfigFromFile) GetConfig() (services []Service, err error) { - generalScopeErr := "error reading config from file" - if len(srvConf.filePath) == 0 { - errCause := fmt.Sprintln("file path should not be empty, are you using the 'NewServicesConfigFromFile' function to get an instance?") - return services, util.ErrorFromThisScope(errCause, generalScopeErr) - } - viper.SetConfigFile(srvConf.filePath) - if err := viper.ReadInConfig(); err != nil { - errCause := fmt.Sprintln("error reading config file: ", srvConf.filePath, err.Error()) - return services, util.ErrorFromThisScope(errCause, generalScopeErr) - } - var root RootConfig - if err := viper.Unmarshal(&root); err != nil { - errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) - return services, util.ErrorFromThisScope(errCause, generalScopeErr) - } - services = root.Services - return services, err -} diff --git a/src/config/serviceconfigreader.go b/src/config/serviceconfigreader.go deleted file mode 100644 index 6c9906a..0000000 --- a/src/config/serviceconfigreader.go +++ /dev/null @@ -1,8 +0,0 @@ -package config - -// ServiceConfigReader is an interface to get a service config from for example: -// a file, a REST API, a stream and so on... -type ServiceConfigReader interface { - // GetConfig return a service config or an error if any - GetConfig() (Service, error) -} diff --git a/src/config/storage.go b/src/config/storage.go new file mode 100644 index 0000000..d912156 --- /dev/null +++ b/src/config/storage.go @@ -0,0 +1 @@ +package config diff --git a/src/configlocator/locator.go b/src/configlocator/locator.go new file mode 100644 index 0000000..1f13400 --- /dev/null +++ b/src/configlocator/locator.go @@ -0,0 +1,7 @@ +package configlocator + +// MainFile return the entry point path for a config +func MainFile() string { + // FIXME(denisacostaq@gmail.com): make this working + return "/home/adacosta/.config/simelo/rextporter/main.toml" +} diff --git a/src/core/config.go b/src/core/config.go new file mode 100644 index 0000000..5f2e7d5 --- /dev/null +++ b/src/core/config.go @@ -0,0 +1,221 @@ +package core + +import ( + "errors" +) + +var ( + // ErrKeyInvalidType for unexpected type + ErrKeyInvalidType = errors.New("Unsupported type") + // ErrKeyNotFound in key value store + ErrKeyNotFound = errors.New("Missing key") + // ErrKeyNotClonable in key value store + ErrKeyNotClonable = errors.New("Impossible to obtain a copy of object") + // ErrKeyConfigHaveSomeErrors for config validation + ErrKeyConfigHaveSomeErrors = errors.New("Config have some errors") + // ErrKeyEmptyValue values not allowed + ErrKeyEmptyValue = errors.New("A required value is missed (empty or nil)") + // ErrKeyDecodingFile can not parse or decode content + ErrKeyDecodingFile = errors.New("Error decoding/parsing read content") +) + +const ( + // KeyMetricTypeCounter is the key you should define in the config file for counters. + KeyMetricTypeCounter = "Counter" + // KeyMetricTypeGauge is the key you should define in the config file for gauges. + KeyMetricTypeGauge = "Gauge" + // KeyMetricTypeHistogram is the key you should define in the config file for histograms. + KeyMetricTypeHistogram = "Histogram" + // KeyMetricTypeSummary is the key you should define in the config file for summaries. + KeyMetricTypeSummary = "Summary" +) + +const ( + // OptKeyRextResourceDefHTTPMethod key to define an http method inside a RextResourceDef + OptKeyRextResourceDefHTTPMethod = "d43e326a-3e5d-462c-ad92-39dc2272f1d8" + // OptKeyRextAuthDefTokenHeaderKey key to define a token header key inside a RextAuthDef + OptKeyRextAuthDefTokenHeaderKey = "768772f5-cbe7-4a61-96ba-72ab99aede59" + // OptKeyRextAuthDefTokenKeyFromEndpoint key to define a token key from a response auth API inside a RextAuthDef + OptKeyRextAuthDefTokenKeyFromEndpoint = "1cb99a48-c642-4234-af5e-7de88cb20271" + // OptKeyRextAuthDefTokenGenEndpoint key to define a token endpoint to get authenticated inside a RextAuthDef + OptKeyRextAuthDefTokenGenEndpoint = "3a5e1d2f-53c0-4c47-b0cb-13a3190ce97f" + // OptKeyRextServiceDefJobName key to define the job name, it is mandatory for all services + OptKeyRextServiceDefJobName = "555efe9a-fd0a-4f03-9724-fed758491e65" + // OptKeyRextServiceDefInstanceName key to define a instance name for a service, it is mandatory for all services + // a service can run in multiple nodes(physical or virtual), all these instances are mandatory, can be + // for example 127.0.0.0:8080 + OptKeyRextServiceDefInstanceName = "0a12a60a-6ed4-400b-af78-2664d6588233" + // OptKeyRextMetricDefHMetricBuckets key to hold the configured buckets inside a RextMetricDef if you are using + // a histogram kind + OptKeyRextMetricDefHMetricBuckets = "9983807d-13fe-4b1d-9363-4b844ea2f301" + // OptKeyRextMetricDefVecItemPath key to hold the path where you can find the items for a metrics vec + OptKeyRextMetricDefVecItemPath = "ca49882d-893f-4707-b195-2ab885e0f67f" +) + +// RextRoot hold a service list whit their configurations info +type RextRoot interface { + GetServices() []RextServiceDef + AddService(RextServiceDef) + Clone() (RextRoot, error) + Validate() (hasError bool) +} + +// RextServiceDef encapsulates all data for services +type RextServiceDef interface { + SetBasePath(path string) // can be an http server base path, a filesystem directory ... + GetBasePath() string + // file | http | ftp + GetProtocol() string + SetProtocol(string) // TODO(denisacostaq@gmail.com): move this to set base path, and add a port too + SetAuthForBaseURL(RextAuthDef) + GetAuthForBaseURL() RextAuthDef + AddResource(source RextResourceDef) + AddResources(sources ...RextResourceDef) + GetResources() []RextResourceDef + GetOptions() RextKeyValueStore + Clone() (RextServiceDef, error) + Validate() (hasError bool) +} + +// RextResourceDef for retrieving raw data +type RextResourceDef interface { + // GetResourcePATH should be used in the context of a service, so the service base path information + // have to be passed to this method, it returns a resource url = base_path + uri + GetResourcePATH(basePath string) string + + // GetAuth should be used in the context of a service, so the service auth information + // have to be passed to this method(can be null if not auth is required for major service calls) + // it returns the resource auth info or the general auth for service if the resource have not a specific + // one. + GetAuth(defAuth RextAuthDef) (auth RextAuthDef) + + // SetResourceURI set the path where live the resource inside a service, see examples below + // http -> /api/v1/network/connections | /api/v1/health + // file -> /path/to/a/file | /proc/$(pidof qtcreator)/status + // the retrieved resource can be a json file, a xml, a plain text, a .rar ... + SetResourceURI(string) + + // SetAuth set a specific auth info for the resource if required, for example + // in a web server different resource path can have different different auth strategics|info, + // in a filesystem some special files may require root(admin) access + SetAuth(RextAuthDef) + + // GetDecoder return a decoder to parse the resource and get the info + GetDecoder() RextDecoderDef + + // SetDecoder set a decoder to parse the resource and get the info + SetDecoder(RextDecoderDef) + + // AddMetricDef set a metric definition for this resource path + AddMetricDef(RextMetricDef) + GetMetricDefs() []RextMetricDef + + SetType(string) // TODO(denisacostaq@gmail.com): remove this + GetType() string // TODO(denisacostaq@gmail.com): remove this + GetOptions() RextKeyValueStore + Clone() (RextResourceDef, error) + Validate() (hasError bool) +} + +// RextDecoderDef allow you to decode a resource from different formats +type RextDecoderDef interface { + // GetType return some kind of "encoding" like: json, xml, ini, plain_text, prometheus_exposed_metrics, + // .rar(even encrypted) + GetType() string + + // GetOptions return additional options for example if the retrieved content is encripted, get info + // about the algorithm, the key, and so on... + GetOptions() RextKeyValueStore + Clone() (RextDecoderDef, error) + Validate() (hasError bool) +} + +const ( + // RextNodeSolverTypeJSONPath var name to use node solver of json kind + RextNodeSolverTypeJSONPath = "jsonPath" +) + +// RextNodeSolver help you to get raw data(sample/s) to create a metric from a specific path inside a +// retrieved resource +type RextNodeSolver interface { + // GetType return the strategy to find the data, it can be: jpath, xpath, .ini, plain_text, .tar.gz + // it is different to RextDecoderDef.type in the sense of a decoder can work over a binary encoded + // content and after, the node solver over a .rar + GetType() string + + // GetNodePath return the path where you can find the value, it depends on the type, see some examples below: + // "json" -> "/blockchain/head/seq" | "/blockchain/head/fee" + // "xml" -> "/blockchain/head/seq" | "/blockchain/head/fee" + // "ini" -> "key_name" + // "plain_text" -> line number + // "directory" -> file_path + // ".rar" -> file_path | file_path + jpath for the specific file | file_path + key(.ini) for the specific file + GetNodePath() string + SetNodePath(string) + + // GetOptions return additional information for more complex data structures, like for example in the + // .rar example above + GetOptions() RextKeyValueStore + Clone() (RextNodeSolver, error) + Validate() (hasError bool) +} + +// RextMetricDef contains the metadata associated to the metrics +type RextMetricDef interface { + // GetMetricName return the metric name + GetMetricName() string + // GetMetricType return the metric type + GetMetricType() string + // GetMetricDescription return the metric description + GetMetricDescription() string + // GetLabels return the labels in which the metrics should be mapped in + GetLabels() []RextLabelDef + // GetNodeSolver return a solver able to get the metric sample/s + GetNodeSolver() RextNodeSolver + SetNodeSolver(RextNodeSolver) + SetMetricName(string) + SetMetricType(string) + SetMetricDescription(string) + AddLabel(RextLabelDef) + GetOptions() RextKeyValueStore + Clone() (RextMetricDef, error) + Validate() (hasError bool) +} + +// RextLabelDef define a label name and the way to get the value for metrics vec +type RextLabelDef interface { + SetName(string) + // GetName return the metric name + GetName() string + SetNodeSolver(RextNodeSolver) + // GetNodeSolver return the solver able to get the metric value + GetNodeSolver() RextNodeSolver + Clone() (RextLabelDef, error) + Validate() (hasError bool) +} + +// AuthTypeCSRF define a const name for auth of type CSRF +const AuthTypeCSRF = "CSRF" + +// RextAuthDef can store information about authentication requirements, how and where you can autenticate, +// using what values, all this info is stored inside a RextAuthDef +type RextAuthDef interface { + // SetAuthType set the auth type + SetAuthType(string) + // GetAuthType return the auth type + GetAuthType() string + GetOptions() RextKeyValueStore + Clone() (RextAuthDef, error) + Validate() (hasError bool) +} + +// RextKeyValueStore providing access to object settings, you give a key with a value(can be a string or +// a interface{}) for store this value and later you can get back this value trough the original key. +type RextKeyValueStore interface { + GetString(key string) (string, error) + SetString(key string, value string) (bool, error) + GetObject(key string) (interface{}, error) + SetObject(key string, value interface{}) (bool, error) + GetKeys() []string + Clone() (RextKeyValueStore, error) +} diff --git a/src/core/mocks/.gitkeep b/src/core/mocks/.gitkeep new file mode 100644 index 0000000..bbd7b75 --- /dev/null +++ b/src/core/mocks/.gitkeep @@ -0,0 +1,2 @@ +// This file allow you to make the github.com/simelo/rextporter/src/core/mocks package existent without content, +// an empty folder is present at the go get instant, later the mock files will be autogenerated. diff --git a/src/core/validations.go b/src/core/validations.go new file mode 100644 index 0000000..0b331d2 --- /dev/null +++ b/src/core/validations.go @@ -0,0 +1,189 @@ +package core + +import ( + log "github.com/sirupsen/logrus" +) + +// ValidateAuth check if the auth instance in parameter fill the required constraints +// to be considered as a valid RextAuthDef. +// Return true if any error is found +func ValidateAuth(auth RextAuthDef) (hasError bool) { + if len(auth.GetAuthType()) == 0 { + hasError = true + log.Errorln("type is required in auth config") + } + if auth.GetAuthType() == AuthTypeCSRF { + opts := auth.GetOptions() + if tkhk, err := opts.GetString(OptKeyRextAuthDefTokenHeaderKey); err != nil || len(tkhk) == 0 { + hasError = true + log.Errorln("token header key is required for CSRF auth type") + } + if tkge, err := opts.GetString(OptKeyRextAuthDefTokenGenEndpoint); err != nil || len(tkge) == 0 { + hasError = true + log.Errorln("token gen endpoint is required for CSRF auth type") + } + if tkfe, err := opts.GetString(OptKeyRextAuthDefTokenKeyFromEndpoint); err != nil || len(tkfe) == 0 { + hasError = true + log.Errorln("token from endpoint is required for CSRF auth type") + } + } + return hasError +} + +// ValidateResource check if the resource instance in parameter fill the required constraints +// to be considered as a valid RextResourceDef. +// Return true if any error is found +func ValidateResource(r RextResourceDef) (hasError bool) { + if len(r.GetType()) == 0 { + hasError = true + log.Errorln("type is required in metric config") + } + if len(r.GetResourcePATH("")) == 0 { + hasError = true + log.Errorln("resource path is required in metric config") + } + if r.GetDecoder() == nil { + hasError = true + log.Errorln("decoder is required in metric config") + } else if r.GetDecoder().Validate() { + hasError = true + } + if r.GetAuth(nil) != nil { + if r.GetAuth(nil).Validate() { + hasError = true + } + } + for _, mtrDef := range r.GetMetricDefs() { + if mtrDef.Validate() { + hasError = true + } + } + return hasError +} + +// ValidateService check if the resource instance in parameter fill the required constraints +// to be considered as a valid RextServiceDef. +// Return true if any error is found +func ValidateService(srv RextServiceDef) (hasError bool) { + srvOpts := srv.GetOptions() + jobName, err := srvOpts.GetString(OptKeyRextServiceDefJobName) + if err != nil { + hasError = true + log.WithError(err).Errorln("key for job name not present service config") + } + if len(jobName) == 0 { + hasError = true + log.Errorln("job name is required in service config") + } + var instanceName string + instanceName, err = srvOpts.GetString(OptKeyRextServiceDefInstanceName) + if err != nil { + hasError = true + log.WithError(err).Errorln("key for job name not present service config") + } + if len(instanceName) == 0 { + hasError = true + log.Errorln("instance name is required in service config") + } + if len(srv.GetProtocol()) == 0 { + hasError = true + log.Errorln("protocol should not be null in service config") + } + if srv.GetAuthForBaseURL() != nil { + if srv.GetAuthForBaseURL().Validate() { + hasError = true + } + } + for _, resource := range srv.GetResources() { + if resource.Validate() { + hasError = true + } + } + return hasError +} + +// ValidateNodeSolver check if the node solver instance in parameter fill the required constraints +// to be considered as a valid RextNodeSolver. +// Return true if any error is found +func ValidateNodeSolver(ns RextNodeSolver) (hasError bool) { + if len(ns.GetNodePath()) == 0 { + hasError = true + log.Errorln("node path is required in node solver config") + } + return hasError +} + +// ValidateLabel check if the label instance in parameter fill the required constraints +// to be considered as a valid RextLabelDef. +// Return true if any error is found +func ValidateLabel(l RextLabelDef) (hasError bool) { + if len(l.GetName()) == 0 { + hasError = true + log.Errorln("name is required in label config") + } + if l.GetNodeSolver() == nil { + hasError = true + log.Errorln("node solver is required in label config") + } else if l.GetNodeSolver().Validate() { + hasError = true + } + return hasError +} + +// ValidateDecoder check if the decoder instance in parameter fill the required constraints +// to be considered as a valid RextDecoderDef. +// Return true if any error is found +func ValidateDecoder(d RextDecoderDef) (hasError bool) { + if len(d.GetType()) == 0 { + hasError = true + log.Errorln("type is required in decoder config") + } + return hasError +} + +// ValidateMetric check if the metric instance in parameter fill the required constraints +// to be considered as a valid RextMetricDef. +// Return true if any error is found +func ValidateMetric(m RextMetricDef) (hasError bool) { + if len(m.GetMetricName()) == 0 { + hasError = true + log.Errorln("name is required in metric config") + } + if len(m.GetMetricType()) == 0 { + hasError = true + log.Errorln("type is required in metric config") + } + switch m.GetMetricType() { + case KeyMetricTypeCounter, KeyMetricTypeGauge, KeyMetricTypeHistogram: + case KeyMetricTypeSummary: + hasError = true + log.Errorf("type %s is not supported yet\n", KeyMetricTypeSummary) + default: + hasError = true + log.Errorf("type should be one of %s, %s, %s or %s", KeyMetricTypeCounter, KeyMetricTypeGauge, KeyMetricTypeSummary, KeyMetricTypeHistogram) + } + if m.GetNodeSolver() == nil { + hasError = true + log.Errorln("node solver is required in metric config") + } else if m.GetNodeSolver().Validate() { + hasError = true + } + for _, label := range m.GetLabels() { + if label.Validate() { + hasError = true + } + } + return hasError +} + +// ValidateRoot check if the root instance in parameter fill the required constraints +// to be considered as a valid RextRoot. +// Return true if any error is found +func ValidateRoot(r RextRoot) (hasError bool) { + for _, srv := range r.GetServices() { + if srv.Validate() { + hasError = true + } + } + return hasError +} diff --git a/src/exporter/collector.go b/src/exporter/collector.go index e003a0e..5d90b03 100644 --- a/src/exporter/collector.go +++ b/src/exporter/collector.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/simelo/rextporter/src/cache" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/scrapper" "github.com/simelo/rextporter/src/util" log "github.com/sirupsen/logrus" @@ -20,11 +20,11 @@ type MetricsCollector struct { defMetrics *defaultMetrics } -func newMetricsCollector(c cache.Cache, conf config.RootConfig) (collector *MetricsCollector, err error) { +func newMetricsCollector(c cache.Cache, conf core.RextRoot) (collector *MetricsCollector, err error) { const generalScopeErr = "error creating collector" defMetrics := newDefaultMetrics() var metrics endpointData2MetricsConsumer - if metrics, err = createMetrics(c, conf.Services, defMetrics.dataSourceResponseDurationDesc); err != nil { + if metrics, err = createMetrics(c, conf, defMetrics.dataSourceResponseDurationDesc); err != nil { errCause := fmt.Sprintln("error creating metrics: ", err.Error()) return nil, util.ErrorFromThisScope(errCause, generalScopeErr) } @@ -289,9 +289,9 @@ func (collector *MetricsCollector) Collect(ch chan<- prometheus.Metric) { } collector.defMetrics.reset() for k := range collector.metrics { - counters := filterMetricsByKind(config.KeyTypeCounter, collector.metrics[k]) - gauges := filterMetricsByKind(config.KeyTypeGauge, collector.metrics[k]) - histograms := filterMetricsByKind(config.KeyTypeHistogram, collector.metrics[k]) + counters := filterMetricsByKind(core.KeyMetricTypeCounter, collector.metrics[k]) + gauges := filterMetricsByKind(core.KeyMetricTypeGauge, collector.metrics[k]) + histograms := filterMetricsByKind(core.KeyMetricTypeHistogram, collector.metrics[k]) collectCounters(counters, collector.defMetrics, ch) collectGauges(gauges, collector.defMetrics, ch) collectHistograms(histograms, collector.defMetrics, ch) diff --git a/src/exporter/exporter.go b/src/exporter/exporter.go index 994e18c..799613a 100644 --- a/src/exporter/exporter.go +++ b/src/exporter/exporter.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/simelo/rextporter/src/cache" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/scrapper" "github.com/simelo/rextporter/src/util" "github.com/simelo/rextporter/src/util/metrics" @@ -82,7 +82,7 @@ func exposedMetricsMiddleware(scrappers []scrapper.FordwaderScrapper, promHandle var fDefMetrics *metrics.DefaultFordwaderMetrics // MustExportMetrics will read the config from mainConfigFile if any or use a default one. -func MustExportMetrics(handlerEndpoint string, listenPort uint16, conf config.RootConfig) (srv *http.Server) { +func MustExportMetrics(handlerEndpoint string, listenPort uint16, conf core.RextRoot) (srv *http.Server) { c := cache.NewCache() if collector, err := newMetricsCollector(c, conf); err != nil { log.WithError(err).Panicln("Can not create metrics") diff --git a/src/exporter/metric.go b/src/exporter/metric.go index 526d962..7409953 100644 --- a/src/exporter/metric.go +++ b/src/exporter/metric.go @@ -6,23 +6,28 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/simelo/rextporter/src/cache" "github.com/simelo/rextporter/src/client" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/scrapper" "github.com/simelo/rextporter/src/util" "github.com/simelo/rextporter/src/util/metrics" + log "github.com/sirupsen/logrus" ) -func createMetricsForwaders(conf config.RootConfig, fDefMetrics *metrics.DefaultFordwaderMetrics) (fordwaderScrappers []scrapper.FordwaderScrapper, err error) { +func createMetricsForwaders(conf core.RextRoot, fDefMetrics *metrics.DefaultFordwaderMetrics) (fordwaderScrappers []scrapper.FordwaderScrapper, err error) { generalScopeErr := "can not create metrics Middleware" - services := conf.FilterServicesByType(config.ServiceTypeProxy) - fordwaderScrappers = make([]scrapper.FordwaderScrapper, len(services)) - for idxService := range services { + services := conf.GetServices() + for _, srvConf := range services { var metricFordwaderCreator client.ProxyMetricClientCreator - if metricFordwaderCreator, err = client.CreateProxyMetricClientCreator(services[idxService], fDefMetrics); err != nil { - errCause := fmt.Sprintln("error creating metric client: ", err.Error()) - return nil, util.ErrorFromThisScope(errCause, generalScopeErr) + resources := srvConf.GetResources() + for _, resConf := range resources { + if resConf.GetType() == "metrics_fordwader" { + if metricFordwaderCreator, err = client.CreateProxyMetricClientCreator(resConf, srvConf, fDefMetrics); err != nil { + errCause := fmt.Sprintln("error creating metric client: ", err.Error()) + return nil, util.ErrorFromThisScope(errCause, generalScopeErr) + } + fordwaderScrappers = append(fordwaderScrappers, scrapper.NewMetricsForwader(metricFordwaderCreator, fDefMetrics)) + } } - fordwaderScrappers[idxService] = scrapper.NewMetricsForwader(metricFordwaderCreator, fDefMetrics) } return fordwaderScrappers, nil } @@ -36,42 +41,53 @@ type constMetric struct { type endpointData2MetricsConsumer map[string][]constMetric -func createMetrics(cache cache.Cache, srvsConf []config.Service, dataSourceResponseDurationDesc *prometheus.Desc) (metrics endpointData2MetricsConsumer, err error) { +func createMetrics(cache cache.Cache, conf core.RextRoot, dataSourceResponseDurationDesc *prometheus.Desc) (metrics endpointData2MetricsConsumer, err error) { generalScopeErr := "can not create metrics" metrics = make(endpointData2MetricsConsumer) - for _, srvConf := range srvsConf { - for _, mConf := range srvConf.Metrics { - k := srvConf.URIToGetMetric(mConf) + for _, srvConf := range conf.GetServices() { + for _, resConf := range srvConf.GetResources() { + k := resConf.GetResourcePATH(srvConf.GetBasePath()) var m constMetric - if m, err = createConstMetric(cache, mConf, srvConf, dataSourceResponseDurationDesc); err != nil { - errCause := fmt.Sprintln(fmt.Sprintf("error creating metric client for %s metric of kind %s. ", mConf.Name, mConf.Options.Type), err.Error()) - return metrics, util.ErrorFromThisScope(errCause, generalScopeErr) + for _, mtrConf := range resConf.GetMetricDefs() { + nSolver := mtrConf.GetNodeSolver() + if m, err = createConstMetric(cache, resConf, srvConf, mtrConf, nSolver, dataSourceResponseDurationDesc); err != nil { + errCause := fmt.Sprintln(fmt.Sprintf("error creating metric client for %s metric of kind %s. ", mtrConf.GetMetricName(), mtrConf.GetMetricType()), err.Error()) + return metrics, util.ErrorFromThisScope(errCause, generalScopeErr) + } + metrics[k] = append(metrics[k], m) } - metrics[k] = append(metrics[k], m) } } return metrics, err } -func createConstMetric(cache cache.Cache, metricConf config.Metric, srvConf config.Service, dataSourceResponseDurationDesc *prometheus.Desc) (metric constMetric, err error) { - generalScopeErr := "can not create metric " + metricConf.Name +func createConstMetric(cache cache.Cache, resConf core.RextResourceDef, srvConf core.RextServiceDef, mtrConf core.RextMetricDef, nSolver core.RextNodeSolver, dataSourceResponseDurationDesc *prometheus.Desc) (metric constMetric, err error) { + generalScopeErr := "can not create metric " + mtrConf.GetMetricName() + if len(mtrConf.GetMetricName()) == 0 { + log.Errorln("metric name is required") + return metric, core.ErrKeyEmptyValue + } var ccf client.CacheableFactory - if ccf, err = client.CreateAPIRestCreator(metricConf, srvConf, dataSourceResponseDurationDesc); err != nil { + if ccf, err = client.CreateAPIRestCreator(resConf, srvConf, dataSourceResponseDurationDesc); err != nil { errCause := fmt.Sprintln("error creating metric client: ", err.Error()) return metric, util.ErrorFromThisScope(errCause, generalScopeErr) } cc := client.CatcherCreator{Cache: cache, ClientFactory: ccf} var numScrapper scrapper.Scrapper - if numScrapper, err = scrapper.NewScrapper(cc, scrapper.JSONParser{}, metricConf, srvConf); err != nil { + if numScrapper, err = scrapper.NewScrapper(cc, scrapper.JSONParser{}, resConf, srvConf, mtrConf, nSolver); err != nil { errCause := fmt.Sprintln("error creating metric client: ", err.Error()) return metric, util.ErrorFromThisScope(errCause, generalScopeErr) } - labels := append(metricConf.LabelNames(), instance4JobLabels...) + var labelsNames []string + for _, label := range mtrConf.GetLabels() { + labelsNames = append(labelsNames, label.GetName()) + } + labels := append(labelsNames, instance4JobLabels...) metric = constMetric{ - kind: metricConf.Options.Type, + kind: mtrConf.GetMetricType(), scrapper: numScrapper, // FIXME(denisacostaq@gmail.com): if you use a duplicated name can panic? - metricDesc: prometheus.NewDesc(metricConf.Name, metricConf.Options.Description, labels, nil), + metricDesc: prometheus.NewDesc(mtrConf.GetMetricName(), mtrConf.GetMetricDescription(), labels, nil), } return metric, err } diff --git a/src/memconfig/decoder.go b/src/memconfig/decoder.go new file mode 100644 index 0000000..337c593 --- /dev/null +++ b/src/memconfig/decoder.go @@ -0,0 +1,49 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// Decoder implements the interface core.RextMetricDef +type Decoder struct { + mType string + options core.RextKeyValueStore +} + +// Clone make a deep copy of Decoder or return an error if any +func (d Decoder) Clone() (cD core.RextDecoderDef, err error) { + var cOpts core.RextKeyValueStore + if cOpts, err = d.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("can not clone options in decoder") + return cD, err + } + cD = NewDecoder(d.mType, cOpts) + return cD, err +} + +// GetType return the decoder type +func (d Decoder) GetType() string { + return d.mType +} + +// GetOptions return key/value pairs for extra options +func (d *Decoder) GetOptions() core.RextKeyValueStore { + if d.options == nil { + d.options = NewOptionsMap() + } + return d.options +} + +// Validate the decoder, return true if any error is found +func (d Decoder) Validate() bool { + return core.ValidateDecoder(&d) +} + +// NewDecoder create a new decoder +func NewDecoder(mType string, options core.RextKeyValueStore) *Decoder { + return &Decoder{ + mType: mType, + options: options, + } +} diff --git a/src/memconfig/decoder_test.go b/src/memconfig/decoder_test.go new file mode 100644 index 0000000..3efc3ad --- /dev/null +++ b/src/memconfig/decoder_test.go @@ -0,0 +1,86 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/stretchr/testify/suite" +) + +func newDecoder(suite *decoderSuit) core.RextDecoderDef { + return NewDecoder( + suite.decoderType, + suite.options, + ) +} + +type decoderSuit struct { + suite.Suite + decoder core.RextDecoderDef + decoderType string + options core.RextKeyValueStore +} + +func (suite *decoderSuit) SetupTest() { + suite.decoderType = "dfdf" + suite.options = NewOptionsMap() + _, err := suite.options.SetString("k1", "v1") + suite.Nil(err) + _, err = suite.options.SetString("k2", "v2") + suite.Nil(err) + suite.decoder = newDecoder(suite) +} + +func TestDecoderSuit(t *testing.T) { + suite.Run(t, new(decoderSuit)) +} + +func (suite *decoderSuit) TestNewDecoderDef() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + decoderDef := newDecoder(suite) + opts, err := suite.options.Clone() + suite.Nil(err) + _, err = suite.options.SetString("k1", "v2") + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.decoderType, decoderDef.GetType()) + suite.Equal(suite.options, decoderDef.GetOptions()) + suite.NotEqual(opts, decoderDef.GetOptions()) +} + +func (suite *decoderSuit) TestInitializeEmptyOptionsInFly() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + decoderDef := Decoder{} + + // NOTE(denisacostaq@gmail.com): Assert + suite.NotNil(decoderDef.GetOptions()) +} + +func (suite *decoderSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cDecoderConf, err := suite.decoder.Clone() + suite.Nil(err) + suite.Equal(suite.decoder, cDecoderConf) + hasError := cDecoderConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *decoderSuit) TestValidationTypeShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + decoderDef := NewDecoder("", nil) + + // NOTE(denisacostaq@gmail.com): When + hasError := decoderDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} diff --git a/src/memconfig/http_auth.go b/src/memconfig/http_auth.go new file mode 100644 index 0000000..803f528 --- /dev/null +++ b/src/memconfig/http_auth.go @@ -0,0 +1,56 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// HTTPAuth implements the core.RextAuth interface +type HTTPAuth struct { + authType string + endpoint string + options core.RextKeyValueStore +} + +// Clone make a deep copy of NodeSolver or return an error if any +func (auth HTTPAuth) Clone() (cAuth core.RextAuthDef, err error) { + var cOpts core.RextKeyValueStore + if cOpts, err = auth.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("Can not clone options in HTTPAuth") + return cAuth, err + } + cAuth = NewHTTPAuth(auth.authType, auth.endpoint, cOpts) + return cAuth, err +} + +// SetAuthType return the auth type +func (auth *HTTPAuth) SetAuthType(authType string) { + auth.authType = authType +} + +// GetAuthType return the auth type +func (auth HTTPAuth) GetAuthType() string { + return auth.authType +} + +// GetOptions return key/value pairs for extra options +func (auth *HTTPAuth) GetOptions() core.RextKeyValueStore { + if auth.options == nil { + auth.options = NewOptionsMap() + } + return auth.options +} + +// Validate the auth, return true if any error is found +func (auth HTTPAuth) Validate() (haveError bool) { + return core.ValidateAuth(&auth) +} + +// NewHTTPAuth create a auth +func NewHTTPAuth(aType, url string, options core.RextKeyValueStore) core.RextAuthDef { + return &HTTPAuth{ + authType: aType, + endpoint: url, + options: options, + } +} diff --git a/src/memconfig/http_auth_test.go b/src/memconfig/http_auth_test.go new file mode 100644 index 0000000..daabe45 --- /dev/null +++ b/src/memconfig/http_auth_test.go @@ -0,0 +1,172 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/stretchr/testify/suite" +) + +func newAuth(suite *authConfSuit) core.RextAuthDef { + return NewHTTPAuth(suite.authType, suite.authURL, suite.options) +} + +type authConfSuit struct { + suite.Suite + authConf core.RextAuthDef + authType, authURL string + options OptionsMap +} + +func (suite *authConfSuit) SetupTest() { + suite.options = NewOptionsMap() + _, err := suite.options.SetString(core.OptKeyRextAuthDefTokenHeaderKey, "v1") + suite.Nil(err) + _, err = suite.options.SetString(core.OptKeyRextAuthDefTokenGenEndpoint, "v2") + suite.Nil(err) + _, err = suite.options.SetString(core.OptKeyRextAuthDefTokenKeyFromEndpoint, "v3") + suite.Nil(err) + suite.authType = "CSRF" + suite.authURL = "http://localhost:9000/hosted_in/auth" + suite.authConf = newAuth(suite) +} + +func TestAuthConfSuit(t *testing.T) { + suite.Run(t, new(authConfSuit)) +} + +func (suite *authConfSuit) TestNewHTTPAuth() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + authConf := newAuth(suite) + opts, err := suite.options.Clone() + suite.Nil(err) + _, err = suite.options.SetString("k1", "v2") + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.authType, authConf.GetAuthType()) + suite.Equal(suite.options, authConf.GetOptions()) + suite.NotEqual(opts, authConf.GetOptions()) +} + +func (suite *authConfSuit) TestAbleToSetType() { + // NOTE(denisacostaq@gmail.com): Giving + orgAuthType := suite.authConf.GetAuthType() + authType := "fgfg78" + suite.authConf.SetAuthType(authType) + + // NOTE(denisacostaq@gmail.com): When + authType2 := suite.authConf.GetAuthType() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(authType, authType2) + suite.NotEqual(orgAuthType, authType2) +} + +func (suite *authConfSuit) TestInitializeEmptyOptionsInFly() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + authDef := HTTPAuth{} + + // NOTE(denisacostaq@gmail.com): Assert + suite.NotNil(authDef.GetOptions()) +} + +func (suite *authConfSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cAuthConf, err := suite.authConf.Clone() + suite.Nil(err) + suite.Equal(suite.authConf, cAuthConf) + hasError := cAuthConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *authConfSuit) TestValidationTypeShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + authDef, err := suite.authConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + authDef.SetAuthType("") + hasError := authDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *authConfSuit) TestValidationTokenHeaderKeyShouldNotBeEmptyInCSRF() { + // NOTE(denisacostaq@gmail.com): Giving + authDef, err := suite.authConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + opts := authDef.GetOptions() + pe, err := opts.SetString(core.OptKeyRextAuthDefTokenHeaderKey, "") + suite.True(pe) + suite.Nil(err) + hasError := authDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *authConfSuit) TestValidationTokenGenEndpointShouldNotBeEmptyInCSRF() { + // NOTE(denisacostaq@gmail.com): Giving + authDef, err := suite.authConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + opts := authDef.GetOptions() + pe, err := opts.SetString(core.OptKeyRextAuthDefTokenGenEndpoint, "") + suite.True(pe) + suite.Nil(err) + hasError := authDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *authConfSuit) TestValidationTokenKeyFromEndpointShouldNotBeEmptyInCSRF() { + // NOTE(denisacostaq@gmail.com): Giving + authDef, err := suite.authConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + opts := authDef.GetOptions() + pe, err := opts.SetString(core.OptKeyRextAuthDefTokenKeyFromEndpoint, "") + suite.True(pe) + suite.Nil(err) + hasError := authDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *authConfSuit) TestValidationTokenValsCanBeEmptyInNotCSRF() { + // NOTE(denisacostaq@gmail.com): Giving + authDef, err := suite.authConf.Clone() + suite.Nil(err) + authDef.SetAuthType("tt3") + + // NOTE(denisacostaq@gmail.com): When + opts := authDef.GetOptions() + pe, err := opts.SetString(core.OptKeyRextAuthDefTokenKeyFromEndpoint, "") + suite.True(pe) + suite.Nil(err) + pe, err = opts.SetString(core.OptKeyRextAuthDefTokenGenEndpoint, "") + suite.True(pe) + suite.Nil(err) + pe, err = opts.SetString(core.OptKeyRextAuthDefTokenHeaderKey, "") + suite.True(pe) + suite.Nil(err) + hasError := authDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} diff --git a/src/memconfig/label_def.go b/src/memconfig/label_def.go new file mode 100644 index 0000000..16e7e10 --- /dev/null +++ b/src/memconfig/label_def.go @@ -0,0 +1,58 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// LabelDef implements the interface core.RextLabelDef +type LabelDef struct { + name string + nodeSolver core.RextNodeSolver +} + +// Clone make a deep copy of LabelDef or return an error if any +func (l LabelDef) Clone() (cL core.RextLabelDef, err error) { + var cNs core.RextNodeSolver + if l.GetNodeSolver() != nil { + if cNs, err = l.GetNodeSolver().Clone(); err != nil { + log.WithError(err).Errorln("can not clone node solver in label") + return cL, err + } + } + cL = NewLabelDef(l.name, cNs) + return cL, err +} + +// GetName return the label name +func (l LabelDef) GetName() string { + return l.name +} + +// SetName set the name for the label +func (l *LabelDef) SetName(name string) { + l.name = name +} + +// GetNodeSolver return the solver for the label value +func (l LabelDef) GetNodeSolver() core.RextNodeSolver { + return l.nodeSolver +} + +// SetNodeSolver set the solver for the label value +func (l *LabelDef) SetNodeSolver(nodeSolver core.RextNodeSolver) { + l.nodeSolver = nodeSolver +} + +// Validate the label, return true if any error is found +func (l LabelDef) Validate() bool { + return core.ValidateLabel(&l) +} + +// NewLabelDef create a new label definition +func NewLabelDef(name string, nodeSolver core.RextNodeSolver) *LabelDef { + return &LabelDef{ + name: name, + nodeSolver: nodeSolver, + } +} diff --git a/src/memconfig/label_def_test.go b/src/memconfig/label_def_test.go new file mode 100644 index 0000000..386d64f --- /dev/null +++ b/src/memconfig/label_def_test.go @@ -0,0 +1,133 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/core/mocks" + "github.com/stretchr/testify/suite" +) + +func newLabelDef(suite *labelDefConfSuit) core.RextLabelDef { + return NewLabelDef( + suite.name, + suite.nodeSolver, + ) +} + +type labelDefConfSuit struct { + suite.Suite + labelDef core.RextLabelDef + name string + nodeSolver core.RextNodeSolver +} + +func (suite *labelDefConfSuit) SetupTest() { + suite.name = "MySupperLabel" + suite.nodeSolver = NewNodeSolver("tr", "pat", NewOptionsMap()) + suite.labelDef = newLabelDef(suite) +} + +func TestLabelDefConfSuit(t *testing.T) { + suite.Run(t, new(labelDefConfSuit)) +} + +func (suite *labelDefConfSuit) TestNewLabelDef() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + labelDef := newLabelDef(suite) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.name, labelDef.GetName()) + suite.Equal(suite.nodeSolver, labelDef.GetNodeSolver()) +} + +func (suite *labelDefConfSuit) TestAbleToSetName() { + // NOTE(denisacostaq@gmail.com): Giving + orgName := suite.labelDef.GetName() + name := "fgfg78" + suite.labelDef.SetName(name) + + // NOTE(denisacostaq@gmail.com): When + name2 := suite.labelDef.GetName() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(name, name2) + suite.NotEqual(orgName, name2) +} + +func (suite *labelDefConfSuit) TestAbleToSetNodeSolver() { + // NOTE(denisacostaq@gmail.com): Giving + orgNs := suite.labelDef.GetNodeSolver() + ns := &NodeSolver{MType: "fee"} + suite.labelDef.SetNodeSolver(ns) + + // NOTE(denisacostaq@gmail.com): When + ns2 := suite.labelDef.GetNodeSolver() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(ns, ns2) + suite.NotEqual(orgNs, ns2) +} + +func (suite *labelDefConfSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cLabelDef, err := suite.labelDef.Clone() + suite.Nil(err) + suite.Equal(suite.labelDef, cLabelDef) + hasError := cLabelDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *labelDefConfSuit) TestValidationNameShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + cLabelDef, err := suite.labelDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverLabel(cLabelDef) + + // NOTE(denisacostaq@gmail.com): When + cLabelDef.SetName("") + hasError := cLabelDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *labelDefConfSuit) TestValidationNodeSolverShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + cLabelDef, err := suite.labelDef.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + cLabelDef.SetNodeSolver(nil) + hasError := cLabelDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *labelDefConfSuit) TestValidationShouldGoDownTroughFields() { + // NOTE(denisacostaq@gmail.com): Giving + cLabelConf, err := suite.labelDef.Clone() + suite.Nil(err) + mockNodeSolver := new(mocks.RextNodeSolver) + mockNodeSolver.On("Validate").Return(false) + cLabelConf.SetNodeSolver(mockNodeSolver) + + // NOTE(denisacostaq@gmail.com): When + cLabelConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + mockNodeSolver.AssertCalled(suite.T(), "Validate") +} + +func setUpFakeValidationOn3rdPartyOverLabel(labelDef core.RextLabelDef) { + nodeSolverStub := new(mocks.RextNodeSolver) + nodeSolverStub.On("Validate").Return(false) + labelDef.SetNodeSolver(nodeSolverStub) +} diff --git a/src/memconfig/metric_def.go b/src/memconfig/metric_def.go new file mode 100644 index 0000000..9418af2 --- /dev/null +++ b/src/memconfig/metric_def.go @@ -0,0 +1,118 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// MetricDef implements the interface core.RextMetricDef +type MetricDef struct { + name string + mType string + nodeSolver core.RextNodeSolver + description string + labels []core.RextLabelDef + options core.RextKeyValueStore +} + +// Clone make a deep copy of MetricDef or return an error if any +func (m MetricDef) Clone() (cM core.RextMetricDef, err error) { + var cNs core.RextNodeSolver + if m.GetNodeSolver() != nil { + if cNs, err = m.GetNodeSolver().Clone(); err != nil { + log.WithError(err).Errorln("can not clone node solver in metric") + return cM, err + } + } + var cLabels []core.RextLabelDef + for _, label := range m.labels { + var cLabel core.RextLabelDef + if cLabel, err = label.Clone(); err != nil { + log.WithError(err).Errorln("can not clone labels in metric") + return cM, err + } + cLabels = append(cLabels, cLabel) + } + var cOpts core.RextKeyValueStore + if cOpts, err = m.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("can not clone options in metric") + return cM, err + } + cM = NewMetricDef(m.GetMetricName(), m.GetMetricType(), m.GetMetricDescription(), cNs, cOpts, cLabels) + return cM, err +} + +// GetMetricName return the metric name +func (m MetricDef) GetMetricName() string { + return m.name +} + +// GetMetricType return the metric type, one of: gauge, counter, histogram or summary +func (m MetricDef) GetMetricType() string { + return m.mType +} + +// GetNodeSolver return solver type +func (m MetricDef) GetNodeSolver() core.RextNodeSolver { + return m.nodeSolver +} + +// SetNodeSolver set the node solver +func (m *MetricDef) SetNodeSolver(nodeSolver core.RextNodeSolver) { + m.nodeSolver = nodeSolver +} + +// GetMetricDescription return the metric description +func (m MetricDef) GetMetricDescription() string { + return m.description +} + +// SetMetricName can set the metric name +func (m *MetricDef) SetMetricName(name string) { + m.name = name +} + +// SetMetricType can set the metric type +func (m *MetricDef) SetMetricType(tp string) { + m.mType = tp +} + +// SetMetricDescription can set the metric description +func (m *MetricDef) SetMetricDescription(description string) { + m.description = description +} + +// GetLabels return labels +func (m MetricDef) GetLabels() []core.RextLabelDef { + return m.labels +} + +// AddLabel receive label to be append to the current list +func (m *MetricDef) AddLabel(label core.RextLabelDef) { + m.labels = append(m.labels, label) +} + +// GetOptions return key/value pairs for extra options +func (m *MetricDef) GetOptions() core.RextKeyValueStore { + if m.options == nil { + m.options = NewOptionsMap() + } + return m.options +} + +// Validate the metric, return true if any error is found +func (m MetricDef) Validate() bool { + return core.ValidateMetric(&m) +} + +// NewMetricDef create a new metric definition +func NewMetricDef(name, mType, description string, nodeSolver core.RextNodeSolver, options core.RextKeyValueStore, labels []core.RextLabelDef) *MetricDef { + return &MetricDef{ + name: name, + mType: mType, + nodeSolver: nodeSolver, + description: description, + labels: labels, + options: options, + } +} diff --git a/src/memconfig/metric_def_test.go b/src/memconfig/metric_def_test.go new file mode 100644 index 0000000..ca7d191 --- /dev/null +++ b/src/memconfig/metric_def_test.go @@ -0,0 +1,250 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/core/mocks" + "github.com/stretchr/testify/suite" +) + +func newMetricDef(suite *metricDefConfSuit) core.RextMetricDef { + return NewMetricDef( + suite.metricName, + suite.metricType, + suite.metricDescription, + suite.nodeSolver, + suite.metricOptions, + suite.metricLabels, + ) +} + +type metricDefConfSuit struct { + suite.Suite + metricDef core.RextMetricDef + metricName, metricType, metricDescription string + nodeSolver core.RextNodeSolver + metricLabels []core.RextLabelDef + metricOptions core.RextKeyValueStore +} + +func (suite *metricDefConfSuit) SetupTest() { + suite.metricName = "MySupperMetric" + suite.metricType = core.KeyMetricTypeCounter + suite.metricDescription = "This is all about ..." + suite.nodeSolver = &NodeSolver{nodePath: "sds"} + suite.nodeSolver.GetOptions() + suite.metricLabels = nil + suite.metricOptions = NewOptionsMap() + _, err := suite.metricOptions.SetString("k1", "v1") + suite.Nil(err) + _, err = suite.metricOptions.SetString("k2", "v2") + suite.Nil(err) + suite.metricDef = newMetricDef(suite) +} + +func TestMetricDefConfSuit(t *testing.T) { + suite.Run(t, new(metricDefConfSuit)) +} + +func (suite *metricDefConfSuit) TestNewMetricDef() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + metricDef := newMetricDef(suite) + opts, err := suite.metricOptions.Clone() + suite.Nil(err) + _, err = suite.metricOptions.SetString("k1", "v2") + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.metricName, metricDef.GetMetricName()) + suite.Equal(suite.metricType, metricDef.GetMetricType()) + suite.Equal(suite.metricDescription, metricDef.GetMetricDescription()) + suite.Equal(suite.nodeSolver, metricDef.GetNodeSolver()) + suite.Equal(suite.metricOptions, metricDef.GetOptions()) + suite.NotEqual(opts, metricDef.GetOptions()) + suite.Equal(suite.metricLabels, metricDef.GetLabels()) +} + +func (suite *metricDefConfSuit) TestAbleToSetName() { + // NOTE(denisacostaq@gmail.com): Giving + orgName := suite.metricDef.GetMetricName() + name := "fgfg78" + suite.metricDef.SetMetricName(name) + + // NOTE(denisacostaq@gmail.com): When + name2 := suite.metricDef.GetMetricName() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(name, name2) + suite.NotEqual(orgName, name2) +} + +func (suite *metricDefConfSuit) TestAbleToSetType() { + // NOTE(denisacostaq@gmail.com): Giving + orgT := suite.metricDef.GetMetricType() + tp := "fgfg78" + suite.metricDef.SetMetricType(tp) + + // NOTE(denisacostaq@gmail.com): When + tp2 := suite.metricDef.GetMetricType() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(tp, tp2) + suite.NotEqual(orgT, tp2) +} + +func (suite *metricDefConfSuit) TestAbleToSetDescription() { + // NOTE(denisacostaq@gmail.com): Giving + orgDescription := suite.metricDef.GetMetricDescription() + description := "fgfg78" + suite.metricDef.SetMetricDescription(description) + + // NOTE(denisacostaq@gmail.com): When + description2 := suite.metricDef.GetMetricDescription() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(description, description2) + suite.NotEqual(orgDescription, description2) +} + +func (suite *metricDefConfSuit) TestAbleToSetNodeSolver() { + // NOTE(denisacostaq@gmail.com): Giving + orgNs := suite.metricDef.GetMetricName() + ns := &NodeSolver{nodePath: "dfdfd"} + suite.metricDef.SetNodeSolver(ns) + + // NOTE(denisacostaq@gmail.com): When + ns2 := suite.metricDef.GetNodeSolver() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(ns, ns2) + suite.NotEqual(orgNs, ns2) +} +func (suite *metricDefConfSuit) TestAbleToAddLabel() { + // NOTE(denisacostaq@gmail.com): Giving + orgLabels := suite.metricDef.GetLabels() + + // NOTE(denisacostaq@gmail.com): When + suite.metricDef.AddLabel(&LabelDef{}) + labels2 := suite.metricDef.GetLabels() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(len(orgLabels)+1, len(labels2)) +} + +func (suite *metricDefConfSuit) TestInitializeEmptyOptionsInFly() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + metricDef := MetricDef{} + + // NOTE(denisacostaq@gmail.com): Assert + suite.NotNil(metricDef.GetOptions()) +} + +func (suite *metricDefConfSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + suite.Equal(suite.metricDef, cMetricDef) + setUpFakeValidationOn3rdPartyOverMetric(cMetricDef) + hasError := cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *metricDefConfSuit) TestValidationNameShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverMetric(cMetricDef) + + // NOTE(denisacostaq@gmail.com): When + cMetricDef.SetMetricName("") + hasError := cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *metricDefConfSuit) TestValidationTypeShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverMetric(cMetricDef) + + // NOTE(denisacostaq@gmail.com): When + cMetricDef.SetMetricType("") + hasError := cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *metricDefConfSuit) TestValidationTypeShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverMetric(cMetricDef) + + // NOTE(denisacostaq@gmail.com): When + cMetricDef.SetMetricType("fgfgfg") + hasError := cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *metricDefConfSuit) TestValidationNodeSolverShouldNotBeNil() { + // NOTE(denisacostaq@gmail.com): Giving + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverMetric(cMetricDef) + + // NOTE(denisacostaq@gmail.com): When + cMetricDef.SetNodeSolver(nil) + hasError := cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *metricDefConfSuit) TestValidationShouldGoDownTroughFields() { + // NOTE(denisacostaq@gmail.com): Giving + cMetricDef, err := suite.metricDef.Clone() + suite.Nil(err) + mockNodeSolver := new(mocks.RextNodeSolver) + mockNodeSolver.On("Validate").Return(false) + cMetricDef.SetNodeSolver(mockNodeSolver) + mockLabel1 := new(mocks.RextLabelDef) + mockLabel1.On("Validate").Return(false) + cMetricDef.AddLabel(mockLabel1) + mockLabel2 := new(mocks.RextLabelDef) + mockLabel2.On("Validate").Return(false) + cMetricDef.AddLabel(mockLabel2) + + // NOTE(denisacostaq@gmail.com): When + cMetricDef.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + mockNodeSolver.AssertCalled(suite.T(), "Validate") + mockLabel1.AssertCalled(suite.T(), "Validate") + mockLabel2.AssertCalled(suite.T(), "Validate") +} + +func setUpFakeValidationOn3rdPartyOverMetric(metricDef core.RextMetricDef) { + nodeSolverStub := new(mocks.RextNodeSolver) + nodeSolverStub.On("Validate").Return(false) + labelStub1 := new(mocks.RextLabelDef) + labelStub1.On("Validate").Return(false) + labelStub2 := new(mocks.RextLabelDef) + labelStub2.On("Validate").Return(false) + metricDef.SetNodeSolver(nodeSolverStub) + metricDef.AddLabel(labelStub1) + metricDef.AddLabel(labelStub2) +} diff --git a/src/memconfig/node_solver.go b/src/memconfig/node_solver.go new file mode 100644 index 0000000..18e6daa --- /dev/null +++ b/src/memconfig/node_solver.go @@ -0,0 +1,62 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// NodeSolver implements the interface core.RextNodeSolver +type NodeSolver struct { + // FIXME(denisacostaq@gmail.com): lowercase + MType string + nodePath string + options core.RextKeyValueStore +} + +// Clone make a deep copy of NodeSolver or return an error if any +func (ns NodeSolver) Clone() (cNs core.RextNodeSolver, err error) { + var cOpts core.RextKeyValueStore + if cOpts, err = ns.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("can not clone options in node solver") + return cNs, err + } + cNs = NewNodeSolver(ns.MType, ns.nodePath, cOpts) + return cNs, err +} + +// GetType return solver type +func (ns NodeSolver) GetType() string { + return ns.MType +} + +// SetNodePath set the node path +func (ns *NodeSolver) SetNodePath(nodePath string) { + ns.nodePath = nodePath +} + +// GetNodePath return the node path +func (ns NodeSolver) GetNodePath() string { + return ns.nodePath +} + +// GetOptions return key/value pairs for extra options +func (ns *NodeSolver) GetOptions() core.RextKeyValueStore { + if ns.options == nil { + ns.options = NewOptionsMap() + } + return ns.options +} + +// Validate the node solver, return true if any error is found +func (ns NodeSolver) Validate() bool { + return core.ValidateNodeSolver(&ns) +} + +// NewNodeSolver create a new node solver +func NewNodeSolver(mType, nodePath string, options core.RextKeyValueStore) core.RextNodeSolver { + return &NodeSolver{ + MType: mType, + nodePath: nodePath, + options: options, + } +} diff --git a/src/memconfig/node_solver_test.go b/src/memconfig/node_solver_test.go new file mode 100644 index 0000000..c4633cc --- /dev/null +++ b/src/memconfig/node_solver_test.go @@ -0,0 +1,95 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/stretchr/testify/suite" +) + +func newNodeSolver(suite *nodeSolverSuit) core.RextNodeSolver { + return NewNodeSolver( + suite.nodeSolverType, + suite.nodePath, + suite.options, + ) +} + +type nodeSolverSuit struct { + suite.Suite + nodeSolver core.RextNodeSolver + nodeSolverType, nodePath string + options core.RextKeyValueStore +} + +func (suite *nodeSolverSuit) SetupTest() { + suite.nodeSolverType = core.RextNodeSolverTypeJSONPath + suite.nodePath = "/tmp/a" + suite.options = NewOptionsMap() + _, err := suite.options.SetString("k1", "v1") + suite.Nil(err) + _, err = suite.options.SetString("k2", "v2") + suite.Nil(err) + suite.nodeSolver = newNodeSolver(suite) +} + +func TestNodeSolverSuit(t *testing.T) { + suite.Run(t, new(nodeSolverSuit)) +} + +func (suite *nodeSolverSuit) TestNewNodeSolver() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + nodeSolver := newNodeSolver(suite) + opts, err := suite.options.Clone() + suite.Nil(err) + _, err = suite.options.SetString("k1", "v2") + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.nodeSolverType, nodeSolver.GetType()) + suite.Equal(suite.nodePath, nodeSolver.GetNodePath()) + suite.Equal(suite.options, nodeSolver.GetOptions()) + suite.NotEqual(opts, nodeSolver.GetOptions()) +} + +func (suite *nodeSolverSuit) TestAbleToSetNodePath() { + // NOTE(denisacostaq@gmail.com): Giving + orgNodePath := suite.nodeSolver.GetNodePath() + nodePath := "fgfg78" + suite.nodeSolver.SetNodePath(nodePath) + + // NOTE(denisacostaq@gmail.com): When + nodePath2 := suite.nodeSolver.GetNodePath() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(nodePath, nodePath2) + suite.NotEqual(orgNodePath, nodePath2) +} + +func (suite *nodeSolverSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cNodeSolver, err := suite.nodeSolver.Clone() + suite.Nil(err) + suite.Equal(suite.nodeSolver, cNodeSolver) + hasError := cNodeSolver.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *nodeSolverSuit) TestValidationTypeShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + nodeSolver, err := suite.nodeSolver.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + nodeSolver.SetNodePath("") + hasError := nodeSolver.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} diff --git a/src/memconfig/resource_def.go b/src/memconfig/resource_def.go new file mode 100644 index 0000000..58b236d --- /dev/null +++ b/src/memconfig/resource_def.go @@ -0,0 +1,128 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// ResourceDef implements the interface core.RextResourceDef +type ResourceDef struct { + mType string + resourceURI string + auth core.RextAuthDef + decoder core.RextDecoderDef + metrics []core.RextMetricDef + options core.RextKeyValueStore +} + +// Clone make a deep copy of ResourceDef or return an error if any +func (rd ResourceDef) Clone() (cRd core.RextResourceDef, err error) { + var cAuth core.RextAuthDef + if rd.GetAuth(nil) != nil { + if cAuth, err = rd.GetAuth(nil).Clone(); err != nil { + log.WithError(err).Errorln("can not clone http auth in resource") + return cRd, err + } + } + var cDecoder core.RextDecoderDef + if rd.GetDecoder() != nil { + if cDecoder, err = rd.GetDecoder().Clone(); err != nil { + log.WithError(err).Errorln("can not clone decoder in resource") + return cRd, err + } + } + var cMetrics []core.RextMetricDef + for _, metric := range rd.GetMetricDefs() { + var cMetric core.RextMetricDef + if cMetric, err = metric.Clone(); err != nil { + log.WithError(err).Errorln("can nor clone metrics in resource") + return cRd, err + } + cMetrics = append(cMetrics, cMetric) + } + var cOpts core.RextKeyValueStore + if cOpts, err = rd.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("can not clone options in metric") + return cRd, err + } + cRd = NewResourceDef(rd.GetType(), rd.resourceURI, cAuth, cMetrics, cDecoder, cOpts) + return cRd, err +} + +// GetResourcePATH return the resource pat against the service base path +func (rd ResourceDef) GetResourcePATH(basePath string) string { + return basePath + rd.resourceURI +} + +// GetType return the path type +func (rd ResourceDef) GetType() string { + return rd.mType +} + +// SetType set the type +func (rd *ResourceDef) SetType(t string) { + rd.mType = t +} + +// SetResourceURI set the resource path inside the service +func (rd *ResourceDef) SetResourceURI(uri string) { + rd.resourceURI = uri +} + +// GetAuth return the defAuth if not have a specific one for this resource +func (rd ResourceDef) GetAuth(defAuth core.RextAuthDef) core.RextAuthDef { + if rd.auth == nil { + return defAuth + } + return rd.auth +} + +// SetAuth set an specific auth for this resource +func (rd *ResourceDef) SetAuth(auth core.RextAuthDef) { + rd.auth = auth +} + +// SetDecoder set a decoder for the service +func (rd *ResourceDef) SetDecoder(decoder core.RextDecoderDef) { + rd.decoder = decoder +} + +// GetDecoder return thedecoder for this service +func (rd ResourceDef) GetDecoder() core.RextDecoderDef { + return rd.decoder +} + +// AddMetricDef add a metric definition inside the resource +func (rd *ResourceDef) AddMetricDef(mtrDef core.RextMetricDef) { + rd.metrics = append(rd.metrics, mtrDef) +} + +// GetMetricDefs return the metrics definitions associated with this resource +func (rd ResourceDef) GetMetricDefs() []core.RextMetricDef { + return rd.metrics +} + +// GetOptions return key/value pairs for extra options +func (rd *ResourceDef) GetOptions() core.RextKeyValueStore { + if rd.options == nil { + rd.options = NewOptionsMap() + } + return rd.options +} + +// Validate the resource, return true if any error is found +func (rd ResourceDef) Validate() bool { + return core.ValidateResource(&rd) +} + +// NewResourceDef create a new metric definition +func NewResourceDef(mType, resourceURI string, auth core.RextAuthDef, metrics []core.RextMetricDef, decoder core.RextDecoderDef, options core.RextKeyValueStore) core.RextResourceDef { + return &ResourceDef{ + mType: mType, + resourceURI: resourceURI, + auth: auth, + decoder: decoder, + metrics: metrics, + options: options, + } +} diff --git a/src/memconfig/resource_def_test.go b/src/memconfig/resource_def_test.go new file mode 100644 index 0000000..d28ecd3 --- /dev/null +++ b/src/memconfig/resource_def_test.go @@ -0,0 +1,233 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/core/mocks" + "github.com/stretchr/testify/suite" +) + +func newResourceDef(suite *resourceDefSuit) core.RextResourceDef { + return NewResourceDef( + suite.mType, + suite.resourceURI, + suite.auth, + suite.metrics, + suite.decoder, + suite.options, + ) +} + +type resourceDefSuit struct { + suite.Suite + mType string + resourceDef core.RextResourceDef + resourceURI string + auth core.RextAuthDef + decoder core.RextDecoderDef + metrics []core.RextMetricDef + options core.RextKeyValueStore +} + +func (suite *resourceDefSuit) SetupTest() { + suite.auth = &HTTPAuth{} + suite.auth.GetOptions() + suite.mType = "tt" + suite.resourceURI = "ddrer" + suite.decoder = &Decoder{} + suite.decoder.GetOptions() + suite.metrics = nil + suite.options = NewOptionsMap() + _, err := suite.options.SetString("k1", "v1") + suite.Nil(err) + _, err = suite.options.SetString("k2", "v2") + suite.Nil(err) + suite.resourceDef = newResourceDef(suite) +} + +func TestResourceDefSuitSuit(t *testing.T) { + suite.Run(t, new(resourceDefSuit)) +} + +func (suite *resourceDefSuit) TestNewResourceDefSuit() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + resourceDef := newResourceDef(suite) + opts, err := suite.options.Clone() + suite.Nil(err) + _, err = suite.options.SetString("k1", "v2") + suite.Nil(err) + basePath := "dssds" + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.mType, resourceDef.GetType()) + suite.Equal(suite.auth, resourceDef.GetAuth(nil)) + suite.Equal(basePath+suite.resourceURI, resourceDef.GetResourcePATH(basePath)) + suite.Equal(suite.decoder, resourceDef.GetDecoder()) + suite.Equal(suite.options, resourceDef.GetOptions()) + suite.NotEqual(opts, resourceDef.GetOptions()) +} + +func (suite *resourceDefSuit) TestAbleToSetDecoder() { + // NOTE(denisacostaq@gmail.com): Giving + orgDecoder := suite.resourceDef.GetDecoder() + decoder := &Decoder{mType: "t1"} + suite.resourceDef.SetDecoder(decoder) + + // NOTE(denisacostaq@gmail.com): When + decoder2 := suite.resourceDef.GetDecoder() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(decoder, decoder2) + suite.NotEqual(orgDecoder, decoder2) +} + +func (suite *resourceDefSuit) TestAbleToSetURI() { + // NOTE(denisacostaq@gmail.com): Giving + basePath := "fffd" + orgPath := suite.resourceDef.GetResourcePATH(basePath) + resourceURI := "uri1" + suite.resourceDef.SetResourceURI(resourceURI) + + // NOTE(denisacostaq@gmail.com): When + resourceURL := suite.resourceDef.GetResourcePATH(basePath) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(basePath+resourceURI, resourceURL) + suite.NotEqual(orgPath, resourceURL) +} + +func (suite *resourceDefSuit) TestAbleToAddMetricDef() { + // NOTE(denisacostaq@gmail.com): Giving + orgMetrics := suite.resourceDef.GetMetricDefs() + metric := &MetricDef{} + suite.resourceDef.AddMetricDef(metric) + + // NOTE(denisacostaq@gmail.com): When + metric2 := suite.resourceDef.GetMetricDefs() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(len(orgMetrics)+1, len(metric2)) +} + +func (suite *resourceDefSuit) TestAbleToSetType() { + // NOTE(denisacostaq@gmail.com): Giving + orgType := suite.resourceDef.GetType() + mType := "dgfg" + suite.resourceDef.SetType(mType) + + // NOTE(denisacostaq@gmail.com): When + mType2 := suite.resourceDef.GetType() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(mType, mType2) + suite.NotEqual(orgType, mType2) +} + +func (suite *resourceDefSuit) TestInitializeEmptyOptionsInFly() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + resDef := ResourceDef{} + + // NOTE(denisacostaq@gmail.com): Assert + suite.NotNil(resDef.GetOptions()) +} + +func (suite *resourceDefSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cResConf, err := suite.resourceDef.Clone() + suite.Nil(err) + suite.Equal(suite.resourceDef, cResConf) + setUpFakeValidationOn3rdPartyOverResource(cResConf) + hasError := cResConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *resourceDefSuit) TestValidationEmptyType() { + // NOTE(denisacostaq@gmail.com): Giving + cResConf, err := suite.resourceDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverResource(cResConf) + + // NOTE(denisacostaq@gmail.com): When + cResConf.SetType("") + hasError := cResConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *resourceDefSuit) TestValidationEmptyResourceURI() { + // NOTE(denisacostaq@gmail.com): Giving + cResConf, err := suite.resourceDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverResource(cResConf) + + // NOTE(denisacostaq@gmail.com): When + cResConf.SetResourceURI("") + hasError := cResConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *resourceDefSuit) TestValidationNilDecoder() { + // NOTE(denisacostaq@gmail.com): Giving + cResConf, err := suite.resourceDef.Clone() + suite.Nil(err) + setUpFakeValidationOn3rdPartyOverResource(cResConf) + + // NOTE(denisacostaq@gmail.com): When + cResConf.SetDecoder(nil) + hasError := cResConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *resourceDefSuit) TestValidationShouldGoDownTroughFields() { + // NOTE(denisacostaq@gmail.com): Giving + cResConf, err := suite.resourceDef.Clone() + suite.Nil(err) + mockAuth := new(mocks.RextAuthDef) + mockAuth.On("Validate").Return(false) + mockDecoder := new(mocks.RextDecoderDef) + mockDecoder.On("Validate").Return(false) + mockMetric1 := new(mocks.RextMetricDef) + mockMetric1.On("Validate").Return(false) + mockMetric2 := new(mocks.RextMetricDef) + mockMetric2.On("Validate").Return(false) + cResConf.SetAuth(mockAuth) + cResConf.SetDecoder(mockDecoder) + cResConf.AddMetricDef(mockMetric1) + cResConf.AddMetricDef(mockMetric2) + + // NOTE(denisacostaq@gmail.com): When + cResConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + mockDecoder.AssertCalled(suite.T(), "Validate") + mockAuth.AssertCalled(suite.T(), "Validate") + suite.Len(cResConf.GetMetricDefs(), 2) + mockMetric1.AssertCalled(suite.T(), "Validate") + mockMetric2.AssertCalled(suite.T(), "Validate") +} + +func setUpFakeValidationOn3rdPartyOverResource(res core.RextResourceDef) { + authStub := new(mocks.RextAuthDef) + authStub.On("Validate").Return(false) + decoderStub := new(mocks.RextDecoderDef) + decoderStub.On("Validate").Return(false) + metricStub := new(mocks.RextMetricDef) + metricStub.On("Validate").Return(false) + res.SetAuth(authStub) + res.SetDecoder(decoderStub) + res.AddMetricDef(metricStub) +} diff --git a/src/memconfig/root_config.go b/src/memconfig/root_config.go new file mode 100644 index 0000000..b2b2a10 --- /dev/null +++ b/src/memconfig/root_config.go @@ -0,0 +1,50 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" +) + +// RootConfig implements core.RextRoot +type RootConfig struct { + services []core.RextServiceDef +} + +// Clone make a deep copy of RootConfig or return an error if any +func (root RootConfig) Clone() (cRc core.RextRoot, err error) { + var cSrvs []core.RextServiceDef + for _, srv := range root.GetServices() { + var cSrv core.RextServiceDef + if cSrv, err = srv.Clone(); err != nil { + log.WithError(err).Errorln("can not clone services in root config") + return cRc, err + } + cSrvs = append(cSrvs, cSrv) + } + cRc = NewRootConfig(cSrvs) + return cRc, err +} + +// GetServices return the services +func (root RootConfig) GetServices() []core.RextServiceDef { + services := make([]core.RextServiceDef, len(root.services)) + for idxSrv := range root.services { + services[idxSrv] = root.services[idxSrv] + } + return services +} + +// AddService add a service +func (root *RootConfig) AddService(srv core.RextServiceDef) { + root.services = append(root.services, srv) +} + +// Validate the root, return true if any error is found +func (root RootConfig) Validate() bool { + return core.ValidateRoot(&root) +} + +// NewRootConfig return a new root config instance +func NewRootConfig(services []core.RextServiceDef) core.RextRoot { + return &RootConfig{services: services} +} diff --git a/src/memconfig/root_config_test.go b/src/memconfig/root_config_test.go new file mode 100644 index 0000000..18b021d --- /dev/null +++ b/src/memconfig/root_config_test.go @@ -0,0 +1,92 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/core/mocks" + "github.com/stretchr/testify/suite" +) + +func newRootConfig(suite *rootConfigSuite) core.RextRoot { + return NewRootConfig(suite.services) +} + +type rootConfigSuite struct { + suite.Suite + services []core.RextServiceDef + rootConfig core.RextRoot +} + +func (suite *rootConfigSuite) SetupTest() { + suite.services = nil + suite.rootConfig = newRootConfig(suite) +} + +func TestRootConfig(t *testing.T) { + suite.Run(t, new(rootConfigSuite)) +} + +func (suite *rootConfigSuite) TestNewRootConf() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + rootConfig := newRootConfig(suite) + services := rootConfig.GetServices() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(len(suite.services), len(services)) +} + +func (suite *rootConfigSuite) TestAbleToAddService() { + // NOTE(denisacostaq@gmail.com): Giving + orgServices := suite.rootConfig.GetServices() + service := &Service{} + suite.rootConfig.AddService(service) + + // NOTE(denisacostaq@gmail.com): When + services2 := suite.rootConfig.GetServices() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(len(orgServices)+1, len(services2)) +} + +func (suite *rootConfigSuite) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cRootConfig, err := suite.rootConfig.Clone() + suite.Nil(err) + suite.Equal(suite.rootConfig, cRootConfig) + setUpFakeValidationOn3rdPartyOverRootConfig(cRootConfig) + hasError := cRootConfig.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *rootConfigSuite) TestValidationShouldGoDownTroughFields() { + // NOTE(denisacostaq@gmail.com): Giving + cRootConfig, err := suite.rootConfig.Clone() + suite.Nil(err) + mockService1 := new(mocks.RextServiceDef) + mockService1.On("Validate").Return(false) + mockService2 := new(mocks.RextServiceDef) + mockService2.On("Validate").Return(false) + cRootConfig.AddService(mockService1) + cRootConfig.AddService(mockService2) + + // NOTE(denisacostaq@gmail.com): When + cRootConfig.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Len(cRootConfig.GetServices(), 2) + mockService2.AssertCalled(suite.T(), "Validate") + mockService2.AssertCalled(suite.T(), "Validate") +} + +func setUpFakeValidationOn3rdPartyOverRootConfig(root core.RextRoot) { + serviceStub := new(mocks.RextServiceDef) + serviceStub.On("Validate").Return(false) + root.AddService(serviceStub) +} diff --git a/src/memconfig/service.go b/src/memconfig/service.go new file mode 100644 index 0000000..1b6a2fb --- /dev/null +++ b/src/memconfig/service.go @@ -0,0 +1,125 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/util" + log "github.com/sirupsen/logrus" +) + +// Service implements core.RextServiceDef interface +type Service struct { + basePath string + // FIXME(denisacostaq@gmail.com): how to use base path, what about protocol, port, url + protocol string + auth core.RextAuthDef + // TODO(denisacostaq@gmail.com): rename to resources + resources []core.RextResourceDef + options core.RextKeyValueStore +} + +// Validate the service, return true if any error is found +func (srv Service) Validate() (hasError bool) { + if srv.GetProtocol() == "http" { + for _, res := range srv.GetResources() { + resPath := res.GetResourcePATH(srv.GetBasePath()) + if !util.IsValidURL(resPath) { + hasError = true + } + } + } + if core.ValidateService(&srv) { + hasError = true + } + return hasError +} + +// Clone make a deep copy of Service or return an error if any +func (srv Service) Clone() (cSrv core.RextServiceDef, err error) { + var cAuth core.RextAuthDef + if srv.auth != nil { + if cAuth, err = srv.auth.Clone(); err != nil { + log.WithError(err).Errorln("can not clone auth in service") + return cSrv, err + } + } + var cOpts core.RextKeyValueStore + if cOpts, err = srv.GetOptions().Clone(); err != nil { + log.WithError(err).Errorln("can not clone options in service") + return cSrv, err + } + var cResources []core.RextResourceDef + for _, resource := range srv.GetResources() { + var cResource core.RextResourceDef + if cResource, err = resource.Clone(); err != nil { + log.WithError(err).Errorln("can not clone resources in service") + return cSrv, err + } + cResources = append(cResources, cResource) + } + cSrv = NewServiceConf(srv.basePath, srv.protocol, cAuth, cResources, cOpts) + return cSrv, err +} + +// SetBasePath set the base path for the service +func (srv *Service) SetBasePath(path string) { + srv.basePath = path +} + +// GetProtocol return the service protocol +func (srv Service) GetProtocol() string { + return srv.protocol +} + +// GetBasePath return the base path +func (srv Service) GetBasePath() string { + return srv.basePath +} + +// SetProtocol set the protocol for the service +func (srv *Service) SetProtocol(protocol string) { + srv.protocol = protocol +} + +// SetAuthForBaseURL set an auth for the service +func (srv *Service) SetAuthForBaseURL(auth core.RextAuthDef) { + srv.auth = auth +} + +// GetAuthForBaseURL return the base auth +func (srv Service) GetAuthForBaseURL() core.RextAuthDef { + return srv.auth +} + +// AddResource add a resource +func (srv *Service) AddResource(source core.RextResourceDef) { + srv.resources = append(srv.resources, source) +} + +// AddResources add multiple resources +func (srv *Service) AddResources(sources ...core.RextResourceDef) { + srv.resources = append(srv.resources, sources...) +} + +// GetResources return the resources inside this service +func (srv Service) GetResources() []core.RextResourceDef { + return srv.resources +} + +// GetOptions return key/value pairs for extra options +func (srv *Service) GetOptions() core.RextKeyValueStore { + if srv.options == nil { + srv.options = NewOptionsMap() + } + return srv.options +} + +// NewServiceConf create a new service +func NewServiceConf(basePath, protocol string, auth core.RextAuthDef, resources []core.RextResourceDef, options core.RextKeyValueStore) core.RextServiceDef { + return &Service{ + basePath: basePath, + protocol: protocol, + auth: auth, + resources: resources, + options: options, + } +} diff --git a/src/memconfig/service_test.go b/src/memconfig/service_test.go new file mode 100644 index 0000000..a2c0261 --- /dev/null +++ b/src/memconfig/service_test.go @@ -0,0 +1,210 @@ +package memconfig + +import ( + "testing" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/core/mocks" + "github.com/stretchr/testify/suite" +) + +type serviceConfSuit struct { + suite.Suite + srvConf core.RextServiceDef + basePath string + protocol string + auth core.RextAuthDef + resources []core.RextResourceDef + options core.RextKeyValueStore +} + +func newService(suite *serviceConfSuit) core.RextServiceDef { + return NewServiceConf(suite.basePath, suite.protocol, suite.auth, suite.resources, suite.options) +} + +func (suite *serviceConfSuit) SetupTest() { + suite.basePath = "/hosted_in/root" + suite.protocol = "file" + suite.auth = &HTTPAuth{} + suite.auth.GetOptions() + suite.resources = nil + suite.options = NewOptionsMap() + _, err := suite.options.SetString(core.OptKeyRextServiceDefJobName, "v1") + suite.Nil(err) + _, err = suite.options.SetString(core.OptKeyRextServiceDefInstanceName, "v2") + suite.Nil(err) + suite.srvConf = newService(suite) +} + +func TestServiceConfSuit(t *testing.T) { + suite.Run(t, new(serviceConfSuit)) +} + +func (suite *serviceConfSuit) TestNewServiceDef() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + serviceDef := newService(suite) + opts, err := suite.options.Clone() + suite.Nil(err) + _, err = suite.options.SetString("k1", "v2") + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(suite.protocol, serviceDef.GetProtocol()) + suite.Equal(suite.options, serviceDef.GetOptions()) + suite.NotEqual(opts, serviceDef.GetOptions()) +} + +func (suite *serviceConfSuit) TestAbleToSetProtocol() { + // NOTE(denisacostaq@gmail.com): Giving + protocol := "http" + suite.srvConf.SetProtocol(protocol) + + // NOTE(denisacostaq@gmail.com): When + protocol2 := suite.srvConf.GetProtocol() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(protocol, protocol2) +} + +func (suite *serviceConfSuit) TestAbleToSetBasePath() { + // NOTE(denisacostaq@gmail.com): Giving + basePath := "dfdf" + suite.srvConf.SetBasePath(basePath) + + // NOTE(denisacostaq@gmail.com): When + basePath2 := suite.srvConf.GetBasePath() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(basePath, basePath2) +} + +func (suite *serviceConfSuit) TestAbleToSetBaseAuth() { + // NOTE(denisacostaq@gmail.com): Giving + orgAuth := suite.srvConf.GetAuthForBaseURL() + auth := &HTTPAuth{authType: "ds"} + suite.srvConf.SetAuthForBaseURL(auth) + + // NOTE(denisacostaq@gmail.com): When + auth2 := suite.srvConf.GetAuthForBaseURL() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(auth, auth2) + suite.NotEqual(orgAuth, auth2) +} + +func (suite *serviceConfSuit) TestAbleToAddSource() { + // NOTE(denisacostaq@gmail.com): Giving + orgResource := suite.srvConf.GetResources() + resource := &ResourceDef{} + suite.srvConf.AddResource(resource) + + // NOTE(denisacostaq@gmail.com): When + resource2 := suite.srvConf.GetResources() + + // NOTE(denisacostaq@gmail.com): Assert + suite.Equal(len(orgResource)+1, len(resource2)) +} + +func (suite *serviceConfSuit) TestInitializeEmptyOptionsInFly() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + srvConf := Service{} + + // NOTE(denisacostaq@gmail.com): Assert + suite.NotNil(srvConf.GetOptions()) +} + +func (suite *serviceConfSuit) TestValidationClonedShouldBeValid() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cSrvConf, err := suite.srvConf.Clone() + suite.Nil(err) + suite.Equal(suite.srvConf, cSrvConf) + setUpFakeValidationOn3rdPartyOverService(cSrvConf) + hasError := cSrvConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.False(hasError) +} + +func (suite *serviceConfSuit) TestValidationJobNameShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + srvConf, err := suite.srvConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + opts := srvConf.GetOptions() + var pe bool + pe, err = opts.SetString(core.OptKeyRextServiceDefJobName, "") + suite.True(pe) + suite.Nil(err) + hasError := srvConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *serviceConfSuit) TestValidationInstanceNameShouldNotBeEmpty() { + // NOTE(denisacostaq@gmail.com): Giving + srvConf, err := suite.srvConf.Clone() + suite.Nil(err) + + // NOTE(denisacostaq@gmail.com): When + opts := srvConf.GetOptions() + pe, err := opts.SetString(core.OptKeyRextServiceDefInstanceName, "") + suite.True(pe) + suite.Nil(err) + hasError := srvConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *serviceConfSuit) TestValidationEmptyProtocol() { + // NOTE(denisacostaq@gmail.com): Giving + + // NOTE(denisacostaq@gmail.com): When + cSrvConf, err := suite.srvConf.Clone() + suite.Nil(err) + cSrvConf.SetProtocol("") + hasError := cSrvConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + suite.True(hasError) +} + +func (suite *serviceConfSuit) TestValidationShouldGoDownTroughFields() { + // NOTE(denisacostaq@gmail.com): Giving + cSrvConf, err := suite.srvConf.Clone() + suite.Nil(err) + mockAuth := new(mocks.RextAuthDef) + mockAuth.On("Validate").Return(false) + mockResource1 := new(mocks.RextResourceDef) + mockResource1.On("Validate").Return(false) + mockResource2 := new(mocks.RextResourceDef) + mockResource2.On("Validate").Return(false) + cSrvConf.SetAuthForBaseURL(mockAuth) + cSrvConf.AddResources(mockResource1, mockResource2) + + // NOTE(denisacostaq@gmail.com): When + cSrvConf.Validate() + + // NOTE(denisacostaq@gmail.com): Assert + mockAuth.AssertCalled(suite.T(), "Validate") + suite.Len(cSrvConf.GetResources(), 2) + mockResource1.AssertCalled(suite.T(), "Validate") + mockResource2.AssertCalled(suite.T(), "Validate") +} + +func setUpFakeValidationOn3rdPartyOverService(srv core.RextServiceDef) { + authStub := new(mocks.RextAuthDef) + authStub.On("Validate").Return(false) + resourceStub := new(mocks.RextResourceDef) + resourceStub.On("Validate").Return(false) + srv.SetAuthForBaseURL(authStub) + srv.AddResource(resourceStub) +} diff --git a/src/memconfig/storage.go b/src/memconfig/storage.go new file mode 100644 index 0000000..1019e63 --- /dev/null +++ b/src/memconfig/storage.go @@ -0,0 +1,66 @@ +package memconfig + +import ( + "github.com/simelo/rextporter/src/core" +) + +// OptionsMap in-memory key value store +type OptionsMap map[string]interface{} + +// NewOptionsMap creates a new instance +func NewOptionsMap() (m OptionsMap) { + m = make(OptionsMap) + return +} + +// GetString return the string value for key +func (m OptionsMap) GetString(key string) (string, error) { + var err error + var val interface{} + if val, err = m.GetObject(key); err == nil { + strVal, okStrVal := val.(string) + if okStrVal { + return strVal, nil + } + return "", core.ErrKeyInvalidType + } + return "", err +} + +// SetString set a string value for key +func (m OptionsMap) SetString(key string, value string) (exists bool, err error) { + return m.SetObject(key, value) +} + +// GetObject return a saved object +func (m OptionsMap) GetObject(key string) (interface{}, error) { + if val, hasKey := m[key]; hasKey { + return val, nil + } + return "", core.ErrKeyNotFound +} + +// SetObject save an general object +func (m OptionsMap) SetObject(key string, value interface{}) (exists bool, err error) { + err = nil + _, exists = m[key] + m[key] = value + return +} + +// GetKeys return all the saved keys +func (m OptionsMap) GetKeys() (keys []string) { + for k := range m { + keys = append(keys, k) + } + return +} + +// Clone make a deep copy of the storage +func (m OptionsMap) Clone() (core.RextKeyValueStore, error) { + clone := NewOptionsMap() + for k := range m { + clone[k] = m[k] + } + return clone, nil +} diff --git a/src/rxt/ast.go b/src/rxt/ast.go new file mode 100644 index 0000000..ef0755a --- /dev/null +++ b/src/rxt/ast.go @@ -0,0 +1,304 @@ +package rxt + +import ( + "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/util" +) + +// ASTDefEnv buildsthe syntax tree +type ASTDefEnv struct { + Options config.OptionsMap +} + +func NewASTDefEnv() *ASTDefEnv { + return &ASTDefEnv{ + Options: config.NewOptionsMap(), + } +} +// NewServiceScraper ... +func (env *ASTDefEnv) NewServiceScraper() (core.RextServiceScraper, error) { + return &ASTDefScraperDataset{ + SupportedServiceNames: nil, + SupportedStackNames: nil, + Definitions: make(map[string]interface{}), + Sources: nil, + Options: config.NewOptionsMap(), + }, nil +} + +// NewAuthStrategy ... +func (env *ASTDefEnv) NewAuthStrategy(authtype string, options core.RextKeyValueStore) (core.RextAuth, error) { + auth := ASTDefAuth{ + AuthType: authtype, + Options: config.NewOptionsMap(), + } + if err := util.MergeStoresInplace(auth.Options, options); err != nil { + return nil, err + } + return &auth, nil +} + +// NewMetricsExtractor ... +func (env *ASTDefEnv) NewMetricsExtractor(scraperType string, options core.RextKeyValueStore, metrics []core.RextMetricDef) (core.RextMetricsExtractor, error) { + astMetrics := make([]*ASTDefMetric, len(metrics)) + extractor := ASTDefExtract{ + Type: scraperType, + Metrics: astMetrics, + Options: config.NewOptionsMap(), + } + for idx, m := range metrics { + if astMetric, isAST := m.(*ASTDefMetric); isAST { + astMetrics[idx] = astMetric + } else { + astMetrics[idx] = &ASTDefMetric{ + Name: m.GetMetricName(), + Type: m.GetMetricType(), + Description: m.GetMetricDescription(), + Labels: m.GetMetricLabels(), + Options: config.NewOptionsMap(), + } + if err := util.MergeStoresInplace(astMetrics[idx].Options, m.GetOptions()); err != nil { + return nil, err + } + } + } + return &extractor, nil +} + +// NewMetricsDatasource ... +func (env *ASTDefEnv) NewMetricsDatasource(srcType string) core.RextDataSource { + return &ASTDefSource{ + Method: "", + Type: srcType, + Location: "", + Scrapers: nil, + Options: config.NewOptionsMap(), + } +} + +// RegisterScraperForServices ... +func (env *ASTDefEnv) RegisterScraperForServices(s core.RextServiceScraper, services ...string) error { + if ds, isAST := s.(*ASTDefScraperDataset); isAST { + ds.SupportedServiceNames = services + return nil + } + return core.ErrInvalidType +} + +// RegisterScraperForStacks ... +func (env *ASTDefEnv) RegisterScraperForStacks(s core.RextServiceScraper, stacks ...string) error { + if ds, isAST := s.(*ASTDefScraperDataset); isAST { + ds.SupportedStackNames = stacks + return nil + } + return core.ErrInvalidType +} + +// GetOptions ... +func (env *ASTDefEnv) GetOptions() core.RextKeyValueStore { + return env.Options +} + +// ASTDefScraperDataset parse tree node +type ASTDefScraperDataset struct { + SupportedServiceNames []string + SupportedStackNames []string + Definitions map[string]interface{} + Sources []*ASTDefSource + Options config.OptionsMap +} + +// AddAuthStrategy ... +func (ds *ASTDefScraperDataset) AddAuthStrategy(auth core.RextAuth, name string) { + ds.Definitions[name] = auth +} + +// AddSource ... +func (ds *ASTDefScraperDataset) AddSource(source core.RextDataSource) { + if astSrc, isAST := source.(*ASTDefSource); isAST { + ds.Sources = append(ds.Sources, astSrc) + } +} + +// AddSources ... +func (ds *ASTDefScraperDataset) AddSources(sources ...core.RextDataSource) { + for _, source := range sources { + ds.AddSource(source) + } +} + +// GetOptions ... +func (ds *ASTDefScraperDataset) GetOptions() core.RextKeyValueStore { + return ds.Options +} + +// ASTDefAuth parse tree node +type ASTDefAuth struct { + AuthType string + Options config.OptionsMap +} + +// GetOptions ... +func (auth *ASTDefAuth) GetOptions() core.RextKeyValueStore { + return auth.Options +} + +// GetOptions ... +func (auth *ASTDefAuth) GetAuthType() string { + return auth.AuthType +} + +// ASTDefSource parse tree node +type ASTDefSource struct { + Method string + Type string + Location string + Scrapers []*ASTDefExtract + Options config.OptionsMap +} + +// SetBaseURL ... +func (src *ASTDefSource) SetBaseURL(url string) { + // Do nothing. No info about this in AST +} + +// GetMethod ... +func (src *ASTDefSource) GetMethod() string { + return src.Method +} + +// SetMethod ... +func (src *ASTDefSource) SetMethod(s string) { + src.Type = s +} + +// GetResourceLocation ... + +func (src *ASTDefSource) GetResourceLocation() string { + return src.Location +} + +// SetResourceLocation ... +func (src *ASTDefSource) SetResourceLocation(s string) error { + src.Location = s + return nil +} + +// ActivateScraper ... +func (src *ASTDefSource) ActivateScraper(scraper core.RextMetricsExtractor) (err error) { + if extAST, isAST := scraper.(*ASTDefExtract); isAST { + src.Scrapers = append(src.Scrapers, extAST) + } else { + err = core.ErrInvalidType + } + return +} + +// GetOptions ... +func (src *ASTDefSource) GetOptions() core.RextKeyValueStore { + return src.Options +} + +// ASTDefExtract parse tree node +type ASTDefExtract struct { + Type string + Metrics []*ASTDefMetric + Options config.OptionsMap +} + +// Apply ... +func (scraper *ASTDefExtract) Apply(rule core.RextMetricDef) (core.RextMetricDef, error) { + var m *ASTDefMetric + if astRule, isAST := rule.(*ASTDefMetric); isAST { + m = astRule + } else { + m = NewMetricFromDef(rule) + } + scraper.Metrics = append(scraper.Metrics, m) + return m, nil +} + +// ApplyMany ... +func (scraper *ASTDefExtract) ApplyMany(rules []core.RextMetricDef) (newRules []core.RextMetricDef, err error) { + newRules = make([]core.RextMetricDef, len(rules)) + for idx, md := range rules { + if newRules[idx], err = scraper.Apply(md); err != nil { + return + } + } + return +} + +// ExtractMetrics ... +func (scraper *ASTDefExtract) ExtractMetrics(target interface{}) ([]core.RextMetric, error) { + return nil, nil +} + +// GetOptions ... +func (scraper *ASTDefExtract) GetOptions() core.RextKeyValueStore { + return scraper.Options +} + +// ASTDefMetric parse tree node +type ASTDefMetric struct { + Type string + Name string + Description string + Labels []string + Options config.OptionsMap +} + +func NewMetricFromDef(m core.RextMetricDef) *ASTDefMetric { + return &ASTDefMetric{ + Name: m.GetMetricName(), + Type: m.GetMetricType(), + Description: m.GetMetricDescription(), + Labels: m.GetMetricLabels(), + } +} + +// GetMetricName ... +func (m *ASTDefMetric) GetMetricName() string { + return m.Name +} + +// GetMetricType ... +func (m *ASTDefMetric) GetMetricType() string { + return m.Type +} + +// GetMetricDescription ... +func (m *ASTDefMetric) GetMetricDescription() string { + return m.Description +} + +// GetMetricLabels ... +func (m *ASTDefMetric) GetMetricLabels() []string { + return m.Labels +} + +// SetMetricName ... +func (m *ASTDefMetric) SetMetricName(s string) { + m.Name = s +} + +// SetMetricType ... +func (m *ASTDefMetric) SetMetricType(s string) { + m.Type = s +} + +// SetMetricDescription ... +func (m *ASTDefMetric) SetMetricDescription(s string) { + m.Description = s +} + +// SetMetricLabels ... +func (m *ASTDefMetric) SetMetricLabels(labels []string) { + m.Labels = labels +} + +// GetOptions ... +func (m *ASTDefMetric) GetOptions() core.RextKeyValueStore { + return m.Options +} diff --git a/src/rxt/grammar/interfaces.go b/src/rxt/grammar/interfaces.go new file mode 100644 index 0000000..ba39ec7 --- /dev/null +++ b/src/rxt/grammar/interfaces.go @@ -0,0 +1,8 @@ +package grammar + +// TokenHandler emits tokens discovered by the RXT lexer +type TokenHandler interface { + EmitInt(tokenid string, value int) + EmitStr(tokenid, value string) + EmitObj(tokenid string, value interface{}) +} diff --git a/src/rxt/grammar/lexer.nex b/src/rxt/grammar/lexer.nex new file mode 100644 index 0000000..70197f1 --- /dev/null +++ b/src/rxt/grammar/lexer.nex @@ -0,0 +1,62 @@ + +< { emit_obj("CTX", rootEnv) } +/[#].*\n/ { /* eat up comments */ } +/,[ \n\t]*/ { emit_str("PNC", token()[:1]) } +/[\n][ \t\n]*/ { indent( token() ) } +/[ \t]+/ { /* eat up whitespace */ } +/DATASET|FOR SERVICE|FOR STACK|DEFINE AUTH|AS|SET|TO|GET|POST|FROM|EXTRACT USING|METRIC|NAME|TYPE|GAUGE|COUNTER|HISTOGRAM|SUMMARY|DESCRIPTION|LABELS/ { emit_str("KEY", token()) } +/"[^"]*"/ { emit_str("STR", token()) } +/'[^']*'/ { emit_str("STR", token()) } +/[a-z_][a-z0-9_]*/ { emit_str("VAR", token()) } +/./ { emit_str("UNK", token()) } +> { /* nothing to do at end of file */ } +// +package grammar +import "os" +func LexTheRxt(handler TokenHandler, rootEnv interface{}) { + lex := NewLexer(os.Stdin) + indent_level := 0 + indent_stack := make([]int, 5) + token := func() string { return lex.Text() } + emit_str := handler.EmitStr + emit_int := handler.EmitInt + emit_obj := handler.EmitObj + indent := func(whitespace string) { + level := len(whitespace) - 1 + idx_last_eol := strings.LastIndexByte(whitespace, 10) + if idx_last_eol != -1 { + level -= idx_last_eol + } + if level > indent_level { + // Open block + indent_stack = append(indent_stack, indent_level) + indent_level = level + emit_int( "BLK" , indent_level ) + } else { + if level == indent_level { + // Same block + emit_int( "EOL" , indent_level ) + } else { + // Close block + if level == 0 && indent_level == 0 { + return + } + + idx := len(indent_stack) + for level < indent_level && idx > 0 { + emit_int( "EOB" , indent_level ) + idx = idx - 1 + indent_level = indent_stack[idx] + } + indent_stack = indent_stack[:idx] + if level == indent_level { + emit_int( "EOL" , indent_level ) + } else { + emit_int( "BIE" , indent_level ) + } + } + } + } + NN_FUN(lex) +} + diff --git a/src/rxt/grammar/lexer.nn.go b/src/rxt/grammar/lexer.nn.go new file mode 100644 index 0000000..f926a52 --- /dev/null +++ b/src/rxt/grammar/lexer.nn.go @@ -0,0 +1,6574 @@ +package grammar + +import "os" +import ( + "bufio" + "io" + "strings" +) + +type frame struct { + i int + s string + line, column int +} +type Lexer struct { + // The lexer runs in its own goroutine, and communicates via channel 'ch'. + ch chan frame + ch_stop chan bool + // We record the level of nesting because the action could return, and a + // subsequent call expects to pick up where it left off. In other words, + // we're simulating a coroutine. + // TODO: Support a channel-based variant that compatible with Go's yacc. + stack []frame + stale bool + + // The 'l' and 'c' fields were added for + // https://github.com/wagerlabs/docker/blob/65694e801a7b80930961d70c69cba9f2465459be/buildfile.nex + // Since then, I introduced the built-in Line() and Column() functions. + l, c int + + parseResult interface{} + + // The following line makes it easy for scripts to insert fields in the + // generated code. + // [NEX_END_OF_LEXER_STRUCT] +} + +// NewLexerWithInit creates a new Lexer object, runs the given callback on it, +// then returns it. +func NewLexerWithInit(in io.Reader, initFun func(*Lexer)) *Lexer { + yylex := new(Lexer) + if initFun != nil { + initFun(yylex) + } + yylex.ch = make(chan frame) + yylex.ch_stop = make(chan bool, 1) + var scan func(in *bufio.Reader, ch chan frame, ch_stop chan bool, family []dfa, line, column int) + scan = func(in *bufio.Reader, ch chan frame, ch_stop chan bool, family []dfa, line, column int) { + // Index of DFA and length of highest-precedence match so far. + matchi, matchn := 0, -1 + var buf []rune + n := 0 + checkAccept := func(i int, st int) bool { + // Higher precedence match? DFAs are run in parallel, so matchn is at most len(buf), hence we may omit the length equality check. + if family[i].acc[st] && (matchn < n || matchi > i) { + matchi, matchn = i, n + return true + } + return false + } + var state [][2]int + for i := 0; i < len(family); i++ { + mark := make([]bool, len(family[i].startf)) + // Every DFA starts at state 0. + st := 0 + for { + state = append(state, [2]int{i, st}) + mark[st] = true + // As we're at the start of input, follow all ^ transitions and append to our list of start states. + st = family[i].startf[st] + if -1 == st || mark[st] { + break + } + // We only check for a match after at least one transition. + checkAccept(i, st) + } + } + atEOF := false + stopped := false + for { + if n == len(buf) && !atEOF { + r, _, err := in.ReadRune() + switch err { + case io.EOF: + atEOF = true + case nil: + buf = append(buf, r) + default: + panic(err) + } + } + if !atEOF { + r := buf[n] + n++ + var nextState [][2]int + for _, x := range state { + x[1] = family[x[0]].f[x[1]](r) + if -1 == x[1] { + continue + } + nextState = append(nextState, x) + checkAccept(x[0], x[1]) + } + state = nextState + } else { + dollar: // Handle $. + for _, x := range state { + mark := make([]bool, len(family[x[0]].endf)) + for { + mark[x[1]] = true + x[1] = family[x[0]].endf[x[1]] + if -1 == x[1] || mark[x[1]] { + break + } + if checkAccept(x[0], x[1]) { + // Unlike before, we can break off the search. Now that we're at the end, there's no need to maintain the state of each DFA. + break dollar + } + } + } + state = nil + } + + if state == nil { + lcUpdate := func(r rune) { + if r == '\n' { + line++ + column = 0 + } else { + column++ + } + } + // All DFAs stuck. Return last match if it exists, otherwise advance by one rune and restart all DFAs. + if matchn == -1 { + if len(buf) == 0 { // This can only happen at the end of input. + break + } + lcUpdate(buf[0]) + buf = buf[1:] + } else { + text := string(buf[:matchn]) + buf = buf[matchn:] + matchn = -1 + for { + sent := false + select { + case ch <- frame{matchi, text, line, column}: + { + sent = true + } + case stopped = <-ch_stop: + { + } + default: + { + // nothing + } + } + if stopped || sent { + break + } + } + if stopped { + break + } + if len(family[matchi].nest) > 0 { + scan(bufio.NewReader(strings.NewReader(text)), ch, ch_stop, family[matchi].nest, line, column) + } + if atEOF { + break + } + for _, r := range text { + lcUpdate(r) + } + } + n = 0 + for i := 0; i < len(family); i++ { + state = append(state, [2]int{i, 0}) + } + } + } + ch <- frame{-1, "", line, column} + } + go scan(bufio.NewReader(in), yylex.ch, yylex.ch_stop, dfas, 0, 0) + return yylex +} + +type dfa struct { + acc []bool // Accepting states. + f []func(rune) int // Transitions. + startf, endf []int // Transitions at start and end of input. + nest []dfa +} + +var dfas = []dfa{ + // [#].*\n + {[]bool{false, false, true, false}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 10: + return -1 + case 35: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 10: + return 2 + case 35: + return 3 + } + return 3 + }, + func(r rune) int { + switch r { + case 10: + return 2 + case 35: + return 3 + } + return 3 + }, + func(r rune) int { + switch r { + case 10: + return 2 + case 35: + return 3 + } + return 3 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1, -1}, nil}, + + // ,[ \n\t]* + {[]bool{false, true, true}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 9: + return -1 + case 10: + return -1 + case 32: + return -1 + case 44: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 9: + return 2 + case 10: + return 2 + case 32: + return 2 + case 44: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 9: + return 2 + case 10: + return 2 + case 32: + return 2 + case 44: + return -1 + } + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1}, nil}, + + // [\n][ \t\n]* + {[]bool{false, true, true}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 9: + return -1 + case 10: + return 1 + case 32: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 9: + return 2 + case 10: + return 2 + case 32: + return 2 + } + return -1 + }, + func(r rune) int { + switch r { + case 9: + return 2 + case 10: + return 2 + case 32: + return 2 + } + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1}, nil}, + + // [ \t]+ + {[]bool{false, true}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 9: + return 1 + case 32: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 9: + return 1 + case 32: + return 1 + } + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1}, []int{ /* End-of-input transitions */ -1, -1}, nil}, + + // DATASET|FOR SERVICE|FOR STACK|DEFINE AUTH|AS|SET|TO|GET|POST|FROM|EXTRACT USING|METRIC|NAME|TYPE|GAUGE|COUNTER|HISTOGRAM|SUMMARY|DESCRIPTION|LABELS + {[]bool{false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, false, true, false, false, false, false, false, false, true, true, false, false, true, false, false, true, false, false, false, false, true, false, false, false, false, true, false, false, false, false, false, false, false, true, false, false, true, false, false, true, false, false, false, true, false, false, false, false, false, false, false, true, false, false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, true, false, false, false, false, true, false, false, false, false, false, true, true}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 1 + case 66: + return -1 + case 67: + return 2 + case 68: + return 3 + case 69: + return 4 + case 70: + return 5 + case 71: + return 6 + case 72: + return 7 + case 73: + return -1 + case 75: + return -1 + case 76: + return 8 + case 77: + return 9 + case 78: + return 10 + case 79: + return -1 + case 80: + return 11 + case 82: + return -1 + case 83: + return 12 + case 84: + return 13 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 116 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 110 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 85 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 86 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return 73 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 56 + case 80: + return -1 + case 82: + return 57 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 50 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 51 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 42 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 37 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 32 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 29 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 26 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 18 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return 19 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 14 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return 15 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return 16 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 17 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 25 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return 20 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return 21 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 22 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 23 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return 24 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 27 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 28 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return 30 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 31 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 33 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 34 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 35 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return 36 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return 38 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 39 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return 40 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 41 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 43 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 44 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 45 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return 46 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 47 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 48 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return 49 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return 53 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 52 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return 54 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 55 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 60 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 58 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return 59 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return 61 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 62 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 63 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 64 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 68 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 65 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return 66 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return 67 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return 69 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 70 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return 71 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 72 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 74 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 75 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 76 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return 77 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 78 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return 79 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return 80 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 81 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 82 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return 83 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return 84 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 105 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return 87 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 88 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 97 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return 89 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 90 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 91 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return 92 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 93 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return 94 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return 95 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return 96 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return 98 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 99 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return 100 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 101 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return 102 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 103 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return 104 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return 106 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return 107 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 108 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 109 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return 111 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return 112 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return 113 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return 114 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return 115 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 32: + return -1 + case 65: + return -1 + case 66: + return -1 + case 67: + return -1 + case 68: + return -1 + case 69: + return -1 + case 70: + return -1 + case 71: + return -1 + case 72: + return -1 + case 73: + return -1 + case 75: + return -1 + case 76: + return -1 + case 77: + return -1 + case 78: + return -1 + case 79: + return -1 + case 80: + return -1 + case 82: + return -1 + case 83: + return -1 + case 84: + return -1 + case 85: + return -1 + case 86: + return -1 + case 88: + return -1 + case 89: + return -1 + } + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, nil}, + + // "[^"]*" + {[]bool{false, false, true, false}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 34: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 34: + return 2 + } + return 3 + }, + func(r rune) int { + switch r { + case 34: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 34: + return 2 + } + return 3 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1, -1}, nil}, + + // '[^']*' + {[]bool{false, false, true, false}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 39: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 39: + return 2 + } + return 3 + }, + func(r rune) int { + switch r { + case 39: + return -1 + } + return -1 + }, + func(r rune) int { + switch r { + case 39: + return 2 + } + return 3 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1, -1}, nil}, + + // [a-z_][a-z0-9_]* + {[]bool{false, true, true}, []func(rune) int{ // Transitions + func(r rune) int { + switch r { + case 95: + return 1 + } + switch { + case 48 <= r && r <= 57: + return -1 + case 97 <= r && r <= 122: + return 1 + } + return -1 + }, + func(r rune) int { + switch r { + case 95: + return 2 + } + switch { + case 48 <= r && r <= 57: + return 2 + case 97 <= r && r <= 122: + return 2 + } + return -1 + }, + func(r rune) int { + switch r { + case 95: + return 2 + } + switch { + case 48 <= r && r <= 57: + return 2 + case 97 <= r && r <= 122: + return 2 + } + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1, -1}, []int{ /* End-of-input transitions */ -1, -1, -1}, nil}, + + // . + {[]bool{false, true}, []func(rune) int{ // Transitions + func(r rune) int { + return 1 + }, + func(r rune) int { + return -1 + }, + }, []int{ /* Start-of-input transitions */ -1, -1}, []int{ /* End-of-input transitions */ -1, -1}, nil}, +} + +func NewLexer(in io.Reader) *Lexer { + return NewLexerWithInit(in, nil) +} + +func (yyLex *Lexer) Stop() { + yyLex.ch_stop <- true +} + +// Text returns the matched text. +func (yylex *Lexer) Text() string { + return yylex.stack[len(yylex.stack)-1].s +} + +// Line returns the current line number. +// The first line is 0. +func (yylex *Lexer) Line() int { + if len(yylex.stack) == 0 { + return 0 + } + return yylex.stack[len(yylex.stack)-1].line +} + +// Column returns the current column number. +// The first column is 0. +func (yylex *Lexer) Column() int { + if len(yylex.stack) == 0 { + return 0 + } + return yylex.stack[len(yylex.stack)-1].column +} + +func (yylex *Lexer) next(lvl int) int { + if lvl == len(yylex.stack) { + l, c := 0, 0 + if lvl > 0 { + l, c = yylex.stack[lvl-1].line, yylex.stack[lvl-1].column + } + yylex.stack = append(yylex.stack, frame{0, "", l, c}) + } + if lvl == len(yylex.stack)-1 { + p := &yylex.stack[lvl] + *p = <-yylex.ch + yylex.stale = false + } else { + yylex.stale = true + } + return yylex.stack[lvl].i +} +func (yylex *Lexer) pop() { + yylex.stack = yylex.stack[:len(yylex.stack)-1] +} +func LexTheRxt(handler TokenHandler, rootEnv interface{}) { + lex := NewLexer(os.Stdin) + indent_level := 0 + indent_stack := make([]int, 5) + token := func() string { return lex.Text() } + emit_str := handler.EmitStr + emit_int := handler.EmitInt + emit_obj := handler.EmitObj + indent := func(whitespace string) { + level := len(whitespace) - 1 + idx_last_eol := strings.LastIndexByte(whitespace, 10) + if idx_last_eol != -1 { + level -= idx_last_eol + } + if level > indent_level { + // Open block + indent_stack = append(indent_stack, indent_level) + indent_level = level + emit_int("BLK", indent_level) + } else { + if level == indent_level { + // Same block + emit_int("EOL", indent_level) + } else { + // Close block + if level == 0 && indent_level == 0 { + return + } + + idx := len(indent_stack) + for level < indent_level && idx > 0 { + emit_int("EOB", indent_level) + idx = idx - 1 + indent_level = indent_stack[idx] + } + indent_stack = indent_stack[:idx] + if level == indent_level { + emit_int("EOL", indent_level) + } else { + emit_int("BIE", indent_level) + } + } + } + } + func(yylex *Lexer) { + if !yylex.stale { + { + emit_obj("CTX", rootEnv) + } + } + OUTER0: + for { + switch yylex.next(0) { + case 0: + { /* eat up comments */ + } + case 1: + { + emit_str("PNC", token()[:1]) + } + case 2: + { + indent(token()) + } + case 3: + { /* eat up whitespace */ + } + case 4: + { + emit_str("KEY", token()) + } + case 5: + { + emit_str("STR", token()) + } + case 6: + { + emit_str("STR", token()) + } + case 7: + { + emit_str("VAR", token()) + } + case 8: + { + emit_str("UNK", token()) + } + default: + break OUTER0 + } + continue + } + yylex.pop() + { /* nothing to do at end of file */ + } + }(lex) +} diff --git a/src/rxt/grammar/parser.go.y b/src/rxt/grammar/parser.go.y new file mode 100644 index 0000000..3471f3f --- /dev/null +++ b/src/rxt/grammar/parser.go.y @@ -0,0 +1,335 @@ + +{% +package grammar + +import ( + "errors" + + "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/util" +) + +const ( + ErrBlockLevelUnderflow = errors.New("End of block is not possible beyond DATABASE") + ErrBlockLevelOverflow = errors.New("Too many nested syntax levels") +) + +type parserEnv struct { + env core.RextEnv + scraper core.RextServiceScraper +} + +type strTuple struct { + key string + val string +} + +type mainSecTuple struct { + src core.RextDataSource + key string + val interface{} +} + +type metricDef { + mname string + mtype string + mdesc string + mlbls []string + opts core.RextKeyValueStore +} + +// FIXME : Not global. Parser stack ? TLS ? +var root parserEnv +var metric metricDef + +// TODO: metricDef should implement core.RextMetricDef + +func value_for_str(str string) { + // FIXME: Support string literals + return string[1: len(str) - 1] +} + +func newOption() core.RextKeyValueStore { + return config.NewOptionsMap() +} + +func newStrTuple(s1, s2 string) *strTuple { + return &strTuple { + first: s1, + second: s2, + } +} + +func newMainDef(key string, value inrerface{}) *mainSecTuple { + return &mainSecTuple{ + src: nil, + key: key, + value: value, + } +} + +func newMainSrc(src core.RextDataSource) *mainSecTuple { + return &mainSecTuple{ + src: src, + key: "", + value: nil, + } +} + +func getRootEnv() *parserEnv { + return &root +} + +func (m *metricDef) GetMetricName() string { + return m.mname +} + +func (m *metricDef) GetMetricType() string { + return m.mtype +} + +func (m *metricDef) GetMetricDescription() string { + return m.mdesc +} + +func (m *metricDef) GetMetricLabels() []string { + return m.mlbls +} + +func (m *metricDef) SetMetricName(name string) { + m.mname = name +} + +func (m *metricDef) SetMetricType(typeid string) { + m.mtype = typeid +} + +func (m *metricDef) SetMetricDescription(desc string) { + m.mdesc = desc +} + +func (m *metricDef) SetMetricLabels(labels []string) { + m.mlbls = labels +} + +func (m *metricDef) GetOptions() RextKeyValueStore { + return nil +} + +%} + +%union{ + root core.RextServiceScraper + options core.RextKeyValueStore + mains []mainSecTuple + mainsec *mainSecTuple + exts []core.RextMetricsExtractor + extract core.RextMetricsExtractor + metrics []metricsDef + metric metricsDef + key string + strval string + strlist []string + pair *strTuple +} + +%type id mname mtype mhelp mhelpo +%type defverb srcverb mtvalue mfname +%type setcls +%type strlst idlst mlabels mlablso stkcls stkclso srvcls srvclso +%type optsblk optblkl optblkr optblko +%type metsec +%type metblk +%type extblk +%type srcsec defsec mainsec +%type mainblk +%type dataset + +%token STR VAR + +%% + +defverb : 'DEFINE AUTH' + { $$ = "AUTH" } + ; +srcverb : 'GET' + { $$ = $1 } + | 'POST' + { $$ = $1 } + ; +mtvalue : 'GAUGE' + { $$ = config.KeyTypeGauge } + | 'COUNTER' + { $$ = config.KeyTypeCounter } + | 'HISTOGRAM' + { $$ = config.KeyTypeHistogram } + | 'SUMMARY' + { $$ = config.KeyTypeSummary } + ; +id : VAR + { $$ = $1 } + | STR + { $$ = value_for_str($1) } + ; +setcls : 'SET' STR 'TO' STR + { $$ = newStrTuple($2, $4) } +optsblk : setcls + { + // TODO: Error handling + $$ = newOption() + _, _ = $$.SetString($1.first, $1.second) + } + | optsblk EOL setcls + { + // TODO: Error handling + _, _ = $1.top().SetString($3.first, $3.second) + $$ = $1 + } +strlst : STR + { $$ = []string{ $1 } } + | strlst ',' STR + { $$ = append($1, $3) } +idlst : id + { $$ = []string{ $1 } } + | idlst ',' id + { $$ = append($1, $3) } +mlabels : 'LABELS' strlst + { $$ = $2 } +mname : 'NAME' ID + { $$ = $2 } +mtype : 'TYPE' mtvalue + { $$ = $2 } +mhelp : 'HELP' STR + { $$ = $2 } +mhelpo : /* empty */ + { + $$ = "Metric extracted by [rextporter](https://github.com/simelo/rextporter)" + } + | EOL mhelp + { $$ = $2 } +mlablso : /* empty */ + { $$ = nil } + | EOL mlabels + { $$ = $2 } +optblkl : /* empty */ + { $$ = nil } + | EOL optsblk + { $$ = $2 } +optblkr : /* empty */ + { $$ = nil } + | optsblk EOL + { $$ = $1 } +metsec : 'METRIC' BLK mname EOL mtype mhelpo mlablso optblkl EOB + { + $$ = metricDef{ + mname: $3, + mtype: $5, + mdesc: $6, + mlbls: $7, + opts: $8, + } + } +metblk : metsec + { $$ = []metricsDef{ $1 } } + | metblk EOL metsec + { $$ = append($1, $3) } +extblk : 'EXTRACT USING' id BLK optblkr metblk EOB + { + env := getRootEnv() + $$ = env.NewMetricsExtractor($2, $4, $5) + for _, md := range $6 { + $$.AddMetricRule(&md) + } + } +ssec : extblk + { $$ = []core.RextMetricsExtractor{ $1 } } + | ssec EOL extblk + { $$ = append($1, $2) } +srcsec : srcverb VAR 'FROM' STR + { + env := getRootEnv() + ds := env.NewMetricsDataSource($2) + dsSetMethod($1) + dsSetLocation($4) + $$ = newMainSrc(ds) + } + | srcverb VAR 'FROM' STR BLK optblkr ssec EOB + { + env := getRootEnv() + ds := env.NewMetricsDataSource($2) + ds.SetMethod($1) + ds.SetLocation($4) + // FIXME: Error handling + _ = util.MergeStoresInplace(dsGetOptions(), $6) + $$ = newMainSrc(ds) + } +defsec : defverb VAR 'AS' id optblko + { + env := getRootEnv() + if defverb == 'AUTH' { + $$ = newMainDef($4, env.NewAuthStrategy($2, $5)) + } + // TODO: Error handling + $$ = nil + } +optblko : /* empty */ + { $$ = nil } + | BLK optsblk EOB + { $$ = $2 } +stkcls : 'FOR STACK' idlst + { $$ = $2 } +stkclso : /* empty */ + { $$ = nil } + | stkcls EOL + { $$ = $1 } +srvcls : 'FOR SERVICE' idlst + { $$ = $2 } +srvclso : /* empty */ + { $$ = nil } + | srvcls EOL + { $$ = $1 } +mainsec : defsec + { $$ = $1 } + | srcsec + { $$ = $1 } +mainblk : mainsec + { $$ = []mainSecTuple { $1 } } + | mainblk EOL mainsec + { $$ = append($1, $2) } +eolo : /* empty */ + | EOL +dataset : CTX eolo 'DATASET' BLK srvclso srvstko optblkr mainblk EOB eolo + { + env = $1 + $$ = env.NewServiceScraper() + if $5 != nil { + // TODO : Error handling + _ = env.RegisterScraperForServices($5...) + } + if $6 != nil { + // TODO : Error handling + _ = env.RegisterScraperForServices($6...) + } + if $7 != nil { + util.MergeStoresInplace($$.GetOptions(), $7) + } + for _, mainsec := range $8 { + if mainsec.src != nil { + $$.AddSource(mainsec.src) + } else if mainsec.value != nil { + if auth, isAuth := mainsec.value.(core.RextAuth); isAuth { + $$.AddAuthStrategy(auth, mainsec.key) + } + // TODO : Error handling + } + // TODO : Error handling + } + } +%% + + + + + + diff --git a/src/rxt/handlers.go b/src/rxt/handlers.go new file mode 100644 index 0000000..aaff31b --- /dev/null +++ b/src/rxt/handlers.go @@ -0,0 +1,22 @@ +package rxt + +import "fmt" + +// TokenWriter outputs tokens to stdout +type TokenWriter struct { +} + +// EmitInt ... +func (tw *TokenWriter) EmitInt(tokenid string, value int) { + println(tokenid, value) +} + +// EmitStr ... +func (tw *TokenWriter) EmitStr(tokenid, value string) { + println(tokenid, value) +} + +// EmitObj ... +func (tw *TokenWriter) EmitObj(tokenid string, value interface{}) { + println(tokenid, fmt.Sprintf("%v", value)) +} diff --git a/src/rxt/testdata/skyexample.golden b/src/rxt/testdata/skyexample.golden new file mode 100644 index 0000000..9731469 --- /dev/null +++ b/src/rxt/testdata/skyexample.golden @@ -0,0 +1,173 @@ +CTX LEX +EOL 0 +KEY DATASET +BLK 4 +KEY FOR SERVICE +VAR skycoin +EOL 4 +KEY FOR STACK +VAR skyfiber +EOL 4 +KEY DEFINE AUTH +VAR rest_csrf +KEY AS +STR "skyauth" +BLK 8 +KEY SET +STR "url" +KEY TO +STR "/api/v1/csrf" +EOL 8 +KEY SET +STR "method" +KEY TO +STR "GET" +EOL 8 +KEY SET +STR "header" +KEY TO +STR "X-CSRF-Token" +EOL 8 +KEY SET +STR "json_path" +KEY TO +STR "csrf_token" +EOB 8 +EOL 4 +KEY GET +VAR forward_metrics +KEY FROM +STR '/api/v2/metrics' +BLK 8 +KEY SET +STR "prefix" +KEY TO +STR "skycoinexample" +EOB 8 +EOL 4 +KEY GET +VAR rest_api +KEY FROM +STR '/api/v1/health' +BLK 8 +KEY SET +STR "auth" +KEY TO +STR "skyauth" +EOL 8 +KEY EXTRACT USING +STR "jsonpath" +BLK 12 +KEY METRIC +BLK 16 +KEY NAME +STR "skycoin_auth_csrf_enabled" +EOL 16 +KEY TYPE +KEY GAUGE +EOL 16 +KEY SET +STR "path" +KEY TO +STR "csrf_enabled" +EOB 16 +EOL 12 +KEY METRIC +BLK 16 +KEY NAME +STR "skycoin_blockchain_burn_factor" +EOL 16 +KEY TYPE +KEY COUNTER +EOL 16 +KEY SET +STR "path" +KEY TO +STR "user_verify_transaction.burn_factor" +EOB 16 +EOL 12 +KEY METRIC +BLK 16 +KEY NAME +STR "skycoin_blockchain_block_head" +EOL 16 +KEY TYPE +KEY COUNTER +EOL 16 +KEY SET +STR "path" +KEY TO +STR "blockchain.head.seq" +EOB 16 +EOB 12 +EOB 8 +EOL 4 +KEY GET +VAR rest_api +KEY FROM +STR '/api/v1/network/connections' +BLK 8 +KEY SET +STR "auth" +KEY TO +STR "skyauth" +EOL 8 +KEY EXTRACT USING +STR "jsonpath" +BLK 12 +KEY METRIC +BLK 16 +KEY NAME +STR "skycoin_pex_connections" +EOL 16 +KEY TYPE +KEY COUNTER +EOL 16 +KEY LABELS +STR "state" +PNC , +STR "outgoing" +PNC , +STR "listen_port" +PNC , +STR "user_agent" +PNC , +STR "is_trusted" +PNC , +STR "burn_factor" +PNC , +STR "max_txn_size" +PNC , +STR "max_decimals" +PNC , +STR "height" +EOL 16 +KEY SET +STR "path" +KEY TO +STR "connections[*]" +EOL 16 +KEY SET +STR "label_path:is_trusted" +KEY TO +STR "is_trusted_peer" +EOL 16 +KEY SET +STR "label_path:burn_factor" +KEY TO +STR "unconfirmed_verify_transaction.burn_factor" +EOL 16 +KEY SET +STR "label_path:max_txn_size" +KEY TO +STR "unconfirmed_verify_transaction.max_transaction_size" +EOL 16 +KEY SET +STR "label_path:max_decimals" +KEY TO +STR "unconfirmed_verify_transaction.max_decimals" +EOB 16 +EOB 12 +EOB 8 +EOB 4 +EOL 0 diff --git a/src/rxt/testdata/skyexample.rxt b/src/rxt/testdata/skyexample.rxt new file mode 100644 index 0000000..2f430c7 --- /dev/null +++ b/src/rxt/testdata/skyexample.rxt @@ -0,0 +1,45 @@ + +DATASET + FOR SERVICE skycoin + FOR STACK skyfiber + + DEFINE AUTH rest_csrf AS "skyauth" + SET "url" TO "/api/v1/csrf" + SET "method" TO "GET" + SET "header" TO "X-CSRF-Token" + SET "json_path" TO "csrf_token" + + GET forward_metrics FROM '/api/v2/metrics' + SET "prefix" TO "skycoinexample" + + GET rest_api FROM '/api/v1/health' + SET "auth" TO "skyauth" + EXTRACT USING "jsonpath" + METRIC + NAME "skycoin_auth_csrf_enabled" + TYPE GAUGE + SET "path" TO "csrf_enabled" + METRIC + NAME "skycoin_blockchain_burn_factor" + TYPE COUNTER + SET "path" TO "user_verify_transaction.burn_factor" + METRIC + NAME "skycoin_blockchain_block_head" + TYPE COUNTER + SET "path" TO "blockchain.head.seq" + + GET rest_api FROM '/api/v1/network/connections' + SET "auth" TO "skyauth" + EXTRACT USING "jsonpath" + METRIC + NAME "skycoin_pex_connections" + TYPE COUNTER + LABELS "state", "outgoing", "listen_port", "user_agent", + "is_trusted", "burn_factor", "max_txn_size", + "max_decimals", "height" + SET "path" TO "connections[*]" + SET "label_path:is_trusted" TO "is_trusted_peer" + SET "label_path:burn_factor" TO "unconfirmed_verify_transaction.burn_factor" + SET "label_path:max_txn_size" TO "unconfirmed_verify_transaction.max_transaction_size" + SET "label_path:max_decimals" TO "unconfirmed_verify_transaction.max_decimals" + diff --git a/src/scrapper/histogram.go b/src/scrapper/histogram.go index fe05030..114391e 100644 --- a/src/scrapper/histogram.go +++ b/src/scrapper/histogram.go @@ -5,7 +5,6 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/simelo/rextporter/src/client" - "github.com/simelo/rextporter/src/config" "github.com/simelo/rextporter/src/util" ) @@ -18,7 +17,7 @@ type Histogram struct { buckets histogramClientOptions } -func newHistogram(cf client.Factory, parser BodyParser, metric config.Metric, jobName, instanceName, dataSource string) Scrapper { +func newHistogram(cf client.Factory, parser BodyParser, datasource, jobName, instanceName, nodePath string, buckets histogramClientOptions) Scrapper { return Histogram{ baseAPIScrapper: baseAPIScrapper{ baseScrapper: baseScrapper{ @@ -26,11 +25,11 @@ func newHistogram(cf client.Factory, parser BodyParser, metric config.Metric, jo instanceName: instanceName, }, clientFactory: cf, - dataSource: dataSource, + dataSource: datasource, parser: parser, - jsonPath: metric.Path, + jsonPath: nodePath, }, - buckets: histogramClientOptions(metric.HistogramOptions.Buckets), + buckets: buckets, } } diff --git a/src/scrapper/metrics_forwader.go b/src/scrapper/metrics_forwader.go index fb431bc..4a4b431 100644 --- a/src/scrapper/metrics_forwader.go +++ b/src/scrapper/metrics_forwader.go @@ -58,12 +58,11 @@ func appendLables(metrics []byte, labels []*io_prometheus_client.LabelPair) ([]b writer := bufio.NewWriter(&buff) encoder := expfmt.NewEncoder(writer, expfmt.FmtText) for _, mf := range metricFamilies { - for idxMetrics := range mf.Metric { - mf.Metric[idxMetrics].Label = append(mf.Metric[idxMetrics].Label, labels...) + for idxMetric := range mf.Metric { + mf.Metric[idxMetric].Label = append(mf.Metric[idxMetric].Label, labels...) } - err := encoder.Encode(mf) - if err != nil { - log.WithFields(log.Fields{"err": err, "metric family": mf}).Errorln("can not encode metric family") + if err := encoder.Encode(mf); err != nil { + log.WithFields(log.Fields{"err": err, "metric_family": mf}).Errorln("can not encode metric family") return metrics, err } } @@ -95,6 +94,7 @@ func (scrapper MetricsForwader) GetMetric() (val interface{}, err error) { errCause := "can not get the data" return data, util.ErrorFromThisScope(errCause, generalScopeErr) } + // TODO(denisacostaq@gmail.com): use a global variable name for job and instance job := "job" instance := "instance" prefixed, err := appendLables( diff --git a/src/scrapper/numeric_vec.go b/src/scrapper/numeric_vec.go index be743cf..8c029f3 100644 --- a/src/scrapper/numeric_vec.go +++ b/src/scrapper/numeric_vec.go @@ -5,19 +5,18 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/simelo/rextporter/src/client" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/util" ) // NumericVec implements the Client interface(is able to get numeric metrics through `GetMetric` like Gauge and Counter) type NumericVec struct { baseAPIScrapper - labels []config.Label - labelsName []string - itemPath string + labels []core.RextLabelDef + itemPath string } -func newNumericVec(cf client.Factory, p BodyParser, metric config.Metric, jobName, instanceName, dataSource string) Scrapper { +func newNumericVec(cf client.Factory, p BodyParser, jobName, instanceName, dataSource string, nSolver core.RextNodeSolver, mtrConf core.RextMetricDef, itemPath string) Scrapper { return NumericVec{ baseAPIScrapper: baseAPIScrapper{ baseScrapper: baseScrapper{ @@ -27,11 +26,10 @@ func newNumericVec(cf client.Factory, p BodyParser, metric config.Metric, jobNam clientFactory: cf, dataSource: dataSource, parser: p, - jsonPath: metric.Path, + jsonPath: nSolver.GetNodePath(), }, - labels: metric.Options.Labels, - labelsName: metric.LabelNames(), - itemPath: metric.Options.ItemPath, + labels: mtrConf.GetLabels(), + itemPath: itemPath, } } @@ -78,13 +76,14 @@ func (nv NumericVec) GetMetric(metricsCollector chan<- prometheus.Metric) (val i metricsVal[idxMetric].Labels = make([]string, len(nv.labels)) for idxLabel, label := range nv.labels { var iLabelVal interface{} - if iLabelVal, err = nv.parser.pathLookup(label.Path, metricItem); err != nil { + ns := label.GetNodeSolver() + if iLabelVal, err = nv.parser.pathLookup(ns.GetNodePath(), metricItem); err != nil { errCause := fmt.Sprintln("can not locate the path for label: ", err.Error()) return nil, util.ErrorFromThisScope(errCause, generalScopeErr) } labelVal, okLabelVal := iLabelVal.(string) if !okLabelVal { - errCause := fmt.Sprintf("can not assert metric label %s %+v as string", label.Name, iLabelVal) + errCause := fmt.Sprintf("can not assert metric label %s %+v as string", label.GetName(), iLabelVal) return nil, util.ErrorFromThisScope(errCause, generalScopeErr) } metricsVal[idxMetric].Labels[idxLabel] = labelVal diff --git a/src/scrapper/scrapper.go b/src/scrapper/scrapper.go index 7155ce6..0addf46 100644 --- a/src/scrapper/scrapper.go +++ b/src/scrapper/scrapper.go @@ -2,11 +2,13 @@ package scrapper import ( "errors" + "strings" "github.com/prometheus/client_golang/prometheus" "github.com/simelo/rextporter/src/client" - "github.com/simelo/rextporter/src/config" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/util" + log "github.com/sirupsen/logrus" ) // Scrapper get metrics from raw data @@ -63,31 +65,56 @@ type BodyParser interface { } // NewScrapper will put all the required info to scrap metrics from the body returned by the client. -func NewScrapper(cf client.Factory, parser BodyParser, metric config.Metric, srvConf config.Service) (Scrapper, error) { - jobName := srvConf.JobName() - instanceName := srvConf.InstanceName() - dataSource := metric.URL - if len(metric.LabelNames()) > 0 { - return createVecScrapper(cf, parser, metric, jobName, instanceName, dataSource) +func NewScrapper(cf client.Factory, parser BodyParser, resConf core.RextResourceDef, srvConf core.RextServiceDef, mtrConf core.RextMetricDef, nSolver core.RextNodeSolver) (scrapper Scrapper, err error) { + dataSource := strings.TrimPrefix(resConf.GetResourcePATH(srvConf.GetBasePath()), srvConf.GetBasePath()) + srvOpts := srvConf.GetOptions() + jobName, err := srvOpts.GetString(core.OptKeyRextServiceDefJobName) + if err != nil { + log.WithError(err).Errorln("Can not find jobName") + return scrapper, err } - return createAtomicScrapper(cf, parser, metric, jobName, instanceName, dataSource) + instanceName, err := srvOpts.GetString(core.OptKeyRextServiceDefInstanceName) + if err != nil { + log.WithError(err).Errorln("Can not find instanceName") + return scrapper, err + } + if len(mtrConf.GetLabels()) > 0 { + return createVecScrapper(cf, parser, jobName, instanceName, dataSource, nSolver, mtrConf) + } + return createAtomicScrapper(cf, parser, jobName, instanceName, dataSource, mtrConf, nSolver) } -func createVecScrapper(cf client.Factory, parser BodyParser, metric config.Metric, jobName, instanceName, dataSource string) (Scrapper, error) { - if metric.Options.Type == config.KeyTypeCounter || metric.Options.Type == config.KeyTypeGauge { - return newNumericVec(cf, parser, metric, jobName, instanceName, dataSource), nil +func createVecScrapper(cf client.Factory, parser BodyParser, jobName, instanceName, dataSource string, nSolver core.RextNodeSolver, mtrConf core.RextMetricDef) (scrapper Scrapper, err error) { + if mtrConf.GetMetricType() == core.KeyMetricTypeCounter || mtrConf.GetMetricType() == core.KeyMetricTypeGauge { + mtrOptions := mtrConf.GetOptions() + itemPath, err := mtrOptions.GetString(core.OptKeyRextMetricDefVecItemPath) + if err != nil { + log.WithError(err).Errorln("unable to get item path for metric vec") + return scrapper, err + } + return newNumericVec(cf, parser, jobName, instanceName, dataSource, nSolver, mtrConf, itemPath), nil } return NumericVec{}, errors.New("histogram vec and summary vec are not supported yet") } -func createAtomicScrapper(cf client.Factory, parser BodyParser, metric config.Metric, jobName, instanceName, dataSource string) (Scrapper, error) { - if metric.Options.Type == config.KeyTypeSummary { +func createAtomicScrapper(cf client.Factory, parser BodyParser, jobName, instanceName, dataSource string, mtrConf core.RextMetricDef, nSolver core.RextNodeSolver) (scrapper Scrapper, err error) { + if mtrConf.GetMetricType() == core.KeyMetricTypeSummary { return Histogram{}, errors.New("summary scrapper is not supported yet") } - if metric.Options.Type == config.KeyTypeHistogram { - return newHistogram(cf, parser, metric, jobName, instanceName, dataSource), nil + if mtrConf.GetMetricType() == core.KeyMetricTypeHistogram { + bObj, err := mtrConf.GetOptions().GetObject(core.OptKeyRextMetricDefHMetricBuckets) + if err != nil { + log.WithError(err).Errorln("no buckets definitions found") + return scrapper, err + } + buckets, okBuckets := bObj.([]float64) + if !okBuckets { + log.WithField("val", bObj).Errorln("value is not a float64 array(buckets)") + return scrapper, core.ErrKeyInvalidType + } + return newHistogram(cf, parser, jobName, instanceName, dataSource, nSolver.GetNodePath(), buckets), nil } - return newNumeric(cf, parser, metric.Path, jobName, instanceName, dataSource), nil + return newNumeric(cf, parser, nSolver.GetNodePath(), jobName, instanceName, dataSource), nil } func getData(cf client.Factory, p BodyParser, metricsCollector chan<- prometheus.Metric) (data interface{}, err error) { diff --git a/src/toml2config/filler.go b/src/toml2config/filler.go new file mode 100644 index 0000000..e419e78 --- /dev/null +++ b/src/toml2config/filler.go @@ -0,0 +1,157 @@ +package toml2config + +import ( + "fmt" + + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/memconfig" + "github.com/simelo/rextporter/src/tomlconfig" + log "github.com/sirupsen/logrus" +) + +type metricName2Metric map[string]tomlconfig.Metric +type serviceName2MetricName2Metric map[string]metricName2Metric + +func buildMetricsMapping(conf tomlconfig.RootConfig) (metricsMapping serviceName2MetricName2Metric) { + metricsMapping = make(serviceName2MetricName2Metric) + for _, srv := range conf.Services { + mtrName2Metric := make(metricName2Metric) + for _, mtr := range srv.Metrics { + mtrName2Metric[mtr.Name] = mtr + } + metricsMapping[srv.Name] = mtrName2Metric + } + return metricsMapping +} + +func createService(srv tomlconfig.Service, metricsMapping serviceName2MetricName2Metric) (service core.RextServiceDef, err error) { + mtrN2Metric := metricsMapping[srv.Name] + service = &memconfig.Service{} + service.SetProtocol(srv.Protocol) + basePath := fmt.Sprintf("%s://%s:%d", service.GetProtocol(), srv.Location.Location, srv.Port) + service.SetBasePath(basePath) + srvOpts := service.GetOptions() + if _, err = srvOpts.SetString(core.OptKeyRextServiceDefJobName, srv.Name); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextServiceDefJobName, "val": srv.Name}).Errorln("error saving job name") + return service, err + } + if _, err = srvOpts.SetString(core.OptKeyRextServiceDefInstanceName, fmt.Sprintf("%s:%d", srv.Location.Location, srv.Port)); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextServiceDefInstanceName, "val": fmt.Sprintf("%s:%d", srv.Location.Location, srv.Port)}).Errorln("error saving instance name") + return service, err + } + auth := &memconfig.HTTPAuth{} + auth.SetAuthType(srv.AuthType) + authOpts := auth.GetOptions() + if _, err = authOpts.SetString(core.OptKeyRextAuthDefTokenHeaderKey, srv.TokenHeaderKey); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextAuthDefTokenHeaderKey, "val": srv.TokenHeaderKey}).Errorln("error saving token header key") + return service, err + } + if _, err = authOpts.SetString(core.OptKeyRextAuthDefTokenKeyFromEndpoint, srv.TokenKeyFromEndpoint); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextAuthDefTokenKeyFromEndpoint, "val": srv.TokenKeyFromEndpoint}).Errorln("error saving token key from endpoint") + return service, err + } + if _, err = authOpts.SetString(core.OptKeyRextAuthDefTokenGenEndpoint, srv.GenTokenEndpoint); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextAuthDefTokenGenEndpoint, "val": srv.GenTokenEndpoint}).Errorln("error saving token endpoint") + return service, err + } + service.SetAuthForBaseURL(auth) + for _, resPath := range srv.ResourcePaths { + var resDef core.RextResourceDef + switch resPath.PathType { + case "rest_api": + resDef = createResourceFrom4API(mtrN2Metric, resPath) + resDef.SetType(resPath.PathType) + resDef.SetResourceURI(resPath.Path) + decoder := memconfig.NewDecoder(resPath.PathType, nil) + resDef.SetDecoder(decoder) + resOpts := resDef.GetOptions() + // FIXME(denisacostaq@gmail.com): OptKeyRextResourceDefHTTPMethod should be inside the service or the resource + if _, err = resOpts.SetString(core.OptKeyRextResourceDefHTTPMethod, resPath.HTTPMethod); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextResourceDefHTTPMethod, "val": resPath.HTTPMethod}).Errorln("error saving http method") + return service, err + } + case "metrics_fordwader": + resDef = createResourceFrom4ExposedMetrics(resPath) + decoder := memconfig.NewDecoder(resPath.PathType, nil) + resDef.SetDecoder(decoder) + default: + log.WithField("resource_path_type", resPath.PathType).Errorln("valid types are rest_api or metrics_fordwader") + return service, core.ErrKeyInvalidType + } + service.AddResource(resDef) + } + return service, err +} + +func createResourceFrom4API(mtrN2Metric map[string]tomlconfig.Metric, resPath tomlconfig.ResourcePath) (resDef core.RextResourceDef) { + resDef = &memconfig.ResourceDef{} + resDef.SetType(resPath.PathType) + resDef.SetResourceURI(resPath.Path) + resOpts := resDef.GetOptions() + if _, err := resOpts.SetString(core.OptKeyRextResourceDefHTTPMethod, resPath.HTTPMethod); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextResourceDefHTTPMethod, "val": resPath.HTTPMethod}).Errorln("error saving http method") + return resDef + } + for _, mtrName := range resPath.MetricNames { + mtr /*, foundMetric*/ := mtrN2Metric[mtrName] + // if !foundMetric { + // continue + // } + nodeSolver := &memconfig.NodeSolver{MType: resPath.NodeSolverType} + nodeSolver.SetNodePath(mtr.Path) + metric := &memconfig.MetricDef{} + metric.SetMetricName(mtr.Name) + metric.SetMetricType(mtr.Options.Type) + metric.SetMetricDescription(mtr.Options.Description) + mtrOpts := metric.GetOptions() + for _, tomlLabel := range mtr.Options.Labels { + label := &memconfig.LabelDef{} + label.SetName(tomlLabel.Name) + lns := &memconfig.NodeSolver{} + lns.SetNodePath(tomlLabel.Path) + label.SetNodeSolver(lns) + metric.AddLabel(label) + } + if mtr.Options.Type == core.KeyMetricTypeHistogram { + if _, err := mtrOpts.SetObject(core.OptKeyRextMetricDefHMetricBuckets, mtr.HistogramOptions.Buckets); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextMetricDefHMetricBuckets, "value": mtr.HistogramOptions.Buckets}).Errorln("error saving buckets for histogram") + return resDef + } + } + if mtr.Options.Labels != nil && len(mtr.Options.Labels) > 0 { + if _, err := mtrOpts.SetString(core.OptKeyRextMetricDefVecItemPath, mtr.Options.ItemPath); err != nil { + log.WithFields(log.Fields{"key": core.OptKeyRextMetricDefVecItemPath, "value": mtr.Options.ItemPath}).Errorln("error saving item path") + return resDef + } + } + metric.SetNodeSolver(nodeSolver) + resDef.AddMetricDef(metric) + } + return resDef +} + +func createResourceFrom4ExposedMetrics(resPath tomlconfig.ResourcePath) (resDef core.RextResourceDef) { + resDef = &memconfig.ResourceDef{} + resDef.SetType(resPath.PathType) + resDef.SetResourceURI(resPath.Path) + return resDef +} + +// Fill receive a tomlconfig.RootConfig and return an equivalent core.RextRoot +func Fill(conf tomlconfig.RootConfig) (root core.RextRoot, err error) { + root = &memconfig.RootConfig{} + metricsMapping := buildMetricsMapping(conf) + for _, srv := range conf.Services { + var service core.RextServiceDef + if service, err = createService(srv, metricsMapping); err != nil { + log.WithError(err).Errorln("can not fill service info") + return root, err + } + root.AddService(service) + } + if root.Validate() { + err = core.ErrKeyConfigHaveSomeErrors + return root, err + } + return root, err +} diff --git a/src/tomlconfig/config_render.go b/src/tomlconfig/config_render.go new file mode 100644 index 0000000..e95ebda --- /dev/null +++ b/src/tomlconfig/config_render.go @@ -0,0 +1,429 @@ +package tomlconfig + +// import ( +// "fmt" +// "os" +// "path/filepath" +// "text/template" + +// "github.com/shibukawa/configdir" +// "github.com/simelo/rextporter/src/util" +// "github.com/simelo/rextporter/src/util/file" +// "github.com/spf13/viper" +// ) + +// type templateData struct { +// ServicesConfigPath string +// MetricsForServicesPath string +// } + +// type metricsForServiceTemplateData struct { +// ServiceNameToMetricsConfPath map[string]string +// } + +// type metricsForServiceConfigTemplateData struct { +// TmplData metricsForServiceTemplateData +// } + +// type mainConfigData struct { +// mainConfigPath string +// tmplData templateData +// metricsForServiceConfigTmplData metricsForServiceConfigTemplateData +// } + +// func (confData mainConfigData) ServicesConfigPath() string { +// return confData.tmplData.ServicesConfigPath +// } + +// func (confData mainConfigData) metricsForServicesPath() string { +// return confData.tmplData.MetricsForServicesPath +// } + +// func (confData mainConfigData) MetricsConfigPath(serviceName string) string { +// return confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath[serviceName] +// } + +// func (confData mainConfigData) MainConfigPath() string { +// return confData.mainConfigPath +// } + +// const mainConfigFileContentTemplate = ` +// servicesConfigTransport = "file" # "file" | "consulCatalog" +// servicesConfigPath = "{{.ServicesConfigPath}}" +// servicesConfigPath = "{{.ServicesConfigPath}}" +// metricsForServicesPath = "{{.MetricsForServicesPath}}" +// ` + +// const serviceConfigFileContentTemplate = ` +// # Services configuration. +// [[services]] +// name = "skycoin" +// modes = ["rest_api"] +// scheme = "http" +// port = 8080 +// authType = "CSRF" +// tokenHeaderKey = "X-CSRF-Token" +// genTokenEndpoint = "/api/v1/csrf" +// tokenKeyFromEndpoint = "csrf_token" + +// [services.location] +// location = "localhost" +// ` +// const skycoinMetricsConfigFileContentTemplate = ` +// # All metrics to be measured. +// [[metrics]] +// name = "seq" +// url = "/api/v1/health" +// httpMethod = "GET" +// path = "/blockchain/head/seq" + +// [metrics.options] +// type = "Counter" +// description = "I am running since" + +// [[metrics]] +// name = "openConnections" +// url = "/api/v1/network/connections" +// httpMethod = "GET" +// path = "/connections/unconfirmed_verify_transaction/burn_factor" + +// [metrics.options] +// type = "Histogram" +// description = "Connections amount" + +// [metrics.histogramOptions] +// buckets = [1, 2, 3] + +// [[metrics]] +// name = "burn_factor_by_service" +// url = "/api/v1/network/connections" +// httpMethod = "GET" +// path = "/connections" + +// [metrics.options] +// type = "Gauge" +// itemPath = "/unconfirmed_verify_transaction/burn_factor" +// description = "I am running since" + +// [[metrics.options.labels]] +// name = "ip_port" +// path = "/address" + +// [[metrics]] +// name = "connectedAtBySocketAndByBurnFactor" +// url = "/api/v1/network/connections" +// httpMethod = "GET" +// path = "/connections" + +// [metrics.options] +// type = "Gauge" +// itemPath = "/unconfirmed_verify_transaction/burn_factor" +// description = "Connections amount" + +// [[metrics.options.labels]] +// name = "ip:port" +// path = "/address" + +// [[metrics.options.labels]] +// name = "ip:port" +// path = "/address" + +// # TODO(denisacostaq@gmail.com): +// # if you refer(under "metrics_for_host") to a not previously defined host or metric it will be raise an error and the process will not start +// # if in all your definition you not use some host or metric the process will raise a warning and the process will start normally. +// # from https://github.com/simelo/rextporter/pull/17 +// # @denisacostaq services should be match against whole templates , rather than individual metrics. +// # The match is not for hosts directly . The match is for service types . Works like this +// # metrics <- m:1 -> templates <- 1:n -> services <- m:n -> (physical | virtual) hosts +// ` + +// const metricsForServiceMappingConfFileContentTemplate = ` +// serviceNameToMetricsConfPath = [{{range $key, $value := .}} +// { {{$key}} = "{{$value}}" },{{end}} +// ] +// ` + +// const ( +// systemVendorName = "simelo" +// // SystemProgramName is the program's name +// SystemProgramName = "rextporter" +// mainConfigFileName = "main.toml" +// servicesConfigFileName = "services.toml" +// metricsForServicesConfigFileName = "metricsForServices.toml" +// skycoinMetricsConfigFileName = "skycoinMetrics.toml" +// walletMetricsConfigFileName = "walletMetrics.toml" +// ) + +// func (confData mainConfigData) existServicesConfigFile() bool { +// return file.ExistFile(confData.ServicesConfigPath()) +// } + +// // createServicesConfigFile creates the service file or return an error if any, +// // if the file already exist does no thin. +// func (confData mainConfigData) createServicesConfigFile() (err error) { +// generalScopeErr := "error creating service config file" +// if confData.existServicesConfigFile() { +// return nil +// } +// tmpl := template.New("serviceConfig") +// var templateEngine *template.Template +// if templateEngine, err = tmpl.Parse(serviceConfigFileContentTemplate); err != nil { +// errCause := "error parsing service config: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = file.CreateFullPathForFile(confData.ServicesConfigPath()); err != nil { +// errCause := "error creating directory for service file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// var servicesConfigFile *os.File +// if servicesConfigFile, err = os.Create(confData.ServicesConfigPath()); err != nil { +// errCause := "error creating service config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = templateEngine.Execute(servicesConfigFile, nil); err != nil { +// errCause := "error writing main config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// return err +// } + +// // createMetricsConfigFile creates the metrics file or return an error if any, +// // if the file already exist does no thin. +// func createMetricsConfigFile(metricConfPath string) (err error) { +// generalScopeErr := "error creating metrics config file" +// if file.ExistFile(metricConfPath) { +// return nil +// } +// tmpl := template.New("metricsConfig") +// var templateEngine *template.Template +// if templateEngine, err = tmpl.Parse(skycoinMetricsConfigFileContentTemplate); err != nil { +// errCause := "error parsing metrics config: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = file.CreateFullPathForFile(metricConfPath); err != nil { +// errCause := "error creating directory for metrics file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// var metricsConfigFile *os.File +// if metricsConfigFile, err = os.Create(metricConfPath); err != nil { +// errCause := "error creating metrics config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = templateEngine.Execute(metricsConfigFile, nil); err != nil { +// errCause := "error writing metrics config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// return err +// } + +// func (confData mainConfigData) existMetricsForServicesConfigFile() bool { +// return file.ExistFile(confData.tmplData.MetricsForServicesPath) +// } + +// // createMetricsForServicesConfFile creates the metrics for services file or return an error if any, +// // if the file already exist does no thin. +// func (confData mainConfigData) createMetricsForServicesConfFile() (err error) { +// generalScopeErr := "error creating metrics for services config file" +// if confData.existMetricsForServicesConfigFile() { +// // FIXME(denisacostaq@gmail.com): check each metric file and create one of not exist +// return nil +// } +// tmpl := template.New("metricsForServiceConfig") +// var templateEngine *template.Template +// if templateEngine, err = tmpl.Parse(metricsForServiceMappingConfFileContentTemplate); err != nil { +// errCause := "error parsing metrics for services config: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = file.CreateFullPathForFile(confData.metricsForServicesPath()); err != nil { +// errCause := "error creating directory for metrics for services file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// var metricsForServiceConfigFile *os.File +// if metricsForServiceConfigFile, err = os.Create(confData.metricsForServicesPath()); err != nil { +// errCause := "error creating metrics for services config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = templateEngine.Execute(metricsForServiceConfigFile, confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath); err != nil { +// errCause := "error writing metrics for services config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// for key, val := range confData.metricsForServiceConfigTmplData.TmplData.ServiceNameToMetricsConfPath { +// if err = createMetricsConfigFile(val); err != nil { +// errCause := fmt.Sprintf("error creating metrics config file for service %s: %s", key, err.Error()) +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// } +// return err +// } + +// func (confData mainConfigData) existMainConfigFile() bool { +// return file.ExistFile(confData.MainConfigPath()) +// } + +// // createMainConfigFile creates the main file or return an error if any, +// // if the file already exist does no thin. +// func (confData mainConfigData) createMainConfigFile() (err error) { +// generalScopeErr := "error creating main config file" +// if confData.existMainConfigFile() { +// return nil +// } +// tmpl := template.New("mainConfig") +// var templateEngine *template.Template +// if templateEngine, err = tmpl.Parse(mainConfigFileContentTemplate); err != nil { +// errCause := "error parsing main config: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = file.CreateFullPathForFile(confData.MainConfigPath()); err != nil { +// errCause := "error creating directory for main file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// var mainConfigFile *os.File +// if mainConfigFile, err = os.Create(confData.MainConfigPath()); err != nil { +// errCause := "error creating main config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = templateEngine.Execute(mainConfigFile, confData.tmplData); err != nil { +// errCause := "error writing main config file: " + err.Error() +// return util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// return err +// } + +// func servicesDefaultConfigPath(conf *configdir.Config) (path string) { +// return file.DefaultConfigPath(servicesConfigFileName, conf) +// } + +// func mainDefaultConfigPath(conf *configdir.Config) (path string) { +// return file.DefaultConfigPath(mainConfigFileName, conf) +// } + +// func metricsForServicesDefaultConfigPath(conf *configdir.Config) (path string) { +// return file.DefaultConfigPath(metricsForServicesConfigFileName, conf) +// } + +// func skycoinMetricsConfigPath(conf *configdir.Config) (path string) { +// return file.DefaultConfigPath(skycoinMetricsConfigFileName, conf) +// } + +// func walletMetricsConfigPath(conf *configdir.Config) (path string) { +// return file.DefaultConfigPath(walletMetricsConfigFileName, conf) +// } + +// func defaultTmplData(conf *configdir.Config) (tmplData templateData) { +// tmplData = templateData{ +// ServicesConfigPath: servicesDefaultConfigPath(conf), +// MetricsForServicesPath: metricsForServicesDefaultConfigPath(conf), +// } +// return tmplData +// } + +// func defaultMetricsForServiceTmplData(conf *configdir.Config) (tmplData metricsForServiceConfigTemplateData) { +// tmplData = metricsForServiceConfigTemplateData{ +// TmplData: metricsForServiceTemplateData{ +// ServiceNameToMetricsConfPath: map[string]string{ +// "skycoin": skycoinMetricsConfigPath(conf), +// "wallet": walletMetricsConfigPath(conf), +// }, +// }, +// } +// return tmplData +// } + +// func tmplDataFromMainFile(mainConfigFilePath string) (tmpl templateData, err error) { +// generalScopeErr := "error filling template data" +// viper.SetConfigFile(mainConfigFilePath) +// viper.SetConfigType("toml") +// if err := viper.ReadInConfig(); err != nil { +// errCause := fmt.Sprintln("error reading config file: ", mainConfigFilePath, err.Error()) +// return tmpl, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// var mainConf templateData +// if err := viper.Unmarshal(&mainConf); err != nil { +// errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) +// return tmpl, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// tmpl.ServicesConfigPath, tmpl.MetricsForServicesPath = mainConf.ServicesConfigPath, mainConf.MetricsForServicesPath +// return tmpl, err +// } + +// func (tmpl templateData) metricsForServicesTmplDataFromFile() (metricsForServicesTmpl metricsForServiceConfigTemplateData, err error) { +// generalScopeErr := "error filling template data" +// viper.SetConfigFile(tmpl.MetricsForServicesPath) +// viper.SetConfigType("toml") +// if err := viper.ReadInConfig(); err != nil { +// errCause := fmt.Sprintln("error reading config file: ", tmpl.MetricsForServicesPath, err.Error()) +// return metricsForServicesTmpl, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err := viper.Unmarshal(&(metricsForServicesTmpl.TmplData)); err != nil { +// errCause := fmt.Sprintln("can not decode the config data: ", err.Error()) +// return metricsForServicesTmpl, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// return metricsForServicesTmpl, err +// } + +// func metricsForServicesTmplData(conf *configdir.Config) metricsForServiceConfigTemplateData { +// return defaultMetricsForServiceTmplData(conf) +// } + +// func newMainConfigData(path string) (mainConf mainConfigData, err error) { +// generalScopeErr := "can not create main config instance" +// if file.IsADirectoryPath(path) { +// path = filepath.Join(path, mainConfigFileName) +// } +// var tmplData templateData +// var metricsForServiceTmplData metricsForServiceConfigTemplateData +// if len(path) == 0 || !file.ExistFile(path) { +// // TODO(denisacostaq@gmail.com): move homeConf to fn defaultTmplData +// var homeConf *configdir.Config +// if homeConf, err = file.HomeConfigFolder(systemVendorName, SystemProgramName); err != nil { +// errCause := "error looking for config folder under home: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// path = mainDefaultConfigPath(homeConf) +// tmplData = defaultTmplData(homeConf) +// metricsForServiceTmplData = metricsForServicesTmplData(homeConf) +// } else { +// if tmplData, err = tmplDataFromMainFile(path); err != nil { +// errCause := "error reading template data from file: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// // BUG(denisacostaq@gmail.com): if file not exist, metricsForServicesTmplDataFromFile panics +// if metricsForServiceTmplData, err = tmplData.metricsForServicesTmplDataFromFile(); err != nil { +// errCause := "error reading template data from file: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// } +// if len(tmplData.ServicesConfigPath) == 0 || len(tmplData.MetricsForServicesPath) == 0 { +// var homeConf *configdir.Config +// if homeConf, err = file.HomeConfigFolder(systemVendorName, SystemProgramName); err != nil { +// errCause := "error looking for config folder under home: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// tmpTmplData := defaultTmplData(homeConf) +// if len(tmplData.ServicesConfigPath) == 0 { +// tmplData.ServicesConfigPath = tmpTmplData.ServicesConfigPath +// } +// if len(tmplData.MetricsForServicesPath) == 0 { +// tmplData.MetricsForServicesPath = tmpTmplData.MetricsForServicesPath +// } +// metricsForServiceTmplData = metricsForServicesTmplData(homeConf) +// } +// mainConf = mainConfigData{ +// mainConfigPath: path, +// tmplData: tmplData, +// metricsForServiceConfigTmplData: metricsForServiceTmplData, +// } +// if err = mainConf.createMainConfigFile(); err != nil { +// errCause := "error creating main config file: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = mainConf.createServicesConfigFile(); err != nil { +// errCause := "error creating service config file: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// if err = mainConf.createMetricsForServicesConfFile(); err != nil { +// errCause := "error creating metrics for services config file: " + err.Error() +// return mainConf, util.ErrorFromThisScope(errCause, generalScopeErr) +// } +// return mainConf, err +// } diff --git a/src/tomlconfig/example/main.toml b/src/tomlconfig/example/main.toml new file mode 100644 index 0000000..632052d --- /dev/null +++ b/src/tomlconfig/example/main.toml @@ -0,0 +1,4 @@ +servicesConfigTransport = "file" # "file" | "consulCatalog" +servicesConfigPath = "/home/adacosta/.config/simelo/rextporter/services.toml" +metricsForServicesConfigPath = "/home/adacosta/.config/simelo/rextporter/metricsForServices.toml" +resourcePathsForServicesConfPath = "/home/adacosta/.config/simelo/rextporter/resourcePathsForServices.toml" diff --git a/src/tomlconfig/example/metricsForServices.toml b/src/tomlconfig/example/metricsForServices.toml new file mode 100644 index 0000000..1d9b6ac --- /dev/null +++ b/src/tomlconfig/example/metricsForServices.toml @@ -0,0 +1,4 @@ +metricPathsForServicesConfig = [ + { skycoin = "/home/adacosta/.config/simelo/rextporter/skycoinMetrics.toml" }, + { wallet = "/home/adacosta/.config/simelo/rextporter/walletMetrics.toml" }, +] diff --git a/src/tomlconfig/example/resourcePathsForServices.toml b/src/tomlconfig/example/resourcePathsForServices.toml new file mode 100644 index 0000000..7fa96b4 --- /dev/null +++ b/src/tomlconfig/example/resourcePathsForServices.toml @@ -0,0 +1,4 @@ +resourcePathsForServicesConfig = [ + { skycoin = "/home/adacosta/.config/simelo/rextporter/resourcePathsForServices.toml" }, + { wallet = "/home/adacosta/.config/simelo/rextporter/walletResourcePaths.toml" }, +] diff --git a/src/tomlconfig/example/services.toml b/src/tomlconfig/example/services.toml new file mode 100644 index 0000000..642f941 --- /dev/null +++ b/src/tomlconfig/example/services.toml @@ -0,0 +1,30 @@ +# modes = ["rest_api", "forward_metrics"] +# Services configuration. +[[services]] + name = "skycoin" + modes = ["rest_api", "forward_metrics"] + protocol = "http" + port = 8000 + metricsToForwardPath = "/api/v1/metrics" + authType = "CSRF" + tokenHeaderKey = "X-CSRF-Token" + genTokenEndpoint = "/api/v1/csrf" + tokenKeyFromEndpoint = "csrf_token" + + [services.location] + location = "localhost" + + +[[services]] + name = "wallet" + modes = ["rest_api", "forward_metrics"] + protocol = "http" + port = 8000 + metricsToForwardPath = "/api/metrics2" + authType = "CSRF" + tokenHeaderKey = "X-CSRF-Token" + genTokenEndpoint = "/api/v1/csrf" + tokenKeyFromEndpoint = "csrf_token" + + [services.location] + location = "localhost" diff --git a/src/tomlconfig/example/skycoinMetrics.toml b/src/tomlconfig/example/skycoinMetrics.toml new file mode 100644 index 0000000..1d17ab9 --- /dev/null +++ b/src/tomlconfig/example/skycoinMetrics.toml @@ -0,0 +1,32 @@ +# All metrics to be measured. +[[metrics]] + name = "burnFactor" + path = "/connections/unconfirmed_verify_transaction/burn_factor" + nodeSolver = "connections" + + [metrics.options] + type = "Histogram" + description = "Connections amount 1" + + [metrics.histogramOptions] + buckets = [1, 2, 3] + + +[[metrics]] + name = "seq" + path = "/blockchain/head/seq" + nodeSolver = "health" + + [metrics.options] + type = "Counter" + description = "I am running since" + + +[[metrics]] + name = "fee" + path = "/blockchain/head/fee" + nodeSolver = "health" + + [metrics.options] + type = "Counter" + description = "I am running since" diff --git a/src/tomlconfig/example/skycoinResourcePaths.toml b/src/tomlconfig/example/skycoinResourcePaths.toml new file mode 100644 index 0000000..698332a --- /dev/null +++ b/src/tomlconfig/example/skycoinResourcePaths.toml @@ -0,0 +1,19 @@ +[[ResourcePaths]] + Name = "connections" + Path = "/api/v1/network/connections" + PathType = "rest_api" + nodeSolverType = "jsonPath" + MetricNames = ["burnFactor"] + +[[ResourcePaths]] + Name = "health" + Path = "/api/v1/health" + PathType = "rest_api" + nodeSolverType = "jsonPath" + MetricNames = ["seq", "fee"] + +[[ResourcePaths]] + Name = "metricsFordwader" + Path = "/api/v1/metrics" + PathType = "metrics_fordwader" + httpMethod = "GET" diff --git a/src/tomlconfig/example/walletMetrics.toml b/src/tomlconfig/example/walletMetrics.toml new file mode 100644 index 0000000..0832360 --- /dev/null +++ b/src/tomlconfig/example/walletMetrics.toml @@ -0,0 +1,12 @@ +# All metrics to be measured. +[[metrics]] + name = "burnFactor2" + path = "/connections/unconfirmed_verify_transaction/burn_factor" + nodeSolver = "connections" + + [metrics.options] + type = "Histogram" + description = "Connections amount 2" + + [metrics.histogramOptions] + buckets = [0.5, 1.0, 2.0, 3.0, 5.0] diff --git a/src/tomlconfig/example/walletResourcePaths.toml b/src/tomlconfig/example/walletResourcePaths.toml new file mode 100644 index 0000000..a534771 --- /dev/null +++ b/src/tomlconfig/example/walletResourcePaths.toml @@ -0,0 +1,13 @@ +[[ResourcePaths]] + Name = "connections" + Path = "/api/v1/network/connections" + PathType = "rest_api" + nodeSolverType = "jsonPath" + MetricNames = ["burnFactor2"] + +#[[ResourcePaths]] +# Name = "metricsFordwader" +# Path = "/metrics" +# PathType = "forward_metrics" +# nodeSolverType = "fordwader" + diff --git a/src/tomlconfig/fs_reader.go b/src/tomlconfig/fs_reader.go new file mode 100644 index 0000000..55735d2 --- /dev/null +++ b/src/tomlconfig/fs_reader.go @@ -0,0 +1,167 @@ +package tomlconfig + +import ( + "errors" + + "github.com/simelo/rextporter/src/configlocator" + "github.com/simelo/rextporter/src/core" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" +) + +var ( + // ErrKeyReadingFile check for empty and/or null + ErrKeyReadingFile = errors.New("Error reading file") +) + +type configFromFile struct { + filePath string +} + +type mainConfig struct { + ServicesConfigTransport string + ServicesConfigPath string + MetricsForServicesConfigPath string + ResourcePathsForServicesConfPath string +} + +func (cf configFromFile) readTomlFile(data interface{}) error { + if len(cf.filePath) == 0 { + log.Errorln("file path is required to read toml config") + return core.ErrKeyEmptyValue + } + viper.SetConfigType("toml") + viper.SetConfigFile(cf.filePath) + if err := viper.ReadInConfig(); err != nil { + log.WithFields(log.Fields{"err": err, "path": cf.filePath}).Errorln("error reading toml config file") + return ErrKeyReadingFile + } + if err := viper.Unmarshal(data); err != nil { + log.WithFields(log.Fields{"err": err, "path": cf.filePath}).Errorln("Error decoding toml config file content") + return ErrKeyReadingFile + } + return nil +} + +func (cf configFromFile) readMainConf() (mainConf mainConfig, err error) { + if err = cf.readTomlFile(&mainConf); err != nil { + log.Errorln("error reading main config") + return mainConf, err + } + return mainConf, err +} + +func (cf configFromFile) readServicesConf() (services []Service, err error) { + var root RootConfig + if err = cf.readTomlFile(&root); err != nil { + log.Errorln("error reading services config") + return services, err + } + services = root.Services + return services, err +} + +func (cf configFromFile) readMetricsForServiceConf() (metricsConf MetricsTemplate, err error) { + type metricsForServiceConfig struct { + Metrics MetricsTemplate + } + var metricsForServiceConf metricsForServiceConfig + if err = cf.readTomlFile(&metricsForServiceConf); err != nil { + log.Errorln("error reading metrics config") + return metricsConf, err + } + metricsConf = metricsForServiceConf.Metrics + return metricsConf, err +} + +func (cf configFromFile) readResourcePathsForServiceConf() (resPaths4Service ResourcePathTemplate, err error) { + type resourcePathsForServiceConfig struct { + ResourcePaths ResourcePathTemplate + } + var resourcePathsForServiceConf resourcePathsForServiceConfig + if err = cf.readTomlFile(&resourcePathsForServiceConf); err != nil { + log.Errorln("error reading resource path for services config") + return resPaths4Service, err + } + resPaths4Service = resourcePathsForServiceConf.ResourcePaths + return resPaths4Service, err +} + +func (cf configFromFile) readResourcePathsForServicesConf() (resPaths4Services map[string]string, err error) { + type resourcePathsForServicesConfig struct { + ResourcePathsForServicesConfig map[string]string + } + var resourcePathsForServicesConf resourcePathsForServicesConfig + if err = cf.readTomlFile(&resourcePathsForServicesConf); err != nil { + log.Errorln("error reading main config") + return resPaths4Services, err + } + resPaths4Services = resourcePathsForServicesConf.ResourcePathsForServicesConfig + return resPaths4Services, err +} + +func (cf configFromFile) readMetricsPathsForServicesConf() (mtrPaths4Services map[string]string, err error) { + type metricPathsForServicesConfig struct { + MetricPathsForServicesConfig map[string]string + } + var metricPathsForServicesConf metricPathsForServicesConfig + if err = cf.readTomlFile(&metricPathsForServicesConf); err != nil { + log.Errorln("error reading metric for services config") + return mtrPaths4Services, err + } + mtrPaths4Services = metricPathsForServicesConf.MetricPathsForServicesConfig + return mtrPaths4Services, err +} + +func readRootStructure(mainConf mainConfig) (rootConf RootConfig, err error) { + srvConfReader := configFromFile{filePath: mainConf.ServicesConfigPath} + rootConf.Services, err = srvConfReader.readServicesConf() + if err != nil { + log.WithError(err).Errorln("error reading services config") + return rootConf, ErrKeyReadingFile + } + resPaths4ServicesReader := configFromFile{filePath: mainConf.ResourcePathsForServicesConfPath} + var resPath4Service, mtrPath4Service map[string]string + if resPath4Service, err = resPaths4ServicesReader.readResourcePathsForServicesConf(); err != nil { + log.WithError(err).Errorln("error reading resource paths for services config") + return rootConf, ErrKeyReadingFile + } + mtrPaths4ServicesReader := configFromFile{filePath: mainConf.MetricsForServicesConfigPath} + if mtrPath4Service, err = mtrPaths4ServicesReader.readMetricsPathsForServicesConf(); err != nil { + log.WithError(err).Errorln("error reading metric paths for services config") + return rootConf, ErrKeyReadingFile + } + for idxService, service := range rootConf.Services { + resPath4ServiceReader := configFromFile{filePath: resPath4Service[service.Name]} + if rootConf.Services[idxService].ResourcePaths, err = resPath4ServiceReader.readResourcePathsForServiceConf(); err != nil { + log.WithFields(log.Fields{"err": err, "service": service.Name}).Warnln("error reading resource paths for service") + return rootConf, ErrKeyReadingFile + } + mtrPath4ServiceReader := configFromFile{filePath: mtrPath4Service[service.Name]} + if rootConf.Services[idxService].Metrics, err = mtrPath4ServiceReader.readMetricsForServiceConf(); err != nil { + log.WithFields(log.Fields{"err": err, "service": service.Name}).Warnln("error reading resource paths for service") + return rootConf, ErrKeyReadingFile + } + } + return rootConf, err +} + +// ReadConfigFromFileSystem will read the config from the file system. +func ReadConfigFromFileSystem(filePath string) (rootConf RootConfig, err error) { + const generalScopeErr = "error getting config values from file system" + mainConfigPath := configlocator.MainFile() + if len(filePath) != 0 { + mainConfigPath = filePath + } + mainConfReader := configFromFile{filePath: mainConfigPath} + var mainConf mainConfig + if mainConf, err = mainConfReader.readMainConf(); err != nil { + log.WithError(err).Errorln("error reading main config file") + return rootConf, ErrKeyReadingFile + } + if rootConf, err = readRootStructure(mainConf); err != nil { + log.WithError(err).Errorln("error reading root structure conf") + return rootConf, err + } + return rootConf, nil +} diff --git a/src/tomlconfig/structure.go b/src/tomlconfig/structure.go new file mode 100644 index 0000000..7d4c8a2 --- /dev/null +++ b/src/tomlconfig/structure.go @@ -0,0 +1,92 @@ +package tomlconfig + +// RootConfig is the top level node for the config tree, it has a list of services +type RootConfig struct { + Services []Service +} + +// Service is a concept to grab information about a datasource, for example: +// where is it http://localhost:1234 (Protocol + Location + : + Port + BasePath) +// what is the filesystem path(in case of file protocol)? +type Service struct { + Name string + // Protocol is file, http, https + Protocol string + Port uint16 + // FIXME(denisacostaq@gmial.com): use this base path? + BasePath string + AuthType string + TokenHeaderKey string + GenTokenEndpoint string + TokenKeyFromEndpoint string + Location Server + ResourcePaths ResourcePathTemplate + Metrics MetricsTemplate +} + +// MetricsTemplate is a list of metrics definition, ready to be applied +// multiple times to different services, to apply the same metric template to different +// services with different metrics check out how ResourcePathTemplate can +// define a subset of the metrics in a template +type MetricsTemplate []Metric + +// Metric describe a metadata about how to load real exposed metrics +type Metric struct { + Name string `json:"name"` + Path string `json:"path,omitempty"` + Options MetricOptions `json:"options"` + HistogramOptions HistogramOptions `json:"histogram_options"` +} + +// MetricOptions keep information you about the metric, mostly the type(Counter, Gauge, Summary, and Histogram) +type MetricOptions struct { + Type string `json:"type"` + ItemPath string `json:"item_path"` + Description string `json:"description"` + Labels []Label +} + +// Label to create metrics grouping by json path value, for example: +// {Name: "color", "/properties/color"} +type Label struct { + // Name the name of the label, different values can be assigned to it + Name string + // Path the json path from where you need to extract the label + Path string +} + +// HistogramOptions allows you to define the histogram is buckets. +type HistogramOptions struct { + Buckets []float64 `json:"buckets"` + + // ExponentialBuckets is a len three array where: + // - The first value is the low bound start bucket. + // - The second vale is the growing factor. + // - The three one is the buckets amount. + ExponentialBuckets []float64 `json:"exponential_buckets"` +} + +// ResourcePath define a node solver type for a giving resource inside a service +// in adition, have a metric names list, it can work like a filter +// to work over a subset of the defined metrics for a resource +// TODO(denisacostaq@gmail.com) this filter should work for metrics fordwader too +type ResourcePath struct { + Name string + PathType string + Path string + NodeSolverType string + HTTPMethod string + // MetricNames TODO(denisacostaq@gmail.com): trying to define filtered metric can introduce + // some redundancy because the other fields + MetricNames []string +} + +// ResourcePathTemplate can be used to define subset of metrics from MetricsTemplate in a giving +// service +type ResourcePathTemplate []ResourcePath + +// Server the server where is running the service +type Server struct { + // Location should have the ip or URL. + Location string `json:"location"` +} diff --git a/src/util/config.go b/src/util/config.go new file mode 100644 index 0000000..6f1838e --- /dev/null +++ b/src/util/config.go @@ -0,0 +1,31 @@ +package util + +import ( + "net/url" + + "github.com/simelo/rextporter/src/core" +) + +// MergeStoresInplace to update key / values in destination with those in source +func MergeStoresInplace(dst, src core.RextKeyValueStore) (err error) { + var value interface{} + err = nil + for _, k := range src.GetKeys() { + if value, err = src.GetObject(k); err == nil { + if _, err = dst.SetObject(k, value); err != nil { + return + } + } else { + return + } + } + return +} + +// IsValidURL tests a string to determine if it is a valid URL or not. +func IsValidURL(toTest string) bool { + if _, err := url.ParseRequestURI(toTest); err != nil { + return false + } + return true +} diff --git a/src/wip/dataset.go b/src/wip/dataset.go new file mode 100644 index 0000000..1920d11 --- /dev/null +++ b/src/wip/dataset.go @@ -0,0 +1,7 @@ +package wip + +// RextMetric provides access to values measured for a given metric +type RextMetric interface { + GetMetadata() RextMetricDef + // TODO: Methods to retrieve values measured for a given metric +} diff --git a/test/integration/fake_skycoin_node.go b/test/integration/fake_skycoin_node.go index 097bd18..31b3fc5 100644 --- a/test/integration/fake_skycoin_node.go +++ b/test/integration/fake_skycoin_node.go @@ -1,162 +1,21 @@ package main import ( + "bytes" "fmt" "net/http" "os" + "strings" + "github.com/prometheus/common/expfmt" + "github.com/simelo/rextporter/src/core" "github.com/simelo/rextporter/src/util/file" "github.com/simelo/rextporter/test/integration/testrand" log "github.com/sirupsen/logrus" ) -const exposedMetricsResponse = ` -# HELP go_gc_duration_seconds A summary of the GC invocation durations. -# TYPE go_gc_duration_seconds summary -go_gc_duration_seconds{quantile="0"} 0 -go_gc_duration_seconds{quantile="0.25"} 0 -go_gc_duration_seconds{quantile="0.5"} 0 -go_gc_duration_seconds{quantile="0.75"} 0 -go_gc_duration_seconds{quantile="1"} 0 -go_gc_duration_seconds_sum 0 -go_gc_duration_seconds_count 0 -# HELP go_goroutines Number of goroutines that currently exist. -# TYPE go_goroutines gauge -go_goroutines 7 -# HELP go_info Information about the Go environment. -# TYPE go_info gauge -go_info{version="go1.10.4"} 1 -# HELP go_memstats_alloc_bytes Number of bytes allocated and still in use. -# TYPE go_memstats_alloc_bytes gauge -go_memstats_alloc_bytes 1.060672e+06 -# HELP go_memstats_alloc_bytes_total Total number of bytes allocated, even if freed. -# TYPE go_memstats_alloc_bytes_total counter -go_memstats_alloc_bytes_total 1.060672e+06 -# HELP go_memstats_buck_hash_sys_bytes Number of bytes used by the profiling bucket hash table. -# TYPE go_memstats_buck_hash_sys_bytes gauge -go_memstats_buck_hash_sys_bytes 1.443364e+06 -# HELP go_memstats_frees_total Total number of frees. -# TYPE go_memstats_frees_total counter -go_memstats_frees_total 1007 -# HELP go_memstats_gc_cpu_fraction The fraction of this program's available CPU time used by the GC since the program started. -# TYPE go_memstats_gc_cpu_fraction gauge -go_memstats_gc_cpu_fraction 0 -# HELP go_memstats_gc_sys_bytes Number of bytes used for garbage collection system metadata. -# TYPE go_memstats_gc_sys_bytes gauge -go_memstats_gc_sys_bytes 169984 -# HELP go_memstats_heap_alloc_bytes Number of heap bytes allocated and still in use. -# TYPE go_memstats_heap_alloc_bytes gauge -go_memstats_heap_alloc_bytes 1.060672e+06 -# HELP go_memstats_heap_idle_bytes Number of heap bytes waiting to be used. -# TYPE go_memstats_heap_idle_bytes gauge -go_memstats_heap_idle_bytes 401408 -# HELP go_memstats_heap_inuse_bytes Number of heap bytes that are in use. -# TYPE go_memstats_heap_inuse_bytes gauge -go_memstats_heap_inuse_bytes 2.285568e+06 -# HELP go_memstats_heap_objects Number of allocated objects. -# TYPE go_memstats_heap_objects gauge -go_memstats_heap_objects 8042 -# HELP go_memstats_heap_released_bytes Number of heap bytes released to OS. -# TYPE go_memstats_heap_released_bytes gauge -go_memstats_heap_released_bytes 0 -# HELP go_memstats_heap_sys_bytes Number of heap bytes obtained from system. -# TYPE go_memstats_heap_sys_bytes gauge -go_memstats_heap_sys_bytes 2.686976e+06 -# HELP go_memstats_last_gc_time_seconds Number of seconds since 1970 of last garbage collection. -# TYPE go_memstats_last_gc_time_seconds gauge -go_memstats_last_gc_time_seconds 0 -# HELP go_memstats_lookups_total Total number of pointer lookups. -# TYPE go_memstats_lookups_total counter -go_memstats_lookups_total 15 -# HELP go_memstats_mallocs_total Total number of mallocs. -# TYPE go_memstats_mallocs_total counter -go_memstats_mallocs_total 9049 -# HELP go_memstats_mcache_inuse_bytes Number of bytes in use by mcache structures. -# TYPE go_memstats_mcache_inuse_bytes gauge -go_memstats_mcache_inuse_bytes 13888 -# HELP go_memstats_mcache_sys_bytes Number of bytes used for mcache structures obtained from system. -# TYPE go_memstats_mcache_sys_bytes gauge -go_memstats_mcache_sys_bytes 16384 -# HELP go_memstats_mspan_inuse_bytes Number of bytes in use by mspan structures. -# TYPE go_memstats_mspan_inuse_bytes gauge -go_memstats_mspan_inuse_bytes 31312 -# HELP go_memstats_mspan_sys_bytes Number of bytes used for mspan structures obtained from system. -# TYPE go_memstats_mspan_sys_bytes gauge -go_memstats_mspan_sys_bytes 32768 -# HELP go_memstats_next_gc_bytes Number of heap bytes when next garbage collection will take place. -# TYPE go_memstats_next_gc_bytes gauge -go_memstats_next_gc_bytes 4.473924e+06 -# HELP go_memstats_other_sys_bytes Number of bytes used for other system allocations. -# TYPE go_memstats_other_sys_bytes gauge -go_memstats_other_sys_bytes 1.05954e+06 -# HELP go_memstats_stack_inuse_bytes Number of bytes in use by the stack allocator. -# TYPE go_memstats_stack_inuse_bytes gauge -go_memstats_stack_inuse_bytes 458752 -# HELP go_memstats_stack_sys_bytes Number of bytes obtained from system for stack allocator. -# TYPE go_memstats_stack_sys_bytes gauge -go_memstats_stack_sys_bytes 458752 -# HELP go_memstats_sys_bytes Number of bytes obtained from system. -# TYPE go_memstats_sys_bytes gauge -go_memstats_sys_bytes 5.867768e+06 -# HELP go_threads Number of OS threads created. -# TYPE go_threads gauge -go_threads 7 -# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds. -# TYPE process_cpu_seconds_total counter -process_cpu_seconds_total 0 -# HELP process_max_fds Maximum number of open file descriptors. -# TYPE process_max_fds gauge -process_max_fds 1024 -# HELP process_open_fds Number of open file descriptors. -# TYPE process_open_fds gauge -process_open_fds 9 -# HELP process_resident_memory_bytes Resident memory size in bytes. -# TYPE process_resident_memory_bytes gauge -process_resident_memory_bytes 1.2218368e+07 -# HELP process_start_time_seconds Start time of the process since unix epoch in seconds. -# TYPE process_start_time_seconds gauge -process_start_time_seconds 1.54180052506e+09 -# HELP process_virtual_memory_bytes Virtual memory size in bytes. -# TYPE process_virtual_memory_bytes gauge -process_virtual_memory_bytes 3.95628544e+08 -# HELP process_virtual_memory_max_bytes Maximum amount of virtual memory available in bytes. -# TYPE process_virtual_memory_max_bytes gauge -process_virtual_memory_max_bytes -1 -# HELP promhttp_metric_handler_requests_in_flight Current number of scrapes being served. -# TYPE promhttp_metric_handler_requests_in_flight gauge -promhttp_metric_handler_requests_in_flight 1 -# HELP promhttp_metric_handler_requests_total Total number of scrapes by HTTP status code. -# TYPE promhttp_metric_handler_requests_total counter -promhttp_metric_handler_requests_total{code="200"} 0 -promhttp_metric_handler_requests_total{code="500"} 0 -promhttp_metric_handler_requests_total{code="503"} 0 -# HELP skycoin_wallet1_seq1 I am running since -# TYPE skycoin_wallet1_seq1 counter -skycoin_wallet1_seq1 0 -# HELP skycoin_wallet1_seq1_up Says if the same name metric(skycoin_wallet1_seq1) was success updated, 1 for ok, 0 for failed. -# TYPE skycoin_wallet1_seq1_up gauge -skycoin_wallet1_seq1_up 1 -# HELP skycoin_wallet1_seq2 I am running since -# TYPE skycoin_wallet1_seq2 counter -skycoin_wallet1_seq2 0 -# HELP skycoin_wallet1_seq2_up Says if the same name metric(skycoin_wallet1_seq2) was success updated, 1 for ok, 0 for failed. -# TYPE skycoin_wallet1_seq2_up gauge -skycoin_wallet1_seq2_up 1 -# HELP skycoin_wallet2_seq1 I am running since -# TYPE skycoin_wallet2_seq1 counter -skycoin_wallet2_seq1 0 -# HELP skycoin_wallet2_seq1_up Says if the same name metric(skycoin_wallet2_seq1) was success updated, 1 for ok, 0 for failed. -# TYPE skycoin_wallet2_seq1_up gauge -skycoin_wallet2_seq1_up 1 -# HELP skycoin_wallet2_seq2 I am running since -# TYPE skycoin_wallet2_seq2 counter -skycoin_wallet2_seq2 0 -# HELP skycoin_wallet2_seq2_up Says if the same name metric(skycoin_wallet2_seq2) was success updated, 1 for ok, 0 for failed. -# TYPE skycoin_wallet2_seq2_up gauge -skycoin_wallet2_seq2_up 0 -` - -const jsonHealthResponse = ` +func apiHealthHandler(w http.ResponseWriter, r *http.Request) { + const jsonHealthResponse = ` { "blockchain": { "head": { @@ -188,21 +47,43 @@ const jsonHealthResponse = ` "json_rpc_enabled": false } ` - -func apiHealthHandler(w http.ResponseWriter, r *http.Request) { - log.Println("r.RequestURI\n\n", r.RequestURI) if _, err := w.Write([]byte(jsonHealthResponse)); err != nil { - log.WithError(err).Panicln("unable to write response") + log.WithError(err).Errorln("unable to write response") + w.WriteHeader(http.StatusInternalServerError) + } else { + w.WriteHeader(http.StatusOK) } - w.WriteHeader(http.StatusOK) } func exposedMetricHandler(w http.ResponseWriter, r *http.Request) { - log.Println("r.RequestURI\n\n", r.RequestURI) + const exposedMetricsResponse = ` +# HELP go_gc_duration_seconds1a18ac9b29c6 A summary of the GC invocation durations. +# TYPE go_gc_duration_seconds1a18ac9b29c6 summary +go_gc_duration_seconds1a18ac9b29c6{quantile="0"} 0 +go_gc_duration_seconds1a18ac9b29c6{quantile="0.25"} 0 +go_gc_duration_seconds1a18ac9b29c6{quantile="0.5"} 0 +go_gc_duration_seconds1a18ac9b29c6{quantile="0.75"} 0 +go_gc_duration_seconds1a18ac9b29c6{quantile="1"} 0 +go_gc_duration_seconds1a18ac9b29c6_sum 0 +go_gc_duration_seconds1a18ac9b29c6_count 0 +# HELP go_goroutines1a18ac9b29c6 Number of goroutines that currently exist. +# TYPE go_goroutines1a18ac9b29c6 gauge +go_goroutines1a18ac9b29c6 7 +# HELP go_memstats_mallocs_total1a18ac9b29c6 Total number of mallocs. +# TYPE go_memstats_mallocs_total1a18ac9b29c6 counter +go_memstats_mallocs_total1a18ac9b29c6 9049 +# HELP promhttp_metric_handler_requests_total1a18ac9b29c6 Total number of scrapes by HTTP status code. +# TYPE promhttp_metric_handler_requests_total1a18ac9b29c6 counter +promhttp_metric_handler_requests_total1a18ac9b29c6{code="200"} 0 +promhttp_metric_handler_requests_total1a18ac9b29c6{code="500"} 0 +promhttp_metric_handler_requests_total1a18ac9b29c6{code="503"} 0 +` if _, err := w.Write([]byte(exposedMetricsResponse)); err != nil { - log.WithError(err).Panicln("unable to write response") + log.WithError(err).Errorln("unable to write response") + w.WriteHeader(http.StatusInternalServerError) + } else { + w.WriteHeader(http.StatusOK) } - w.WriteHeader(http.StatusOK) } func exposedAFewMetrics(w http.ResponseWriter, r *http.Request) { @@ -221,9 +102,11 @@ seq 32 seq_up 0 ` if _, err := w.Write([]byte(metrics)); err != nil { - log.WithError(err).Panicln("unable to write response") + log.WithError(err).Errorln("unable to write response") + w.WriteHeader(http.StatusInternalServerError) + } else { + w.WriteHeader(http.StatusOK) } - w.WriteHeader(http.StatusOK) } func writeListenPortInFile(port uint16) (err error) { @@ -267,7 +150,42 @@ func main() { } log.WithField("port", fakeNodePort).Infoln("starting fake server") http.HandleFunc("/api/v1/health", apiHealthHandler) - http.HandleFunc("/metrics", exposedMetricHandler) + http.HandleFunc("/metrics2", exposedMetricHandler) http.HandleFunc("/a_few_metrics", exposedAFewMetrics) log.WithError(http.ListenAndServe(fmt.Sprintf(":%d", fakeNodePort), nil)).Fatalln("server fail") } + +func findMetric(metrics []byte, mtrName string) (bool, error) { + var parser expfmt.TextParser + in := bytes.NewReader(metrics) + metricFamilies, err := parser.TextToMetricFamilies(in) + if err != nil { + log.WithError(err).Errorln("error, reading text format failed") + return false, core.ErrKeyDecodingFile + } + for _, mf := range metricFamilies { + if mtrName == *mf.Name { + return true, nil + } + } + return false, err +} + +func getGaugeValue(metrics []byte, mtrName string) (float64, error) { + var parser expfmt.TextParser + in := bytes.NewReader(metrics) + metricFamilies, err := parser.TextToMetricFamilies(in) + if err != nil { + log.WithError(err).Errorln("error, reading text format failed") + return -1, core.ErrKeyDecodingFile + } + for _, mf := range metricFamilies { + if mtrName == *mf.Name { + if (*mf.Type).String() != strings.ToUpper(core.KeyMetricTypeGauge) { + return -1, core.ErrKeyInvalidType + } + return *mf.Metric[0].Gauge.Value, nil + } + } + return -1, core.ErrKeyNotFound +} diff --git a/test/integration/integration_test.go b/test/integration/integration_test.go index f5e966d..65d9395 100644 --- a/test/integration/integration_test.go +++ b/test/integration/integration_test.go @@ -3,370 +3,220 @@ package main import ( "context" "fmt" + "io/ioutil" "net/http" - "os" "path/filepath" "testing" "time" - "github.com/alecthomas/template" - "github.com/simelo/rextporter/src/config" "github.com/simelo/rextporter/src/exporter" - "github.com/simelo/rextporter/src/util" + "github.com/simelo/rextporter/src/tomlconfig" "github.com/simelo/rextporter/test/integration/testrand" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" ) -const mainConfigFileContenTemplate = ` -serviceConfigTransport = "file" -# render a template with a portable path -servicesConfigPath = "{{.ServicesConfigPath}}" -metricsForServicesPath = "{{.MetricsForServicesPath}}" -` - -const servicesConfigFileContenTemplate = ` - # Service configuration.{{range .Services}} - [[services]] - name = "{{.Name}}" - metricsToForwardPath = "{{.ForwardPath}}" - modes=[{{range .Modes}}"{{.}}" {{end}}] - scheme = "http" - port = {{.Port}} - basePath = "{{.BasePath}}" - authType = "CSRF" - tokenHeaderKey = "X-CSRF-Token" - genTokenEndpoint = "/api/v1/csrf" - tokenKeyFromEndpoint = "csrf_token" - - [services.location] - location = "localhost" - -{{end}} -` - -const metricsConfigFileContenTemplate = ` -# All metrics to be measured. -[[metrics]] - name = "open_connections_is_a_fake_name_for_test_purpose" - url = "/api/v1/health" - httpMethod = "GET" - path = "open_connections" - - [metrics.options] - type = "Gauge" - description = "Track the open connections in the system" -` - -const metricsForServicesConfFileContenTemplate = ` - serviceNameToMetricsConfPath = [{{range $key, $value := .}} - { {{$key}} = "{{$value}}" },{{end}} -] -` - -type Service struct { - Name string - Port uint16 - ForwardPath string - Modes []string - BasePath string -} - -type ServicesConfData struct { - Services []Service -} - type HealthSuit struct { suite.Suite - require *require.Assertions - mainConfFilePath string - mainConfTmplContent string - servicesConfFilePath string - servicesConfData ServicesConfData - metricsConfTmplContent string - metricsConfFilePath string - metricsForServiceConfTmplContent string - metricsForServicesConfData map[string]string - metricsForServicesConfFilePath string -} - -var fakeNodePort uint16 - -func createConfigFile(tmplContent, path string, data interface{}) (err error) { - generalScopeErr := "error creating config file for integration test" - if len(tmplContent) == 0 || len(path) == 0 { - return err - } - tmpl := template.New("fileConfig") - var templateEngine *template.Template - if templateEngine, err = tmpl.Parse(tmplContent); err != nil { - errCause := "error parsing config: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - var configFile *os.File - if configFile, err = os.Create(path); err != nil { - errCause := "error creating config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - if err = templateEngine.Execute(configFile, data); err != nil { - errCause := "error writing config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err + require *require.Assertions + rextporterEndpoint string + rextporterServer *http.Server +} + +func (suite HealthSuit) rootConf(fakeNodePort uint16) tomlconfig.RootConfig { + root := tomlconfig.RootConfig{} + mtr1 := tomlconfig.Metric{ + Name: "burnFactor", + Path: "/connections/unconfirmed_verify_transaction/burn_factor", + // NodeSolverType: "ns0132", + Options: tomlconfig.MetricOptions{Type: "Gauge", Description: "This is a basic description"}, + } + mtr2 := tomlconfig.Metric{ + Name: "seq", + Path: "/blockchain/head/seq", + // NodeSolverType: "ns0132", + Options: tomlconfig.MetricOptions{Type: "Gauge", Description: "This is a basic description"}, + } + res1 := tomlconfig.ResourcePath{ + Name: "connections", + Path: "/api/v1/network/connections", + PathType: "rest_api", + NodeSolverType: "jsonPath", + MetricNames: []string{mtr1.Name}, + } + res2 := tomlconfig.ResourcePath{ + Name: "Fordwader", + Path: "/metrics2", + PathType: "metrics_fordwader", + } + res3 := tomlconfig.ResourcePath{ + Name: "health", + Path: "/api/v1/health", + PathType: "rest_api", + NodeSolverType: "jsonPath", + MetricNames: []string{mtr2.Name}, + } + srv1 := tomlconfig.Service{ + Name: "MySuperServer", + Protocol: "http", + Port: fakeNodePort, + BasePath: "", + AuthType: "CSRF", + TokenHeaderKey: "X-CSRF-Token", + GenTokenEndpoint: "/api/v1/csrf", + TokenKeyFromEndpoint: "csrf_token", + Location: tomlconfig.Server{Location: "localhost"}, + Metrics: []tomlconfig.Metric{mtr1, mtr2}, + ResourcePaths: []tomlconfig.ResourcePath{res1, res2, res3}, + } + root.Services = []tomlconfig.Service{srv1} + return root } -func (suite *HealthSuit) createServicesConfPath() (err error) { - generalScopeErr := "error creating service config file for integration test" - if err = createConfigFile(servicesConfigFileContenTemplate, suite.servicesConfFilePath, suite.servicesConfData); err != nil { - errCause := "error writing service config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err -} +func (suite *HealthSuit) SetupSuite() { + suite.require = require.New(suite.T()) + mainConfigDir := testrand.RFolderPath() + err := createDirectoriesWithFullDepth([]string{mainConfigDir}) + suite.Nil(err) + mainConfFilePath := filepath.Join(mainConfigDir, testrand.RName()) + fakeNodePort, err := readListenPortFromFile() + suite.Nil(err) + err = createFullConfig(mainConfFilePath, suite.rootConf(fakeNodePort)) + suite.require.Nil(err) -func (suite *HealthSuit) createMainConfPath(tmplData interface{}) (err error) { - generalScopeErr := "error creating main config file for integration test" - if err = createConfigFile(suite.mainConfTmplContent, suite.mainConfFilePath, tmplData); err != nil { - errCause := "error writing service config file: " + err.Error() - return util.ErrorFromThisScope(errCause, generalScopeErr) - } - return err -} + // NOTE(denisacostaq@gmail.com): Giving -func (suite *HealthSuit) createMetricsForServicesConfPath() (err error) { - return createConfigFile( - suite.metricsForServiceConfTmplContent, - suite.metricsForServicesConfFilePath, - suite.metricsForServicesConfData) + conf, err := getConfig(mainConfFilePath) + suite.require.Nil(err) + suite.require.False(conf.Validate()) + listenPort := testrand.RandomPort() + suite.rextporterEndpoint = fmt.Sprintf("http://localhost:%d%s", listenPort, "/metdddrics2") + suite.rextporterServer = exporter.MustExportMetrics("/metdddrics2", listenPort, conf) + suite.require.NotNil(suite.rextporterServer) + // NOTE(denisacostaq@gmail.com): Wait for server starts + time.Sleep(time.Second * 2) } -func (suite *HealthSuit) createMetricsConfigPaths() (err error) { - return createConfigFile( - suite.metricsConfTmplContent, - suite.metricsConfFilePath, - nil) +func (suite *HealthSuit) TearDownSuite() { + log.Info("Shutting down server...") + suite.Nil(suite.rextporterServer.Shutdown(context.Context(nil))) } -func (suite *HealthSuit) createMainConfig() { - generalScopeErr := "error creating main config file for integration test" - type mainConfigData struct { - ServicesConfigPath string - MetricsForServicesPath string - } - confData := mainConfigData{ - ServicesConfigPath: suite.servicesConfFilePath, - MetricsForServicesPath: suite.metricsForServicesConfFilePath, - } - if err := suite.createMainConfPath(confData); err != nil { - errCause := "error writing main config file: " + err.Error() - suite.Nil(util.ErrorFromThisScope(errCause, generalScopeErr)) - } - if err := suite.createServicesConfPath(); err != nil { - errCause := "error writing services config file: " + err.Error() - suite.Nil(util.ErrorFromThisScope(errCause, generalScopeErr)) - } - if err := suite.createMetricsConfigPaths(); err != nil { - errCause := "error writing my monitored server metrics config file: " + err.Error() - suite.Nil(util.ErrorFromThisScope(errCause, generalScopeErr)) - } - if err := suite.createMetricsForServicesConfPath(); err != nil { - errCause := "error writing metrics for service config file: " + err.Error() - suite.Nil(util.ErrorFromThisScope(errCause, generalScopeErr)) - } +func TestSkycoinHealthSuit(t *testing.T) { + suite.Run(t, new(HealthSuit)) } -func (suite *HealthSuit) createDirectoriesWithFullDepth(dirs []string) { - for _, dir := range dirs { - suite.Nil(os.MkdirAll(dir, 0750)) - } -} +func (suite *HealthSuit) TestDefaultMetricsArePresent() { + // NOTE(denisacostaq@gmail.com): Giving -func readListenPortFromFile() (port uint16, err error) { - var path string - path, err = testrand.FilePathToSharePort() - var file *os.File - file, err = os.OpenFile(path, os.O_RDONLY, 0400) - if err != nil { - log.WithError(err).Errorln("error opening file") - return 0, err - } - defer file.Close() - _, err = fmt.Fscanf(file, "%d", &port) - if err != nil { - log.WithError(err).Errorln("error reading file") - return port, err - } - return port, err -} + // NOTE(denisacostaq@gmail.com): When + resp, err := http.Get(suite.rextporterEndpoint) -func TestSkycoinHealthSuit(t *testing.T) { - suite.Run(t, new(HealthSuit)) + // NOTE(denisacostaq@gmail.com): Assert + suite.Nil(err) + suite.Equal(http.StatusOK, resp.StatusCode) + suite.NotNil(resp.Body) + var respBody []byte + respBody, err = ioutil.ReadAll(resp.Body) + suite.Nil(err) + suite.NotNil(respBody) + mtrs := []string{ + "scrape_duration_seconds", + "scrape_samples_scraped", + "data_source_response_duration_seconds", + "data_source_scrape_duration_seconds", + "data_source_scrape_samples_scraped", + "fordwader_response_duration_seconds", + "fordwader_scrape_duration_seconds"} + for _, mtr := range mtrs { + var found bool + found, err = findMetric(respBody, mtr) + suite.Nil(err) + suite.True(found) + } + var found bool + found, err = findMetric(respBody, "fordwader_scrape_duration_secondss") + suite.Nil(err) + suite.False(found) } -func (suite *HealthSuit) SetupSuite() { - require := require.New(suite.T()) - var port uint16 - var err error - port, err = readListenPortFromFile() - require.Nil(err) - fakeNodePort = port -} +func (suite *HealthSuit) TestFordwadedMetricIsPresent() { + // NOTE(denisacostaq@gmail.com): Giving -func (suite *HealthSuit) SetupTest() { - suite.callSetUpTest() -} + // NOTE(denisacostaq@gmail.com): When + resp, err := http.Get(suite.rextporterEndpoint) -func (suite *HealthSuit) callSetUpTest() { - suite.metricsForServicesConfData = - map[string]string{ - "myMonitoredServer": suite.metricsConfFilePath, - "myMonitoredAsProxyServer": suite.metricsConfFilePath} - suite.servicesConfData = ServicesConfData{ - Services: []Service{Service{Name: "myMonitoredServer", Port: fakeNodePort, Modes: []string{"rest_api"}, BasePath: ""}}, - } + // NOTE(denisacostaq@gmail.com): Assert + suite.Nil(err) + suite.Equal(http.StatusOK, resp.StatusCode) + suite.NotNil(resp.Body) + var respBody []byte + respBody, err = ioutil.ReadAll(resp.Body) + suite.Nil(err) + suite.NotNil(respBody) + var found bool + found, err = findMetric(respBody, "go_memstats_mallocs_total1a18ac9b29c6") + suite.Nil(err) + suite.True(found) } -func (suite *HealthSuit) TestMetricMonitorHealth() { +func (suite *HealthSuit) TestConfiguredMetricIsPresent() { // NOTE(denisacostaq@gmail.com): Giving - suite.require = require.New(suite.T()) - mainConfigDir := testrand.RFolderPath() - servicesDir := testrand.RFolderPath() - myMonitoredServerMetricsDir := testrand.RFolderPath() - metricsForServicesDir := testrand.RFolderPath() - port := testrand.RandomPort() - suite.createDirectoriesWithFullDepth([]string{mainConfigDir, servicesDir, myMonitoredServerMetricsDir, metricsForServicesDir}) - suite.mainConfFilePath = filepath.Join(mainConfigDir, testrand.RName()) - suite.servicesConfFilePath = filepath.Join(servicesDir, testrand.RName()) - suite.metricsConfFilePath = filepath.Join(myMonitoredServerMetricsDir, testrand.RName()) - suite.metricsForServicesConfFilePath = filepath.Join(metricsForServicesDir, testrand.RName()) - suite.mainConfTmplContent = mainConfigFileContenTemplate - suite.metricsConfTmplContent = metricsConfigFileContenTemplate - suite.metricsForServiceConfTmplContent = metricsForServicesConfFileContenTemplate - suite.callSetUpTest() - suite.createMainConfig() - conf := config.MustConfigFromFileSystem(suite.mainConfFilePath) - srv := exporter.MustExportMetrics("/metrics2", port, conf) - suite.require.NotNil(srv) - // NOTE(denisacostaq@gmail.com): Wait for server starts - time.Sleep(time.Second * 2) // NOTE(denisacostaq@gmail.com): When - resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:%d/metrics2", port)) + resp, err := http.Get(suite.rextporterEndpoint) // NOTE(denisacostaq@gmail.com): Assert - defer func() { suite.Nil(resp.Body.Close()) }() suite.Nil(err) suite.Equal(http.StatusOK, resp.StatusCode) - suite.Len(conf.Services, 1) - suite.Len(conf.Services[0].Metrics, 1) - metricName := conf.Services[0].Metrics[0].Name - suite.Equal(metricName, "open_connections_is_a_fake_name_for_test_purpose") - var usingAVariableToMakeLinterHappy = context.Context(nil) - suite.Nil(srv.Shutdown(usingAVariableToMakeLinterHappy)) + suite.NotNil(resp.Body) + var respBody []byte + respBody, err = ioutil.ReadAll(resp.Body) + suite.Nil(err) + suite.NotNil(respBody) + var found bool + found, err = findMetric(respBody, "seq") + suite.Nil(err) + suite.True(found) } -// func (suite *HealthSuit) TestMetricMonitorAsProxy() { -// // NOTE(denisacostaq@gmail.com): Giving -// suite.require = require.New(suite.T()) -// port := testrand.RandomPort() -// mainConfigDir := testrand.RFolderPath() -// servicesDir := testrand.RFolderPath() -// myMonitoredServerMetricsDir := testrand.RFolderPath() -// metricsForServicesDir := testrand.RFolderPath() -// suite.createDirectoriesWithFullDepth([]string{mainConfigDir, servicesDir, myMonitoredServerMetricsDir, metricsForServicesDir}) -// suite.mainConfFilePath = filepath.Join(mainConfigDir, testrand.RName()) -// suite.servicesConfFilePath = filepath.Join(servicesDir, testrand.RName()) -// suite.metricsConfFilePath = filepath.Join(myMonitoredServerMetricsDir, testrand.RName()) -// suite.metricsForServicesConfFilePath = filepath.Join(metricsForServicesDir, testrand.RName()) -// suite.mainConfTmplContent = mainConfigFileContenTemplate -// suite.metricsConfTmplContent = metricsConfigFileContenTemplate -// suite.metricsForServiceConfTmplContent = metricsForServicesConfFileContenTemplate -// suite.callSetUpTest() -// suite.metricsForServicesConfData = -// map[string]string{ -// "myMonitoredServer": suite.metricsConfFilePath, -// "myMonitoredAsProxyServer": suite.metricsConfFilePath} -// suite.servicesConfData = ServicesConfData{ -// Services: []Service{Service{ -// Name: "myMonitoredAsProxyServer", -// Port: fakeNodePort, -// Modes: []string{"forward_metrics"}, -// ForwardPath: "/metrics"}, -// }, -// } -// suite.createMainConfig() -// conf := config.MustConfigFromFileSystem(suite.mainConfFilePath) -// srv := exporter.MustExportMetrics("/metrics4", port, conf) -// suite.require.NotNil(srv) - -// // NOTE(denisacostaq@gmail.com): Wait for server starts -// time.Sleep(time.Second * 2) +func (suite *HealthSuit) TestConfiguredMetricValue() { + // NOTE(denisacostaq@gmail.com): Giving -// // NOTE(denisacostaq@gmail.com): When -// var resp *http.Response -// resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:%d/metrics4", port)) -// suite.require.NotNil(resp) + // NOTE(denisacostaq@gmail.com): When + resp, err := http.Get(suite.rextporterEndpoint) -// // NOTE(denisacostaq@gmail.com): Assert -// suite.Nil(err) -// defer func() { suite.Nil(resp.Body.Close()) }() -// suite.Equal(http.StatusOK, resp.StatusCode) -// suite.require.Len(conf.Services, 1) -// suite.require.Len(conf.Services[0].Metrics, 0) -// metricName := conf.Services[0].Name + "_skycoin_wallet2_seq2" -// suite.require.Equal(metricName, "myMonitoredAsProxyServer_skycoin_wallet2_seq2") -// var usingAVariableToMakeLinterHappy = context.Context(nil) -// suite.require.Nil(srv.Shutdown(usingAVariableToMakeLinterHappy)) -// } + // NOTE(denisacostaq@gmail.com): Assert + suite.Nil(err) + suite.Equal(http.StatusOK, resp.StatusCode) + suite.NotNil(resp.Body) + var respBody []byte + respBody, err = ioutil.ReadAll(resp.Body) + suite.Nil(err) + suite.NotNil(respBody) + var val float64 + val, err = getGaugeValue(respBody, "seq") + suite.Nil(err) + suite.Equal(float64(58894), val) +} -// func (suite *HealthSuit) TestMetricMonitorAsProxyWithNonMetricsEndpoint() { -// // NOTE(denisacostaq@gmail.com): Giving -// suite.require = require.New(suite.T()) -// port := testrand.RandomPort() -// mainConfigDir := testrand.RFolderPath() -// servicesDir := testrand.RFolderPath() -// myMonitoredServerMetricsDir := testrand.RFolderPath() -// metricsForServicesDir := testrand.RFolderPath() -// suite.createDirectoriesWithFullDepth([]string{mainConfigDir, servicesDir, myMonitoredServerMetricsDir, metricsForServicesDir}) -// suite.mainConfFilePath = filepath.Join(mainConfigDir, testrand.RName()) -// suite.servicesConfFilePath = filepath.Join(servicesDir, testrand.RName()+".toml") -// suite.metricsConfFilePath = filepath.Join(myMonitoredServerMetricsDir, testrand.RName()+".toml") -// suite.metricsForServicesConfFilePath = filepath.Join(metricsForServicesDir, testrand.RName()+".toml") -// suite.mainConfTmplContent = mainConfigFileContenTemplate -// suite.callSetUpTest() -// suite.metricsForServicesConfData = -// map[string]string{ -// "myMonitoredAsProxyServer": suite.metricsConfFilePath} -// suite.servicesConfData = ServicesConfData{ -// Services: []Service{Service{ -// Name: "myMonitoredAsProxyServer", -// Port: fakeNodePort, -// Modes: []string{"forward_metrics"}, -// BasePath: "/api/v1/health", -// ForwardPath: "/metrics"}, -// }, -// } -// suite.metricsConfTmplContent = metricsConfigFileContenTemplate -// suite.metricsForServiceConfTmplContent = metricsForServicesConfFileContenTemplate -// suite.createMainConfig() -// conf := config.MustConfigFromFileSystem(suite.mainConfFilePath) -// srv := exporter.MustExportMetrics("/metrics5", port, conf) -// suite.require.NotNil(srv) -// // NOTE(denisacostaq@gmail.com): Wait for server starts -// time.Sleep(time.Second * 2) +func (suite *HealthSuit) TestConfiguredMetricIsNotPresentBecauseServerEndpointInaccesible() { + // NOTE(denisacostaq@gmail.com): Giving -// // NOTE(denisacostaq@gmail.com): When -// resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:%d/metrics5", port)) + // NOTE(denisacostaq@gmail.com): When + resp, err := http.Get(suite.rextporterEndpoint) -// // NOTE(denisacostaq@gmail.com): Assert -// suite.Nil(err) -// defer func() { suite.Nil(resp.Body.Close()) }() -// suite.Equal(http.StatusOK, resp.StatusCode) -// suite.require.Len(conf.Services, 1) -// suite.require.Len(conf.Services[0].Metrics, 0) -// metricName := "skycoin_wallet2_seq2" -// suite.Equal(metricName, "skycoin_wallet2_seq2") -// var usingAVariableToMakeLinterHappy = context.Context(nil) -// suite.require.Nil(srv.Shutdown(usingAVariableToMakeLinterHappy)) -// } + // NOTE(denisacostaq@gmail.com): Assert + suite.Nil(err) + suite.Equal(http.StatusOK, resp.StatusCode) + suite.NotNil(resp.Body) + var respBody []byte + respBody, err = ioutil.ReadAll(resp.Body) + suite.Nil(err) + suite.NotNil(respBody) + var found bool + found, err = findMetric(respBody, "burnFactor") + suite.Nil(err) + suite.False(found) +} diff --git a/test/integration/system_setup.go b/test/integration/system_setup.go new file mode 100644 index 0000000..df94c52 --- /dev/null +++ b/test/integration/system_setup.go @@ -0,0 +1,209 @@ +package main + +import ( + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/alecthomas/template" + "github.com/simelo/rextporter/src/core" + "github.com/simelo/rextporter/src/toml2config" + "github.com/simelo/rextporter/src/tomlconfig" + "github.com/simelo/rextporter/test/integration/testrand" + log "github.com/sirupsen/logrus" +) + +const mainConfigFileContenTemplate = `configTransport = "file" # "file" | "consulCatalog" +# render a template with a portable path +servicesConfigPath = "{{.ServicesConfPath}}" +metricsForServicesConfigPath = "{{.MetricsForServicesConfPath}}" +resourcePathsForServicesConfPath = "{{.ResourcePathsForServicesConfPath}}" +` + +const servicesConfigFileContenTemplate = `# Service configuration.{{range .Services}} +[[services]] + name = "{{.Name}}" + protocol = "http" + port = {{.Port}} + basePath = "{{.BasePath}}" + authType = "CSRF" + tokenHeaderKey = "X-CSRF-Token" + genTokenEndpoint = "/api/v1/csrf" + tokenKeyFromEndpoint = "csrf_token" + + [services.location] + location = "localhost" + +{{end}} +` + +const metricsConfigFileContenTemplate = `# All metrics to be measured.{{range .Metrics}} +[[metrics]] + name = "{{.Name}}" + path = "{{.Path}}" + nodeSolver = "ns0132" # FIXME(denisacostaq@gmail.com): make portable + + [metrics.options] + type = "{{.Options.Type}}" + description = "{{.Options.Description}}" + +{{end}} +` + +const metricsForServicesConfFileContenTemplate = `metricPathsForServicesConfig = [{{range $key, $value := .}} + { {{$key}} = "{{$value}}" },{{end}} +] +` + +const resourceForServicesConfFileContentTemplate = `resourcePathsForServicesConfig = [{{range $key, $value := .}} + { {{$key}} = "{{$value}}" },{{end}} +] +` + +const serviceResourcePathsFileContentTemplate = `{{range .}}[[ResourcePaths]] + Name = "{{.Name}}" + Path = "{{.Path}}" + PathType = "{{.PathType}}"{{if ne .NodeSolverType ""}} + nodeSolverType = "{{.NodeSolverType}}"{{end}}{{if gt (len .MetricNames) 0}} {{$len := (len .MetricNames)}} + MetricNames = [{{range $i, $v := .MetricNames}}"{{$v}}"{{if lt (inc $i) $len}}, {{end}}{{end}}] +{{end}} + +{{end}} +` + +func createConfigFile(tmplContent, path string, data interface{}) (err error) { + if len(tmplContent) == 0 || len(path) == 0 { + log.Errorln("template content should not be empty") + return core.ErrKeyEmptyValue + } + tmpl := template.New("fileConfig") + var templateEngine *template.Template + funcs := template.FuncMap{"inc": func(i int) int { return i + 1 }} + if templateEngine, err = tmpl.New("").Funcs(funcs).Parse(tmplContent); err != nil { + log.WithField("template", tmplContent).Errorln("Can not parse template content") + return core.ErrKeyDecodingFile + } + var configFile *os.File + if configFile, err = os.Create(path); err != nil { + log.WithFields(log.Fields{"err": err, "path": path}).Errorln("error creating config file") + return ErrKeyWritingFsStructure + } + if err = templateEngine.Execute(configFile, data); err != nil { + log.WithFields(log.Fields{"err": err, "data": data}).Errorln("error writing config file") + return ErrKeyWritingFile + } + return nil +} + +var ( + // ErrKeyWritingFsStructure tells about a fs change(creating a file and/or folder) + ErrKeyWritingFsStructure = errors.New("Error creating file/folder") + // ErrKeyWritingFile tells about a write error in a file + ErrKeyWritingFile = errors.New("Error writing file") +) + +func createFullConfig(mainConfFilePath string, conf tomlconfig.RootConfig) (err error) { + srvsConfDir := testrand.RFolderPath() + mtrs4ServiceConfDir := testrand.RFolderPath() + res4ServiceConfDir := testrand.RFolderPath() + dirs := []string{srvsConfDir, mtrs4ServiceConfDir, res4ServiceConfDir} + if err = createDirectoriesWithFullDepth(dirs); err != nil { + log.WithError(err).Errorln("error creating directory") + return ErrKeyWritingFsStructure + } + srvsConfPath := filepath.Join(srvsConfDir, testrand.RName()) + mtrs4ServiceConfPath := filepath.Join(mtrs4ServiceConfDir, testrand.RName()) + res4ServiceConfPath := filepath.Join(res4ServiceConfDir, testrand.RName()) + type mainConfigData struct { + ServicesConfPath string + MetricsForServicesConfPath string + ResourcePathsForServicesConfPath string + } + confData := mainConfigData{ + ServicesConfPath: srvsConfPath, + MetricsForServicesConfPath: mtrs4ServiceConfPath, + ResourcePathsForServicesConfPath: res4ServiceConfPath, + } + if err = createConfigFile(mainConfigFileContenTemplate, mainConfFilePath, confData); err != nil { + log.Errorln("error writing main config") + err = ErrKeyWritingFsStructure + } + if err = createConfigFile(servicesConfigFileContenTemplate, srvsConfPath, conf); err != nil { + log.Errorln("error writing service config") + err = ErrKeyWritingFsStructure + } + mtrs4Srvs := make(map[string]string) + res4Srvs := make(map[string]string) + for _, srv := range conf.Services { + mtrsConfDir := testrand.RFolderPath() + res4SrvsConfDir := testrand.RFolderPath() + dirs = []string{mtrsConfDir, res4SrvsConfDir} + if err = createDirectoriesWithFullDepth(dirs); err != nil { + log.WithError(err).Errorln("error creating directory") + return ErrKeyWritingFsStructure + } + mtrsConfPath := filepath.Join(mtrsConfDir, testrand.RName()) + res4SrvsConfPath := filepath.Join(res4SrvsConfDir, testrand.RName()) + mtrs4Srvs[srv.Name] = mtrsConfPath + res4Srvs[srv.Name] = res4SrvsConfPath + if err = createConfigFile(metricsConfigFileContenTemplate, mtrsConfPath, srv); err != nil { + log.Errorln("error writing metrics config") + err = ErrKeyWritingFsStructure + } + if err = createConfigFile(serviceResourcePathsFileContentTemplate, res4SrvsConfPath, srv.ResourcePaths); err != nil { + log.Errorln("error writing service resource paths config") + err = ErrKeyWritingFsStructure + } + } + if err = createConfigFile(resourceForServicesConfFileContentTemplate, res4ServiceConfPath, res4Srvs); err != nil { + log.Errorln("error writing resources paths for services config") + err = ErrKeyWritingFsStructure + } + if err = createConfigFile(metricsForServicesConfFileContenTemplate, mtrs4ServiceConfPath, mtrs4Srvs); err != nil { + log.Errorln("error writing metrics for service config") + err = ErrKeyWritingFsStructure + } + return err +} + +func createDirectoriesWithFullDepth(dirs []string) (err error) { + for _, dir := range dirs { + if err = os.MkdirAll(dir, 0750); err != nil { + log.WithFields(log.Fields{"err": err, "dir": dir}).Errorln("Error creating directory") + err = ErrKeyWritingFsStructure + } + } + return err +} + +func getConfig(mainConfFilePath string) (rootConf core.RextRoot, err error) { + rawConf, err := tomlconfig.ReadConfigFromFileSystem(mainConfFilePath) + if err != nil { + log.WithField("path", mainConfFilePath).Errorln("error reading config from file system") + return rootConf, err + } + if rootConf, err = toml2config.Fill(rawConf); err != nil { + log.WithField("conf", rawConf).Errorln("error filling config info") + return rootConf, err + } + return rootConf, err +} + +func readListenPortFromFile() (port uint16, err error) { + var path string + path, err = testrand.FilePathToSharePort() + var file *os.File + file, err = os.OpenFile(path, os.O_RDONLY, 0400) + if err != nil { + log.WithError(err).Errorln("error opening file") + return 0, err + } + defer file.Close() + _, err = fmt.Fscanf(file, "%d", &port) + if err != nil { + log.WithError(err).Errorln("error reading file") + return port, err + } + return port, err +} diff --git a/vendor/github.com/stretchr/objx/.codeclimate.yml b/vendor/github.com/stretchr/objx/.codeclimate.yml new file mode 100644 index 0000000..010d4cc --- /dev/null +++ b/vendor/github.com/stretchr/objx/.codeclimate.yml @@ -0,0 +1,13 @@ +engines: + gofmt: + enabled: true + golint: + enabled: true + govet: + enabled: true + +exclude_patterns: +- ".github/" +- "vendor/" +- "codegen/" +- "doc.go" diff --git a/vendor/github.com/stretchr/objx/.gitignore b/vendor/github.com/stretchr/objx/.gitignore new file mode 100644 index 0000000..ea58090 --- /dev/null +++ b/vendor/github.com/stretchr/objx/.gitignore @@ -0,0 +1,11 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out diff --git a/vendor/github.com/stretchr/objx/.travis.yml b/vendor/github.com/stretchr/objx/.travis.yml new file mode 100644 index 0000000..a63efa5 --- /dev/null +++ b/vendor/github.com/stretchr/objx/.travis.yml @@ -0,0 +1,25 @@ +language: go +go: + - 1.8 + - 1.9 + - tip + +env: + global: + - CC_TEST_REPORTER_ID=68feaa3410049ce73e145287acbcdacc525087a30627f96f04e579e75bd71c00 + +before_script: + - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter + - chmod +x ./cc-test-reporter + - ./cc-test-reporter before-build + +install: +- go get github.com/go-task/task/cmd/task + +script: +- task dl-deps +- task lint +- task test-coverage + +after_script: + - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT diff --git a/vendor/github.com/stretchr/objx/Gopkg.lock b/vendor/github.com/stretchr/objx/Gopkg.lock new file mode 100644 index 0000000..eebe342 --- /dev/null +++ b/vendor/github.com/stretchr/objx/Gopkg.lock @@ -0,0 +1,30 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "346938d642f2ec3594ed81d874461961cd0faa76" + version = "v1.1.0" + +[[projects]] + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + name = "github.com/stretchr/testify" + packages = [ + "assert", + "require" + ] + revision = "b91bfb9ebec76498946beb6af7c0230c7cc7ba6c" + version = "v1.2.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "2d160a7dea4ffd13c6c31dab40373822f9d78c73beba016d662bef8f7a998876" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/stretchr/objx/Gopkg.toml b/vendor/github.com/stretchr/objx/Gopkg.toml new file mode 100644 index 0000000..d70f157 --- /dev/null +++ b/vendor/github.com/stretchr/objx/Gopkg.toml @@ -0,0 +1,8 @@ +[prune] + unused-packages = true + non-go = true + go-tests = true + +[[constraint]] + name = "github.com/stretchr/testify" + version = "~1.2.0" diff --git a/vendor/github.com/stretchr/objx/LICENSE b/vendor/github.com/stretchr/objx/LICENSE new file mode 100644 index 0000000..44d4d9d --- /dev/null +++ b/vendor/github.com/stretchr/objx/LICENSE @@ -0,0 +1,22 @@ +The MIT License + +Copyright (c) 2014 Stretchr, Inc. +Copyright (c) 2017-2018 objx contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/stretchr/objx/README.md b/vendor/github.com/stretchr/objx/README.md new file mode 100644 index 0000000..be5750c --- /dev/null +++ b/vendor/github.com/stretchr/objx/README.md @@ -0,0 +1,80 @@ +# Objx +[![Build Status](https://travis-ci.org/stretchr/objx.svg?branch=master)](https://travis-ci.org/stretchr/objx) +[![Go Report Card](https://goreportcard.com/badge/github.com/stretchr/objx)](https://goreportcard.com/report/github.com/stretchr/objx) +[![Maintainability](https://api.codeclimate.com/v1/badges/1d64bc6c8474c2074f2b/maintainability)](https://codeclimate.com/github/stretchr/objx/maintainability) +[![Test Coverage](https://api.codeclimate.com/v1/badges/1d64bc6c8474c2074f2b/test_coverage)](https://codeclimate.com/github/stretchr/objx/test_coverage) +[![Sourcegraph](https://sourcegraph.com/github.com/stretchr/objx/-/badge.svg)](https://sourcegraph.com/github.com/stretchr/objx) +[![GoDoc](https://godoc.org/github.com/stretchr/objx?status.svg)](https://godoc.org/github.com/stretchr/objx) + +Objx - Go package for dealing with maps, slices, JSON and other data. + +Get started: + +- Install Objx with [one line of code](#installation), or [update it with another](#staying-up-to-date) +- Check out the API Documentation http://godoc.org/github.com/stretchr/objx + +## Overview +Objx provides the `objx.Map` type, which is a `map[string]interface{}` that exposes a powerful `Get` method (among others) that allows you to easily and quickly get access to data within the map, without having to worry too much about type assertions, missing data, default values etc. + +### Pattern +Objx uses a preditable pattern to make access data from within `map[string]interface{}` easy. Call one of the `objx.` functions to create your `objx.Map` to get going: + + m, err := objx.FromJSON(json) + +NOTE: Any methods or functions with the `Must` prefix will panic if something goes wrong, the rest will be optimistic and try to figure things out without panicking. + +Use `Get` to access the value you're interested in. You can use dot and array +notation too: + + m.Get("places[0].latlng") + +Once you have sought the `Value` you're interested in, you can use the `Is*` methods to determine its type. + + if m.Get("code").IsStr() { // Your code... } + +Or you can just assume the type, and use one of the strong type methods to extract the real value: + + m.Get("code").Int() + +If there's no value there (or if it's the wrong type) then a default value will be returned, or you can be explicit about the default value. + + Get("code").Int(-1) + +If you're dealing with a slice of data as a value, Objx provides many useful methods for iterating, manipulating and selecting that data. You can find out more by exploring the index below. + +### Reading data +A simple example of how to use Objx: + + // Use MustFromJSON to make an objx.Map from some JSON + m := objx.MustFromJSON(`{"name": "Mat", "age": 30}`) + + // Get the details + name := m.Get("name").Str() + age := m.Get("age").Int() + + // Get their nickname (or use their name if they don't have one) + nickname := m.Get("nickname").Str(name) + +### Ranging +Since `objx.Map` is a `map[string]interface{}` you can treat it as such. For example, to `range` the data, do what you would expect: + + m := objx.MustFromJSON(json) + for key, value := range m { + // Your code... + } + +## Installation +To install Objx, use go get: + + go get github.com/stretchr/objx + +### Staying up to date +To update Objx to the latest version, run: + + go get -u github.com/stretchr/objx + +### Supported go versions +We support the lastest two major Go versions, which are 1.8 and 1.9 at the moment. + +## Contributing +Please feel free to submit issues, fork the repository and send pull requests! diff --git a/vendor/github.com/stretchr/objx/Taskfile.yml b/vendor/github.com/stretchr/objx/Taskfile.yml new file mode 100644 index 0000000..f803564 --- /dev/null +++ b/vendor/github.com/stretchr/objx/Taskfile.yml @@ -0,0 +1,32 @@ +default: + deps: [test] + +dl-deps: + desc: Downloads cli dependencies + cmds: + - go get -u github.com/golang/lint/golint + - go get -u github.com/golang/dep/cmd/dep + +update-deps: + desc: Updates dependencies + cmds: + - dep ensure + - dep ensure -update + +lint: + desc: Runs golint + cmds: + - go fmt $(go list ./... | grep -v /vendor/) + - go vet $(go list ./... | grep -v /vendor/) + - golint $(ls *.go | grep -v "doc.go") + silent: true + +test: + desc: Runs go tests + cmds: + - go test -race . + +test-coverage: + desc: Runs go tests and calucates test coverage + cmds: + - go test -coverprofile=c.out . diff --git a/vendor/github.com/stretchr/objx/accessors.go b/vendor/github.com/stretchr/objx/accessors.go new file mode 100644 index 0000000..204356a --- /dev/null +++ b/vendor/github.com/stretchr/objx/accessors.go @@ -0,0 +1,148 @@ +package objx + +import ( + "regexp" + "strconv" + "strings" +) + +// arrayAccesRegexString is the regex used to extract the array number +// from the access path +const arrayAccesRegexString = `^(.+)\[([0-9]+)\]$` + +// arrayAccesRegex is the compiled arrayAccesRegexString +var arrayAccesRegex = regexp.MustCompile(arrayAccesRegexString) + +// Get gets the value using the specified selector and +// returns it inside a new Obj object. +// +// If it cannot find the value, Get will return a nil +// value inside an instance of Obj. +// +// Get can only operate directly on map[string]interface{} and []interface. +// +// Example +// +// To access the title of the third chapter of the second book, do: +// +// o.Get("books[1].chapters[2].title") +func (m Map) Get(selector string) *Value { + rawObj := access(m, selector, nil, false) + return &Value{data: rawObj} +} + +// Set sets the value using the specified selector and +// returns the object on which Set was called. +// +// Set can only operate directly on map[string]interface{} and []interface +// +// Example +// +// To set the title of the third chapter of the second book, do: +// +// o.Set("books[1].chapters[2].title","Time to Go") +func (m Map) Set(selector string, value interface{}) Map { + access(m, selector, value, true) + return m +} + +// access accesses the object using the selector and performs the +// appropriate action. +func access(current, selector, value interface{}, isSet bool) interface{} { + switch selector.(type) { + case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: + if array, ok := current.([]interface{}); ok { + index := intFromInterface(selector) + if index >= len(array) { + return nil + } + return array[index] + } + return nil + + case string: + selStr := selector.(string) + selSegs := strings.SplitN(selStr, PathSeparator, 2) + thisSel := selSegs[0] + index := -1 + var err error + + if strings.Contains(thisSel, "[") { + arrayMatches := arrayAccesRegex.FindStringSubmatch(thisSel) + if len(arrayMatches) > 0 { + // Get the key into the map + thisSel = arrayMatches[1] + + // Get the index into the array at the key + index, err = strconv.Atoi(arrayMatches[2]) + + if err != nil { + // This should never happen. If it does, something has gone + // seriously wrong. Panic. + panic("objx: Array index is not an integer. Must use array[int].") + } + } + } + if curMap, ok := current.(Map); ok { + current = map[string]interface{}(curMap) + } + // get the object in question + switch current.(type) { + case map[string]interface{}: + curMSI := current.(map[string]interface{}) + if len(selSegs) <= 1 && isSet { + curMSI[thisSel] = value + return nil + } + current = curMSI[thisSel] + default: + current = nil + } + // do we need to access the item of an array? + if index > -1 { + if array, ok := current.([]interface{}); ok { + if index < len(array) { + current = array[index] + } else { + current = nil + } + } + } + if len(selSegs) > 1 { + current = access(current, selSegs[1], value, isSet) + } + } + return current +} + +// intFromInterface converts an interface object to the largest +// representation of an unsigned integer using a type switch and +// assertions +func intFromInterface(selector interface{}) int { + var value int + switch selector.(type) { + case int: + value = selector.(int) + case int8: + value = int(selector.(int8)) + case int16: + value = int(selector.(int16)) + case int32: + value = int(selector.(int32)) + case int64: + value = int(selector.(int64)) + case uint: + value = int(selector.(uint)) + case uint8: + value = int(selector.(uint8)) + case uint16: + value = int(selector.(uint16)) + case uint32: + value = int(selector.(uint32)) + case uint64: + value = int(selector.(uint64)) + default: + return 0 + } + return value +} diff --git a/vendor/github.com/stretchr/objx/constants.go b/vendor/github.com/stretchr/objx/constants.go new file mode 100644 index 0000000..f9eb42a --- /dev/null +++ b/vendor/github.com/stretchr/objx/constants.go @@ -0,0 +1,13 @@ +package objx + +const ( + // PathSeparator is the character used to separate the elements + // of the keypath. + // + // For example, `location.address.city` + PathSeparator string = "." + + // SignatureSeparator is the character that is used to + // separate the Base64 string from the security signature. + SignatureSeparator = "_" +) diff --git a/vendor/github.com/stretchr/objx/conversions.go b/vendor/github.com/stretchr/objx/conversions.go new file mode 100644 index 0000000..5e020f3 --- /dev/null +++ b/vendor/github.com/stretchr/objx/conversions.go @@ -0,0 +1,108 @@ +package objx + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/url" +) + +// JSON converts the contained object to a JSON string +// representation +func (m Map) JSON() (string, error) { + result, err := json.Marshal(m) + if err != nil { + err = errors.New("objx: JSON encode failed with: " + err.Error()) + } + return string(result), err +} + +// MustJSON converts the contained object to a JSON string +// representation and panics if there is an error +func (m Map) MustJSON() string { + result, err := m.JSON() + if err != nil { + panic(err.Error()) + } + return result +} + +// Base64 converts the contained object to a Base64 string +// representation of the JSON string representation +func (m Map) Base64() (string, error) { + var buf bytes.Buffer + + jsonData, err := m.JSON() + if err != nil { + return "", err + } + + encoder := base64.NewEncoder(base64.StdEncoding, &buf) + _, err = encoder.Write([]byte(jsonData)) + if err != nil { + return "", err + } + _ = encoder.Close() + + return buf.String(), nil +} + +// MustBase64 converts the contained object to a Base64 string +// representation of the JSON string representation and panics +// if there is an error +func (m Map) MustBase64() string { + result, err := m.Base64() + if err != nil { + panic(err.Error()) + } + return result +} + +// SignedBase64 converts the contained object to a Base64 string +// representation of the JSON string representation and signs it +// using the provided key. +func (m Map) SignedBase64(key string) (string, error) { + base64, err := m.Base64() + if err != nil { + return "", err + } + + sig := HashWithKey(base64, key) + return base64 + SignatureSeparator + sig, nil +} + +// MustSignedBase64 converts the contained object to a Base64 string +// representation of the JSON string representation and signs it +// using the provided key and panics if there is an error +func (m Map) MustSignedBase64(key string) string { + result, err := m.SignedBase64(key) + if err != nil { + panic(err.Error()) + } + return result +} + +/* + URL Query + ------------------------------------------------ +*/ + +// URLValues creates a url.Values object from an Obj. This +// function requires that the wrapped object be a map[string]interface{} +func (m Map) URLValues() url.Values { + vals := make(url.Values) + for k, v := range m { + //TODO: can this be done without sprintf? + vals.Set(k, fmt.Sprintf("%v", v)) + } + return vals +} + +// URLQuery gets an encoded URL query representing the given +// Obj. This function requires that the wrapped object be a +// map[string]interface{} +func (m Map) URLQuery() (string, error) { + return m.URLValues().Encode(), nil +} diff --git a/vendor/github.com/stretchr/objx/doc.go b/vendor/github.com/stretchr/objx/doc.go new file mode 100644 index 0000000..6d6af1a --- /dev/null +++ b/vendor/github.com/stretchr/objx/doc.go @@ -0,0 +1,66 @@ +/* +Objx - Go package for dealing with maps, slices, JSON and other data. + +Overview + +Objx provides the `objx.Map` type, which is a `map[string]interface{}` that exposes +a powerful `Get` method (among others) that allows you to easily and quickly get +access to data within the map, without having to worry too much about type assertions, +missing data, default values etc. + +Pattern + +Objx uses a preditable pattern to make access data from within `map[string]interface{}` easy. +Call one of the `objx.` functions to create your `objx.Map` to get going: + + m, err := objx.FromJSON(json) + +NOTE: Any methods or functions with the `Must` prefix will panic if something goes wrong, +the rest will be optimistic and try to figure things out without panicking. + +Use `Get` to access the value you're interested in. You can use dot and array +notation too: + + m.Get("places[0].latlng") + +Once you have sought the `Value` you're interested in, you can use the `Is*` methods to determine its type. + + if m.Get("code").IsStr() { // Your code... } + +Or you can just assume the type, and use one of the strong type methods to extract the real value: + + m.Get("code").Int() + +If there's no value there (or if it's the wrong type) then a default value will be returned, +or you can be explicit about the default value. + + Get("code").Int(-1) + +If you're dealing with a slice of data as a value, Objx provides many useful methods for iterating, +manipulating and selecting that data. You can find out more by exploring the index below. + +Reading data + +A simple example of how to use Objx: + + // Use MustFromJSON to make an objx.Map from some JSON + m := objx.MustFromJSON(`{"name": "Mat", "age": 30}`) + + // Get the details + name := m.Get("name").Str() + age := m.Get("age").Int() + + // Get their nickname (or use their name if they don't have one) + nickname := m.Get("nickname").Str(name) + +Ranging + +Since `objx.Map` is a `map[string]interface{}` you can treat it as such. +For example, to `range` the data, do what you would expect: + + m := objx.MustFromJSON(json) + for key, value := range m { + // Your code... + } +*/ +package objx diff --git a/vendor/github.com/stretchr/objx/map.go b/vendor/github.com/stretchr/objx/map.go new file mode 100644 index 0000000..406bc89 --- /dev/null +++ b/vendor/github.com/stretchr/objx/map.go @@ -0,0 +1,190 @@ +package objx + +import ( + "encoding/base64" + "encoding/json" + "errors" + "io/ioutil" + "net/url" + "strings" +) + +// MSIConvertable is an interface that defines methods for converting your +// custom types to a map[string]interface{} representation. +type MSIConvertable interface { + // MSI gets a map[string]interface{} (msi) representing the + // object. + MSI() map[string]interface{} +} + +// Map provides extended functionality for working with +// untyped data, in particular map[string]interface (msi). +type Map map[string]interface{} + +// Value returns the internal value instance +func (m Map) Value() *Value { + return &Value{data: m} +} + +// Nil represents a nil Map. +var Nil = New(nil) + +// New creates a new Map containing the map[string]interface{} in the data argument. +// If the data argument is not a map[string]interface, New attempts to call the +// MSI() method on the MSIConvertable interface to create one. +func New(data interface{}) Map { + if _, ok := data.(map[string]interface{}); !ok { + if converter, ok := data.(MSIConvertable); ok { + data = converter.MSI() + } else { + return nil + } + } + return Map(data.(map[string]interface{})) +} + +// MSI creates a map[string]interface{} and puts it inside a new Map. +// +// The arguments follow a key, value pattern. +// +// +// Returns nil if any key argument is non-string or if there are an odd number of arguments. +// +// Example +// +// To easily create Maps: +// +// m := objx.MSI("name", "Mat", "age", 29, "subobj", objx.MSI("active", true)) +// +// // creates an Map equivalent to +// m := objx.Map{"name": "Mat", "age": 29, "subobj": objx.Map{"active": true}} +func MSI(keyAndValuePairs ...interface{}) Map { + newMap := Map{} + keyAndValuePairsLen := len(keyAndValuePairs) + if keyAndValuePairsLen%2 != 0 { + return nil + } + for i := 0; i < keyAndValuePairsLen; i = i + 2 { + key := keyAndValuePairs[i] + value := keyAndValuePairs[i+1] + + // make sure the key is a string + keyString, keyStringOK := key.(string) + if !keyStringOK { + return nil + } + newMap[keyString] = value + } + return newMap +} + +// ****** Conversion Constructors + +// MustFromJSON creates a new Map containing the data specified in the +// jsonString. +// +// Panics if the JSON is invalid. +func MustFromJSON(jsonString string) Map { + o, err := FromJSON(jsonString) + if err != nil { + panic("objx: MustFromJSON failed with error: " + err.Error()) + } + return o +} + +// FromJSON creates a new Map containing the data specified in the +// jsonString. +// +// Returns an error if the JSON is invalid. +func FromJSON(jsonString string) (Map, error) { + var data interface{} + err := json.Unmarshal([]byte(jsonString), &data) + if err != nil { + return Nil, err + } + return New(data), nil +} + +// FromBase64 creates a new Obj containing the data specified +// in the Base64 string. +// +// The string is an encoded JSON string returned by Base64 +func FromBase64(base64String string) (Map, error) { + decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64String)) + decoded, err := ioutil.ReadAll(decoder) + if err != nil { + return nil, err + } + return FromJSON(string(decoded)) +} + +// MustFromBase64 creates a new Obj containing the data specified +// in the Base64 string and panics if there is an error. +// +// The string is an encoded JSON string returned by Base64 +func MustFromBase64(base64String string) Map { + result, err := FromBase64(base64String) + if err != nil { + panic("objx: MustFromBase64 failed with error: " + err.Error()) + } + return result +} + +// FromSignedBase64 creates a new Obj containing the data specified +// in the Base64 string. +// +// The string is an encoded JSON string returned by SignedBase64 +func FromSignedBase64(base64String, key string) (Map, error) { + parts := strings.Split(base64String, SignatureSeparator) + if len(parts) != 2 { + return nil, errors.New("objx: Signed base64 string is malformed") + } + + sig := HashWithKey(parts[0], key) + if parts[1] != sig { + return nil, errors.New("objx: Signature for base64 data does not match") + } + return FromBase64(parts[0]) +} + +// MustFromSignedBase64 creates a new Obj containing the data specified +// in the Base64 string and panics if there is an error. +// +// The string is an encoded JSON string returned by Base64 +func MustFromSignedBase64(base64String, key string) Map { + result, err := FromSignedBase64(base64String, key) + if err != nil { + panic("objx: MustFromSignedBase64 failed with error: " + err.Error()) + } + return result +} + +// FromURLQuery generates a new Obj by parsing the specified +// query. +// +// For queries with multiple values, the first value is selected. +func FromURLQuery(query string) (Map, error) { + vals, err := url.ParseQuery(query) + if err != nil { + return nil, err + } + m := Map{} + for k, vals := range vals { + m[k] = vals[0] + } + return m, nil +} + +// MustFromURLQuery generates a new Obj by parsing the specified +// query. +// +// For queries with multiple values, the first value is selected. +// +// Panics if it encounters an error +func MustFromURLQuery(query string) Map { + o, err := FromURLQuery(query) + if err != nil { + panic("objx: MustFromURLQuery failed with error: " + err.Error()) + } + return o +} diff --git a/vendor/github.com/stretchr/objx/mutations.go b/vendor/github.com/stretchr/objx/mutations.go new file mode 100644 index 0000000..c3400a3 --- /dev/null +++ b/vendor/github.com/stretchr/objx/mutations.go @@ -0,0 +1,77 @@ +package objx + +// Exclude returns a new Map with the keys in the specified []string +// excluded. +func (m Map) Exclude(exclude []string) Map { + excluded := make(Map) + for k, v := range m { + if !contains(exclude, k) { + excluded[k] = v + } + } + return excluded +} + +// Copy creates a shallow copy of the Obj. +func (m Map) Copy() Map { + copied := Map{} + for k, v := range m { + copied[k] = v + } + return copied +} + +// Merge blends the specified map with a copy of this map and returns the result. +// +// Keys that appear in both will be selected from the specified map. +// This method requires that the wrapped object be a map[string]interface{} +func (m Map) Merge(merge Map) Map { + return m.Copy().MergeHere(merge) +} + +// MergeHere blends the specified map with this map and returns the current map. +// +// Keys that appear in both will be selected from the specified map. The original map +// will be modified. This method requires that +// the wrapped object be a map[string]interface{} +func (m Map) MergeHere(merge Map) Map { + for k, v := range merge { + m[k] = v + } + return m +} + +// Transform builds a new Obj giving the transformer a chance +// to change the keys and values as it goes. This method requires that +// the wrapped object be a map[string]interface{} +func (m Map) Transform(transformer func(key string, value interface{}) (string, interface{})) Map { + newMap := Map{} + for k, v := range m { + modifiedKey, modifiedVal := transformer(k, v) + newMap[modifiedKey] = modifiedVal + } + return newMap +} + +// TransformKeys builds a new map using the specified key mapping. +// +// Unspecified keys will be unaltered. +// This method requires that the wrapped object be a map[string]interface{} +func (m Map) TransformKeys(mapping map[string]string) Map { + return m.Transform(func(key string, value interface{}) (string, interface{}) { + if newKey, ok := mapping[key]; ok { + return newKey, value + } + return key, value + }) +} + +// Checks if a string slice contains a string +func contains(s []string, e string) bool { + for _, a := range s { + if a == e { + return true + } + } + return false +} diff --git a/vendor/github.com/stretchr/objx/security.go b/vendor/github.com/stretchr/objx/security.go new file mode 100644 index 0000000..692be8e --- /dev/null +++ b/vendor/github.com/stretchr/objx/security.go @@ -0,0 +1,12 @@ +package objx + +import ( + "crypto/sha1" + "encoding/hex" +) + +// HashWithKey hashes the specified string using the security key +func HashWithKey(data, key string) string { + d := sha1.Sum([]byte(data + ":" + key)) + return hex.EncodeToString(d[:]) +} diff --git a/vendor/github.com/stretchr/objx/tests.go b/vendor/github.com/stretchr/objx/tests.go new file mode 100644 index 0000000..d9e0b47 --- /dev/null +++ b/vendor/github.com/stretchr/objx/tests.go @@ -0,0 +1,17 @@ +package objx + +// Has gets whether there is something at the specified selector +// or not. +// +// If m is nil, Has will always return false. +func (m Map) Has(selector string) bool { + if m == nil { + return false + } + return !m.Get(selector).IsNil() +} + +// IsNil gets whether the data is nil or not. +func (v *Value) IsNil() bool { + return v == nil || v.data == nil +} diff --git a/vendor/github.com/stretchr/objx/type_specific_codegen.go b/vendor/github.com/stretchr/objx/type_specific_codegen.go new file mode 100644 index 0000000..202a91f --- /dev/null +++ b/vendor/github.com/stretchr/objx/type_specific_codegen.go @@ -0,0 +1,2501 @@ +package objx + +/* + Inter (interface{} and []interface{}) +*/ + +// Inter gets the value as a interface{}, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Inter(optionalDefault ...interface{}) interface{} { + if s, ok := v.data.(interface{}); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInter gets the value as a interface{}. +// +// Panics if the object is not a interface{}. +func (v *Value) MustInter() interface{} { + return v.data.(interface{}) +} + +// InterSlice gets the value as a []interface{}, returns the optionalDefault +// value or nil if the value is not a []interface{}. +func (v *Value) InterSlice(optionalDefault ...[]interface{}) []interface{} { + if s, ok := v.data.([]interface{}); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInterSlice gets the value as a []interface{}. +// +// Panics if the object is not a []interface{}. +func (v *Value) MustInterSlice() []interface{} { + return v.data.([]interface{}) +} + +// IsInter gets whether the object contained is a interface{} or not. +func (v *Value) IsInter() bool { + _, ok := v.data.(interface{}) + return ok +} + +// IsInterSlice gets whether the object contained is a []interface{} or not. +func (v *Value) IsInterSlice() bool { + _, ok := v.data.([]interface{}) + return ok +} + +// EachInter calls the specified callback for each object +// in the []interface{}. +// +// Panics if the object is the wrong type. +func (v *Value) EachInter(callback func(int, interface{}) bool) *Value { + for index, val := range v.MustInterSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInter uses the specified decider function to select items +// from the []interface{}. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInter(decider func(int, interface{}) bool) *Value { + var selected []interface{} + v.EachInter(func(index int, val interface{}) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInter uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]interface{}. +func (v *Value) GroupInter(grouper func(int, interface{}) string) *Value { + groups := make(map[string][]interface{}) + v.EachInter(func(index int, val interface{}) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]interface{}, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInter uses the specified function to replace each interface{}s +// by iterating each item. The data in the returned result will be a +// []interface{} containing the replaced items. +func (v *Value) ReplaceInter(replacer func(int, interface{}) interface{}) *Value { + arr := v.MustInterSlice() + replaced := make([]interface{}, len(arr)) + v.EachInter(func(index int, val interface{}) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInter uses the specified collector function to collect a value +// for each of the interface{}s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInter(collector func(int, interface{}) interface{}) *Value { + arr := v.MustInterSlice() + collected := make([]interface{}, len(arr)) + v.EachInter(func(index int, val interface{}) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + MSI (map[string]interface{} and []map[string]interface{}) +*/ + +// MSI gets the value as a map[string]interface{}, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) MSI(optionalDefault ...map[string]interface{}) map[string]interface{} { + if s, ok := v.data.(map[string]interface{}); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustMSI gets the value as a map[string]interface{}. +// +// Panics if the object is not a map[string]interface{}. +func (v *Value) MustMSI() map[string]interface{} { + return v.data.(map[string]interface{}) +} + +// MSISlice gets the value as a []map[string]interface{}, returns the optionalDefault +// value or nil if the value is not a []map[string]interface{}. +func (v *Value) MSISlice(optionalDefault ...[]map[string]interface{}) []map[string]interface{} { + if s, ok := v.data.([]map[string]interface{}); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustMSISlice gets the value as a []map[string]interface{}. +// +// Panics if the object is not a []map[string]interface{}. +func (v *Value) MustMSISlice() []map[string]interface{} { + return v.data.([]map[string]interface{}) +} + +// IsMSI gets whether the object contained is a map[string]interface{} or not. +func (v *Value) IsMSI() bool { + _, ok := v.data.(map[string]interface{}) + return ok +} + +// IsMSISlice gets whether the object contained is a []map[string]interface{} or not. +func (v *Value) IsMSISlice() bool { + _, ok := v.data.([]map[string]interface{}) + return ok +} + +// EachMSI calls the specified callback for each object +// in the []map[string]interface{}. +// +// Panics if the object is the wrong type. +func (v *Value) EachMSI(callback func(int, map[string]interface{}) bool) *Value { + for index, val := range v.MustMSISlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereMSI uses the specified decider function to select items +// from the []map[string]interface{}. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereMSI(decider func(int, map[string]interface{}) bool) *Value { + var selected []map[string]interface{} + v.EachMSI(func(index int, val map[string]interface{}) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupMSI uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]map[string]interface{}. +func (v *Value) GroupMSI(grouper func(int, map[string]interface{}) string) *Value { + groups := make(map[string][]map[string]interface{}) + v.EachMSI(func(index int, val map[string]interface{}) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]map[string]interface{}, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceMSI uses the specified function to replace each map[string]interface{}s +// by iterating each item. The data in the returned result will be a +// []map[string]interface{} containing the replaced items. +func (v *Value) ReplaceMSI(replacer func(int, map[string]interface{}) map[string]interface{}) *Value { + arr := v.MustMSISlice() + replaced := make([]map[string]interface{}, len(arr)) + v.EachMSI(func(index int, val map[string]interface{}) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectMSI uses the specified collector function to collect a value +// for each of the map[string]interface{}s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectMSI(collector func(int, map[string]interface{}) interface{}) *Value { + arr := v.MustMSISlice() + collected := make([]interface{}, len(arr)) + v.EachMSI(func(index int, val map[string]interface{}) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + ObjxMap ((Map) and [](Map)) +*/ + +// ObjxMap gets the value as a (Map), returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) ObjxMap(optionalDefault ...(Map)) Map { + if s, ok := v.data.((Map)); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return New(nil) +} + +// MustObjxMap gets the value as a (Map). +// +// Panics if the object is not a (Map). +func (v *Value) MustObjxMap() Map { + return v.data.((Map)) +} + +// ObjxMapSlice gets the value as a [](Map), returns the optionalDefault +// value or nil if the value is not a [](Map). +func (v *Value) ObjxMapSlice(optionalDefault ...[](Map)) [](Map) { + if s, ok := v.data.([](Map)); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustObjxMapSlice gets the value as a [](Map). +// +// Panics if the object is not a [](Map). +func (v *Value) MustObjxMapSlice() [](Map) { + return v.data.([](Map)) +} + +// IsObjxMap gets whether the object contained is a (Map) or not. +func (v *Value) IsObjxMap() bool { + _, ok := v.data.((Map)) + return ok +} + +// IsObjxMapSlice gets whether the object contained is a [](Map) or not. +func (v *Value) IsObjxMapSlice() bool { + _, ok := v.data.([](Map)) + return ok +} + +// EachObjxMap calls the specified callback for each object +// in the [](Map). +// +// Panics if the object is the wrong type. +func (v *Value) EachObjxMap(callback func(int, Map) bool) *Value { + for index, val := range v.MustObjxMapSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereObjxMap uses the specified decider function to select items +// from the [](Map). The object contained in the result will contain +// only the selected items. +func (v *Value) WhereObjxMap(decider func(int, Map) bool) *Value { + var selected [](Map) + v.EachObjxMap(func(index int, val Map) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupObjxMap uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][](Map). +func (v *Value) GroupObjxMap(grouper func(int, Map) string) *Value { + groups := make(map[string][](Map)) + v.EachObjxMap(func(index int, val Map) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([](Map), 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceObjxMap uses the specified function to replace each (Map)s +// by iterating each item. The data in the returned result will be a +// [](Map) containing the replaced items. +func (v *Value) ReplaceObjxMap(replacer func(int, Map) Map) *Value { + arr := v.MustObjxMapSlice() + replaced := make([](Map), len(arr)) + v.EachObjxMap(func(index int, val Map) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectObjxMap uses the specified collector function to collect a value +// for each of the (Map)s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectObjxMap(collector func(int, Map) interface{}) *Value { + arr := v.MustObjxMapSlice() + collected := make([]interface{}, len(arr)) + v.EachObjxMap(func(index int, val Map) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Bool (bool and []bool) +*/ + +// Bool gets the value as a bool, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Bool(optionalDefault ...bool) bool { + if s, ok := v.data.(bool); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return false +} + +// MustBool gets the value as a bool. +// +// Panics if the object is not a bool. +func (v *Value) MustBool() bool { + return v.data.(bool) +} + +// BoolSlice gets the value as a []bool, returns the optionalDefault +// value or nil if the value is not a []bool. +func (v *Value) BoolSlice(optionalDefault ...[]bool) []bool { + if s, ok := v.data.([]bool); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustBoolSlice gets the value as a []bool. +// +// Panics if the object is not a []bool. +func (v *Value) MustBoolSlice() []bool { + return v.data.([]bool) +} + +// IsBool gets whether the object contained is a bool or not. +func (v *Value) IsBool() bool { + _, ok := v.data.(bool) + return ok +} + +// IsBoolSlice gets whether the object contained is a []bool or not. +func (v *Value) IsBoolSlice() bool { + _, ok := v.data.([]bool) + return ok +} + +// EachBool calls the specified callback for each object +// in the []bool. +// +// Panics if the object is the wrong type. +func (v *Value) EachBool(callback func(int, bool) bool) *Value { + for index, val := range v.MustBoolSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereBool uses the specified decider function to select items +// from the []bool. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereBool(decider func(int, bool) bool) *Value { + var selected []bool + v.EachBool(func(index int, val bool) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupBool uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]bool. +func (v *Value) GroupBool(grouper func(int, bool) string) *Value { + groups := make(map[string][]bool) + v.EachBool(func(index int, val bool) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]bool, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceBool uses the specified function to replace each bools +// by iterating each item. The data in the returned result will be a +// []bool containing the replaced items. +func (v *Value) ReplaceBool(replacer func(int, bool) bool) *Value { + arr := v.MustBoolSlice() + replaced := make([]bool, len(arr)) + v.EachBool(func(index int, val bool) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectBool uses the specified collector function to collect a value +// for each of the bools in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectBool(collector func(int, bool) interface{}) *Value { + arr := v.MustBoolSlice() + collected := make([]interface{}, len(arr)) + v.EachBool(func(index int, val bool) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Str (string and []string) +*/ + +// Str gets the value as a string, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Str(optionalDefault ...string) string { + if s, ok := v.data.(string); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return "" +} + +// MustStr gets the value as a string. +// +// Panics if the object is not a string. +func (v *Value) MustStr() string { + return v.data.(string) +} + +// StrSlice gets the value as a []string, returns the optionalDefault +// value or nil if the value is not a []string. +func (v *Value) StrSlice(optionalDefault ...[]string) []string { + if s, ok := v.data.([]string); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustStrSlice gets the value as a []string. +// +// Panics if the object is not a []string. +func (v *Value) MustStrSlice() []string { + return v.data.([]string) +} + +// IsStr gets whether the object contained is a string or not. +func (v *Value) IsStr() bool { + _, ok := v.data.(string) + return ok +} + +// IsStrSlice gets whether the object contained is a []string or not. +func (v *Value) IsStrSlice() bool { + _, ok := v.data.([]string) + return ok +} + +// EachStr calls the specified callback for each object +// in the []string. +// +// Panics if the object is the wrong type. +func (v *Value) EachStr(callback func(int, string) bool) *Value { + for index, val := range v.MustStrSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereStr uses the specified decider function to select items +// from the []string. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereStr(decider func(int, string) bool) *Value { + var selected []string + v.EachStr(func(index int, val string) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupStr uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]string. +func (v *Value) GroupStr(grouper func(int, string) string) *Value { + groups := make(map[string][]string) + v.EachStr(func(index int, val string) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]string, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceStr uses the specified function to replace each strings +// by iterating each item. The data in the returned result will be a +// []string containing the replaced items. +func (v *Value) ReplaceStr(replacer func(int, string) string) *Value { + arr := v.MustStrSlice() + replaced := make([]string, len(arr)) + v.EachStr(func(index int, val string) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectStr uses the specified collector function to collect a value +// for each of the strings in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectStr(collector func(int, string) interface{}) *Value { + arr := v.MustStrSlice() + collected := make([]interface{}, len(arr)) + v.EachStr(func(index int, val string) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Int (int and []int) +*/ + +// Int gets the value as a int, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Int(optionalDefault ...int) int { + if s, ok := v.data.(int); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustInt gets the value as a int. +// +// Panics if the object is not a int. +func (v *Value) MustInt() int { + return v.data.(int) +} + +// IntSlice gets the value as a []int, returns the optionalDefault +// value or nil if the value is not a []int. +func (v *Value) IntSlice(optionalDefault ...[]int) []int { + if s, ok := v.data.([]int); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustIntSlice gets the value as a []int. +// +// Panics if the object is not a []int. +func (v *Value) MustIntSlice() []int { + return v.data.([]int) +} + +// IsInt gets whether the object contained is a int or not. +func (v *Value) IsInt() bool { + _, ok := v.data.(int) + return ok +} + +// IsIntSlice gets whether the object contained is a []int or not. +func (v *Value) IsIntSlice() bool { + _, ok := v.data.([]int) + return ok +} + +// EachInt calls the specified callback for each object +// in the []int. +// +// Panics if the object is the wrong type. +func (v *Value) EachInt(callback func(int, int) bool) *Value { + for index, val := range v.MustIntSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInt uses the specified decider function to select items +// from the []int. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInt(decider func(int, int) bool) *Value { + var selected []int + v.EachInt(func(index int, val int) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInt uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]int. +func (v *Value) GroupInt(grouper func(int, int) string) *Value { + groups := make(map[string][]int) + v.EachInt(func(index int, val int) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]int, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInt uses the specified function to replace each ints +// by iterating each item. The data in the returned result will be a +// []int containing the replaced items. +func (v *Value) ReplaceInt(replacer func(int, int) int) *Value { + arr := v.MustIntSlice() + replaced := make([]int, len(arr)) + v.EachInt(func(index int, val int) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInt uses the specified collector function to collect a value +// for each of the ints in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInt(collector func(int, int) interface{}) *Value { + arr := v.MustIntSlice() + collected := make([]interface{}, len(arr)) + v.EachInt(func(index int, val int) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Int8 (int8 and []int8) +*/ + +// Int8 gets the value as a int8, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Int8(optionalDefault ...int8) int8 { + if s, ok := v.data.(int8); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustInt8 gets the value as a int8. +// +// Panics if the object is not a int8. +func (v *Value) MustInt8() int8 { + return v.data.(int8) +} + +// Int8Slice gets the value as a []int8, returns the optionalDefault +// value or nil if the value is not a []int8. +func (v *Value) Int8Slice(optionalDefault ...[]int8) []int8 { + if s, ok := v.data.([]int8); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInt8Slice gets the value as a []int8. +// +// Panics if the object is not a []int8. +func (v *Value) MustInt8Slice() []int8 { + return v.data.([]int8) +} + +// IsInt8 gets whether the object contained is a int8 or not. +func (v *Value) IsInt8() bool { + _, ok := v.data.(int8) + return ok +} + +// IsInt8Slice gets whether the object contained is a []int8 or not. +func (v *Value) IsInt8Slice() bool { + _, ok := v.data.([]int8) + return ok +} + +// EachInt8 calls the specified callback for each object +// in the []int8. +// +// Panics if the object is the wrong type. +func (v *Value) EachInt8(callback func(int, int8) bool) *Value { + for index, val := range v.MustInt8Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInt8 uses the specified decider function to select items +// from the []int8. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInt8(decider func(int, int8) bool) *Value { + var selected []int8 + v.EachInt8(func(index int, val int8) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInt8 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]int8. +func (v *Value) GroupInt8(grouper func(int, int8) string) *Value { + groups := make(map[string][]int8) + v.EachInt8(func(index int, val int8) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]int8, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInt8 uses the specified function to replace each int8s +// by iterating each item. The data in the returned result will be a +// []int8 containing the replaced items. +func (v *Value) ReplaceInt8(replacer func(int, int8) int8) *Value { + arr := v.MustInt8Slice() + replaced := make([]int8, len(arr)) + v.EachInt8(func(index int, val int8) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInt8 uses the specified collector function to collect a value +// for each of the int8s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInt8(collector func(int, int8) interface{}) *Value { + arr := v.MustInt8Slice() + collected := make([]interface{}, len(arr)) + v.EachInt8(func(index int, val int8) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Int16 (int16 and []int16) +*/ + +// Int16 gets the value as a int16, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Int16(optionalDefault ...int16) int16 { + if s, ok := v.data.(int16); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustInt16 gets the value as a int16. +// +// Panics if the object is not a int16. +func (v *Value) MustInt16() int16 { + return v.data.(int16) +} + +// Int16Slice gets the value as a []int16, returns the optionalDefault +// value or nil if the value is not a []int16. +func (v *Value) Int16Slice(optionalDefault ...[]int16) []int16 { + if s, ok := v.data.([]int16); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInt16Slice gets the value as a []int16. +// +// Panics if the object is not a []int16. +func (v *Value) MustInt16Slice() []int16 { + return v.data.([]int16) +} + +// IsInt16 gets whether the object contained is a int16 or not. +func (v *Value) IsInt16() bool { + _, ok := v.data.(int16) + return ok +} + +// IsInt16Slice gets whether the object contained is a []int16 or not. +func (v *Value) IsInt16Slice() bool { + _, ok := v.data.([]int16) + return ok +} + +// EachInt16 calls the specified callback for each object +// in the []int16. +// +// Panics if the object is the wrong type. +func (v *Value) EachInt16(callback func(int, int16) bool) *Value { + for index, val := range v.MustInt16Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInt16 uses the specified decider function to select items +// from the []int16. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInt16(decider func(int, int16) bool) *Value { + var selected []int16 + v.EachInt16(func(index int, val int16) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInt16 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]int16. +func (v *Value) GroupInt16(grouper func(int, int16) string) *Value { + groups := make(map[string][]int16) + v.EachInt16(func(index int, val int16) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]int16, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInt16 uses the specified function to replace each int16s +// by iterating each item. The data in the returned result will be a +// []int16 containing the replaced items. +func (v *Value) ReplaceInt16(replacer func(int, int16) int16) *Value { + arr := v.MustInt16Slice() + replaced := make([]int16, len(arr)) + v.EachInt16(func(index int, val int16) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInt16 uses the specified collector function to collect a value +// for each of the int16s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInt16(collector func(int, int16) interface{}) *Value { + arr := v.MustInt16Slice() + collected := make([]interface{}, len(arr)) + v.EachInt16(func(index int, val int16) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Int32 (int32 and []int32) +*/ + +// Int32 gets the value as a int32, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Int32(optionalDefault ...int32) int32 { + if s, ok := v.data.(int32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustInt32 gets the value as a int32. +// +// Panics if the object is not a int32. +func (v *Value) MustInt32() int32 { + return v.data.(int32) +} + +// Int32Slice gets the value as a []int32, returns the optionalDefault +// value or nil if the value is not a []int32. +func (v *Value) Int32Slice(optionalDefault ...[]int32) []int32 { + if s, ok := v.data.([]int32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInt32Slice gets the value as a []int32. +// +// Panics if the object is not a []int32. +func (v *Value) MustInt32Slice() []int32 { + return v.data.([]int32) +} + +// IsInt32 gets whether the object contained is a int32 or not. +func (v *Value) IsInt32() bool { + _, ok := v.data.(int32) + return ok +} + +// IsInt32Slice gets whether the object contained is a []int32 or not. +func (v *Value) IsInt32Slice() bool { + _, ok := v.data.([]int32) + return ok +} + +// EachInt32 calls the specified callback for each object +// in the []int32. +// +// Panics if the object is the wrong type. +func (v *Value) EachInt32(callback func(int, int32) bool) *Value { + for index, val := range v.MustInt32Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInt32 uses the specified decider function to select items +// from the []int32. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInt32(decider func(int, int32) bool) *Value { + var selected []int32 + v.EachInt32(func(index int, val int32) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInt32 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]int32. +func (v *Value) GroupInt32(grouper func(int, int32) string) *Value { + groups := make(map[string][]int32) + v.EachInt32(func(index int, val int32) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]int32, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInt32 uses the specified function to replace each int32s +// by iterating each item. The data in the returned result will be a +// []int32 containing the replaced items. +func (v *Value) ReplaceInt32(replacer func(int, int32) int32) *Value { + arr := v.MustInt32Slice() + replaced := make([]int32, len(arr)) + v.EachInt32(func(index int, val int32) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInt32 uses the specified collector function to collect a value +// for each of the int32s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInt32(collector func(int, int32) interface{}) *Value { + arr := v.MustInt32Slice() + collected := make([]interface{}, len(arr)) + v.EachInt32(func(index int, val int32) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Int64 (int64 and []int64) +*/ + +// Int64 gets the value as a int64, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Int64(optionalDefault ...int64) int64 { + if s, ok := v.data.(int64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustInt64 gets the value as a int64. +// +// Panics if the object is not a int64. +func (v *Value) MustInt64() int64 { + return v.data.(int64) +} + +// Int64Slice gets the value as a []int64, returns the optionalDefault +// value or nil if the value is not a []int64. +func (v *Value) Int64Slice(optionalDefault ...[]int64) []int64 { + if s, ok := v.data.([]int64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustInt64Slice gets the value as a []int64. +// +// Panics if the object is not a []int64. +func (v *Value) MustInt64Slice() []int64 { + return v.data.([]int64) +} + +// IsInt64 gets whether the object contained is a int64 or not. +func (v *Value) IsInt64() bool { + _, ok := v.data.(int64) + return ok +} + +// IsInt64Slice gets whether the object contained is a []int64 or not. +func (v *Value) IsInt64Slice() bool { + _, ok := v.data.([]int64) + return ok +} + +// EachInt64 calls the specified callback for each object +// in the []int64. +// +// Panics if the object is the wrong type. +func (v *Value) EachInt64(callback func(int, int64) bool) *Value { + for index, val := range v.MustInt64Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereInt64 uses the specified decider function to select items +// from the []int64. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereInt64(decider func(int, int64) bool) *Value { + var selected []int64 + v.EachInt64(func(index int, val int64) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupInt64 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]int64. +func (v *Value) GroupInt64(grouper func(int, int64) string) *Value { + groups := make(map[string][]int64) + v.EachInt64(func(index int, val int64) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]int64, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceInt64 uses the specified function to replace each int64s +// by iterating each item. The data in the returned result will be a +// []int64 containing the replaced items. +func (v *Value) ReplaceInt64(replacer func(int, int64) int64) *Value { + arr := v.MustInt64Slice() + replaced := make([]int64, len(arr)) + v.EachInt64(func(index int, val int64) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectInt64 uses the specified collector function to collect a value +// for each of the int64s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectInt64(collector func(int, int64) interface{}) *Value { + arr := v.MustInt64Slice() + collected := make([]interface{}, len(arr)) + v.EachInt64(func(index int, val int64) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uint (uint and []uint) +*/ + +// Uint gets the value as a uint, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uint(optionalDefault ...uint) uint { + if s, ok := v.data.(uint); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUint gets the value as a uint. +// +// Panics if the object is not a uint. +func (v *Value) MustUint() uint { + return v.data.(uint) +} + +// UintSlice gets the value as a []uint, returns the optionalDefault +// value or nil if the value is not a []uint. +func (v *Value) UintSlice(optionalDefault ...[]uint) []uint { + if s, ok := v.data.([]uint); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUintSlice gets the value as a []uint. +// +// Panics if the object is not a []uint. +func (v *Value) MustUintSlice() []uint { + return v.data.([]uint) +} + +// IsUint gets whether the object contained is a uint or not. +func (v *Value) IsUint() bool { + _, ok := v.data.(uint) + return ok +} + +// IsUintSlice gets whether the object contained is a []uint or not. +func (v *Value) IsUintSlice() bool { + _, ok := v.data.([]uint) + return ok +} + +// EachUint calls the specified callback for each object +// in the []uint. +// +// Panics if the object is the wrong type. +func (v *Value) EachUint(callback func(int, uint) bool) *Value { + for index, val := range v.MustUintSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUint uses the specified decider function to select items +// from the []uint. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUint(decider func(int, uint) bool) *Value { + var selected []uint + v.EachUint(func(index int, val uint) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUint uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uint. +func (v *Value) GroupUint(grouper func(int, uint) string) *Value { + groups := make(map[string][]uint) + v.EachUint(func(index int, val uint) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uint, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUint uses the specified function to replace each uints +// by iterating each item. The data in the returned result will be a +// []uint containing the replaced items. +func (v *Value) ReplaceUint(replacer func(int, uint) uint) *Value { + arr := v.MustUintSlice() + replaced := make([]uint, len(arr)) + v.EachUint(func(index int, val uint) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUint uses the specified collector function to collect a value +// for each of the uints in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUint(collector func(int, uint) interface{}) *Value { + arr := v.MustUintSlice() + collected := make([]interface{}, len(arr)) + v.EachUint(func(index int, val uint) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uint8 (uint8 and []uint8) +*/ + +// Uint8 gets the value as a uint8, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uint8(optionalDefault ...uint8) uint8 { + if s, ok := v.data.(uint8); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUint8 gets the value as a uint8. +// +// Panics if the object is not a uint8. +func (v *Value) MustUint8() uint8 { + return v.data.(uint8) +} + +// Uint8Slice gets the value as a []uint8, returns the optionalDefault +// value or nil if the value is not a []uint8. +func (v *Value) Uint8Slice(optionalDefault ...[]uint8) []uint8 { + if s, ok := v.data.([]uint8); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUint8Slice gets the value as a []uint8. +// +// Panics if the object is not a []uint8. +func (v *Value) MustUint8Slice() []uint8 { + return v.data.([]uint8) +} + +// IsUint8 gets whether the object contained is a uint8 or not. +func (v *Value) IsUint8() bool { + _, ok := v.data.(uint8) + return ok +} + +// IsUint8Slice gets whether the object contained is a []uint8 or not. +func (v *Value) IsUint8Slice() bool { + _, ok := v.data.([]uint8) + return ok +} + +// EachUint8 calls the specified callback for each object +// in the []uint8. +// +// Panics if the object is the wrong type. +func (v *Value) EachUint8(callback func(int, uint8) bool) *Value { + for index, val := range v.MustUint8Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUint8 uses the specified decider function to select items +// from the []uint8. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUint8(decider func(int, uint8) bool) *Value { + var selected []uint8 + v.EachUint8(func(index int, val uint8) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUint8 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uint8. +func (v *Value) GroupUint8(grouper func(int, uint8) string) *Value { + groups := make(map[string][]uint8) + v.EachUint8(func(index int, val uint8) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uint8, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUint8 uses the specified function to replace each uint8s +// by iterating each item. The data in the returned result will be a +// []uint8 containing the replaced items. +func (v *Value) ReplaceUint8(replacer func(int, uint8) uint8) *Value { + arr := v.MustUint8Slice() + replaced := make([]uint8, len(arr)) + v.EachUint8(func(index int, val uint8) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUint8 uses the specified collector function to collect a value +// for each of the uint8s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUint8(collector func(int, uint8) interface{}) *Value { + arr := v.MustUint8Slice() + collected := make([]interface{}, len(arr)) + v.EachUint8(func(index int, val uint8) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uint16 (uint16 and []uint16) +*/ + +// Uint16 gets the value as a uint16, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uint16(optionalDefault ...uint16) uint16 { + if s, ok := v.data.(uint16); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUint16 gets the value as a uint16. +// +// Panics if the object is not a uint16. +func (v *Value) MustUint16() uint16 { + return v.data.(uint16) +} + +// Uint16Slice gets the value as a []uint16, returns the optionalDefault +// value or nil if the value is not a []uint16. +func (v *Value) Uint16Slice(optionalDefault ...[]uint16) []uint16 { + if s, ok := v.data.([]uint16); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUint16Slice gets the value as a []uint16. +// +// Panics if the object is not a []uint16. +func (v *Value) MustUint16Slice() []uint16 { + return v.data.([]uint16) +} + +// IsUint16 gets whether the object contained is a uint16 or not. +func (v *Value) IsUint16() bool { + _, ok := v.data.(uint16) + return ok +} + +// IsUint16Slice gets whether the object contained is a []uint16 or not. +func (v *Value) IsUint16Slice() bool { + _, ok := v.data.([]uint16) + return ok +} + +// EachUint16 calls the specified callback for each object +// in the []uint16. +// +// Panics if the object is the wrong type. +func (v *Value) EachUint16(callback func(int, uint16) bool) *Value { + for index, val := range v.MustUint16Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUint16 uses the specified decider function to select items +// from the []uint16. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUint16(decider func(int, uint16) bool) *Value { + var selected []uint16 + v.EachUint16(func(index int, val uint16) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUint16 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uint16. +func (v *Value) GroupUint16(grouper func(int, uint16) string) *Value { + groups := make(map[string][]uint16) + v.EachUint16(func(index int, val uint16) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uint16, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUint16 uses the specified function to replace each uint16s +// by iterating each item. The data in the returned result will be a +// []uint16 containing the replaced items. +func (v *Value) ReplaceUint16(replacer func(int, uint16) uint16) *Value { + arr := v.MustUint16Slice() + replaced := make([]uint16, len(arr)) + v.EachUint16(func(index int, val uint16) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUint16 uses the specified collector function to collect a value +// for each of the uint16s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUint16(collector func(int, uint16) interface{}) *Value { + arr := v.MustUint16Slice() + collected := make([]interface{}, len(arr)) + v.EachUint16(func(index int, val uint16) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uint32 (uint32 and []uint32) +*/ + +// Uint32 gets the value as a uint32, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uint32(optionalDefault ...uint32) uint32 { + if s, ok := v.data.(uint32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUint32 gets the value as a uint32. +// +// Panics if the object is not a uint32. +func (v *Value) MustUint32() uint32 { + return v.data.(uint32) +} + +// Uint32Slice gets the value as a []uint32, returns the optionalDefault +// value or nil if the value is not a []uint32. +func (v *Value) Uint32Slice(optionalDefault ...[]uint32) []uint32 { + if s, ok := v.data.([]uint32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUint32Slice gets the value as a []uint32. +// +// Panics if the object is not a []uint32. +func (v *Value) MustUint32Slice() []uint32 { + return v.data.([]uint32) +} + +// IsUint32 gets whether the object contained is a uint32 or not. +func (v *Value) IsUint32() bool { + _, ok := v.data.(uint32) + return ok +} + +// IsUint32Slice gets whether the object contained is a []uint32 or not. +func (v *Value) IsUint32Slice() bool { + _, ok := v.data.([]uint32) + return ok +} + +// EachUint32 calls the specified callback for each object +// in the []uint32. +// +// Panics if the object is the wrong type. +func (v *Value) EachUint32(callback func(int, uint32) bool) *Value { + for index, val := range v.MustUint32Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUint32 uses the specified decider function to select items +// from the []uint32. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUint32(decider func(int, uint32) bool) *Value { + var selected []uint32 + v.EachUint32(func(index int, val uint32) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUint32 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uint32. +func (v *Value) GroupUint32(grouper func(int, uint32) string) *Value { + groups := make(map[string][]uint32) + v.EachUint32(func(index int, val uint32) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uint32, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUint32 uses the specified function to replace each uint32s +// by iterating each item. The data in the returned result will be a +// []uint32 containing the replaced items. +func (v *Value) ReplaceUint32(replacer func(int, uint32) uint32) *Value { + arr := v.MustUint32Slice() + replaced := make([]uint32, len(arr)) + v.EachUint32(func(index int, val uint32) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUint32 uses the specified collector function to collect a value +// for each of the uint32s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUint32(collector func(int, uint32) interface{}) *Value { + arr := v.MustUint32Slice() + collected := make([]interface{}, len(arr)) + v.EachUint32(func(index int, val uint32) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uint64 (uint64 and []uint64) +*/ + +// Uint64 gets the value as a uint64, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uint64(optionalDefault ...uint64) uint64 { + if s, ok := v.data.(uint64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUint64 gets the value as a uint64. +// +// Panics if the object is not a uint64. +func (v *Value) MustUint64() uint64 { + return v.data.(uint64) +} + +// Uint64Slice gets the value as a []uint64, returns the optionalDefault +// value or nil if the value is not a []uint64. +func (v *Value) Uint64Slice(optionalDefault ...[]uint64) []uint64 { + if s, ok := v.data.([]uint64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUint64Slice gets the value as a []uint64. +// +// Panics if the object is not a []uint64. +func (v *Value) MustUint64Slice() []uint64 { + return v.data.([]uint64) +} + +// IsUint64 gets whether the object contained is a uint64 or not. +func (v *Value) IsUint64() bool { + _, ok := v.data.(uint64) + return ok +} + +// IsUint64Slice gets whether the object contained is a []uint64 or not. +func (v *Value) IsUint64Slice() bool { + _, ok := v.data.([]uint64) + return ok +} + +// EachUint64 calls the specified callback for each object +// in the []uint64. +// +// Panics if the object is the wrong type. +func (v *Value) EachUint64(callback func(int, uint64) bool) *Value { + for index, val := range v.MustUint64Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUint64 uses the specified decider function to select items +// from the []uint64. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUint64(decider func(int, uint64) bool) *Value { + var selected []uint64 + v.EachUint64(func(index int, val uint64) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUint64 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uint64. +func (v *Value) GroupUint64(grouper func(int, uint64) string) *Value { + groups := make(map[string][]uint64) + v.EachUint64(func(index int, val uint64) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uint64, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUint64 uses the specified function to replace each uint64s +// by iterating each item. The data in the returned result will be a +// []uint64 containing the replaced items. +func (v *Value) ReplaceUint64(replacer func(int, uint64) uint64) *Value { + arr := v.MustUint64Slice() + replaced := make([]uint64, len(arr)) + v.EachUint64(func(index int, val uint64) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUint64 uses the specified collector function to collect a value +// for each of the uint64s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUint64(collector func(int, uint64) interface{}) *Value { + arr := v.MustUint64Slice() + collected := make([]interface{}, len(arr)) + v.EachUint64(func(index int, val uint64) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Uintptr (uintptr and []uintptr) +*/ + +// Uintptr gets the value as a uintptr, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Uintptr(optionalDefault ...uintptr) uintptr { + if s, ok := v.data.(uintptr); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustUintptr gets the value as a uintptr. +// +// Panics if the object is not a uintptr. +func (v *Value) MustUintptr() uintptr { + return v.data.(uintptr) +} + +// UintptrSlice gets the value as a []uintptr, returns the optionalDefault +// value or nil if the value is not a []uintptr. +func (v *Value) UintptrSlice(optionalDefault ...[]uintptr) []uintptr { + if s, ok := v.data.([]uintptr); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustUintptrSlice gets the value as a []uintptr. +// +// Panics if the object is not a []uintptr. +func (v *Value) MustUintptrSlice() []uintptr { + return v.data.([]uintptr) +} + +// IsUintptr gets whether the object contained is a uintptr or not. +func (v *Value) IsUintptr() bool { + _, ok := v.data.(uintptr) + return ok +} + +// IsUintptrSlice gets whether the object contained is a []uintptr or not. +func (v *Value) IsUintptrSlice() bool { + _, ok := v.data.([]uintptr) + return ok +} + +// EachUintptr calls the specified callback for each object +// in the []uintptr. +// +// Panics if the object is the wrong type. +func (v *Value) EachUintptr(callback func(int, uintptr) bool) *Value { + for index, val := range v.MustUintptrSlice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereUintptr uses the specified decider function to select items +// from the []uintptr. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereUintptr(decider func(int, uintptr) bool) *Value { + var selected []uintptr + v.EachUintptr(func(index int, val uintptr) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupUintptr uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]uintptr. +func (v *Value) GroupUintptr(grouper func(int, uintptr) string) *Value { + groups := make(map[string][]uintptr) + v.EachUintptr(func(index int, val uintptr) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]uintptr, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceUintptr uses the specified function to replace each uintptrs +// by iterating each item. The data in the returned result will be a +// []uintptr containing the replaced items. +func (v *Value) ReplaceUintptr(replacer func(int, uintptr) uintptr) *Value { + arr := v.MustUintptrSlice() + replaced := make([]uintptr, len(arr)) + v.EachUintptr(func(index int, val uintptr) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectUintptr uses the specified collector function to collect a value +// for each of the uintptrs in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectUintptr(collector func(int, uintptr) interface{}) *Value { + arr := v.MustUintptrSlice() + collected := make([]interface{}, len(arr)) + v.EachUintptr(func(index int, val uintptr) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Float32 (float32 and []float32) +*/ + +// Float32 gets the value as a float32, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Float32(optionalDefault ...float32) float32 { + if s, ok := v.data.(float32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustFloat32 gets the value as a float32. +// +// Panics if the object is not a float32. +func (v *Value) MustFloat32() float32 { + return v.data.(float32) +} + +// Float32Slice gets the value as a []float32, returns the optionalDefault +// value or nil if the value is not a []float32. +func (v *Value) Float32Slice(optionalDefault ...[]float32) []float32 { + if s, ok := v.data.([]float32); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustFloat32Slice gets the value as a []float32. +// +// Panics if the object is not a []float32. +func (v *Value) MustFloat32Slice() []float32 { + return v.data.([]float32) +} + +// IsFloat32 gets whether the object contained is a float32 or not. +func (v *Value) IsFloat32() bool { + _, ok := v.data.(float32) + return ok +} + +// IsFloat32Slice gets whether the object contained is a []float32 or not. +func (v *Value) IsFloat32Slice() bool { + _, ok := v.data.([]float32) + return ok +} + +// EachFloat32 calls the specified callback for each object +// in the []float32. +// +// Panics if the object is the wrong type. +func (v *Value) EachFloat32(callback func(int, float32) bool) *Value { + for index, val := range v.MustFloat32Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereFloat32 uses the specified decider function to select items +// from the []float32. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereFloat32(decider func(int, float32) bool) *Value { + var selected []float32 + v.EachFloat32(func(index int, val float32) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupFloat32 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]float32. +func (v *Value) GroupFloat32(grouper func(int, float32) string) *Value { + groups := make(map[string][]float32) + v.EachFloat32(func(index int, val float32) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]float32, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceFloat32 uses the specified function to replace each float32s +// by iterating each item. The data in the returned result will be a +// []float32 containing the replaced items. +func (v *Value) ReplaceFloat32(replacer func(int, float32) float32) *Value { + arr := v.MustFloat32Slice() + replaced := make([]float32, len(arr)) + v.EachFloat32(func(index int, val float32) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectFloat32 uses the specified collector function to collect a value +// for each of the float32s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectFloat32(collector func(int, float32) interface{}) *Value { + arr := v.MustFloat32Slice() + collected := make([]interface{}, len(arr)) + v.EachFloat32(func(index int, val float32) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Float64 (float64 and []float64) +*/ + +// Float64 gets the value as a float64, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Float64(optionalDefault ...float64) float64 { + if s, ok := v.data.(float64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustFloat64 gets the value as a float64. +// +// Panics if the object is not a float64. +func (v *Value) MustFloat64() float64 { + return v.data.(float64) +} + +// Float64Slice gets the value as a []float64, returns the optionalDefault +// value or nil if the value is not a []float64. +func (v *Value) Float64Slice(optionalDefault ...[]float64) []float64 { + if s, ok := v.data.([]float64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustFloat64Slice gets the value as a []float64. +// +// Panics if the object is not a []float64. +func (v *Value) MustFloat64Slice() []float64 { + return v.data.([]float64) +} + +// IsFloat64 gets whether the object contained is a float64 or not. +func (v *Value) IsFloat64() bool { + _, ok := v.data.(float64) + return ok +} + +// IsFloat64Slice gets whether the object contained is a []float64 or not. +func (v *Value) IsFloat64Slice() bool { + _, ok := v.data.([]float64) + return ok +} + +// EachFloat64 calls the specified callback for each object +// in the []float64. +// +// Panics if the object is the wrong type. +func (v *Value) EachFloat64(callback func(int, float64) bool) *Value { + for index, val := range v.MustFloat64Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereFloat64 uses the specified decider function to select items +// from the []float64. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereFloat64(decider func(int, float64) bool) *Value { + var selected []float64 + v.EachFloat64(func(index int, val float64) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupFloat64 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]float64. +func (v *Value) GroupFloat64(grouper func(int, float64) string) *Value { + groups := make(map[string][]float64) + v.EachFloat64(func(index int, val float64) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]float64, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceFloat64 uses the specified function to replace each float64s +// by iterating each item. The data in the returned result will be a +// []float64 containing the replaced items. +func (v *Value) ReplaceFloat64(replacer func(int, float64) float64) *Value { + arr := v.MustFloat64Slice() + replaced := make([]float64, len(arr)) + v.EachFloat64(func(index int, val float64) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectFloat64 uses the specified collector function to collect a value +// for each of the float64s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectFloat64(collector func(int, float64) interface{}) *Value { + arr := v.MustFloat64Slice() + collected := make([]interface{}, len(arr)) + v.EachFloat64(func(index int, val float64) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Complex64 (complex64 and []complex64) +*/ + +// Complex64 gets the value as a complex64, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Complex64(optionalDefault ...complex64) complex64 { + if s, ok := v.data.(complex64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustComplex64 gets the value as a complex64. +// +// Panics if the object is not a complex64. +func (v *Value) MustComplex64() complex64 { + return v.data.(complex64) +} + +// Complex64Slice gets the value as a []complex64, returns the optionalDefault +// value or nil if the value is not a []complex64. +func (v *Value) Complex64Slice(optionalDefault ...[]complex64) []complex64 { + if s, ok := v.data.([]complex64); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustComplex64Slice gets the value as a []complex64. +// +// Panics if the object is not a []complex64. +func (v *Value) MustComplex64Slice() []complex64 { + return v.data.([]complex64) +} + +// IsComplex64 gets whether the object contained is a complex64 or not. +func (v *Value) IsComplex64() bool { + _, ok := v.data.(complex64) + return ok +} + +// IsComplex64Slice gets whether the object contained is a []complex64 or not. +func (v *Value) IsComplex64Slice() bool { + _, ok := v.data.([]complex64) + return ok +} + +// EachComplex64 calls the specified callback for each object +// in the []complex64. +// +// Panics if the object is the wrong type. +func (v *Value) EachComplex64(callback func(int, complex64) bool) *Value { + for index, val := range v.MustComplex64Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereComplex64 uses the specified decider function to select items +// from the []complex64. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereComplex64(decider func(int, complex64) bool) *Value { + var selected []complex64 + v.EachComplex64(func(index int, val complex64) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupComplex64 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]complex64. +func (v *Value) GroupComplex64(grouper func(int, complex64) string) *Value { + groups := make(map[string][]complex64) + v.EachComplex64(func(index int, val complex64) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]complex64, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceComplex64 uses the specified function to replace each complex64s +// by iterating each item. The data in the returned result will be a +// []complex64 containing the replaced items. +func (v *Value) ReplaceComplex64(replacer func(int, complex64) complex64) *Value { + arr := v.MustComplex64Slice() + replaced := make([]complex64, len(arr)) + v.EachComplex64(func(index int, val complex64) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectComplex64 uses the specified collector function to collect a value +// for each of the complex64s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectComplex64(collector func(int, complex64) interface{}) *Value { + arr := v.MustComplex64Slice() + collected := make([]interface{}, len(arr)) + v.EachComplex64(func(index int, val complex64) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} + +/* + Complex128 (complex128 and []complex128) +*/ + +// Complex128 gets the value as a complex128, returns the optionalDefault +// value or a system default object if the value is the wrong type. +func (v *Value) Complex128(optionalDefault ...complex128) complex128 { + if s, ok := v.data.(complex128); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return 0 +} + +// MustComplex128 gets the value as a complex128. +// +// Panics if the object is not a complex128. +func (v *Value) MustComplex128() complex128 { + return v.data.(complex128) +} + +// Complex128Slice gets the value as a []complex128, returns the optionalDefault +// value or nil if the value is not a []complex128. +func (v *Value) Complex128Slice(optionalDefault ...[]complex128) []complex128 { + if s, ok := v.data.([]complex128); ok { + return s + } + if len(optionalDefault) == 1 { + return optionalDefault[0] + } + return nil +} + +// MustComplex128Slice gets the value as a []complex128. +// +// Panics if the object is not a []complex128. +func (v *Value) MustComplex128Slice() []complex128 { + return v.data.([]complex128) +} + +// IsComplex128 gets whether the object contained is a complex128 or not. +func (v *Value) IsComplex128() bool { + _, ok := v.data.(complex128) + return ok +} + +// IsComplex128Slice gets whether the object contained is a []complex128 or not. +func (v *Value) IsComplex128Slice() bool { + _, ok := v.data.([]complex128) + return ok +} + +// EachComplex128 calls the specified callback for each object +// in the []complex128. +// +// Panics if the object is the wrong type. +func (v *Value) EachComplex128(callback func(int, complex128) bool) *Value { + for index, val := range v.MustComplex128Slice() { + carryon := callback(index, val) + if !carryon { + break + } + } + return v +} + +// WhereComplex128 uses the specified decider function to select items +// from the []complex128. The object contained in the result will contain +// only the selected items. +func (v *Value) WhereComplex128(decider func(int, complex128) bool) *Value { + var selected []complex128 + v.EachComplex128(func(index int, val complex128) bool { + shouldSelect := decider(index, val) + if !shouldSelect { + selected = append(selected, val) + } + return true + }) + return &Value{data: selected} +} + +// GroupComplex128 uses the specified grouper function to group the items +// keyed by the return of the grouper. The object contained in the +// result will contain a map[string][]complex128. +func (v *Value) GroupComplex128(grouper func(int, complex128) string) *Value { + groups := make(map[string][]complex128) + v.EachComplex128(func(index int, val complex128) bool { + group := grouper(index, val) + if _, ok := groups[group]; !ok { + groups[group] = make([]complex128, 0) + } + groups[group] = append(groups[group], val) + return true + }) + return &Value{data: groups} +} + +// ReplaceComplex128 uses the specified function to replace each complex128s +// by iterating each item. The data in the returned result will be a +// []complex128 containing the replaced items. +func (v *Value) ReplaceComplex128(replacer func(int, complex128) complex128) *Value { + arr := v.MustComplex128Slice() + replaced := make([]complex128, len(arr)) + v.EachComplex128(func(index int, val complex128) bool { + replaced[index] = replacer(index, val) + return true + }) + return &Value{data: replaced} +} + +// CollectComplex128 uses the specified collector function to collect a value +// for each of the complex128s in the slice. The data returned will be a +// []interface{}. +func (v *Value) CollectComplex128(collector func(int, complex128) interface{}) *Value { + arr := v.MustComplex128Slice() + collected := make([]interface{}, len(arr)) + v.EachComplex128(func(index int, val complex128) bool { + collected[index] = collector(index, val) + return true + }) + return &Value{data: collected} +} diff --git a/vendor/github.com/stretchr/objx/value.go b/vendor/github.com/stretchr/objx/value.go new file mode 100644 index 0000000..e4b4a14 --- /dev/null +++ b/vendor/github.com/stretchr/objx/value.go @@ -0,0 +1,53 @@ +package objx + +import ( + "fmt" + "strconv" +) + +// Value provides methods for extracting interface{} data in various +// types. +type Value struct { + // data contains the raw data being managed by this Value + data interface{} +} + +// Data returns the raw data contained by this Value +func (v *Value) Data() interface{} { + return v.data +} + +// String returns the value always as a string +func (v *Value) String() string { + switch { + case v.IsStr(): + return v.Str() + case v.IsBool(): + return strconv.FormatBool(v.Bool()) + case v.IsFloat32(): + return strconv.FormatFloat(float64(v.Float32()), 'f', -1, 32) + case v.IsFloat64(): + return strconv.FormatFloat(v.Float64(), 'f', -1, 64) + case v.IsInt(): + return strconv.FormatInt(int64(v.Int()), 10) + case v.IsInt8(): + return strconv.FormatInt(int64(v.Int8()), 10) + case v.IsInt16(): + return strconv.FormatInt(int64(v.Int16()), 10) + case v.IsInt32(): + return strconv.FormatInt(int64(v.Int32()), 10) + case v.IsInt64(): + return strconv.FormatInt(v.Int64(), 10) + case v.IsUint(): + return strconv.FormatUint(uint64(v.Uint()), 10) + case v.IsUint8(): + return strconv.FormatUint(uint64(v.Uint8()), 10) + case v.IsUint16(): + return strconv.FormatUint(uint64(v.Uint16()), 10) + case v.IsUint32(): + return strconv.FormatUint(uint64(v.Uint32()), 10) + case v.IsUint64(): + return strconv.FormatUint(v.Uint64(), 10) + } + return fmt.Sprintf("%#v", v.Data()) +} diff --git a/vendor/github.com/stretchr/testify/mock/doc.go b/vendor/github.com/stretchr/testify/mock/doc.go new file mode 100644 index 0000000..7324128 --- /dev/null +++ b/vendor/github.com/stretchr/testify/mock/doc.go @@ -0,0 +1,44 @@ +// Package mock provides a system by which it is possible to mock your objects +// and verify calls are happening as expected. +// +// Example Usage +// +// The mock package provides an object, Mock, that tracks activity on another object. It is usually +// embedded into a test object as shown below: +// +// type MyTestObject struct { +// // add a Mock object instance +// mock.Mock +// +// // other fields go here as normal +// } +// +// When implementing the methods of an interface, you wire your functions up +// to call the Mock.Called(args...) method, and return the appropriate values. +// +// For example, to mock a method that saves the name and age of a person and returns +// the year of their birth or an error, you might write this: +// +// func (o *MyTestObject) SavePersonDetails(firstname, lastname string, age int) (int, error) { +// args := o.Called(firstname, lastname, age) +// return args.Int(0), args.Error(1) +// } +// +// The Int, Error and Bool methods are examples of strongly typed getters that take the argument +// index position. Given this argument list: +// +// (12, true, "Something") +// +// You could read them out strongly typed like this: +// +// args.Int(0) +// args.Bool(1) +// args.String(2) +// +// For objects of your own type, use the generic Arguments.Get(index) method and make a type assertion: +// +// return args.Get(0).(*MyObject), args.Get(1).(*AnotherObjectOfMine) +// +// This may cause a panic if the object you are getting is nil (the type assertion will fail), in those +// cases you should check for nil first. +package mock diff --git a/vendor/github.com/stretchr/testify/mock/mock.go b/vendor/github.com/stretchr/testify/mock/mock.go new file mode 100644 index 0000000..cc4f642 --- /dev/null +++ b/vendor/github.com/stretchr/testify/mock/mock.go @@ -0,0 +1,885 @@ +package mock + +import ( + "errors" + "fmt" + "reflect" + "regexp" + "runtime" + "strings" + "sync" + "time" + + "github.com/davecgh/go-spew/spew" + "github.com/pmezard/go-difflib/difflib" + "github.com/stretchr/objx" + "github.com/stretchr/testify/assert" +) + +// TestingT is an interface wrapper around *testing.T +type TestingT interface { + Logf(format string, args ...interface{}) + Errorf(format string, args ...interface{}) + FailNow() +} + +/* + Call +*/ + +// Call represents a method call and is used for setting expectations, +// as well as recording activity. +type Call struct { + Parent *Mock + + // The name of the method that was or will be called. + Method string + + // Holds the arguments of the method. + Arguments Arguments + + // Holds the arguments that should be returned when + // this method is called. + ReturnArguments Arguments + + // Holds the caller info for the On() call + callerInfo []string + + // The number of times to return the return arguments when setting + // expectations. 0 means to always return the value. + Repeatability int + + // Amount of times this call has been called + totalCalls int + + // Call to this method can be optional + optional bool + + // Holds a channel that will be used to block the Return until it either + // receives a message or is closed. nil means it returns immediately. + WaitFor <-chan time.Time + + waitTime time.Duration + + // Holds a handler used to manipulate arguments content that are passed by + // reference. It's useful when mocking methods such as unmarshalers or + // decoders. + RunFn func(Arguments) +} + +func newCall(parent *Mock, methodName string, callerInfo []string, methodArguments ...interface{}) *Call { + return &Call{ + Parent: parent, + Method: methodName, + Arguments: methodArguments, + ReturnArguments: make([]interface{}, 0), + callerInfo: callerInfo, + Repeatability: 0, + WaitFor: nil, + RunFn: nil, + } +} + +func (c *Call) lock() { + c.Parent.mutex.Lock() +} + +func (c *Call) unlock() { + c.Parent.mutex.Unlock() +} + +// Return specifies the return arguments for the expectation. +// +// Mock.On("DoSomething").Return(errors.New("failed")) +func (c *Call) Return(returnArguments ...interface{}) *Call { + c.lock() + defer c.unlock() + + c.ReturnArguments = returnArguments + + return c +} + +// Once indicates that that the mock should only return the value once. +// +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Once() +func (c *Call) Once() *Call { + return c.Times(1) +} + +// Twice indicates that that the mock should only return the value twice. +// +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Twice() +func (c *Call) Twice() *Call { + return c.Times(2) +} + +// Times indicates that that the mock should only return the indicated number +// of times. +// +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Times(5) +func (c *Call) Times(i int) *Call { + c.lock() + defer c.unlock() + c.Repeatability = i + return c +} + +// WaitUntil sets the channel that will block the mock's return until its closed +// or a message is received. +// +// Mock.On("MyMethod", arg1, arg2).WaitUntil(time.After(time.Second)) +func (c *Call) WaitUntil(w <-chan time.Time) *Call { + c.lock() + defer c.unlock() + c.WaitFor = w + return c +} + +// After sets how long to block until the call returns +// +// Mock.On("MyMethod", arg1, arg2).After(time.Second) +func (c *Call) After(d time.Duration) *Call { + c.lock() + defer c.unlock() + c.waitTime = d + return c +} + +// Run sets a handler to be called before returning. It can be used when +// mocking a method such as unmarshalers that takes a pointer to a struct and +// sets properties in such struct +// +// Mock.On("Unmarshal", AnythingOfType("*map[string]interface{}").Return().Run(func(args Arguments) { +// arg := args.Get(0).(*map[string]interface{}) +// arg["foo"] = "bar" +// }) +func (c *Call) Run(fn func(args Arguments)) *Call { + c.lock() + defer c.unlock() + c.RunFn = fn + return c +} + +// Maybe allows the method call to be optional. Not calling an optional method +// will not cause an error while asserting expectations +func (c *Call) Maybe() *Call { + c.lock() + defer c.unlock() + c.optional = true + return c +} + +// On chains a new expectation description onto the mocked interface. This +// allows syntax like. +// +// Mock. +// On("MyMethod", 1).Return(nil). +// On("MyOtherMethod", 'a', 'b', 'c').Return(errors.New("Some Error")) +func (c *Call) On(methodName string, arguments ...interface{}) *Call { + return c.Parent.On(methodName, arguments...) +} + +// Mock is the workhorse used to track activity on another object. +// For an example of its usage, refer to the "Example Usage" section at the top +// of this document. +type Mock struct { + // Represents the calls that are expected of + // an object. + ExpectedCalls []*Call + + // Holds the calls that were made to this mocked object. + Calls []Call + + // test is An optional variable that holds the test struct, to be used when an + // invalid mock call was made. + test TestingT + + // TestData holds any data that might be useful for testing. Testify ignores + // this data completely allowing you to do whatever you like with it. + testData objx.Map + + mutex sync.Mutex +} + +// TestData holds any data that might be useful for testing. Testify ignores +// this data completely allowing you to do whatever you like with it. +func (m *Mock) TestData() objx.Map { + + if m.testData == nil { + m.testData = make(objx.Map) + } + + return m.testData +} + +/* + Setting expectations +*/ + +// Test sets the test struct variable of the mock object +func (m *Mock) Test(t TestingT) { + m.mutex.Lock() + defer m.mutex.Unlock() + m.test = t +} + +// fail fails the current test with the given formatted format and args. +// In case that a test was defined, it uses the test APIs for failing a test, +// otherwise it uses panic. +func (m *Mock) fail(format string, args ...interface{}) { + m.mutex.Lock() + defer m.mutex.Unlock() + + if m.test == nil { + panic(fmt.Sprintf(format, args...)) + } + m.test.Errorf(format, args...) + m.test.FailNow() +} + +// On starts a description of an expectation of the specified method +// being called. +// +// Mock.On("MyMethod", arg1, arg2) +func (m *Mock) On(methodName string, arguments ...interface{}) *Call { + for _, arg := range arguments { + if v := reflect.ValueOf(arg); v.Kind() == reflect.Func { + panic(fmt.Sprintf("cannot use Func in expectations. Use mock.AnythingOfType(\"%T\")", arg)) + } + } + + m.mutex.Lock() + defer m.mutex.Unlock() + c := newCall(m, methodName, assert.CallerInfo(), arguments...) + m.ExpectedCalls = append(m.ExpectedCalls, c) + return c +} + +// /* +// Recording and responding to activity +// */ + +func (m *Mock) findExpectedCall(method string, arguments ...interface{}) (int, *Call) { + for i, call := range m.ExpectedCalls { + if call.Method == method && call.Repeatability > -1 { + + _, diffCount := call.Arguments.Diff(arguments) + if diffCount == 0 { + return i, call + } + + } + } + return -1, nil +} + +func (m *Mock) findClosestCall(method string, arguments ...interface{}) (*Call, string) { + var diffCount int + var closestCall *Call + var err string + + for _, call := range m.expectedCalls() { + if call.Method == method { + + errInfo, tempDiffCount := call.Arguments.Diff(arguments) + if tempDiffCount < diffCount || diffCount == 0 { + diffCount = tempDiffCount + closestCall = call + err = errInfo + } + + } + } + + return closestCall, err +} + +func callString(method string, arguments Arguments, includeArgumentValues bool) string { + + var argValsString string + if includeArgumentValues { + var argVals []string + for argIndex, arg := range arguments { + argVals = append(argVals, fmt.Sprintf("%d: %#v", argIndex, arg)) + } + argValsString = fmt.Sprintf("\n\t\t%s", strings.Join(argVals, "\n\t\t")) + } + + return fmt.Sprintf("%s(%s)%s", method, arguments.String(), argValsString) +} + +// Called tells the mock object that a method has been called, and gets an array +// of arguments to return. Panics if the call is unexpected (i.e. not preceded by +// appropriate .On .Return() calls) +// If Call.WaitFor is set, blocks until the channel is closed or receives a message. +func (m *Mock) Called(arguments ...interface{}) Arguments { + // get the calling function's name + pc, _, _, ok := runtime.Caller(1) + if !ok { + panic("Couldn't get the caller information") + } + functionPath := runtime.FuncForPC(pc).Name() + //Next four lines are required to use GCCGO function naming conventions. + //For Ex: github_com_docker_libkv_store_mock.WatchTree.pN39_github_com_docker_libkv_store_mock.Mock + //uses interface information unlike golang github.com/docker/libkv/store/mock.(*Mock).WatchTree + //With GCCGO we need to remove interface information starting from pN
. + re := regexp.MustCompile("\\.pN\\d+_") + if re.MatchString(functionPath) { + functionPath = re.Split(functionPath, -1)[0] + } + parts := strings.Split(functionPath, ".") + functionName := parts[len(parts)-1] + return m.MethodCalled(functionName, arguments...) +} + +// MethodCalled tells the mock object that the given method has been called, and gets +// an array of arguments to return. Panics if the call is unexpected (i.e. not preceded +// by appropriate .On .Return() calls) +// If Call.WaitFor is set, blocks until the channel is closed or receives a message. +func (m *Mock) MethodCalled(methodName string, arguments ...interface{}) Arguments { + m.mutex.Lock() + //TODO: could combine expected and closes in single loop + found, call := m.findExpectedCall(methodName, arguments...) + + if found < 0 { + // we have to fail here - because we don't know what to do + // as the return arguments. This is because: + // + // a) this is a totally unexpected call to this method, + // b) the arguments are not what was expected, or + // c) the developer has forgotten to add an accompanying On...Return pair. + + closestCall, mismatch := m.findClosestCall(methodName, arguments...) + m.mutex.Unlock() + + if closestCall != nil { + m.fail("\n\nmock: Unexpected Method Call\n-----------------------------\n\n%s\n\nThe closest call I have is: \n\n%s\n\n%s\nDiff: %s", + callString(methodName, arguments, true), + callString(methodName, closestCall.Arguments, true), + diffArguments(closestCall.Arguments, arguments), + strings.TrimSpace(mismatch), + ) + } else { + m.fail("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(\"%s\").Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", methodName, methodName, callString(methodName, arguments, true), assert.CallerInfo()) + } + } + + if call.Repeatability == 1 { + call.Repeatability = -1 + } else if call.Repeatability > 1 { + call.Repeatability-- + } + call.totalCalls++ + + // add the call + m.Calls = append(m.Calls, *newCall(m, methodName, assert.CallerInfo(), arguments...)) + m.mutex.Unlock() + + // block if specified + if call.WaitFor != nil { + <-call.WaitFor + } else { + time.Sleep(call.waitTime) + } + + m.mutex.Lock() + runFn := call.RunFn + m.mutex.Unlock() + + if runFn != nil { + runFn(arguments) + } + + m.mutex.Lock() + returnArgs := call.ReturnArguments + m.mutex.Unlock() + + return returnArgs +} + +/* + Assertions +*/ + +type assertExpectationser interface { + AssertExpectations(TestingT) bool +} + +// AssertExpectationsForObjects asserts that everything specified with On and Return +// of the specified objects was in fact called as expected. +// +// Calls may have occurred in any order. +func AssertExpectationsForObjects(t TestingT, testObjects ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + for _, obj := range testObjects { + if m, ok := obj.(Mock); ok { + t.Logf("Deprecated mock.AssertExpectationsForObjects(myMock.Mock) use mock.AssertExpectationsForObjects(myMock)") + obj = &m + } + m := obj.(assertExpectationser) + if !m.AssertExpectations(t) { + t.Logf("Expectations didn't match for Mock: %+v", reflect.TypeOf(m)) + return false + } + } + return true +} + +// AssertExpectations asserts that everything specified with On and Return was +// in fact called as expected. Calls may have occurred in any order. +func (m *Mock) AssertExpectations(t TestingT) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + m.mutex.Lock() + defer m.mutex.Unlock() + var somethingMissing bool + var failedExpectations int + + // iterate through each expectation + expectedCalls := m.expectedCalls() + for _, expectedCall := range expectedCalls { + if !expectedCall.optional && !m.methodWasCalled(expectedCall.Method, expectedCall.Arguments) && expectedCall.totalCalls == 0 { + somethingMissing = true + failedExpectations++ + t.Logf("FAIL:\t%s(%s)\n\t\tat: %s", expectedCall.Method, expectedCall.Arguments.String(), expectedCall.callerInfo) + } else { + if expectedCall.Repeatability > 0 { + somethingMissing = true + failedExpectations++ + t.Logf("FAIL:\t%s(%s)\n\t\tat: %s", expectedCall.Method, expectedCall.Arguments.String(), expectedCall.callerInfo) + } else { + t.Logf("PASS:\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) + } + } + } + + if somethingMissing { + t.Errorf("FAIL: %d out of %d expectation(s) were met.\n\tThe code you are testing needs to make %d more call(s).\n\tat: %s", len(expectedCalls)-failedExpectations, len(expectedCalls), failedExpectations, assert.CallerInfo()) + } + + return !somethingMissing +} + +// AssertNumberOfCalls asserts that the method was called expectedCalls times. +func (m *Mock) AssertNumberOfCalls(t TestingT, methodName string, expectedCalls int) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + m.mutex.Lock() + defer m.mutex.Unlock() + var actualCalls int + for _, call := range m.calls() { + if call.Method == methodName { + actualCalls++ + } + } + return assert.Equal(t, expectedCalls, actualCalls, fmt.Sprintf("Expected number of calls (%d) does not match the actual number of calls (%d).", expectedCalls, actualCalls)) +} + +// AssertCalled asserts that the method was called. +// It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. +func (m *Mock) AssertCalled(t TestingT, methodName string, arguments ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + m.mutex.Lock() + defer m.mutex.Unlock() + if !m.methodWasCalled(methodName, arguments) { + var calledWithArgs []string + for _, call := range m.calls() { + calledWithArgs = append(calledWithArgs, fmt.Sprintf("%v", call.Arguments)) + } + if len(calledWithArgs) == 0 { + return assert.Fail(t, "Should have called with given arguments", + fmt.Sprintf("Expected %q to have been called with:\n%v\nbut no actual calls happened", methodName, arguments)) + } + return assert.Fail(t, "Should have called with given arguments", + fmt.Sprintf("Expected %q to have been called with:\n%v\nbut actual calls were:\n %v", methodName, arguments, strings.Join(calledWithArgs, "\n"))) + } + return true +} + +// AssertNotCalled asserts that the method was not called. +// It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. +func (m *Mock) AssertNotCalled(t TestingT, methodName string, arguments ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + m.mutex.Lock() + defer m.mutex.Unlock() + if m.methodWasCalled(methodName, arguments) { + return assert.Fail(t, "Should not have called with given arguments", + fmt.Sprintf("Expected %q to not have been called with:\n%v\nbut actually it was.", methodName, arguments)) + } + return true +} + +func (m *Mock) methodWasCalled(methodName string, expected []interface{}) bool { + for _, call := range m.calls() { + if call.Method == methodName { + + _, differences := Arguments(expected).Diff(call.Arguments) + + if differences == 0 { + // found the expected call + return true + } + + } + } + // we didn't find the expected call + return false +} + +func (m *Mock) expectedCalls() []*Call { + return append([]*Call{}, m.ExpectedCalls...) +} + +func (m *Mock) calls() []Call { + return append([]Call{}, m.Calls...) +} + +/* + Arguments +*/ + +// Arguments holds an array of method arguments or return values. +type Arguments []interface{} + +const ( + // Anything is used in Diff and Assert when the argument being tested + // shouldn't be taken into consideration. + Anything = "mock.Anything" +) + +// AnythingOfTypeArgument is a string that contains the type of an argument +// for use when type checking. Used in Diff and Assert. +type AnythingOfTypeArgument string + +// AnythingOfType returns an AnythingOfTypeArgument object containing the +// name of the type to check for. Used in Diff and Assert. +// +// For example: +// Assert(t, AnythingOfType("string"), AnythingOfType("int")) +func AnythingOfType(t string) AnythingOfTypeArgument { + return AnythingOfTypeArgument(t) +} + +// argumentMatcher performs custom argument matching, returning whether or +// not the argument is matched by the expectation fixture function. +type argumentMatcher struct { + // fn is a function which accepts one argument, and returns a bool. + fn reflect.Value +} + +func (f argumentMatcher) Matches(argument interface{}) bool { + expectType := f.fn.Type().In(0) + expectTypeNilSupported := false + switch expectType.Kind() { + case reflect.Interface, reflect.Chan, reflect.Func, reflect.Map, reflect.Slice, reflect.Ptr: + expectTypeNilSupported = true + } + + argType := reflect.TypeOf(argument) + var arg reflect.Value + if argType == nil { + arg = reflect.New(expectType).Elem() + } else { + arg = reflect.ValueOf(argument) + } + + if argType == nil && !expectTypeNilSupported { + panic(errors.New("attempting to call matcher with nil for non-nil expected type")) + } + if argType == nil || argType.AssignableTo(expectType) { + result := f.fn.Call([]reflect.Value{arg}) + return result[0].Bool() + } + return false +} + +func (f argumentMatcher) String() string { + return fmt.Sprintf("func(%s) bool", f.fn.Type().In(0).Name()) +} + +// MatchedBy can be used to match a mock call based on only certain properties +// from a complex struct or some calculation. It takes a function that will be +// evaluated with the called argument and will return true when there's a match +// and false otherwise. +// +// Example: +// m.On("Do", MatchedBy(func(req *http.Request) bool { return req.Host == "example.com" })) +// +// |fn|, must be a function accepting a single argument (of the expected type) +// which returns a bool. If |fn| doesn't match the required signature, +// MatchedBy() panics. +func MatchedBy(fn interface{}) argumentMatcher { + fnType := reflect.TypeOf(fn) + + if fnType.Kind() != reflect.Func { + panic(fmt.Sprintf("assert: arguments: %s is not a func", fn)) + } + if fnType.NumIn() != 1 { + panic(fmt.Sprintf("assert: arguments: %s does not take exactly one argument", fn)) + } + if fnType.NumOut() != 1 || fnType.Out(0).Kind() != reflect.Bool { + panic(fmt.Sprintf("assert: arguments: %s does not return a bool", fn)) + } + + return argumentMatcher{fn: reflect.ValueOf(fn)} +} + +// Get Returns the argument at the specified index. +func (args Arguments) Get(index int) interface{} { + if index+1 > len(args) { + panic(fmt.Sprintf("assert: arguments: Cannot call Get(%d) because there are %d argument(s).", index, len(args))) + } + return args[index] +} + +// Is gets whether the objects match the arguments specified. +func (args Arguments) Is(objects ...interface{}) bool { + for i, obj := range args { + if obj != objects[i] { + return false + } + } + return true +} + +// Diff gets a string describing the differences between the arguments +// and the specified objects. +// +// Returns the diff string and number of differences found. +func (args Arguments) Diff(objects []interface{}) (string, int) { + //TODO: could return string as error and nil for No difference + + var output = "\n" + var differences int + + var maxArgCount = len(args) + if len(objects) > maxArgCount { + maxArgCount = len(objects) + } + + for i := 0; i < maxArgCount; i++ { + var actual, expected interface{} + var actualFmt, expectedFmt string + + if len(objects) <= i { + actual = "(Missing)" + actualFmt = "(Missing)" + } else { + actual = objects[i] + actualFmt = fmt.Sprintf("(%[1]T=%[1]v)", actual) + } + + if len(args) <= i { + expected = "(Missing)" + expectedFmt = "(Missing)" + } else { + expected = args[i] + expectedFmt = fmt.Sprintf("(%[1]T=%[1]v)", expected) + } + + if matcher, ok := expected.(argumentMatcher); ok { + if matcher.Matches(actual) { + output = fmt.Sprintf("%s\t%d: PASS: %s matched by %s\n", output, i, actualFmt, matcher) + } else { + differences++ + output = fmt.Sprintf("%s\t%d: PASS: %s not matched by %s\n", output, i, actualFmt, matcher) + } + } else if reflect.TypeOf(expected) == reflect.TypeOf((*AnythingOfTypeArgument)(nil)).Elem() { + + // type checking + if reflect.TypeOf(actual).Name() != string(expected.(AnythingOfTypeArgument)) && reflect.TypeOf(actual).String() != string(expected.(AnythingOfTypeArgument)) { + // not match + differences++ + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected, reflect.TypeOf(actual).Name(), actualFmt) + } + + } else { + + // normal checking + + if assert.ObjectsAreEqual(expected, Anything) || assert.ObjectsAreEqual(actual, Anything) || assert.ObjectsAreEqual(actual, expected) { + // match + output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, actualFmt, expectedFmt) + } else { + // not match + differences++ + output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, actualFmt, expectedFmt) + } + } + + } + + if differences == 0 { + return "No differences.", differences + } + + return output, differences + +} + +// Assert compares the arguments with the specified objects and fails if +// they do not exactly match. +func (args Arguments) Assert(t TestingT, objects ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + // get the differences + diff, diffCount := args.Diff(objects) + + if diffCount == 0 { + return true + } + + // there are differences... report them... + t.Logf(diff) + t.Errorf("%sArguments do not match.", assert.CallerInfo()) + + return false + +} + +// String gets the argument at the specified index. Panics if there is no argument, or +// if the argument is of the wrong type. +// +// If no index is provided, String() returns a complete string representation +// of the arguments. +func (args Arguments) String(indexOrNil ...int) string { + + if len(indexOrNil) == 0 { + // normal String() method - return a string representation of the args + var argsStr []string + for _, arg := range args { + argsStr = append(argsStr, fmt.Sprintf("%s", reflect.TypeOf(arg))) + } + return strings.Join(argsStr, ",") + } else if len(indexOrNil) == 1 { + // Index has been specified - get the argument at that index + var index = indexOrNil[0] + var s string + var ok bool + if s, ok = args.Get(index).(string); !ok { + panic(fmt.Sprintf("assert: arguments: String(%d) failed because object wasn't correct type: %s", index, args.Get(index))) + } + return s + } + + panic(fmt.Sprintf("assert: arguments: Wrong number of arguments passed to String. Must be 0 or 1, not %d", len(indexOrNil))) + +} + +// Int gets the argument at the specified index. Panics if there is no argument, or +// if the argument is of the wrong type. +func (args Arguments) Int(index int) int { + var s int + var ok bool + if s, ok = args.Get(index).(int); !ok { + panic(fmt.Sprintf("assert: arguments: Int(%d) failed because object wasn't correct type: %v", index, args.Get(index))) + } + return s +} + +// Error gets the argument at the specified index. Panics if there is no argument, or +// if the argument is of the wrong type. +func (args Arguments) Error(index int) error { + obj := args.Get(index) + var s error + var ok bool + if obj == nil { + return nil + } + if s, ok = obj.(error); !ok { + panic(fmt.Sprintf("assert: arguments: Error(%d) failed because object wasn't correct type: %v", index, args.Get(index))) + } + return s +} + +// Bool gets the argument at the specified index. Panics if there is no argument, or +// if the argument is of the wrong type. +func (args Arguments) Bool(index int) bool { + var s bool + var ok bool + if s, ok = args.Get(index).(bool); !ok { + panic(fmt.Sprintf("assert: arguments: Bool(%d) failed because object wasn't correct type: %v", index, args.Get(index))) + } + return s +} + +func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { + t := reflect.TypeOf(v) + k := t.Kind() + + if k == reflect.Ptr { + t = t.Elem() + k = t.Kind() + } + return t, k +} + +func diffArguments(expected Arguments, actual Arguments) string { + if len(expected) != len(actual) { + return fmt.Sprintf("Provided %v arguments, mocked for %v arguments", len(expected), len(actual)) + } + + for x := range expected { + if diffString := diff(expected[x], actual[x]); diffString != "" { + return fmt.Sprintf("Difference found in argument %v:\n\n%s", x, diffString) + } + } + + return "" +} + +// diff returns a diff of both values as long as both are of the same type and +// are a struct, map, slice or array. Otherwise it returns an empty string. +func diff(expected interface{}, actual interface{}) string { + if expected == nil || actual == nil { + return "" + } + + et, ek := typeAndKind(expected) + at, _ := typeAndKind(actual) + + if et != at { + return "" + } + + if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array { + return "" + } + + e := spewConfig.Sdump(expected) + a := spewConfig.Sdump(actual) + + diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ + A: difflib.SplitLines(e), + B: difflib.SplitLines(a), + FromFile: "Expected", + FromDate: "", + ToFile: "Actual", + ToDate: "", + Context: 1, + }) + + return diff +} + +var spewConfig = spew.ConfigState{ + Indent: " ", + DisablePointerAddresses: true, + DisableCapacities: true, + SortKeys: true, +} + +type tHelper interface { + Helper() +}