diff --git a/Dockerfile b/Dockerfile index a2f0e18..34e070a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,22 @@ # Requires Docker v17.06 or later -FROM golang:1.9 as builder +FROM golang:1.15.5 as builder RUN mkdir -p /go/src/app WORKDIR /go/src/app COPY . /go/src/app -RUN go-wrapper download -u github.com/golang/dep/cmd/dep -RUN go-wrapper install github.com/golang/dep/cmd/dep -RUN dep ensure -RUN go-wrapper install +RUN go build -v . + +FROM frolvlad/alpine-glibc:alpine-3.12_glibc-2.32 + +ENV PROMSQL_BIND_ADDRESS="0.0.0.0" +ENV PROMSQL_PORT="8080" + +COPY --from=builder /go/src/app/prometheus-sql /usr/local/bin/prometheus-sql +COPY docker-entrypoint.sh /usr/local/bin/ + +RUN chmod +x /usr/local/bin/* -FROM frolvlad/alpine-glibc:alpine-3.6 -COPY --from=builder /go/bin/app /usr/local/bin/prometheus-sql EXPOSE 8080 -ENTRYPOINT ["/usr/local/bin/prometheus-sql", "-host", "0.0.0.0"] + +ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"] # Default command assumes the SQL agent is linked. CMD ["-service", "http://sqlagent:5000"] \ No newline at end of file diff --git a/config.go b/config.go index f8f0b1a..5ce2112 100644 --- a/config.go +++ b/config.go @@ -56,8 +56,12 @@ type Query struct { Params map[string]interface{} Interval time.Duration Timeout time.Duration + ValueCase string `yaml:"value-case"` + LabelCase string `yaml:"label-case"` DataField string `yaml:"data-field"` + HelpText string `yaml:"help-text"` SubMetrics map[string]string `yaml:"sub-metrics"` + ExtraLabels map[string]string `yaml:"labels"` ValueOnError string `yaml:"value-on-error"` } @@ -193,7 +197,6 @@ func decodeQueries(r io.Reader, config *Config) (QueryList, error) { if q.ValueOnError == "" && config.Defaults.QueryValueOnError != "" { q.ValueOnError = config.Defaults.QueryValueOnError } - q.DataField = strings.ToLower(q.DataField) if err := validateQuery(q); err != nil { return nil, err } diff --git a/examples/example-config-queries.yml b/examples/example-config-queries.yml index a4fc048..2b5a443 100644 --- a/examples/example-config-queries.yml +++ b/examples/example-config-queries.yml @@ -19,6 +19,7 @@ # insert into Companies (name, country) values ('Company1', 'IRL'); # insert into Companies (name, country) values ('Company2', 'IRL'); # +# # select * from Companies; # +----------+---------+ # | name | country | @@ -30,6 +31,18 @@ # +----------+---------+ # 4 rows in set (0.00 sec) # +# -- Should you want to change the column name or upper case / lower case a column use 'as' +# select name as companyName from Companies; +# +-------------+ +# | companyName | +# +-------------+ +# | Company1 | +# | Company1 | +# | Company1 | +# | Company2 | +# +-------------+ +# 4 rows in set (0.00 sec) +# # quit # # docker run -d -p 8080:8080 -v ${PWD}/example-config-queries.yml:/queries.yml -v ${PWD}/example-config.yml:/prometheus-sql.yml --link sqlagent:sqlagent --name prometheus-sql dbhi/prometheus-sql -service http://sqlagent:5000 -config prometheus-sql.yml @@ -64,3 +77,47 @@ count: cnt sum: rt interval: 30s + +# Histogram queries +# This will register an array of metrics intended for a histogram: +# - responseHistogram with label values of 0,1,2, and 3 representing the bin for seconds taken +- response_times: + sql: > + select count(CASE WHEN response_time >= 0 AND response_time <= 1 THEN 1 END) as 'le#1', + count(CASE WHEN response_time > 1 AND response_time <= 2 THEN 1 END) as 'le#2', + count(CASE WHEN response_time > 2 AND response_time <= 3 THEN 1 END) as 'le#3', + count(CASE WHEN response_time > 3 THEN 1 END) as 'le#+Inf', + sum(response_time) as s, count(response_time) as c + from stats + sub-metrics: + bucket: 'le#' + total: 's' + count: 'c' + interval: 30s + +# Hard setting a field for easy labeling +# This will register an extra field intended to be used for labeling such as the case when two or +# more databases have similar records and you with to set a custom field for identifying the database: +# - last_login_time is generated with overlapping metric label 'systemName' +# - label-case and value-case will alter the case of keys and values before being passed to prometheus +# possible values for label-case and value-case are: +# - lower - lower case the string ('MyVal' -> 'myval') -- default -- +# - upper - upper case the string ('MyVal' -> 'MYVAL') +# - first - lower case the initial character ('MyVal' -> 'myVal') +# - title - upper case the initial character ('myval' -> 'Myval') +# - keep - leave the case alone and pass as untouched ('MyVal' -> 'MyVal') +- last_login_time: + label-case: lower + value-case: lower + sql: > + select *, 'user_portal_database' as 'systemName' from logins + interval: 30s +- last_login_time: + label-case: first + value-case: keep + labels: + systemName: user_registration_database + sql: > + select * from logins + interval: 30s + diff --git a/examples/example-queries.yml b/examples/example-queries.yml index 6a3286d..b0ad065 100644 --- a/examples/example-queries.yml +++ b/examples/example-queries.yml @@ -48,6 +48,9 @@ # Name of the driver to use. driver: postgresql + # Help text about query + help-text: "Result of an SQL query on example.org" + # Connection information. connection: host: example.org diff --git a/set.go b/set.go index a99216c..de4cbc3 100644 --- a/set.go +++ b/set.go @@ -4,12 +4,15 @@ import ( "encoding/json" "errors" "fmt" + "github.com/prometheus/client_golang/prometheus" + "math" "strconv" "strings" - - "github.com/prometheus/client_golang/prometheus" ) +type record map[string]interface{} +type records []record + type metricStatus int const ( @@ -17,12 +20,13 @@ const ( unregistered ) +// QueryResult contains query results type QueryResult struct { Query *Query Result map[string]prometheus.Gauge // Internally we represent each facet with a JSON-encoded string for simplicity } -// NewSetMetrics initializes a new metrics collector. +// NewQueryMetrics initializes a new metrics collector. func NewQueryResult(q *Query) *QueryResult { r := &QueryResult{ Query: q, @@ -32,7 +36,7 @@ func NewQueryResult(q *Query) *QueryResult { return r } -func (r *QueryResult) registerMetric(facets map[string]interface{}, suffix string) (string, metricStatus) { +func (r *QueryResult) registerMetric(facets map[string]interface{}, suffix string, valueCase string, helpText string) (string, metricStatus) { labels := prometheus.Labels{} metricName := r.Query.Name if suffix != "" { @@ -43,27 +47,31 @@ func (r *QueryResult) registerMetric(facets map[string]interface{}, suffix strin resultKey := fmt.Sprintf("%s%s", metricName, string(jsonData)) for k, v := range facets { - labels[k] = strings.ToLower(fmt.Sprintf("%v", v)) + labels[k] = CaseChange(fmt.Sprintf("%v", v), valueCase) } - if _, ok := r.Result[resultKey]; ok { // A metric with this name is already registered + if _, ok := r.Result[resultKey]; ok { + // A metric with this key is already created and assumed to be registered return resultKey, registered } + if len(helpText) == 0 { + helpText = "Result of an SQL query" + } + fmt.Println("Creating", resultKey) r.Result[resultKey] = prometheus.NewGauge(prometheus.GaugeOpts{ Name: fmt.Sprintf("query_result_%s", metricName), - Help: "Result of an SQL query", + Help: helpText, ConstLabels: labels, }) return resultKey, unregistered } -type record map[string]interface{} -type records []record - func setValueForResult(r prometheus.Gauge, v interface{}) error { switch t := v.(type) { + case nil: + r.Set(math.NaN()) case string: f, err := strconv.ParseFloat(t, 64) if err != nil { @@ -80,6 +88,7 @@ func setValueForResult(r prometheus.Gauge, v interface{}) error { return nil } +// SetMetrics set and register metrics func (r *QueryResult) SetMetrics(recs records) (map[string]metricStatus, error) { // Queries that return only one record should only have one column if len(recs) > 1 && len(recs[0]) == 1 { @@ -91,6 +100,9 @@ func (r *QueryResult) SetMetrics(recs records) (map[string]metricStatus, error) } submetrics := map[string]string{} + extralabels := map[string]string{} + labelCase := r.Query.LabelCase + valueCase := r.Query.ValueCase if len(r.Query.SubMetrics) > 0 { submetrics = r.Query.SubMetrics @@ -98,29 +110,49 @@ func (r *QueryResult) SetMetrics(recs records) (map[string]metricStatus, error) submetrics = map[string]string{"": r.Query.DataField} } + if len(r.Query.ExtraLabels) > 0 { + extralabels = r.Query.ExtraLabels + } + facetsWithResult := make(map[string]metricStatus, 0) for _, row := range recs { for suffix, datafield := range submetrics { facet := make(map[string]interface{}) + for k, v := range extralabels { + facet[k] = v + } var ( dataVal interface{} dataFound bool ) + datafield = CaseChange(datafield, labelCase) + histogram_data := make(map[string]interface{}) + histogram := (datafield[len(datafield)-1:] == "#") for k, v := range row { - if len(row) > 1 && strings.ToLower(k) != datafield { // facet field, add to facets - submetric := false - for _, n := range submetrics { - if strings.ToLower(k) == n { - submetric = true + k := CaseChange(fmt.Sprintf("%v", k), labelCase) + if len(row) > 1 && k != datafield { + if histogram && strings.HasPrefix(k, datafield) { + // histogram field, add to histogram_data + histogram_data[k[len(datafield):]] = v + dataFound = true + } else { + // facet field, add to facets + submetric := false + for _, n := range submetrics { + if k == CaseChange(n, labelCase) { + submetric = true + } else if strings.Contains(n, "#") && strings.HasPrefix(k, CaseChange(n, labelCase)) { + submetric = true + } + } + // it is a facet field and not a submetric field + if !submetric { + facet[k] = v } - } - // it is a facet field and not a submetric field - if !submetric { - facet[strings.ToLower(fmt.Sprintf("%v", k))] = v } } else { // this is the actual gauge data if dataFound { - return nil, errors.New("Data field not specified for multi-column query") + return nil, errors.New(fmt.Sprintf("Data field '%v' not specified for multi-column query", datafield)) } dataVal = v dataFound = true @@ -128,21 +160,37 @@ func (r *QueryResult) SetMetrics(recs records) (map[string]metricStatus, error) } if !dataFound { - return nil, errors.New("Data field not found in result set") + return nil, errors.New(fmt.Sprintf("Data field '%v' not found in result set", datafield)) } - key, status := r.registerMetric(facet, suffix) - err := setValueForResult(r.Result[key], dataVal) - if err != nil { - return nil, err + if histogram { + histogram_field := datafield[0 : len(datafield)-1] + for k, dataVal := range histogram_data { + // loop over histogram data registering bins + facet[histogram_field] = k + + key, status := r.registerMetric(facet, suffix, valueCase, r.Query.HelpText) + err := setValueForResult(r.Result[key], dataVal) + if err != nil { + return nil, err + } + facetsWithResult[key] = status + } + } else { + key, status := r.registerMetric(facet, suffix, valueCase, r.Query.HelpText) + err := setValueForResult(r.Result[key], dataVal) + if err != nil { + return nil, err + } + facetsWithResult[key] = status } - facetsWithResult[key] = status } } return facetsWithResult, nil } +// RegisterMetrics registers and unregister gauges func (r *QueryResult) RegisterMetrics(facetsWithResult map[string]metricStatus) { for key, m := range r.Result { status, ok := facetsWithResult[key] @@ -160,3 +208,18 @@ func (r *QueryResult) RegisterMetrics(facetsWithResult map[string]metricStatus) } } } +func CaseChange(str string, newCase string) string { + switch newCase { + case "lower": + return strings.ToLower(str) + case "upper": + return strings.ToUpper(str) + case "first": + return string(strings.ToLower(str[0:1])) + str[1:] + case "title": + return string(strings.ToUpper(str[0:1])) + str[1:] + case "keep": + return str + } + return strings.ToLower(str) +}