diff --git a/.circleci/config.yml b/.circleci/config.yml index 91ace22d33b9..1d74e6e64316 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -73,7 +73,7 @@ jobs: end-to-end-test: docker: - image: circleci/node:8-browsers - - image: grafana/grafana:master + - image: grafana/grafana-dev:master-$CIRCLE_SHA1 steps: - run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s - checkout @@ -91,6 +91,12 @@ jobs: name: run end-to-end tests command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests' no_output_timeout: 5m + - store_artifacts: + path: public/e2e-test/screenShots/theTruth + destination: expected-screenshots + - store_artifacts: + path: public/e2e-test/screenShots/theOutput + destination: output-screenshots codespell: docker: @@ -629,7 +635,7 @@ workflows: - mysql-integration-test - postgres-integration-test - build-oss-msi - filters: *filter-only-master + filters: *filter-only-master - grafana-docker-master: requires: - build-all @@ -662,7 +668,10 @@ workflows: - mysql-integration-test - postgres-integration-test filters: *filter-only-master - + - end-to-end-test: + requires: + - grafana-docker-master + filters: *filter-only-master release: jobs: - build-all: diff --git a/CHANGELOG.md b/CHANGELOG.md index 9224bb241aeb..64d8d41d5f5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,18 @@ +# 6.3.0 (unreleased) + +# 6.2.1 (2019-05-27) + +### Features / Enhancements + * **CLI**: Add command to migrate all datasources to use encrypted password fields . [#17118](https://github.com/grafana/grafana/pull/17118), [@aocenas](https://github.com/aocenas) + * **Gauge/BarGauge**: Improvements to auto value font size . [#17292](https://github.com/grafana/grafana/pull/17292), [@torkelo](https://github.com/torkelo) + +### Bug Fixes + * **Auth Proxy**: Resolve database is locked errors. [#17274](https://github.com/grafana/grafana/pull/17274), [@marefr](https://github.com/marefr) + * **Database**: Retry transaction if sqlite returns database is locked error. [#17276](https://github.com/grafana/grafana/pull/17276), [@marefr](https://github.com/marefr) + * **Explore**: Fixes so clicking in a Prometheus Table the query is filtered by clicked value. [#17083](https://github.com/grafana/grafana/pull/17083), [@hugohaggmark](https://github.com/hugohaggmark) + * **Singlestat**: Fixes issue with value placement and line wraps. [#17249](https://github.com/grafana/grafana/pull/17249), [@torkelo](https://github.com/torkelo) + * **Tech**: Update jQuery to 3.4.1 to fix issue on iOS 10 based browers as well as Chrome 53.x . [#17290](https://github.com/grafana/grafana/pull/17290), [@timbutler](https://github.com/timbutler) + # 6.2.0 (2019-05-22) ### Bug Fixes @@ -70,6 +85,7 @@ repo on July 1st. Make sure you have switched to the new repo by then. The new r * **Provisioning**: Add API endpoint to reload provisioning configs. [#16579](https://github.com/grafana/grafana/pull/16579), [@aocenas](https://github.com/aocenas) * **Provisioning**: Do not allow deletion of provisioned dashboards. [#16211](https://github.com/grafana/grafana/pull/16211), [@aocenas](https://github.com/aocenas) * **Provisioning**: Interpolate env vars in provisioning files. [#16499](https://github.com/grafana/grafana/pull/16499), [@aocenas](https://github.com/aocenas) + * **Provisioning**: Support FolderUid in Dashboard Provisioning Config. [#16559](https://github.com/grafana/grafana/pull/16559), [@swtch1](https://github.com/swtch1) * **Security**: Add new setting allow_embedding. [#16853](https://github.com/grafana/grafana/pull/16853), [@marefr](https://github.com/marefr) * **Security**: Store datasource passwords encrypted in secureJsonData. [#16175](https://github.com/grafana/grafana/pull/16175), [@aocenas](https://github.com/aocenas) * **UX**: Improve Grafana usage for smaller screens. [#16783](https://github.com/grafana/grafana/pull/16783), [@torkelo](https://github.com/torkelo) diff --git a/Makefile b/Makefile index 8cfc7ce26812..1d71675dbaba 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ -include local/Makefile -.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive +.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive devenv devenv-down revive-alerting GO := GO111MODULE=on go GO_FILES := ./pkg/... @@ -84,6 +84,34 @@ revive: scripts/go/bin/revive -config ./scripts/go/configs/revive.toml \ $(GO_FILES) +revive-alerting: scripts/go/bin/revive + @scripts/go/bin/revive \ + -formatter stylish \ + ./pkg/services/alerting/... + +# create docker-compose file with provided sources and start them +# example: make devenv sources=postgres,openldap +ifeq ($(sources),) +devenv: + @printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n' +else +devenv: devenv-down + $(eval targets := $(shell echo '$(sources)' | tr "," " ")) + + @cd devenv; \ + ./create_docker_compose.sh $(targets) || \ + (rm -rf docker-compose.yaml; exit 1) + + @cd devenv; \ + docker-compose up -d --build +endif + +# drop down the envs +devenv-down: + @cd devenv; \ + test -f docker-compose.yaml && \ + docker-compose down || exit 0; + # TODO recheck the rules and leave only necessary exclusions gosec: scripts/go/bin/gosec @scripts/go/bin/gosec -quiet \ diff --git a/conf/defaults.ini b/conf/defaults.ini index ca49f1212698..ec83a5ea1a5a 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -47,6 +47,9 @@ enforce_domain = false # The full public facing url root_url = %(protocol)s://%(domain)s:%(http_port)s/ +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +serve_from_sub_path = false + # Log web requests router_logging = false diff --git a/conf/sample.ini b/conf/sample.ini index de684bc98f67..dc7d5bc54677 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -48,6 +48,9 @@ # If you use reverse proxy and sub path specify full url (with sub path) ;root_url = http://localhost:3000 +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +;serve_from_sub_path = false + # Log web requests ;router_logging = false diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index d5664d7136c0..33dde611bdcd 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -143,6 +143,17 @@ datasources: timeField: "@timestamp" esVersion: 70 + - name: gdev-elasticsearch-v7-metricbeat + type: elasticsearch + access: proxy + database: "[metricbeat-]YYYY.MM.DD" + url: http://localhost:12200 + jsonData: + interval: Daily + timeField: "@timestamp" + esVersion: 70 + timeInterval: "10s" + - name: gdev-mysql type: mysql url: localhost:3306 diff --git a/devenv/docker/blocks/elastic7/docker-compose.yaml b/devenv/docker/blocks/elastic7/docker-compose.yaml index 3ef922c890c3..45e2836f870e 100644 --- a/devenv/docker/blocks/elastic7/docker-compose.yaml +++ b/devenv/docker/blocks/elastic7/docker-compose.yaml @@ -21,3 +21,19 @@ - ./docker/blocks/elastic7/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro - /var/log:/var/log:ro - ../data/log:/var/log/grafana:ro + + metricbeat7: + image: docker.elastic.co/beats/metricbeat-oss:7.0.0 + network_mode: host + command: metricbeat -e -strict.perms=false + user: root + volumes: + - ./docker/blocks/elastic7/metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + + kibana7: + image: docker.elastic.co/kibana/kibana-oss:7.0.0 + ports: + - "5601:5601" + environment: + ELASTICSEARCH_HOSTS: http://elasticsearch7:9200 diff --git a/devenv/docker/blocks/elastic7/metricbeat.yml b/devenv/docker/blocks/elastic7/metricbeat.yml new file mode 100644 index 000000000000..4788c0cdd9a5 --- /dev/null +++ b/devenv/docker/blocks/elastic7/metricbeat.yml @@ -0,0 +1,38 @@ +metricbeat.config: + modules: + path: ${path.config}/modules.d/*.yml + # Reload module configs as they change: + reload.enabled: false + +metricbeat.autodiscover: + providers: + - type: docker + hints.enabled: true + +metricbeat.modules: +- module: docker + metricsets: + - "container" + - "cpu" + - "diskio" + - "healthcheck" + - "info" + #- "image" + - "memory" + - "network" + hosts: ["unix:///var/run/docker.sock"] + period: 10s + enabled: true + +processors: + - add_cloud_metadata: ~ + +output.elasticsearch: + hosts: ["localhost:12200"] + index: "metricbeat-%{+yyyy.MM.dd}" + +setup.template.name: "metricbeat" +setup.template.pattern: "metricbeat-*" +setup.template.settings: + index.number_of_shards: 1 + index.number_of_replicas: 1 \ No newline at end of file diff --git a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile similarity index 74% rename from devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile index 979d01c7dad4..29e581d2b137 100644 --- a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY admins-ldap-server/modules/ /etc/ldap.dist/modules +COPY admins-ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml index 74f5d29a90ff..7ed0ca2e840a 100644 --- a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml +++ b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml @@ -1,5 +1,7 @@ admins-openldap: - build: docker/blocks/multiple-openldap/admins-ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./admins-ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org @@ -8,7 +10,9 @@ - "389:389" openldap: - build: docker/blocks/multiple-openldap/ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org diff --git a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile similarity index 75% rename from devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile index 979d01c7dad4..7604d1118a3f 100644 --- a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY ldap-server/modules/ /etc/ldap.dist/modules +COPY ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index 9b1f8a6c70f6..d09fb0bbc51b 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -179,6 +179,24 @@ Secure json data is a map of settings that will be encrypted with [secret key](/ | accessKey | string | Cloudwatch | Access key for connecting to Cloudwatch | | secretKey | string | Cloudwatch | Secret key for connecting to Cloudwatch | +#### Custom HTTP headers for datasources +Datasources managed by Grafanas provisioning can be configured to add HTTP headers to all requests +going to that datasource. The header name is configured in the `jsonData` field and the header value should be +configured in `secureJsonData`. + +```yaml +apiVersion: 1 + +datasources: +- name: Graphite + jsonData: + httpHeaderName1: "HeaderName" + httpHeaderName2: "Authorization" + secureJsonData: + httpHeaderValue1: "HeaderValue" + httpHeaderValue2: "Bearer XXXXXXXXX" +``` + ### Dashboards It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana from the local filesystem. @@ -204,7 +222,7 @@ providers: # enable dashboard editing editable: true # how often Grafana will scan for changed dashboards - updateIntervalSeconds: 10 + updateIntervalSeconds: 10 options: # path to dashboard files on disk. Required path: /var/lib/grafana/dashboards diff --git a/docs/sources/features/datasources/azuremonitor.md b/docs/sources/features/datasources/azuremonitor.md index ee40248fe001..114187499f17 100644 --- a/docs/sources/features/datasources/azuremonitor.md +++ b/docs/sources/features/datasources/azuremonitor.md @@ -254,6 +254,10 @@ To make writing queries easier there are several Grafana macros that can be used `datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and` `datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. +- `$__timeFrom()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. + +- `$__timeTo()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. + - `$__escapeMulti($myVar)` - is to be used with multi-value template variables that contain illegal characters. If `$myVar` has the following two values as a string `'\\grafana-vm\Network(eth0)\Total','\\hello!'`, then it expands to: `@'\\grafana-vm\Network(eth0)\Total', @'\\hello!'`. If using single value variables there is no need for this macro, simply escape the variable inline instead - `@'\$myVar'`. - `$__contains(colName, $myVar)` - is to be used with multi-value template variables. If `$myVar` has the value `'value1','value2'`, it expands to: `colName in ('value1','value2')`. @@ -264,8 +268,6 @@ To make writing queries easier there are several Grafana macros that can be used There are also some Grafana variables that can be used in Azure Log Analytics queries: -- `$__from` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. -- `$__to` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. - `$__interval` - Grafana calculates the minimum time grain that can be used to group by time in queries. More details on how it works [here]({{< relref "reference/templating.md#interval-variables" >}}). It returns a time grain like `5m` or `1h` that can be used in the bin function. E.g. `summarize count() by bin(TimeGenerated, $__interval)` ### Azure Log Analytics Alerting diff --git a/docs/sources/guides/basic_concepts.md b/docs/sources/guides/basic_concepts.md index d3f8dd0ba633..3c610e21ce23 100644 --- a/docs/sources/guides/basic_concepts.md +++ b/docs/sources/guides/basic_concepts.md @@ -66,7 +66,7 @@ There are a wide variety of styling and formatting options that each Panel expos Panels can be dragged and dropped and rearranged on the Dashboard. They can also be resized. -There are currently four Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/),and [Text](/reference/text/). +There are currently five Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/), and [Text](/reference/text/). Panels like the [Graph](/reference/graph/) panel allow you to graph as many metrics and series as you want. Other panels like [Singlestat](/reference/singlestat/) require a reduction of a single query into a single number. [Dashlist](/reference/dashlist/) and [Text](/reference/text/) are special panels that do not connect to any Data Source. diff --git a/docs/sources/http_api/alerting_notification_channels.md b/docs/sources/http_api/alerting_notification_channels.md index b8db1595aaa1..aa6e7297ac2d 100644 --- a/docs/sources/http_api/alerting_notification_channels.md +++ b/docs/sources/http_api/alerting_notification_channels.md @@ -54,7 +54,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -93,7 +93,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -130,7 +130,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -158,7 +158,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -177,7 +177,7 @@ Content-Type: application/json "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -206,7 +206,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -226,7 +226,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -256,7 +256,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -276,7 +276,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -353,7 +353,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "type": "email", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index a865234ebeba..af0261032d4b 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -154,6 +154,15 @@ callback URL to be correct). > in front of Grafana that exposes it through a subpath. In that > case add the subpath to the end of this URL setting. +### serve_from_sub_path + +Serve Grafana from subpath specified in `root_url` setting. By +default it is set to `false` for compatibility reasons. + +By enabling this setting and using a subpath in `root_url` above, e.g. +`root_url = http://localhost:3000/grafana`, Grafana will be accessible on +`http://localhost:3000/grafana`. + ### static_root_path The path to the directory where the front end files (HTML, JS, and CSS diff --git a/go.mod b/go.mod index 619f5183a5c7..1730235d6173 100644 --- a/go.mod +++ b/go.mod @@ -9,10 +9,8 @@ require ( github.com/aws/aws-sdk-go v1.18.5 github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 - github.com/brianvoe/gofakeit v3.17.0+incompatible github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd // indirect github.com/codegangsta/cli v1.20.0 - github.com/davecgh/go-spew v1.1.1 github.com/denisenkom/go-mssqldb v0.0.0-20190315220205-a8ed825ac853 github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect github.com/facebookgo/inject v0.0.0-20180706035515-f23751cae28b diff --git a/go.sum b/go.sum index 3c77812fbf7b..55223ecbc74e 100644 --- a/go.sum +++ b/go.sum @@ -16,8 +16,6 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLM github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 h1:rRISKWyXfVxvoa702s91Zl5oREZTrR3yv+tXrrX7G/g= github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= -github.com/brianvoe/gofakeit v3.17.0+incompatible h1:C1+30+c0GtjgGDtRC+iePZeP1WMiwsWCELNJhmc7aIc= -github.com/brianvoe/gofakeit v3.17.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w= diff --git a/latest.json b/latest.json index 18770c57cce2..97724801882c 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "6.2.0", - "testing": "6.2.0" + "stable": "6.2.1", + "testing": "6.2.1" } diff --git a/package.json b/package.json index e2153dcf3419..b4e4ee4f63ee 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "@types/react-window": "1.7.0", "angular-mocks": "1.6.6", "autoprefixer": "9.5.0", - "axios": "0.18.0", + "axios": "0.19.0", "babel-core": "7.0.0-bridge.0", "babel-jest": "24.8.0", "babel-loader": "8.0.5", @@ -131,7 +131,7 @@ "tslint-react": "3.6.0", "typescript": "3.4.1", "webpack": "4.29.6", - "webpack-bundle-analyzer": "3.1.0", + "webpack-bundle-analyzer": "3.3.2", "webpack-cleanup-plugin": "0.5.1", "webpack-cli": "3.2.3", "webpack-dev-server": "3.2.1", @@ -140,9 +140,9 @@ }, "scripts": { "dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js", - "start": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --watchTheme", - "start:hot": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot --watchTheme", - "start:ignoreTheme": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot", + "start": "npm run cli -- core:start --watchTheme", + "start:hot": "npm run cli -- core:start --hot --watchTheme", + "start:ignoreTheme": "npm run cli -- core:start --hot", "watch": "yarn start -d watch,start core:start --watchTheme ", "build": "grunt build", "test": "grunt test", @@ -156,13 +156,13 @@ "themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts", "prettier:check": "prettier --list-different \"**/*.{ts,tsx,scss}\"", "prettier:write": "prettier --list-different \"**/*.{ts,tsx,scss}\" --write", + "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts", "gui:tslint": "tslint -c ./packages/grafana-ui/tslint.json --project ./packages/grafana-ui/tsconfig.json", - "gui:build": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:build", - "gui:releasePrepare": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release", + "gui:build": "npm run cli -- gui:build", + "gui:releasePrepare": "npm run cli -- gui:release", "gui:publish": "cd packages/grafana-ui/dist && npm publish --access public", - "gui:release": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release -p --createVersionCommit", - "precommit": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts precommit", - "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts" + "gui:release": "npm run cli -- gui:release -p --createVersionCommit", + "precommit": "npm run cli -- precommit" }, "husky": { "hooks": { @@ -206,7 +206,7 @@ "fast-text-encoding": "^1.0.0", "file-saver": "1.3.8", "immutable": "3.8.2", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "mousetrap": "1.6.3", diff --git a/packages/grafana-runtime/README.md b/packages/grafana-runtime/README.md new file mode 100644 index 000000000000..f01cd35537c7 --- /dev/null +++ b/packages/grafana-runtime/README.md @@ -0,0 +1,3 @@ +# Grafana Runtime library + +Interfaces that let you use the runtime... \ No newline at end of file diff --git a/packages/grafana-runtime/index.js b/packages/grafana-runtime/index.js new file mode 100644 index 000000000000..d1a4363350e9 --- /dev/null +++ b/packages/grafana-runtime/index.js @@ -0,0 +1,7 @@ +'use strict' + +if (process.env.NODE_ENV === 'production') { + module.exports = require('./index.production.js'); +} else { + module.exports = require('./index.development.js'); +} diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json new file mode 100644 index 000000000000..ed390d63b6af --- /dev/null +++ b/packages/grafana-runtime/package.json @@ -0,0 +1,37 @@ +{ + "name": "@grafana/runtime", + "version": "6.0.1-alpha.0", + "description": "Grafana Runtime Library", + "keywords": [ + "typescript", + "react", + "react-component" + ], + "main": "src/index.ts", + "scripts": { + "tslint": "tslint -c tslint.json --project tsconfig.json", + "typecheck": "tsc --noEmit", + "clean": "rimraf ./dist ./compiled", + "build": "rollup -c rollup.config.ts" + }, + "author": "Grafana Labs", + "license": "Apache-2.0", + "dependencies": { + }, + "devDependencies": { + "awesome-typescript-loader": "^5.2.1", + "lodash": "^4.17.10", + "pretty-format": "^24.5.0", + "rollup": "1.6.0", + "rollup-plugin-commonjs": "9.2.1", + "rollup-plugin-node-resolve": "4.0.1", + "rollup-plugin-sourcemaps": "0.4.2", + "rollup-plugin-terser": "4.0.4", + "rollup-plugin-typescript2": "0.19.3", + "rollup-plugin-visualizer": "0.9.2", + "typescript": "3.4.1" + }, + "resolutions": { + "@types/lodash": "4.14.119" + } +} diff --git a/packages/grafana-runtime/rollup.config.ts b/packages/grafana-runtime/rollup.config.ts new file mode 100644 index 000000000000..a2d6da109d96 --- /dev/null +++ b/packages/grafana-runtime/rollup.config.ts @@ -0,0 +1,50 @@ +import resolve from 'rollup-plugin-node-resolve'; +import commonjs from 'rollup-plugin-commonjs'; +import sourceMaps from 'rollup-plugin-sourcemaps'; +import { terser } from 'rollup-plugin-terser'; + +const pkg = require('./package.json'); + +const libraryName = pkg.name; + +const buildCjsPackage = ({ env }) => { + return { + input: `compiled/index.js`, + output: [ + { + file: `dist/index.${env}.js`, + name: libraryName, + format: 'cjs', + sourcemap: true, + exports: 'named', + globals: {}, + }, + ], + external: ['lodash'], // Use Lodash from grafana + plugins: [ + commonjs({ + include: /node_modules/, + namedExports: { + '../../node_modules/lodash/lodash.js': [ + 'flatten', + 'find', + 'upperFirst', + 'debounce', + 'isNil', + 'isNumber', + 'flattenDeep', + 'map', + 'chunk', + 'sortBy', + 'uniqueId', + 'zip', + ], + }, + }), + resolve(), + sourceMaps(), + env === 'production' && terser(), + ], + }; +}; +export default [buildCjsPackage({ env: 'development' }), buildCjsPackage({ env: 'production' })]; diff --git a/packages/grafana-runtime/src/index.ts b/packages/grafana-runtime/src/index.ts new file mode 100644 index 000000000000..e371345e62d8 --- /dev/null +++ b/packages/grafana-runtime/src/index.ts @@ -0,0 +1 @@ +export * from './services'; diff --git a/packages/grafana-runtime/src/services/AngularLoader.ts b/packages/grafana-runtime/src/services/AngularLoader.ts new file mode 100644 index 000000000000..9565a6d41f43 --- /dev/null +++ b/packages/grafana-runtime/src/services/AngularLoader.ts @@ -0,0 +1,19 @@ +export interface AngularComponent { + destroy(): void; + digest(): void; + getScope(): any; +} + +export interface AngularLoader { + load(elem: any, scopeProps: any, template: string): AngularComponent; +} + +let instance: AngularLoader; + +export function setAngularLoader(v: AngularLoader) { + instance = v; +} + +export function getAngularLoader(): AngularLoader { + return instance; +} diff --git a/packages/grafana-runtime/src/services/backendSrv.ts b/packages/grafana-runtime/src/services/backendSrv.ts new file mode 100644 index 000000000000..a30296eca8cc --- /dev/null +++ b/packages/grafana-runtime/src/services/backendSrv.ts @@ -0,0 +1,42 @@ +/** + * Currently implemented with: + * https://docs.angularjs.org/api/ng/service/$http#usage + * but that will likely change in the future + */ +export type BackendSrvRequest = { + url: string; + retry?: number; + headers?: any; + method?: string; + + // Show a message with the result + showSuccessAlert?: boolean; + + [key: string]: any; +}; + +export interface BackendSrv { + get(url: string, params?: any): Promise; + + delete(url: string): Promise; + + post(url: string, data: any): Promise; + + patch(url: string, data: any): Promise; + + put(url: string, data: any): Promise; + + // If there is an error, set: err.isHandled = true + // otherwise the backend will show a message for you + request(options: BackendSrvRequest): Promise; +} + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/dataSourceSrv.ts b/packages/grafana-runtime/src/services/dataSourceSrv.ts new file mode 100644 index 000000000000..1f3bbbb8436b --- /dev/null +++ b/packages/grafana-runtime/src/services/dataSourceSrv.ts @@ -0,0 +1,15 @@ +import { ScopedVars, DataSourceApi } from '@grafana/ui'; + +export interface DataSourceSrv { + get(name?: string, scopedVars?: ScopedVars): Promise; +} + +let singletonInstance: DataSourceSrv; + +export function setDataSourceSrv(instance: DataSourceSrv) { + singletonInstance = instance; +} + +export function getDataSourceSrv(): DataSourceSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/index.ts b/packages/grafana-runtime/src/services/index.ts new file mode 100644 index 000000000000..08517c0650b5 --- /dev/null +++ b/packages/grafana-runtime/src/services/index.ts @@ -0,0 +1,3 @@ +export * from './backendSrv'; +export * from './AngularLoader'; +export * from './dataSourceSrv'; diff --git a/packages/grafana-runtime/tsconfig.build.json b/packages/grafana-runtime/tsconfig.build.json new file mode 100644 index 000000000000..34e37b5d0b84 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.build.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "node_modules", "**/*.test.ts", "**/*.test.tsx"] +} diff --git a/packages/grafana-runtime/tsconfig.json b/packages/grafana-runtime/tsconfig.json new file mode 100644 index 000000000000..dcc4fd974360 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "../../tsconfig.json", + "include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"], + "exclude": ["dist", "node_modules"], + "compilerOptions": { + "rootDirs": ["."], + "module": "esnext", + "outDir": "compiled", + "declaration": true, + "declarationDir": "dist", + "strict": true, + "alwaysStrict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "typeRoots": ["./node_modules/@types", "types"], + "skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors, + "removeComments": false + } +} diff --git a/packages/grafana-runtime/tslint.json b/packages/grafana-runtime/tslint.json new file mode 100644 index 000000000000..f51293736244 --- /dev/null +++ b/packages/grafana-runtime/tslint.json @@ -0,0 +1,6 @@ +{ + "extends": "../../tslint.json", + "rules": { + "import-blacklist": [true, ["^@grafana/runtime.*"]] + } +} diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index 46b402c92f97..1e672eb46ebf 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -23,7 +23,7 @@ "@types/react-color": "2.17.0", "classnames": "2.2.6", "d3": "5.9.1", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "papaparse": "4.6.3", diff --git a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx index 4351b6671d6e..cb08d1b15aba 100644 --- a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx +++ b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx @@ -11,8 +11,9 @@ import { DisplayValue, Themeable, TimeSeriesValue, Threshold, VizOrientation } f const MIN_VALUE_HEIGHT = 18; const MAX_VALUE_HEIGHT = 50; const MIN_VALUE_WIDTH = 50; -const MAX_VALUE_WIDTH = 100; -const LINE_HEIGHT = 1.5; +const MAX_VALUE_WIDTH = 150; +const TITLE_LINE_HEIGHT = 1.5; +const VALUE_LINE_HEIGHT = 1; export interface Props extends Themeable { height: number; @@ -227,7 +228,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { return { fontSize: 14, width: width, - height: 14 * LINE_HEIGHT, + height: 14 * TITLE_LINE_HEIGHT, placement: 'below', }; } @@ -238,7 +239,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(Math.min(height * maxTitleHeightRatio, MAX_VALUE_HEIGHT), 17); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, width: 0, height: titleHeight, placement: 'above', @@ -251,7 +252,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(height * maxTitleHeightRatio, MIN_VALUE_HEIGHT); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, height: 0, width: Math.min(Math.max(width * maxTitleWidthRatio, 50), 200), placement: 'left', @@ -485,7 +486,7 @@ export function getValueColor(props: Props): string { * Only exported to for unit test */ function getValueStyles(value: string, color: string, width: number, height: number): CSSProperties { - const heightFont = height / LINE_HEIGHT; + const heightFont = height / VALUE_LINE_HEIGHT; const guess = width / (value.length * 1.1); const fontSize = Math.min(Math.max(guess, 14), heightFont); @@ -495,6 +496,15 @@ function getValueStyles(value: string, color: string, width: number, height: num width: `${width}px`, display: 'flex', alignItems: 'center', - fontSize: fontSize.toFixed(2) + 'px', + lineHeight: VALUE_LINE_HEIGHT, + fontSize: fontSize.toFixed(4) + 'px', }; } + +// function getTextWidth(text: string): number { +// const canvas = getTextWidth.canvas || (getTextWidth.canvas = document.createElement("canvas")); +// var context = canvas.getContext("2d"); +// context.font = "'Roboto', 'Helvetica Neue', Arial, sans-serif"; +// var metrics = context.measureText(text); +// return metrics.width; +// } diff --git a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap index 4bb9395dd96e..1d341a9b0d4c 100644 --- a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap +++ b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap @@ -18,8 +18,9 @@ exports[`BarGauge Render with basic options should render 1`] = ` "alignItems": "center", "color": "#73BF69", "display": "flex", - "fontSize": "27.27px", + "fontSize": "27.2727px", "height": "300px", + "lineHeight": 1, "paddingLeft": "10px", "width": "60px", } diff --git a/packages/grafana-ui/src/components/Gauge/Gauge.tsx b/packages/grafana-ui/src/components/Gauge/Gauge.tsx index eb49891d298e..0a0495c4848d 100644 --- a/packages/grafana-ui/src/components/Gauge/Gauge.tsx +++ b/packages/grafana-ui/src/components/Gauge/Gauge.tsx @@ -58,7 +58,7 @@ export class Gauge extends PureComponent { if (length > 12) { return FONT_SCALE - (length * 5) / 110; } - return FONT_SCALE - (length * 5) / 100; + return FONT_SCALE - (length * 5) / 101; } draw() { @@ -78,7 +78,8 @@ export class Gauge extends PureComponent { const gaugeWidthReduceRatio = showThresholdLabels ? 1.5 : 1; const gaugeWidth = Math.min(dimension / 5.5, 40) / gaugeWidthReduceRatio; const thresholdMarkersWidth = gaugeWidth / 5; - const fontSize = Math.min(dimension / 5.5, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const fontSize = Math.min(dimension / 4, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const thresholdLabelFontSize = fontSize / 2.5; const options: any = { diff --git a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx index cdcc1f406bbb..026aa5600a15 100644 --- a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx +++ b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx @@ -38,7 +38,7 @@ export class SetInterval extends PureComponent { } componentDidUpdate(prevProps: Props) { - if (_.isEqual(prevProps, this.props)) { + if ((isLive(prevProps.interval) && isLive(this.props.interval)) || _.isEqual(prevProps, this.props)) { return; } diff --git a/packages/grafana-ui/src/types/datasource.ts b/packages/grafana-ui/src/types/datasource.ts index dd364a7d9c9e..a2629ec6f6de 100644 --- a/packages/grafana-ui/src/types/datasource.ts +++ b/packages/grafana-ui/src/types/datasource.ts @@ -75,9 +75,7 @@ export class DataSourcePlugin< export interface DataSourcePluginMeta extends PluginMeta { builtIn?: boolean; // Is this for all metrics?: boolean; - tables?: boolean; logs?: boolean; - explore?: boolean; annotations?: boolean; alerting?: boolean; mixed?: boolean; @@ -85,7 +83,7 @@ export interface DataSourcePluginMeta extends PluginMeta { category?: string; queryOptions?: PluginMetaQueryOptions; sort?: number; - supportsStreaming?: boolean; + streaming?: boolean; /** * By default, hidden queries are not passed to the datasource @@ -166,10 +164,6 @@ export abstract class DataSourceApi< */ abstract query(options: DataQueryRequest, observer?: DataStreamObserver): Promise; - convertToStreamTargets?(options: DataQueryRequest): Array<{ url: string; refId: string }>; - - resultToSeriesData?(data: any, refId: string): SeriesData[]; - /** * Test & verify datasource settings & connection details */ diff --git a/packages/grafana-ui/src/utils/logs.ts b/packages/grafana-ui/src/utils/logs.ts index fb8c7977e2ad..b5c45b635daf 100644 --- a/packages/grafana-ui/src/utils/logs.ts +++ b/packages/grafana-ui/src/utils/logs.ts @@ -23,6 +23,15 @@ export function getLogLevel(line: string): LogLevel { return LogLevel.unknown; } +export function getLogLevelFromKey(key: string): LogLevel { + const level = (LogLevel as any)[key]; + if (level) { + return level; + } + + return LogLevel.unknown; +} + export function addLogLevelToSeries(series: SeriesData, lineIndex: number): SeriesData { return { ...series, // Keeps Tags, RefID etc diff --git a/packages/grafana-ui/src/utils/processSeriesData.test.ts b/packages/grafana-ui/src/utils/processSeriesData.test.ts index 96afa79aa8c0..ea582e89b3a8 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.test.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.test.ts @@ -6,7 +6,7 @@ import { guessFieldTypes, guessFieldTypeFromValue, } from './processSeriesData'; -import { FieldType, TimeSeries } from '../types/data'; +import { FieldType, TimeSeries, SeriesData, TableData } from '../types/data'; import { dateTime } from './moment_wrapper'; describe('toSeriesData', () => { @@ -99,4 +99,25 @@ describe('SerisData backwards compatibility', () => { expect(isTableData(roundtrip)).toBeTruthy(); expect(roundtrip).toMatchObject(table); }); + + it('converts SeriesData to TableData to series and back again', () => { + const series: SeriesData = { + refId: 'Z', + meta: { + somethign: 8, + }, + fields: [ + { name: 'T', type: FieldType.time }, // first + { name: 'N', type: FieldType.number, filterable: true }, + { name: 'S', type: FieldType.string, filterable: true }, + ], + rows: [[1, 100, '1'], [2, 200, '2'], [3, 300, '3']], + }; + const table = toLegacyResponseData(series) as TableData; + expect(table.meta).toBe(series.meta); + expect(table.refId).toBe(series.refId); + + const names = table.columns.map(c => c.text); + expect(names).toEqual(['T', 'N', 'S']); + }); }); diff --git a/packages/grafana-ui/src/utils/processSeriesData.ts b/packages/grafana-ui/src/utils/processSeriesData.ts index 1ba31e3bbdee..38e9abf91358 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.ts @@ -4,7 +4,7 @@ import isString from 'lodash/isString'; import isBoolean from 'lodash/isBoolean'; // Types -import { SeriesData, Field, TimeSeries, FieldType, TableData } from '../types/index'; +import { SeriesData, Field, TimeSeries, FieldType, TableData, Column } from '../types/index'; import { isDateTime } from './moment_wrapper'; function convertTableToSeriesData(table: TableData): SeriesData { @@ -160,6 +160,7 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData const type = guessFieldTypeFromSeries(series, 1); if (type === FieldType.time) { return { + alias: fields[0].name || series.name, target: fields[0].name || series.name, datapoints: rows, unit: fields[0].unit, @@ -171,14 +172,12 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData return { columns: fields.map(f => { - return { - text: f.name, - filterable: f.filterable, - unit: f.unit, - refId: series.refId, - meta: series.meta, - }; + const { name, ...column } = f; + (column as Column).text = name; + return column as Column; }), + refId: series.refId, + meta: series.meta, rows, }; }; diff --git a/packages/grafana-ui/tslint.json b/packages/grafana-ui/tslint.json index 937aa29800e5..1033e1962fc7 100644 --- a/packages/grafana-ui/tslint.json +++ b/packages/grafana-ui/tslint.json @@ -1,6 +1,6 @@ { "extends": "../../tslint.json", "rules": { - "import-blacklist": [true, "moment", ["^@grafana/ui.*"]] + "import-blacklist": [true, "moment", ["^@grafana/ui.*"], ["^@grafana/runtime.*"]] } } diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 0cd00d3b015f..e5e943260275 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -131,9 +131,9 @@ func AlertTest(c *m.ReqContext, dto dtos.AlertTestCommand) Response { } backendCmd := alerting.AlertTestCommand{ - OrgId: c.OrgId, + OrgID: c.OrgId, Dashboard: dto.Dashboard, - PanelId: dto.PanelId, + PanelID: dto.PanelId, User: c.SignedInUser, } diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 7ec4fbaa3b3c..d2094b33cb1a 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -30,7 +30,7 @@ import ( "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" - "gopkg.in/macaron.v1" + macaron "gopkg.in/macaron.v1" ) func init() { @@ -227,6 +227,10 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { m.Use(middleware.AddDefaultResponseHeaders()) + if setting.ServeFromSubPath && setting.AppSubUrl != "" { + m.SetURLPrefix(setting.AppSubUrl) + } + m.Use(macaron.Renderer(macaron.RenderOptions{ Directory: path.Join(setting.StaticRootPath, "views"), IndentJSON: macaron.Env != macaron.PROD, diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index d5add2b71684..ebaee5573482 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -7,14 +7,16 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/datamigrations" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" ) -func runDbCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runDbCommand(command func(commandLine utils.CommandLine, sqlStore *sqlstore.SqlStore) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} cfg := setting.NewCfg() cfg.Load(&setting.CommandLineArgs{ @@ -28,7 +30,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine.Bus = bus.GetBus() engine.Init() - if err := command(cmd); err != nil { + if err := command(cmd, engine); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s\n\n", err) @@ -40,10 +42,10 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli } } -func runPluginCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runPluginCommand(command func(commandLine utils.CommandLine) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} if err := command(cmd); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s %s\n\n", color.RedString("✗"), err) @@ -107,6 +109,17 @@ var adminCommands = []cli.Command{ }, }, }, + { + Name: "data-migration", + Usage: "Runs a script that migrates or cleanups data in your db", + Subcommands: []cli.Command{ + { + Name: "encrypt-datasource-passwords", + Usage: "Migrates passwords from unsecured fields to secure_json_data field. Return ok unless there is an error. Safe to execute multiple times.", + Action: runDbCommand(datamigrations.EncryptDatasourcePaswords), + }, + }, + }, } var Commands = []cli.Command{ diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go new file mode 100644 index 000000000000..e55fa2d70b88 --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go @@ -0,0 +1,126 @@ +package datamigrations + +import ( + "context" + "encoding/json" + + "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + "github.com/grafana/grafana/pkg/util/errutil" +) + +var ( + datasourceTypes = []string{ + "mysql", + "influxdb", + "elasticsearch", + "graphite", + "prometheus", + "opentsdb", + } +) + +// EncryptDatasourcePaswords migrates un-encrypted secrets on datasources +// to the secureJson Column. +func EncryptDatasourcePaswords(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { + return sqlStore.WithDbSession(context.Background(), func(session *sqlstore.DBSession) error { + passwordsUpdated, err := migrateColumn(session, "password") + if err != nil { + return err + } + + basicAuthUpdated, err := migrateColumn(session, "basic_auth_password") + if err != nil { + return err + } + + logger.Info("\n") + if passwordsUpdated > 0 { + logger.Infof("%s Encrypted password field for %d datasources \n", color.GreenString("✔"), passwordsUpdated) + } + + if basicAuthUpdated > 0 { + logger.Infof("%s Encrypted basic_auth_password field for %d datasources \n", color.GreenString("✔"), basicAuthUpdated) + } + + if passwordsUpdated == 0 && basicAuthUpdated == 0 { + logger.Infof("%s All datasources secrets are allready encrypted\n", color.GreenString("✔")) + } + + logger.Info("\n") + + logger.Warn("Warning: Datasource provisioning files need to be manually changed to prevent overwriting of " + + "the data during provisioning. See https://grafana.com/docs/installation/upgrading/#upgrading-to-v6-2 for " + + "details") + return nil + }) +} + +func migrateColumn(session *sqlstore.DBSession, column string) (int, error) { + var rows []map[string]string + + session.Cols("id", column, "secure_json_data") + session.Table("data_source") + session.In("type", datasourceTypes) + session.Where(column + " IS NOT NULL AND " + column + " != ''") + err := session.Find(&rows) + + if err != nil { + return 0, errutil.Wrapf(err, "failed to select column: %s", column) + } + + rowsUpdated, err := updateRows(session, rows, column) + return rowsUpdated, errutil.Wrapf(err, "failed to update column: %s", column) +} + +func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordFieldName string) (int, error) { + var rowsUpdated int + + for _, row := range rows { + newSecureJSONData, err := getUpdatedSecureJSONData(row, passwordFieldName) + if err != nil { + return 0, err + } + + data, err := json.Marshal(newSecureJSONData) + if err != nil { + return 0, errutil.Wrap("marshaling newSecureJsonData failed", err) + } + + newRow := map[string]interface{}{"secure_json_data": data, passwordFieldName: ""} + session.Table("data_source") + session.Where("id = ?", row["id"]) + // Setting both columns while having value only for secure_json_data should clear the [passwordFieldName] column + session.Cols("secure_json_data", passwordFieldName) + + _, err = session.Update(newRow) + if err != nil { + return 0, err + } + + rowsUpdated++ + } + return rowsUpdated, nil +} + +func getUpdatedSecureJSONData(row map[string]string, passwordFieldName string) (map[string]interface{}, error) { + encryptedPassword, err := util.Encrypt([]byte(row[passwordFieldName]), setting.SecretKey) + if err != nil { + return nil, err + } + + var secureJSONData map[string]interface{} + + if err := json.Unmarshal([]byte(row["secure_json_data"]), &secureJSONData); err != nil { + return nil, err + } + + jsonFieldName := util.ToCamelCase(passwordFieldName) + secureJSONData[jsonFieldName] = encryptedPassword + return secureJSONData, nil +} diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go new file mode 100644 index 000000000000..64987423decd --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go @@ -0,0 +1,67 @@ +package datamigrations + +import ( + "testing" + "time" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/commandstest" + "github.com/grafana/grafana/pkg/components/securejsondata" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/stretchr/testify/assert" +) + +func TestPasswordMigrationCommand(t *testing.T) { + //setup datasources with password, basic_auth and none + sqlstore := sqlstore.InitTestDB(t) + session := sqlstore.NewSession() + defer session.Close() + + datasources := []*models.DataSource{ + {Type: "influxdb", Name: "influxdb", Password: "foobar"}, + {Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar"}, + {Type: "prometheus", Name: "prometheus", SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{})}, + } + + // set required default values + for _, ds := range datasources { + ds.Created = time.Now() + ds.Updated = time.Now() + ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{}) + } + + _, err := session.Insert(&datasources) + assert.Nil(t, err) + + //run migration + err = EncryptDatasourcePaswords(&commandstest.FakeCommandLine{}, sqlstore) + assert.Nil(t, err) + + //verify that no datasources still have password or basic_auth + var dss []*models.DataSource + err = session.SQL("select * from data_source").Find(&dss) + assert.Nil(t, err) + assert.Equal(t, len(dss), 3) + + for _, ds := range dss { + sj := ds.SecureJsonData.Decrypt() + + if ds.Name == "influxdb" { + assert.Equal(t, ds.Password, "") + v, exist := sj["password"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected password to be moved to securejson") + } + + if ds.Name == "graphite" { + assert.Equal(t, ds.BasicAuthPassword, "") + v, exist := sj["basicAuthPassword"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected basic_auth_password to be moved to securejson") + } + + if ds.Name == "prometheus" { + assert.Equal(t, len(sj), 0) + } + } +} diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 99cef15e50e3..db3907682638 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -14,13 +14,14 @@ import ( "strings" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" ) -func validateInput(c CommandLine, pluginFolder string) error { +func validateInput(c utils.CommandLine, pluginFolder string) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to install") @@ -46,7 +47,7 @@ func validateInput(c CommandLine, pluginFolder string) error { return nil } -func installCommand(c CommandLine) error { +func installCommand(c utils.CommandLine) error { pluginFolder := c.PluginDirectory() if err := validateInput(c, pluginFolder); err != nil { return err @@ -60,7 +61,7 @@ func installCommand(c CommandLine) error { // InstallPlugin downloads the plugin code as a zip file from the Grafana.com API // and then extracts the zip into the plugins directory. -func InstallPlugin(pluginName, version string, c CommandLine) error { +func InstallPlugin(pluginName, version string, c utils.CommandLine) error { pluginFolder := c.PluginDirectory() downloadURL := c.PluginURL() if downloadURL == "" { diff --git a/pkg/cmd/grafana-cli/commands/listremote_command.go b/pkg/cmd/grafana-cli/commands/listremote_command.go index 4798369def11..7351ee58a371 100644 --- a/pkg/cmd/grafana-cli/commands/listremote_command.go +++ b/pkg/cmd/grafana-cli/commands/listremote_command.go @@ -3,9 +3,10 @@ package commands import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func listremoteCommand(c CommandLine) error { +func listremoteCommand(c utils.CommandLine) error { plugin, err := s.ListAllPlugins(c.RepoDirectory()) if err != nil { diff --git a/pkg/cmd/grafana-cli/commands/listversions_command.go b/pkg/cmd/grafana-cli/commands/listversions_command.go index 95c536e94f0a..78d681c06a3a 100644 --- a/pkg/cmd/grafana-cli/commands/listversions_command.go +++ b/pkg/cmd/grafana-cli/commands/listversions_command.go @@ -5,9 +5,10 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func validateVersionInput(c CommandLine) error { +func validateVersionInput(c utils.CommandLine) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to list versions for") @@ -16,7 +17,7 @@ func validateVersionInput(c CommandLine) error { return nil } -func listversionsCommand(c CommandLine) error { +func listversionsCommand(c utils.CommandLine) error { if err := validateVersionInput(c); err != nil { return err } diff --git a/pkg/cmd/grafana-cli/commands/ls_command.go b/pkg/cmd/grafana-cli/commands/ls_command.go index 30745ce3172d..63492d732e98 100644 --- a/pkg/cmd/grafana-cli/commands/ls_command.go +++ b/pkg/cmd/grafana-cli/commands/ls_command.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var ls_getPlugins func(path string) []m.InstalledPlugin = s.GetLocalPlugins @@ -31,7 +32,7 @@ var validateLsCommand = func(pluginDir string) error { return nil } -func lsCommand(c CommandLine) error { +func lsCommand(c utils.CommandLine) error { pluginDir := c.PluginDirectory() if err := validateLsCommand(pluginDir); err != nil { return err diff --git a/pkg/cmd/grafana-cli/commands/remove_command.go b/pkg/cmd/grafana-cli/commands/remove_command.go index e51929dc95cb..eb536d7b8c7a 100644 --- a/pkg/cmd/grafana-cli/commands/remove_command.go +++ b/pkg/cmd/grafana-cli/commands/remove_command.go @@ -5,12 +5,13 @@ import ( "fmt" "strings" - services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin -func removeCommand(c CommandLine) error { +func removeCommand(c utils.CommandLine) error { pluginPath := c.PluginDirectory() plugin := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/reset_password_command.go b/pkg/cmd/grafana-cli/commands/reset_password_command.go index af2b8b3f89ae..4a6a4b674f2e 100644 --- a/pkg/cmd/grafana-cli/commands/reset_password_command.go +++ b/pkg/cmd/grafana-cli/commands/reset_password_command.go @@ -6,13 +6,15 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/util" ) const AdminUserId = 1 -func resetPasswordCommand(c CommandLine) error { +func resetPasswordCommand(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { newPassword := c.Args().First() password := models.Password(newPassword) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go index e01df2dab602..a5aadbbb0c23 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go @@ -4,6 +4,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/hashicorp/go-version" ) @@ -27,7 +28,7 @@ func ShouldUpgrade(installed string, remote m.Plugin) bool { return false } -func upgradeAllCommand(c CommandLine) error { +func upgradeAllCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() localPlugins := s.GetLocalPlugins(pluginsDir) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_command.go b/pkg/cmd/grafana-cli/commands/upgrade_command.go index 396371d35772..f32961ce5895 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_command.go @@ -4,9 +4,10 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func upgradeCommand(c CommandLine) error { +func upgradeCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() pluginName := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/command_line.go b/pkg/cmd/grafana-cli/utils/command_line.go similarity index 64% rename from pkg/cmd/grafana-cli/commands/command_line.go rename to pkg/cmd/grafana-cli/utils/command_line.go index d487aff8aaaa..d3142d0f195e 100644 --- a/pkg/cmd/grafana-cli/commands/command_line.go +++ b/pkg/cmd/grafana-cli/utils/command_line.go @@ -1,4 +1,4 @@ -package commands +package utils import ( "github.com/codegangsta/cli" @@ -22,30 +22,30 @@ type CommandLine interface { PluginURL() string } -type contextCommandLine struct { +type ContextCommandLine struct { *cli.Context } -func (c *contextCommandLine) ShowHelp() { +func (c *ContextCommandLine) ShowHelp() { cli.ShowCommandHelp(c.Context, c.Command.Name) } -func (c *contextCommandLine) ShowVersion() { +func (c *ContextCommandLine) ShowVersion() { cli.ShowVersion(c.Context) } -func (c *contextCommandLine) Application() *cli.App { +func (c *ContextCommandLine) Application() *cli.App { return c.App } -func (c *contextCommandLine) PluginDirectory() string { +func (c *ContextCommandLine) PluginDirectory() string { return c.GlobalString("pluginsDir") } -func (c *contextCommandLine) RepoDirectory() string { +func (c *ContextCommandLine) RepoDirectory() string { return c.GlobalString("repo") } -func (c *contextCommandLine) PluginURL() string { +func (c *ContextCommandLine) PluginURL() string { return c.GlobalString("pluginUrl") } diff --git a/pkg/components/gtime/gtime.go b/pkg/components/gtime/gtime.go new file mode 100644 index 000000000000..e3e4e449f661 --- /dev/null +++ b/pkg/components/gtime/gtime.go @@ -0,0 +1,28 @@ +package gtime + +import ( + "regexp" + "strconv" + "time" +) + +// ParseInterval parses and interval with support for all units that Grafana uses. +func ParseInterval(interval string) (time.Duration, error) { + re := regexp.MustCompile(`(\d+)([wdy])`) + result := re.FindSubmatch([]byte(interval)) + + if len(result) == 3 { + num, _ := strconv.Atoi(string(result[1])) + period := string(result[2]) + + if period == `d` { + return time.Hour * 24 * time.Duration(num), nil + } else if period == `w` { + return time.Hour * 24 * 7 * time.Duration(num), nil + } else { + return time.Hour * 24 * 7 * 365 * time.Duration(num), nil + } + } else { + return time.ParseDuration(interval) + } +} diff --git a/pkg/components/gtime/gtime_test.go b/pkg/components/gtime/gtime_test.go new file mode 100644 index 000000000000..e683184023fa --- /dev/null +++ b/pkg/components/gtime/gtime_test.go @@ -0,0 +1,34 @@ +package gtime + +import ( + "errors" + "fmt" + "testing" + "time" +) + +func TestParseInterval(t *testing.T) { + tcs := []struct { + interval string + duration time.Duration + err error + }{ + {interval: "1d", duration: time.Hour * 24}, + {interval: "1w", duration: time.Hour * 24 * 7}, + {interval: "1y", duration: time.Hour * 24 * 7 * 365}, + {interval: "1M", err: errors.New("time: unknown unit M in duration 1M")}, + {interval: "invalid-duration", err: errors.New("time: invalid duration invalid-duration")}, + } + + for i, tc := range tcs { + t.Run(fmt.Sprintf("testcase %d", i), func(t *testing.T) { + res, err := ParseInterval(tc.interval) + if err != nil && err.Error() != tc.err.Error() { + t.Fatalf("expected '%v' got '%v'", tc.err, err) + } + if res != tc.duration { + t.Errorf("expected %v got %v", tc.duration, res) + } + }) + } +} diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go index cbe9ec2b7b07..6ee742a4d8e3 100644 --- a/pkg/extensions/main.go +++ b/pkg/extensions/main.go @@ -1,7 +1,6 @@ package extensions import ( - _ "github.com/brianvoe/gofakeit" _ "github.com/gobwas/glob" _ "github.com/robfig/cron" _ "gopkg.in/square/go-jose.v2" diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index 890fd5e4f24b..9ec5852b73dc 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -31,6 +31,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Check if allowed to continue with this IP if result, err := auth.IsAllowedIP(); !result { + ctx.Logger.Error("auth proxy: failed to check whitelisted ip addresses", "message", err.Error(), "error", err.DetailsError) ctx.Handle(407, err.Error(), err.DetailsError) return true } @@ -38,6 +39,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Try to log in user from various providers id, err := auth.Login() if err != nil { + ctx.Logger.Error("auth proxy: failed to login", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -45,6 +47,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Get full user info user, err := auth.GetSignedUser(id) if err != nil { + ctx.Logger.Error("auth proxy: failed to get signed in user", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -55,6 +58,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Remember user data it in cache if err := auth.Remember(id); err != nil { + ctx.Logger.Error("auth proxy: failed to store user in cache", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } diff --git a/pkg/models/user.go b/pkg/models/user.go index 5ced373f248f..de61150512d2 100644 --- a/pkg/models/user.go +++ b/pkg/models/user.go @@ -94,6 +94,11 @@ type DisableUserCommand struct { IsDisabled bool } +type BatchDisableUsersCommand struct { + UserIds []int64 + IsDisabled bool +} + type DeleteUserCommand struct { UserId int64 } diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 8c846839edaf..1379daf5a6da 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -29,6 +29,7 @@ type DataSourcePlugin struct { BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` Routes []*AppPluginRoute `json:"routes"` + Streaming bool `json:"streaming"` Backend bool `json:"backend,omitempty"` Executable string `json:"executable,omitempty"` diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go index eef593d39e23..3045b633f1e4 100644 --- a/pkg/services/alerting/conditions/evaluator.go +++ b/pkg/services/alerting/conditions/evaluator.go @@ -14,22 +14,25 @@ var ( rangedTypes = []string{"within_range", "outside_range"} ) +// AlertEvaluator evaluates the reduced value of a timeserie. +// Returning true if a timeserie is violating the condition +// ex: ThresholdEvaluator, NoValueEvaluator, RangeEvaluator type AlertEvaluator interface { Eval(reducedValue null.Float) bool } -type NoValueEvaluator struct{} +type noValueEvaluator struct{} -func (e *NoValueEvaluator) Eval(reducedValue null.Float) bool { +func (e *noValueEvaluator) Eval(reducedValue null.Float) bool { return !reducedValue.Valid } -type ThresholdEvaluator struct { +type thresholdEvaluator struct { Type string Threshold float64 } -func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) { +func newThresholdEvaluator(typ string, model *simplejson.Json) (*thresholdEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, fmt.Errorf("Evaluator missing threshold parameter") @@ -40,12 +43,12 @@ func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvalua return nil, fmt.Errorf("Evaluator has invalid parameter") } - defaultEval := &ThresholdEvaluator{Type: typ} + defaultEval := &thresholdEvaluator{Type: typ} defaultEval.Threshold, _ = firstParam.Float64() return defaultEval, nil } -func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { +func (e *thresholdEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -60,13 +63,13 @@ func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { return false } -type RangedEvaluator struct { +type rangedEvaluator struct { Type string Lower float64 Upper float64 } -func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, error) { +func newRangedEvaluator(typ string, model *simplejson.Json) (*rangedEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"} @@ -82,13 +85,13 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e return nil, alerting.ValidationError{Reason: "Evaluator has invalid second parameter"} } - rangedEval := &RangedEvaluator{Type: typ} + rangedEval := &rangedEvaluator{Type: typ} rangedEval.Lower, _ = firstParam.Float64() rangedEval.Upper, _ = secondParam.Float64() return rangedEval, nil } -func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { +func (e *rangedEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -105,6 +108,8 @@ func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { return false } +// NewAlertEvaluator is a factory function for returning +// an `AlertEvaluator` depending on the json model. func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { typ := model.Get("type").MustString() if typ == "" { @@ -120,7 +125,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { } if typ == "no_value" { - return &NoValueEvaluator{}, nil + return &noValueEvaluator{}, nil } return nil, fmt.Errorf("Evaluator invalid evaluator type: %s", typ) diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index 37dbd9b3f7a6..b29f39b49169 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -17,26 +17,31 @@ import ( func init() { alerting.RegisterCondition("query", func(model *simplejson.Json, index int) (alerting.Condition, error) { - return NewQueryCondition(model, index) + return newQueryCondition(model, index) }) } +// QueryCondition is responsible for issue and query, reduce the +// timeseries into single values and evaluate if they are firing or not. type QueryCondition struct { Index int Query AlertQuery - Reducer QueryReducer + Reducer *queryReducer Evaluator AlertEvaluator Operator string HandleRequest tsdb.HandleRequestFunc } +// AlertQuery contains information about what datasource a query +// should be sent to and the query object. type AlertQuery struct { Model *simplejson.Json - DatasourceId int64 + DatasourceID int64 From string To string } +// Eval evaluates the `QueryCondition`. func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.ConditionResult, error) { timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To) @@ -101,8 +106,8 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { getDsInfo := &models.GetDataSourceByIdQuery{ - Id: c.Query.DatasourceId, - OrgId: context.Rule.OrgId, + Id: c.Query.DatasourceID, + OrgId: context.Rule.OrgID, } if err := bus.Dispatch(getDsInfo); err != nil { @@ -154,16 +159,16 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t return req } -func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { +func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { condition := QueryCondition{} condition.Index = index condition.HandleRequest = tsdb.HandleRequest - queryJson := model.Get("query") + queryJSON := model.Get("query") - condition.Query.Model = queryJson.Get("model") - condition.Query.From = queryJson.Get("params").MustArray()[1].(string) - condition.Query.To = queryJson.Get("params").MustArray()[2].(string) + condition.Query.Model = queryJSON.Get("model") + condition.Query.From = queryJSON.Get("params").MustArray()[1].(string) + condition.Query.To = queryJSON.Get("params").MustArray()[2].(string) if err := validateFromValue(condition.Query.From); err != nil { return nil, err @@ -173,20 +178,20 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro return nil, err } - condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64() + condition.Query.DatasourceID = queryJSON.Get("datasourceId").MustInt64() - reducerJson := model.Get("reducer") - condition.Reducer = NewSimpleReducer(reducerJson.Get("type").MustString()) + reducerJSON := model.Get("reducer") + condition.Reducer = newSimpleReducer(reducerJSON.Get("type").MustString()) - evaluatorJson := model.Get("evaluator") - evaluator, err := NewAlertEvaluator(evaluatorJson) + evaluatorJSON := model.Get("evaluator") + evaluator, err := NewAlertEvaluator(evaluatorJSON) if err != nil { return nil, err } condition.Evaluator = evaluator - operatorJson := model.Get("operator") - operator := operatorJson.Get("type").MustString("and") + operatorJSON := model.Get("operator") + operator := operatorJSON.Get("type").MustString("and") condition.Operator = operator return &condition, nil diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 2e1ecf5f39c5..4c2b1689277a 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -27,16 +27,15 @@ func TestQueryCondition(t *testing.T) { So(ctx.condition.Query.From, ShouldEqual, "5m") So(ctx.condition.Query.To, ShouldEqual, "now") - So(ctx.condition.Query.DatasourceId, ShouldEqual, 1) + So(ctx.condition.Query.DatasourceID, ShouldEqual, 1) Convey("Can read query reducer", func() { - reducer, ok := ctx.condition.Reducer.(*SimpleReducer) - So(ok, ShouldBeTrue) + reducer := ctx.condition.Reducer So(reducer.Type, ShouldEqual, "avg") }) Convey("Can read evaluator", func() { - evaluator, ok := ctx.condition.Evaluator.(*ThresholdEvaluator) + evaluator, ok := ctx.condition.Evaluator.(*thresholdEvaluator) So(ok, ShouldBeTrue) So(evaluator.Type, ShouldEqual, "gt") }) @@ -163,7 +162,7 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error) }`)) So(err, ShouldBeNil) - condition, err := NewQueryCondition(jsonModel, 0) + condition, err := newQueryCondition(jsonModel, 0) So(err, ShouldBeNil) ctx.condition = condition diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index f55545be311f..bf57110ea1c9 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -9,15 +9,15 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -type QueryReducer interface { - Reduce(timeSeries *tsdb.TimeSeries) null.Float -} +// queryReducer reduces an timeserie to a nullable float +type queryReducer struct { -type SimpleReducer struct { + // Type is how the timeserie should be reduced. + // Ex avg, sum, max, min, count Type string } -func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { +func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float { if len(series.Points) == 0 { return null.FloatFromPtr(nil) } @@ -31,7 +31,7 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { for _, point := range series.Points { if point[0].Valid { value += point[0].Float64 - validPointsCount += 1 + validPointsCount++ allNull = false } } @@ -117,8 +117,8 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { return null.FloatFrom(value) } -func NewSimpleReducer(typ string) *SimpleReducer { - return &SimpleReducer{Type: typ} +func newSimpleReducer(t string) *queryReducer { + return &queryReducer{Type: t} } func calculateDiff(series *tsdb.TimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index d2c21771d0b1..eac71378f3d7 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -53,7 +53,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("median should ignore null values", func() { - reducer := NewSimpleReducer("median") + reducer := newSimpleReducer("median") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -76,7 +76,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg with only nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -87,7 +87,7 @@ func TestSimpleReducer(t *testing.T) { Convey("count_non_null", func() { Convey("with null values and real values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -102,7 +102,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("with null values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -115,7 +115,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg of number values and null values should ignore nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -144,7 +144,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("diff with only nulls", func() { - reducer := NewSimpleReducer("diff") + reducer := newSimpleReducer("diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -171,7 +171,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("percent_diff with only nulls", func() { - reducer := NewSimpleReducer("percent_diff") + reducer := newSimpleReducer("percent_diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -184,8 +184,8 @@ func TestSimpleReducer(t *testing.T) { }) } -func testReducer(typ string, datapoints ...float64) float64 { - reducer := NewSimpleReducer(typ) +func testReducer(reducerType string, datapoints ...float64) float64 { + reducer := newSimpleReducer(reducerType) series := &tsdb.TimeSeries{ Name: "test time serie", } diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index f2fd002704ac..8794eb51c325 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -17,10 +17,10 @@ import ( "golang.org/x/sync/errgroup" ) -// AlertingService is the background process that +// AlertEngine is the background process that // schedules alert evaluations and makes sure notifications // are sent. -type AlertingService struct { +type AlertEngine struct { RenderService rendering.Service `inject:""` execQueue chan *Job @@ -33,16 +33,16 @@ type AlertingService struct { } func init() { - registry.RegisterService(&AlertingService{}) + registry.RegisterService(&AlertEngine{}) } // IsDisabled returns true if the alerting service is disable for this instance. -func (e *AlertingService) IsDisabled() bool { +func (e *AlertEngine) IsDisabled() bool { return !setting.AlertingEnabled || !setting.ExecuteAlerts } // Init initalizes the AlertingService. -func (e *AlertingService) Init() error { +func (e *AlertEngine) Init() error { e.ticker = NewTicker(time.Now(), time.Second*0, clock.New()) e.execQueue = make(chan *Job, 1000) e.scheduler = newScheduler() @@ -54,7 +54,7 @@ func (e *AlertingService) Init() error { } // Run starts the alerting service background process. -func (e *AlertingService) Run(ctx context.Context) error { +func (e *AlertEngine) Run(ctx context.Context) error { alertGroup, ctx := errgroup.WithContext(ctx) alertGroup.Go(func() error { return e.alertingTicker(ctx) }) alertGroup.Go(func() error { return e.runJobDispatcher(ctx) }) @@ -63,7 +63,7 @@ func (e *AlertingService) Run(ctx context.Context) error { return err } -func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { +func (e *AlertEngine) alertingTicker(grafanaCtx context.Context) error { defer func() { if err := recover(); err != nil { e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1)) @@ -88,7 +88,7 @@ func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { } } -func (e *AlertingService) runJobDispatcher(grafanaCtx context.Context) error { +func (e *AlertEngine) runJobDispatcher(grafanaCtx context.Context) error { dispatcherGroup, alertCtx := errgroup.WithContext(grafanaCtx) for { @@ -105,7 +105,7 @@ var ( unfinishedWorkTimeout = time.Second * 5 ) -func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *Job) error { +func (e *AlertEngine) processJobWithRetry(grafanaCtx context.Context, job *Job) error { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -140,7 +140,7 @@ func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *J } } -func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { +func (e *AlertEngine) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { job.Running = false close(cancelChan) for cancelFn := range cancelChan { @@ -149,7 +149,7 @@ func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, return err } -func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { +func (e *AlertEngine) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -180,8 +180,8 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel e.evalHandler.Eval(evalContext) - span.SetTag("alertId", evalContext.Rule.Id) - span.SetTag("dashboardId", evalContext.Rule.DashboardId) + span.SetTag("alertId", evalContext.Rule.ID) + span.SetTag("dashboardId", evalContext.Rule.DashboardID) span.SetTag("firing", evalContext.Firing) span.SetTag("nodatapoints", evalContext.NoDataFound) span.SetTag("attemptID", attemptID) @@ -194,7 +194,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel ) if attemptID < setting.AlertingMaxAttempts { span.Finish() - e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) attemptChan <- (attemptID + 1) return } @@ -212,7 +212,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel evalContext.Rule.State = evalContext.GetNewState() e.resultHandler.handle(evalContext) span.Finish() - e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) close(attemptChan) }() } diff --git a/pkg/services/alerting/engine_integration_test.go b/pkg/services/alerting/engine_integration_test.go index 7d0d3360ad5a..6b6fab389d02 100644 --- a/pkg/services/alerting/engine_integration_test.go +++ b/pkg/services/alerting/engine_integration_test.go @@ -17,7 +17,7 @@ import ( func TestEngineTimeouts(t *testing.T) { Convey("Alerting engine timeout tests", t, func() { - engine := &AlertingService{} + engine := &AlertEngine{} engine.Init() setting.AlertingNotificationTimeout = 30 * time.Second setting.AlertingMaxAttempts = 3 diff --git a/pkg/services/alerting/engine_test.go b/pkg/services/alerting/engine_test.go index 4ed317d982f4..86980c21bd48 100644 --- a/pkg/services/alerting/engine_test.go +++ b/pkg/services/alerting/engine_test.go @@ -39,7 +39,7 @@ func (handler *FakeResultHandler) handle(evalContext *EvalContext) error { func TestEngineProcessJob(t *testing.T) { Convey("Alerting engine job processing", t, func() { - engine := &AlertingService{} + engine := &AlertEngine{} engine.Init() setting.AlertingEvaluationTimeout = 30 * time.Second setting.AlertingNotificationTimeout = 30 * time.Second diff --git a/pkg/services/alerting/eval_context.go b/pkg/services/alerting/eval_context.go index 480303fa6d71..8436e9c9a780 100644 --- a/pkg/services/alerting/eval_context.go +++ b/pkg/services/alerting/eval_context.go @@ -26,7 +26,7 @@ type EvalContext struct { dashboardRef *models.DashboardRef - ImagePublicUrl string + ImagePublicURL string ImageOnDiskPath string NoDataFound bool PrevAlertState models.AlertStateType @@ -102,7 +102,7 @@ func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { return c.dashboardRef, nil } - uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardId} + uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardID} if err := bus.Dispatch(uidQuery); err != nil { return nil, err } @@ -113,8 +113,8 @@ func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { const urlFormat = "%s?fullscreen&edit&tab=alert&panelId=%d&orgId=%d" -// GetRuleUrl returns the url to the dashboard containing the alert. -func (c *EvalContext) GetRuleUrl() (string, error) { +// GetRuleURL returns the url to the dashboard containing the alert. +func (c *EvalContext) GetRuleURL() (string, error) { if c.IsTestRun { return setting.AppUrl, nil } @@ -123,7 +123,7 @@ func (c *EvalContext) GetRuleUrl() (string, error) { if err != nil { return "", err } - return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil + return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelID, c.Rule.OrgID), nil } // GetNewState returns the new state from the alert rule evaluation. @@ -148,7 +148,7 @@ func (c *EvalContext) GetNewState() models.AlertStateType { func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.Error != nil { c.log.Error("Alert Rule Result Error", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "error", c.Error, "changing state to", c.Rule.ExecutionErrorState.ToAlertState()) @@ -165,7 +165,7 @@ func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.NoDataFound { c.log.Info("Alert Rule returned no data", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "changing state to", c.Rule.NoDataState.ToAlertState()) diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index be364d6f4cc2..93d3127d6bdd 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -25,7 +25,7 @@ type Notifier interface { // ShouldNotify checks this evaluation should send an alert notification ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool - GetNotifierUid() string + GetNotifierUID() string GetIsDefault() bool GetSendReminder() bool GetDisableResolveMessage() bool diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index de6d74239ae7..84a26e9a64a0 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -35,7 +35,7 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgID, context.Rule.Notifications, context) if err != nil { return err } @@ -56,13 +56,13 @@ func (n *notificationService) SendIfNeeded(context *EvalContext) error { func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { notifier := notifierState.notifier - n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUid(), "isDefault", notifier.GetIsDefault()) + n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault()) metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() err := notifier.Notify(evalContext) if err != nil { - n.log.Error("failed to send notification", "uid", notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifier.GetNotifierUID(), "error", err) } if evalContext.IsTestRun { @@ -106,7 +106,7 @@ func (n *notificationService) sendNotifications(evalContext *EvalContext, notifi for _, notifierState := range notifierStates { err := n.sendNotification(evalContext, notifierState) if err != nil { - n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUID(), "error", err) } } @@ -123,7 +123,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { Width: 1000, Height: 500, Timeout: setting.AlertingEvaluationTimeout, - OrgId: context.Rule.OrgId, + OrgId: context.Rule.OrgID, OrgRole: models.ROLE_ADMIN, ConcurrentLimit: setting.AlertingRenderLimit, } @@ -133,7 +133,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return err } - renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgId, context.Rule.PanelId) + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgID, context.Rule.PanelID) result, err := n.renderService.Render(context.Ctx, renderOpts) if err != nil { @@ -141,13 +141,13 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } context.ImageOnDiskPath = result.FilePath - context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) + context.ImagePublicURL, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { return err } - if context.ImagePublicUrl != "" { - n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl) + if context.ImagePublicURL != "" { + n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicURL) } return nil @@ -170,8 +170,8 @@ func (n *notificationService) getNeededNotifiers(orgID int64, notificationUids [ query := &models.GetOrCreateNotificationStateQuery{ NotifierId: notification.Id, - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, } err = bus.DispatchCtx(evalContext.Ctx, query) diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index bc2807d0d3cd..a8fd7db2f5ee 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -51,7 +51,7 @@ type AlertmanagerNotifier struct { // ShouldNotify returns true if the notifiers should be used depending on state func (am *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *models.AlertNotificationState) bool { - am.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) + am.log.Debug("Should notify", "ruleId", evalContext.Rule.ID, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. if (evalContext.PrevAlertState == models.AlertStatePending) && (evalContext.Rule.State == models.AlertStateOK) { @@ -89,8 +89,8 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m if description != "" { alertJSON.SetPath([]string{"annotations", "description"}, description) } - if evalContext.ImagePublicUrl != "" { - alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicURL) } // Labels (from metrics tags + mandatory alertname). @@ -111,9 +111,9 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m // Notify sends alert notifications to the alert manager func (am *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) error { - am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.Id, "notification", am.Name) + am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.ID, "notification", am.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { am.log.Error("Failed get rule link", "error", err) return err diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index 3ebe23c2d1b3..f31c8b36d9c0 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -120,8 +120,8 @@ func (n *NotifierBase) NeedsImage() bool { return n.UploadImage } -// GetNotifierUid returns the notifier `uid`. -func (n *NotifierBase) GetNotifierUid() string { +// GetNotifierUID returns the notifier `uid`. +func (n *NotifierBase) GetNotifierUID() string { return n.UID } diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index a418adc7e651..fc8ce477ecb9 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -64,7 +64,7 @@ type DingDingNotifier struct { func (dd *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { dd.log.Info("Sending dingding") - messageURL, err := evalContext.GetRuleUrl() + messageURL, err := evalContext.GetRuleURL() if err != nil { dd.log.Error("Failed to get messageUrl", "error", err, "dingding", dd.Name) messageURL = "" @@ -82,7 +82,7 @@ func (dd *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { dd.log.Info("messageUrl:" + messageURL) message := evalContext.Rule.Message - picURL := evalContext.ImagePublicUrl + picURL := evalContext.ImagePublicURL title := evalContext.GetNotificationTitle() if message == "" { message = title diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go index 160c76528dd9..e011ec0c3e93 100644 --- a/pkg/services/alerting/notifiers/discord.go +++ b/pkg/services/alerting/notifiers/discord.go @@ -21,7 +21,7 @@ func init() { Type: "discord", Name: "Discord", Description: "Sends notifications to Discord", - Factory: NewDiscordNotifier, + Factory: newDiscordNotifier, OptionsTemplate: `

Discord settings

@@ -43,7 +43,7 @@ func init() { }) } -func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { +func newDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { content := model.Settings.Get("content").MustString() url := model.Settings.Get("url").MustString() if url == "" { @@ -58,6 +58,8 @@ func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, err }, nil } +// DiscordNotifier is responsible for sending alert +// notifications to discord. type DiscordNotifier struct { NotifierBase Content string @@ -65,20 +67,21 @@ type DiscordNotifier struct { log log.Logger } -func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending alert notification to", "webhook_url", this.WebhookURL) +// Notify send an alert notification to Discord. +func (dn *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { + dn.log.Info("Sending alert notification to", "webhook_url", dn.WebhookURL) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + dn.log.Error("Failed get rule link", "error", err) return err } bodyJSON := simplejson.New() bodyJSON.Set("username", "Grafana") - if this.Content != "" { - bodyJSON.Set("content", this.Content) + if dn.Content != "" { + bodyJSON.Set("content", dn.Content) } fields := make([]map[string]interface{}, 0) @@ -103,7 +106,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { embed.Set("title", evalContext.GetNotificationTitle()) //Discord takes integer for color embed.Set("color", color) - embed.Set("url", ruleUrl) + embed.Set("url", ruleURL) embed.Set("description", evalContext.Rule.Message) embed.Set("type", "rich") embed.Set("fields", fields) @@ -112,9 +115,9 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { var image map[string]interface{} var embeddedImage = false - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { image = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, } embed.Set("image", image) } else { @@ -130,7 +133,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { json, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.WebhookURL, + Url: dn.WebhookURL, HttpMethod: "POST", ContentType: "application/json", } @@ -138,22 +141,22 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { if !embeddedImage { cmd.Body = string(json) } else { - err := this.embedImage(cmd, evalContext.ImageOnDiskPath, json) + err := dn.embedImage(cmd, evalContext.ImageOnDiskPath, json) if err != nil { - this.log.Error("failed to embed image", "error", err) + dn.log.Error("failed to embed image", "error", err) return err } } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to Discord", "error", err) + dn.log.Error("Failed to send notification to Discord", "error", err) return err } return nil } -func (this *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { +func (dn *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { f, err := os.Open(imagePath) defer f.Close() if err != nil { diff --git a/pkg/services/alerting/notifiers/discord_test.go b/pkg/services/alerting/notifiers/discord_test.go index 5fe700245e23..d1cbff6b859a 100644 --- a/pkg/services/alerting/notifiers/discord_test.go +++ b/pkg/services/alerting/notifiers/discord_test.go @@ -22,7 +22,7 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - _, err := NewDiscordNotifier(model) + _, err := newDiscordNotifier(model) So(err, ShouldNotBeNil) }) @@ -40,7 +40,7 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - not, err := NewDiscordNotifier(model) + not, err := newDiscordNotifier(model) discordNotifier := not.(*DiscordNotifier) So(err, ShouldBeNil) diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index 44a6b97653ea..5d3422e608b5 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -67,7 +67,7 @@ func NewEmailNotifier(model *models.AlertNotification) (alerting.Notifier, error func (en *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { en.log.Info("Sending alert notification to", "addresses", en.Addresses) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { en.log.Error("Failed get rule link", "error", err) return err @@ -100,8 +100,8 @@ func (en *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { }, } - if evalContext.ImagePublicUrl != "" { - cmd.Data["ImageLink"] = evalContext.ImagePublicUrl + if evalContext.ImagePublicURL != "" { + cmd.Data["ImageLink"] = evalContext.ImagePublicURL } else { file, err := os.Stat(evalContext.ImageOnDiskPath) if err == nil { diff --git a/pkg/services/alerting/notifiers/googlechat.go b/pkg/services/alerting/notifiers/googlechat.go index a7e452991b04..2d81787fb916 100644 --- a/pkg/services/alerting/notifiers/googlechat.go +++ b/pkg/services/alerting/notifiers/googlechat.go @@ -120,7 +120,7 @@ func (gcn *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { "Content-Type": "application/json; charset=UTF-8", } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { gcn.log.Error("evalContext returned an invalid rule URL") } @@ -152,10 +152,10 @@ func (gcn *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { widgets = append(widgets, fields) // if an image exists, add it as an image widget - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { widgets = append(widgets, imageWidget{ Image: image{ - ImageURL: evalContext.ImagePublicUrl, + ImageURL: evalContext.ImagePublicURL, }, }) } else { diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go index e817fe9a076c..2e8be00576bb 100644 --- a/pkg/services/alerting/notifiers/hipchat.go +++ b/pkg/services/alerting/notifiers/hipchat.go @@ -81,9 +81,9 @@ type HipChatNotifier struct { // Notify sends an alert notification to HipChat func (hc *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { - hc.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.Id, "notification", hc.Name) + hc.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.ID, "notification", hc.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { hc.log.Error("Failed get rule link", "error", err) return err @@ -148,10 +148,10 @@ func (hc *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { "date": evalContext.EndTime.Unix(), "attributes": attributes, } - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { card["thumbnail"] = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, - "url@2x": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, + "url@2x": evalContext.ImagePublicURL, "width": 1193, "height": 564, } diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index 9761adf2f6ae..ed795453c42a 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -78,20 +78,20 @@ func (kn *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("description", evalContext.Rule.Name+" - "+evalContext.Rule.Message) bodyJSON.Set("client", "Grafana") bodyJSON.Set("details", customData) - bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { kn.log.Error("Failed get rule link", "error", err) return err } bodyJSON.Set("client_url", ruleURL) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() imageJSON.Set("type", "image") - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("contexts", contexts) } diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index 6b84ba8f091e..2048495b6465 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -56,7 +56,7 @@ type LineNotifier struct { // Notify send an alert notification to LINE func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error { - ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.Id, "notification", ln.Name) + ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.ID, "notification", ln.Name) var err error switch evalContext.Rule.State { @@ -67,8 +67,8 @@ func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error { } func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { - ln.log.Info("Creating Line notify", "ruleId", evalContext.Rule.Id, "notification", ln.Name) - ruleURL, err := evalContext.GetRuleUrl() + ln.log.Info("Creating Line notify", "ruleId", evalContext.Rule.ID, "notification", ln.Name) + ruleURL, err := evalContext.GetRuleURL() if err != nil { ln.log.Error("Failed get rule link", "error", err) return err @@ -78,9 +78,9 @@ func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { body := fmt.Sprintf("%s - %s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message) form.Add("message", body) - if evalContext.ImagePublicUrl != "" { - form.Add("imageThumbnail", evalContext.ImagePublicUrl) - form.Add("imageFullsize", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + form.Add("imageThumbnail", evalContext.ImagePublicURL) + form.Add("imageFullsize", evalContext.ImagePublicURL) } cmd := &models.SendWebhookSync{ diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 3adcdbc74ade..833927dee9f5 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -90,9 +90,9 @@ func (on *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error { } func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error { - on.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", on.Name) + on.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { on.log.Error("Failed get rule link", "error", err) return err @@ -106,13 +106,13 @@ func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error bodyJSON := simplejson.New() bodyJSON.Set("message", evalContext.Rule.Name) bodyJSON.Set("source", "Grafana") - bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) bodyJSON.Set("description", fmt.Sprintf("%s - %s\n%s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message, customData)) details := simplejson.New() details.Set("url", ruleURL) - if evalContext.ImagePublicUrl != "" { - details.Set("image", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + details.Set("image", evalContext.ImagePublicURL) } bodyJSON.Set("details", details) @@ -136,14 +136,14 @@ func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error } func (on *OpsGenieNotifier) closeAlert(evalContext *alerting.EvalContext) error { - on.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", on.Name) + on.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) bodyJSON := simplejson.New() bodyJSON.Set("source", "Grafana") body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", on.APIUrl, evalContext.Rule.Id), + Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", on.APIUrl, evalContext.Rule.ID), Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 99302c1af778..d771bfd1ad68 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -100,10 +100,10 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON := simplejson.New() bodyJSON.Set("routing_key", pn.Key) bodyJSON.Set("event_action", eventType) - bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) bodyJSON.Set("payload", payloadJSON) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { pn.log.Error("Failed get rule link", "error", err) return err @@ -116,10 +116,10 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { links[0] = linkJSON bodyJSON.Set("links", links) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("images", contexts) } diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go index 19de6ce08a23..5da1a457e679 100644 --- a/pkg/services/alerting/notifiers/pushover.go +++ b/pkg/services/alerting/notifiers/pushover.go @@ -146,7 +146,7 @@ type PushoverNotifier struct { // Notify sends a alert notification to Pushover func (pn *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { pn.log.Error("Failed get rule link", "error", err) return err diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go index 7650cb222d92..7f60178d10f5 100644 --- a/pkg/services/alerting/notifiers/sensu.go +++ b/pkg/services/alerting/notifiers/sensu.go @@ -79,7 +79,7 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { sn.log.Info("Sending sensu result") bodyJSON := simplejson.New() - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) // Sensu alerts cannot have spaces in them bodyJSON.Set("name", strings.Replace(evalContext.Rule.Name, " ", "_", -1)) // Sensu alerts require a source. We set it to the user-specified value (optional), @@ -87,7 +87,7 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { if sn.Source != "" { bodyJSON.Set("source", sn.Source) } else { - bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.ID, 10)) } // Finally, sensu expects an output // We set it to a default output @@ -106,13 +106,13 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("handler", sn.Handler) } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index f8cad904270f..b9a10c4d5d10 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -145,9 +145,9 @@ type SlackNotifier struct { // Notify send alert notification to Slack. func (sn *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { - sn.log.Info("Executing slack notification", "ruleId", evalContext.Rule.Id, "notification", sn.Name) + sn.log.Info("Executing slack notification", "ruleId", evalContext.Rule.ID, "notification", sn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { sn.log.Error("Failed get rule link", "error", err) return err @@ -181,7 +181,7 @@ func (sn *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { imageURL := "" // default to file.upload API method if a token is provided if sn.Token == "" { - imageURL = evalContext.ImagePublicUrl + imageURL = evalContext.ImagePublicURL } body := map[string]interface{}{ diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 4a2cfa1ca911..4d0c47ddad23 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -50,9 +50,9 @@ type TeamsNotifier struct { // Notify send an alert notification to Microsoft teams. func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { - tn.log.Info("Executing teams notification", "ruleId", evalContext.Rule.Id, "notification", tn.Name) + tn.log.Info("Executing teams notification", "ruleId", evalContext.Rule.ID, "notification", tn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { tn.log.Error("Failed get rule link", "error", err) return err @@ -83,9 +83,9 @@ func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } images := make([]map[string]interface{}, 0) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { images = append(images, map[string]interface{}{ - "image": evalContext.ImagePublicUrl, + "image": evalContext.ImagePublicURL, }) } @@ -122,7 +122,7 @@ func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { "name": "View Graph", "targets": []map[string]interface{}{ { - "os": "default", "uri": evalContext.ImagePublicUrl, + "os": "default", "uri": evalContext.ImagePublicURL, }, }, }, diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index 0c2e85579823..be354bc2733e 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -104,13 +104,13 @@ func (tn *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, send func (tn *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.EvalContext) *models.SendWebhookSync { message := fmt.Sprintf("%s\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { message = message + fmt.Sprintf("URL: %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicURL) } metrics := generateMetricsMessage(evalContext) @@ -141,7 +141,7 @@ func (tn *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.EvalCo return nil, err } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { return nil, err } @@ -232,7 +232,7 @@ func appendIfPossible(message string, extra string, sizeLimit int) string { // Notify send an alert notification to Telegram. func (tn *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { var cmd *models.SendWebhookSync - if evalContext.ImagePublicUrl == "" && tn.UploadImage { + if evalContext.ImagePublicURL == "" && tn.UploadImage { cmd = tn.buildMessage(evalContext, true) } else { cmd = tn.buildMessage(evalContext, false) diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go index 621a04a85d5f..560e8c12e80b 100644 --- a/pkg/services/alerting/notifiers/threema.go +++ b/pkg/services/alerting/notifiers/threema.go @@ -143,12 +143,12 @@ func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error message := fmt.Sprintf("%s%s\n\n*State:* %s\n*Message:* %s\n", stateEmoji, evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { message = message + fmt.Sprintf("*URL:* %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicURL) } data.Set("text", message) diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index c118d811b9c4..d19ea356547b 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -70,9 +70,9 @@ type VictoropsNotifier struct { // Notify sends notification to Victorops via POST to URL endpoint func (vn *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { - vn.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.Id, "notification", vn.Name) + vn.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.ID, "notification", vn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { vn.log.Error("Failed get rule link", "error", err) return err @@ -116,8 +116,8 @@ func (vn *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("error_message", evalContext.Error.Error()) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("image_url", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("image_url", evalContext.ImagePublicURL) } data, _ := bodyJSON.MarshalJSON() diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go index 3c10c50c5d03..f5ee99245127 100644 --- a/pkg/services/alerting/notifiers/webhook.go +++ b/pkg/services/alerting/notifiers/webhook.go @@ -76,18 +76,18 @@ func (wn *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON := simplejson.New() bodyJSON.Set("title", evalContext.GetNotificationTitle()) - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) bodyJSON.Set("ruleName", evalContext.Rule.Name) bodyJSON.Set("state", evalContext.Rule.State) bodyJSON.Set("evalMatches", evalContext.EvalMatches) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 7141c6cec46a..814a3f8a21e2 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -46,11 +46,11 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error { metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc() if evalContext.shouldUpdateAlertState() { - handler.log.Info("New state change", "alertId", evalContext.Rule.Id, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) + handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) cmd := &models.SetAlertStateCommand{ - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, State: evalContext.Rule.State, Error: executionError, EvalData: annotationData, @@ -81,10 +81,10 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error { // save annotation item := annotations.Item{ - OrgId: evalContext.Rule.OrgId, - DashboardId: evalContext.Rule.DashboardId, - PanelId: evalContext.Rule.PanelId, - AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgID, + DashboardId: evalContext.Rule.DashboardID, + PanelId: evalContext.Rule.PanelID, + AlertId: evalContext.Rule.ID, Text: "", NewState: string(evalContext.Rule.State), PrevState: string(evalContext.PrevAlertState), diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index 422148bc42f7..9a4065e279da 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -21,10 +21,10 @@ var ( // Rule is the in-memory version of an alert rule. type Rule struct { - Id int64 - OrgId int64 - DashboardId int64 - PanelId int64 + ID int64 + OrgID int64 + DashboardID int64 + PanelID int64 Frequency int64 Name string Message string @@ -44,23 +44,23 @@ type Rule struct { type ValidationError struct { Reason string Err error - Alertid int64 - DashboardId int64 - PanelId int64 + AlertID int64 + DashboardID int64 + PanelID int64 } func (e ValidationError) Error() string { extraInfo := e.Reason - if e.Alertid != 0 { - extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid) + if e.AlertID != 0 { + extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.AlertID) } - if e.PanelId != 0 { - extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelId) + if e.PanelID != 0 { + extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelID) } - if e.DashboardId != 0 { - extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardId) + if e.DashboardID != 0 { + extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardID) } if e.Err != nil { @@ -113,10 +113,10 @@ func getTimeDurationStringToSeconds(str string) (int64, error) { // alert to an in-memory version. func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { model := &Rule{} - model.Id = ruleDef.Id - model.OrgId = ruleDef.OrgId - model.DashboardId = ruleDef.DashboardId - model.PanelId = ruleDef.PanelId + model.ID = ruleDef.Id + model.OrgID = ruleDef.OrgId + model.DashboardID = ruleDef.DashboardId + model.PanelID = ruleDef.PanelId model.Name = ruleDef.Name model.Message = ruleDef.Message model.State = ruleDef.State @@ -140,7 +140,7 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { } else { uid, err := jsonModel.Get("uid").String() if err != nil { - return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Notifications = append(model.Notifications, uid) } @@ -151,11 +151,11 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { conditionType := conditionModel.Get("type").MustString() factory, exist := conditionFactories[conditionType] if !exist { - return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } queryCondition, err := factory(conditionModel, index) if err != nil { - return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Err: err, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Conditions = append(model.Conditions, queryCondition) } diff --git a/pkg/services/alerting/scheduler.go b/pkg/services/alerting/scheduler.go index 62ef35298fcb..b01618f40955 100644 --- a/pkg/services/alerting/scheduler.go +++ b/pkg/services/alerting/scheduler.go @@ -27,8 +27,8 @@ func (s *schedulerImpl) Update(rules []*Rule) { for i, rule := range rules { var job *Job - if s.jobs[rule.Id] != nil { - job = s.jobs[rule.Id] + if s.jobs[rule.ID] != nil { + job = s.jobs[rule.ID] } else { job = &Job{ Running: false, @@ -42,7 +42,7 @@ func (s *schedulerImpl) Update(rules []*Rule) { if job.Offset == 0 { //zero offset causes division with 0 panics. job.Offset = 1 } - jobs[rule.Id] = job + jobs[rule.ID] = job } s.jobs = jobs @@ -73,6 +73,6 @@ func (s *schedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { } func (s *schedulerImpl) enqueue(job *Job, execQueue chan *Job) { - s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id) + s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.ID) execQueue <- job } diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 3651fffa68bf..311109ed6078 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -49,8 +49,8 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { testRule := &Rule{ - DashboardId: 1, - PanelId: 1, + DashboardID: 1, + PanelID: 1, Name: "Test notification", Message: "Someone is testing the alert notification within grafana.", State: models.AlertStateAlerting, @@ -58,7 +58,7 @@ func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { ctx := NewEvalContext(context.Background(), testRule) if cmd.Settings.Get("uploadImage").MustBool(true) { - ctx.ImagePublicUrl = "https://grafana.com/assets/img/blog/mixed_styles.png" + ctx.ImagePublicURL = "https://grafana.com/assets/img/blog/mixed_styles.png" } ctx.IsTestRun = true ctx.Firing = true diff --git a/pkg/services/alerting/test_rule.go b/pkg/services/alerting/test_rule.go index 7a44845614b1..1575490ea324 100644 --- a/pkg/services/alerting/test_rule.go +++ b/pkg/services/alerting/test_rule.go @@ -13,8 +13,8 @@ import ( // of an alert rule. type AlertTestCommand struct { Dashboard *simplejson.Json - PanelId int64 - OrgId int64 + PanelID int64 + OrgID int64 User *models.SignedInUser Result *EvalContext @@ -28,14 +28,14 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { dash := models.NewDashboardFromJson(cmd.Dashboard) - extractor := NewDashAlertExtractor(dash, cmd.OrgId, cmd.User) + extractor := NewDashAlertExtractor(dash, cmd.OrgID, cmd.User) alerts, err := extractor.GetAlerts() if err != nil { return err } for _, alert := range alerts { - if alert.PanelId == cmd.PanelId { + if alert.PanelId == cmd.PanelID { rule, err := NewRuleFromDBAlert(alert) if err != nil { return err @@ -46,7 +46,7 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { } } - return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelId) + return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelID) } func testAlertRule(rule *Rule) *EvalContext { diff --git a/pkg/services/auth/auth_token.go b/pkg/services/auth/auth_token.go index 527d054f6ee9..af23d773f65c 100644 --- a/pkg/services/auth/auth_token.go +++ b/pkg/services/auth/auth_token.go @@ -4,6 +4,7 @@ import ( "context" "crypto/sha256" "encoding/hex" + "strings" "time" "github.com/grafana/grafana/pkg/infra/serverlock" @@ -305,6 +306,36 @@ func (s *UserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId i }) } +func (s *UserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error { + return s.SQLStore.WithTransactionalDbSession(ctx, func(dbSession *sqlstore.DBSession) error { + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + sql := "DELETE from user_auth_token WHERE user_id IN (?" + user_id_params + ")" + + params := []interface{}{sql} + for _, v := range userIds { + params = append(params, v) + } + + res, err := dbSession.Exec(params...) + if err != nil { + return err + } + + affected, err := res.RowsAffected() + if err != nil { + return err + } + + s.log.Debug("all user tokens for given users revoked", "usersCount", len(userIds), "count", affected) + + return err + }) +} + func (s *UserAuthTokenService) GetUserToken(ctx context.Context, userId, userTokenId int64) (*models.UserToken, error) { var result models.UserToken diff --git a/pkg/services/auth/auth_token_test.go b/pkg/services/auth/auth_token_test.go index 802b4602cbfc..bf12d914e970 100644 --- a/pkg/services/auth/auth_token_test.go +++ b/pkg/services/auth/auth_token_test.go @@ -117,6 +117,26 @@ func TestUserAuthToken(t *testing.T) { So(model2, ShouldBeNil) }) }) + + Convey("When revoking users tokens in a batch", func() { + Convey("Can revoke all users tokens", func() { + userIds := []int64{} + for i := 0; i < 3; i++ { + userId := userID + int64(i+1) + userIds = append(userIds, userId) + userAuthTokenService.CreateToken(context.Background(), userId, "192.168.10.11:1234", "some user agent") + } + + err := userAuthTokenService.BatchRevokeAllUserTokens(context.Background(), userIds) + So(err, ShouldBeNil) + + for _, v := range userIds { + tokens, err := userAuthTokenService.GetUserTokens(context.Background(), v) + So(err, ShouldBeNil) + So(len(tokens), ShouldEqual, 0) + } + }) + }) }) Convey("expires correctly", func() { diff --git a/pkg/services/ldap/ldap.go b/pkg/services/ldap/ldap.go index 9fa680d1e19b..418673be4463 100644 --- a/pkg/services/ldap/ldap.go +++ b/pkg/services/ldap/ldap.go @@ -29,18 +29,16 @@ type IConnection interface { // IServer is interface for LDAP authorization type IServer interface { Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) - Add(string, map[string][]string) error - Remove(string) error Users([]string) ([]*models.ExternalUserInfo, error) - ExtractGrafanaUser(*UserInfo) (*models.ExternalUserInfo, error) + InitialBind(string, string) error Dial() error Close() } // Server is basic struct of LDAP authorization type Server struct { - config *ServerConfig - connection IConnection + Config *ServerConfig + Connection IConnection requireSecondBind bool log log.Logger } @@ -49,7 +47,6 @@ var ( // ErrInvalidCredentials is returned if username and password do not match ErrInvalidCredentials = errors.New("Invalid Username or Password") - ErrLDAPUserNotFound = errors.New("LDAP user not found") ) var dial = func(network, addr string) (IConnection, error) { @@ -59,7 +56,7 @@ var dial = func(network, addr string) (IConnection, error) { // New creates the new LDAP auth func New(config *ServerConfig) IServer { return &Server{ - config: config, + Config: config, log: log.New("ldap"), } } @@ -68,9 +65,9 @@ func New(config *ServerConfig) IServer { func (server *Server) Dial() error { var err error var certPool *x509.CertPool - if server.config.RootCACert != "" { + if server.Config.RootCACert != "" { certPool = x509.NewCertPool() - for _, caCertFile := range strings.Split(server.config.RootCACert, " ") { + for _, caCertFile := range strings.Split(server.Config.RootCACert, " ") { pem, err := ioutil.ReadFile(caCertFile) if err != nil { return err @@ -81,35 +78,35 @@ func (server *Server) Dial() error { } } var clientCert tls.Certificate - if server.config.ClientCert != "" && server.config.ClientKey != "" { - clientCert, err = tls.LoadX509KeyPair(server.config.ClientCert, server.config.ClientKey) + if server.Config.ClientCert != "" && server.Config.ClientKey != "" { + clientCert, err = tls.LoadX509KeyPair(server.Config.ClientCert, server.Config.ClientKey) if err != nil { return err } } - for _, host := range strings.Split(server.config.Host, " ") { - address := fmt.Sprintf("%s:%d", host, server.config.Port) - if server.config.UseSSL { + for _, host := range strings.Split(server.Config.Host, " ") { + address := fmt.Sprintf("%s:%d", host, server.Config.Port) + if server.Config.UseSSL { tlsCfg := &tls.Config{ - InsecureSkipVerify: server.config.SkipVerifySSL, + InsecureSkipVerify: server.Config.SkipVerifySSL, ServerName: host, RootCAs: certPool, } if len(clientCert.Certificate) > 0 { tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert) } - if server.config.StartTLS { - server.connection, err = dial("tcp", address) + if server.Config.StartTLS { + server.Connection, err = dial("tcp", address) if err == nil { - if err = server.connection.StartTLS(tlsCfg); err == nil { + if err = server.Connection.StartTLS(tlsCfg); err == nil { return nil } } } else { - server.connection, err = ldap.DialTLS("tcp", address, tlsCfg) + server.Connection, err = ldap.DialTLS("tcp", address, tlsCfg) } } else { - server.connection, err = dial("tcp", address) + server.Connection, err = dial("tcp", address) } if err == nil { @@ -121,16 +118,16 @@ func (server *Server) Dial() error { // Close closes the LDAP connection func (server *Server) Close() { - server.connection.Close() + server.Connection.Close() } -// Log in user by searching and serializing it +// Login user by searching and serializing it func (server *Server) Login(query *models.LoginUserQuery) ( *models.ExternalUserInfo, error, ) { // Perform initial authentication - err := server.initialBind(query.Username, query.Password) + err := server.InitialBind(query.Username, query.Password) if err != nil { return nil, err } @@ -150,6 +147,11 @@ func (server *Server) Login(query *models.LoginUserQuery) ( // Check if a second user bind is needed user := users[0] + + if err := server.validateGrafanaUser(user); err != nil { + return nil, err + } + if server.requireSecondBind { err = server.secondBind(user, query.Password) if err != nil { @@ -160,56 +162,6 @@ func (server *Server) Login(query *models.LoginUserQuery) ( return user, nil } -// Add adds stuff to LDAP -func (server *Server) Add(dn string, values map[string][]string) error { - err := server.initialBind( - server.config.BindDN, - server.config.BindPassword, - ) - if err != nil { - return err - } - - attributes := make([]ldap.Attribute, 0) - for key, value := range values { - attributes = append(attributes, ldap.Attribute{ - Type: key, - Vals: value, - }) - } - - request := &ldap.AddRequest{ - DN: dn, - Attributes: attributes, - } - - err = server.connection.Add(request) - if err != nil { - return err - } - - return nil -} - -// Remove removes stuff from LDAP -func (server *Server) Remove(dn string) error { - err := server.initialBind( - server.config.BindDN, - server.config.BindPassword, - ) - if err != nil { - return err - } - - request := ldap.NewDelRequest(dn, nil) - err = server.connection.Del(request) - if err != nil { - return err - } - - return nil -} - // Users gets LDAP users func (server *Server) Users(logins []string) ( []*models.ExternalUserInfo, @@ -217,10 +169,10 @@ func (server *Server) Users(logins []string) ( ) { var result *ldap.SearchResult var err error - var config = server.config + var Config = server.Config - for _, base := range config.SearchBaseDNs { - result, err = server.connection.Search( + for _, base := range Config.SearchBaseDNs { + result, err = server.Connection.Search( server.getSearchRequest(base, logins), ) if err != nil { @@ -240,21 +192,11 @@ func (server *Server) Users(logins []string) ( return serializedUsers, nil } -// ExtractGrafanaUser extracts external user info from LDAP user -func (server *Server) ExtractGrafanaUser(user *UserInfo) (*models.ExternalUserInfo, error) { - result := server.buildGrafanaUser(user) - if err := server.validateGrafanaUser(result); err != nil { - return nil, err - } - - return result, nil -} - // validateGrafanaUser validates user access. // If there are no ldap group mappings access is true // otherwise a single group must match func (server *Server) validateGrafanaUser(user *models.ExternalUserInfo) error { - if len(server.config.Groups) > 0 && len(user.OrgRoles) < 1 { + if len(server.Config.Groups) > 0 && len(user.OrgRoles) < 1 { server.log.Error( "user does not belong in any of the specified LDAP groups", "username", user.Login, @@ -301,7 +243,7 @@ func (server *Server) getSearchRequest( ) *ldap.SearchRequest { attributes := []string{} - inputs := server.config.Attr + inputs := server.Config.Attr attributes = appendIfNotEmpty( attributes, inputs.Username, @@ -314,7 +256,7 @@ func (server *Server) getSearchRequest( search := "" for _, login := range logins { query := strings.Replace( - server.config.SearchFilter, + server.Config.SearchFilter, "%s", ldap.EscapeFilter(login), -1, ) @@ -347,7 +289,7 @@ func (server *Server) buildGrafanaUser(user *UserInfo) *models.ExternalUserInfo OrgRoles: map[int64]models.RoleType{}, } - for _, group := range server.config.Groups { + for _, group := range server.Config.Groups { // only use the first match for each org if extUser.OrgRoles[group.OrgId] != "" { continue @@ -366,15 +308,15 @@ func (server *Server) buildGrafanaUser(user *UserInfo) *models.ExternalUserInfo func (server *Server) serverBind() error { bindFn := func() error { - return server.connection.Bind( - server.config.BindDN, - server.config.BindPassword, + return server.Connection.Bind( + server.Config.BindDN, + server.Config.BindPassword, ) } - if server.config.BindPassword == "" { + if server.Config.BindPassword == "" { bindFn = func() error { - return server.connection.UnauthenticatedBind(server.config.BindDN) + return server.Connection.UnauthenticatedBind(server.Config.BindDN) } } @@ -397,7 +339,7 @@ func (server *Server) secondBind( user *models.ExternalUserInfo, userPassword string, ) error { - err := server.connection.Bind(user.AuthId, userPassword) + err := server.Connection.Bind(user.AuthId, userPassword) if err != nil { server.log.Info("Second bind failed", "error", err) @@ -412,24 +354,25 @@ func (server *Server) secondBind( return nil } -func (server *Server) initialBind(username, userPassword string) error { - if server.config.BindPassword != "" || server.config.BindDN == "" { - userPassword = server.config.BindPassword +// InitialBind intiates first bind to LDAP server +func (server *Server) InitialBind(username, userPassword string) error { + if server.Config.BindPassword != "" || server.Config.BindDN == "" { + userPassword = server.Config.BindPassword server.requireSecondBind = true } - bindPath := server.config.BindDN + bindPath := server.Config.BindDN if strings.Contains(bindPath, "%s") { - bindPath = fmt.Sprintf(server.config.BindDN, username) + bindPath = fmt.Sprintf(server.Config.BindDN, username) } bindFn := func() error { - return server.connection.Bind(bindPath, userPassword) + return server.Connection.Bind(bindPath, userPassword) } if userPassword == "" { bindFn = func() error { - return server.connection.UnauthenticatedBind(bindPath) + return server.Connection.UnauthenticatedBind(bindPath) } } @@ -451,16 +394,16 @@ func (server *Server) initialBind(username, userPassword string) error { func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string, error) { var memberOf []string - for _, groupSearchBase := range server.config.GroupSearchBaseDNs { + for _, groupSearchBase := range server.Config.GroupSearchBaseDNs { var filterReplace string - if server.config.GroupSearchFilterUserAttribute == "" { - filterReplace = getLDAPAttr(server.config.Attr.Username, searchResult) + if server.Config.GroupSearchFilterUserAttribute == "" { + filterReplace = getLDAPAttr(server.Config.Attr.Username, searchResult) } else { - filterReplace = getLDAPAttr(server.config.GroupSearchFilterUserAttribute, searchResult) + filterReplace = getLDAPAttr(server.Config.GroupSearchFilterUserAttribute, searchResult) } filter := strings.Replace( - server.config.GroupSearchFilter, "%s", + server.Config.GroupSearchFilter, "%s", ldap.EscapeFilter(filterReplace), -1, ) @@ -468,7 +411,7 @@ func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string server.log.Info("Searching for user's groups", "filter", filter) // support old way of reading settings - groupIDAttribute := server.config.Attr.MemberOf + groupIDAttribute := server.Config.Attr.MemberOf // but prefer dn attribute if default settings are used if groupIDAttribute == "" || groupIDAttribute == "memberOf" { groupIDAttribute = "dn" @@ -482,7 +425,7 @@ func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string Filter: filter, } - groupSearchResult, err := server.connection.Search(&groupSearchReq) + groupSearchResult, err := server.Connection.Search(&groupSearchReq) if err != nil { return nil, err } @@ -518,22 +461,22 @@ func (server *Server) serializeUsers( index, ), LastName: getLDAPAttrN( - server.config.Attr.Surname, + server.Config.Attr.Surname, users, index, ), FirstName: getLDAPAttrN( - server.config.Attr.Name, + server.Config.Attr.Name, users, index, ), Username: getLDAPAttrN( - server.config.Attr.Username, + server.Config.Attr.Username, users, index, ), Email: getLDAPAttrN( - server.config.Attr.Email, + server.Config.Attr.Email, users, index, ), @@ -553,8 +496,8 @@ func (server *Server) serializeUsers( func (server *Server) getMemberOf(search *ldap.SearchResult) ( []string, error, ) { - if server.config.GroupSearchFilter == "" { - memberOf := getLDAPAttrArray(server.config.Attr.MemberOf, search) + if server.Config.GroupSearchFilter == "" { + memberOf := getLDAPAttrArray(server.Config.Attr.MemberOf, search) return memberOf, nil } diff --git a/pkg/services/ldap/ldap_helpers_test.go b/pkg/services/ldap/ldap_helpers_test.go index 3f25633460c6..48e6bce8b5ba 100644 --- a/pkg/services/ldap/ldap_helpers_test.go +++ b/pkg/services/ldap/ldap_helpers_test.go @@ -13,7 +13,7 @@ func TestLDAPHelpers(t *testing.T) { Convey("serializeUsers()", t, func() { Convey("simple case", func() { server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -22,7 +22,7 @@ func TestLDAPHelpers(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: &mockConnection{}, + Connection: &MockConnection{}, log: log.New("test-logger"), } @@ -46,7 +46,7 @@ func TestLDAPHelpers(t *testing.T) { Convey("without lastname", func() { server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -55,7 +55,7 @@ func TestLDAPHelpers(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: &mockConnection{}, + Connection: &MockConnection{}, log: log.New("test-logger"), } @@ -75,74 +75,9 @@ func TestLDAPHelpers(t *testing.T) { }) }) - Convey("initialBind", t, func() { - Convey("Given bind dn and password configured", func() { - connection := &mockConnection{} - var actualUsername, actualPassword string - connection.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - BindPassword: "bindpwd", - }, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeTrue) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "bindpwd") - }) - - Convey("Given bind dn configured", func() { - connection := &mockConnection{} - var actualUsername, actualPassword string - connection.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - }, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeFalse) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "pwd") - }) - - Convey("Given empty bind dn and password", func() { - connection := &mockConnection{} - unauthenticatedBindWasCalled := false - var actualUsername string - connection.unauthenticatedBindProvider = func(username string) error { - unauthenticatedBindWasCalled = true - actualUsername = username - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{}, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeTrue) - So(unauthenticatedBindWasCalled, ShouldBeTrue) - So(actualUsername, ShouldBeEmpty) - }) - }) - Convey("serverBind()", t, func() { Convey("Given bind dn and password configured", func() { - connection := &mockConnection{} + connection := &MockConnection{} var actualUsername, actualPassword string connection.bindProvider = func(username, password string) error { actualUsername = username @@ -150,8 +85,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{ + Connection: connection, + Config: &ServerConfig{ BindDN: "o=users,dc=grafana,dc=org", BindPassword: "bindpwd", }, @@ -163,7 +98,7 @@ func TestLDAPHelpers(t *testing.T) { }) Convey("Given bind dn configured", func() { - connection := &mockConnection{} + connection := &MockConnection{} unauthenticatedBindWasCalled := false var actualUsername string connection.unauthenticatedBindProvider = func(username string) error { @@ -172,8 +107,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{ + Connection: connection, + Config: &ServerConfig{ BindDN: "o=users,dc=grafana,dc=org", }, } @@ -184,7 +119,7 @@ func TestLDAPHelpers(t *testing.T) { }) Convey("Given empty bind dn and password", func() { - connection := &mockConnection{} + connection := &MockConnection{} unauthenticatedBindWasCalled := false var actualUsername string connection.unauthenticatedBindProvider = func(username string) error { @@ -193,8 +128,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{}, + Connection: connection, + Config: &ServerConfig{}, } err := server.serverBind() So(err, ShouldBeNil) diff --git a/pkg/services/ldap/ldap_login_test.go b/pkg/services/ldap/ldap_login_test.go index 5bd0edc79cb1..573a9a560e84 100644 --- a/pkg/services/ldap/ldap_login_test.go +++ b/pkg/services/ldap/ldap_login_test.go @@ -13,12 +13,12 @@ import ( func TestLDAPLogin(t *testing.T) { Convey("Login()", t, func() { - authScenario("When user is log in and updated", func(sc *scenarioContext) { + serverScenario("When user is log in and updated", func(sc *scenarioContext) { // arrange - mockConnection := &mockConnection{} + mockConnection := &MockConnection{} - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Host: "", RootCACert: "", Groups: []*GroupToOrgRole{ @@ -33,7 +33,7 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: mockConnection, + Connection: mockConnection, log: log.New("test-logger"), } @@ -61,7 +61,7 @@ func TestLDAPLogin(t *testing.T) { sc.userOrgsQueryReturns([]*models.UserOrgDTO{}) // act - extUser, _ := auth.Login(query) + extUser, _ := server.Login(query) userInfo, err := user.Upsert(&user.UpsertArgs{ SignupAllowed: true, ExternalUser: extUser, @@ -73,7 +73,7 @@ func TestLDAPLogin(t *testing.T) { So(err, ShouldBeNil) // User should be searched in ldap - So(mockConnection.searchCalled, ShouldBeTrue) + So(mockConnection.SearchCalled, ShouldBeTrue) // Info should be updated (email differs) So(userInfo.Email, ShouldEqual, "roel@test.com") @@ -82,8 +82,8 @@ func TestLDAPLogin(t *testing.T) { So(sc.addOrgUserCmd.Role, ShouldEqual, "Admin") }) - authScenario("When login with invalid credentials", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When login with invalid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} connection.setSearchResult(&result) @@ -93,8 +93,8 @@ func TestLDAPLogin(t *testing.T) { ResultCode: 49, } } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -102,19 +102,19 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should return invalid credentials error", func() { So(err, ShouldEqual, ErrInvalidCredentials) }) }) - authScenario("When login with valid credentials", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When login with valid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{ DN: "dn", Attributes: []*ldap.EntryAttribute{ {Name: "username", Values: []string{"markelog"}}, @@ -130,8 +130,8 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -139,18 +139,18 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - resp, err := auth.Login(scenario.loginUserQuery) + resp, err := server.Login(scenario.loginUserQuery) So(err, ShouldBeNil) So(resp.Login, ShouldEqual, "markelog") }) - authScenario("When user not found in LDAP, but exist in Grafana", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user not found in LDAP, but exist in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} result := ldap.SearchResult{Entries: []*ldap.Entry{}} connection.setSearchResult(&result) @@ -160,15 +160,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should disable user", func() { So(scenario.disableExternalUserCalled, ShouldBeTrue) @@ -181,8 +181,8 @@ func TestLDAPLogin(t *testing.T) { }) }) - authScenario("When user not found in LDAP, and disabled in Grafana already", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user not found in LDAP, and disabled in Grafana already", func(scenario *scenarioContext) { + connection := &MockConnection{} result := ldap.SearchResult{Entries: []*ldap.Entry{}} connection.setSearchResult(&result) @@ -192,15 +192,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should't call disable function", func() { So(scenario.disableExternalUserCalled, ShouldBeFalse) @@ -211,8 +211,8 @@ func TestLDAPLogin(t *testing.T) { }) }) - authScenario("When user found in LDAP, and disabled in Grafana", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user found in LDAP, and disabled in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} connection.setSearchResult(&result) @@ -221,15 +221,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - extUser, _ := auth.Login(scenario.loginUserQuery) + extUser, _ := server.Login(scenario.loginUserQuery) _, err := user.Upsert(&user.UpsertArgs{ SignupAllowed: true, ExternalUser: extUser, diff --git a/pkg/services/ldap/ldap_test.go b/pkg/services/ldap/ldap_test.go index 266fe22a4fc7..98b15ec44576 100644 --- a/pkg/services/ldap/ldap_test.go +++ b/pkg/services/ldap/ldap_test.go @@ -9,114 +9,10 @@ import ( "github.com/grafana/grafana/pkg/infra/log" ) -func TestAuth(t *testing.T) { - Convey("Add()", t, func() { - connection := &mockConnection{} - - auth := &Server{ - config: &ServerConfig{ - SearchBaseDNs: []string{"BaseDNHere"}, - }, - connection: connection, - log: log.New("test-logger"), - } - - Convey("Adds user", func() { - err := auth.Add( - "cn=ldap-tuz,ou=users,dc=grafana,dc=org", - map[string][]string{ - "mail": {"ldap-viewer@grafana.com"}, - "userPassword": {"grafana"}, - "objectClass": { - "person", - "top", - "inetOrgPerson", - "organizationalPerson", - }, - "sn": {"ldap-tuz"}, - "cn": {"ldap-tuz"}, - }, - ) - - hasMail := false - hasUserPassword := false - hasObjectClass := false - hasSN := false - hasCN := false - - So(err, ShouldBeNil) - So(connection.addParams.Controls, ShouldBeNil) - So(connection.addCalled, ShouldBeTrue) - So( - connection.addParams.DN, - ShouldEqual, - "cn=ldap-tuz,ou=users,dc=grafana,dc=org", - ) - - attrs := connection.addParams.Attributes - for _, value := range attrs { - if value.Type == "mail" { - So(value.Vals, ShouldContain, "ldap-viewer@grafana.com") - hasMail = true - } - - if value.Type == "userPassword" { - hasUserPassword = true - So(value.Vals, ShouldContain, "grafana") - } - - if value.Type == "objectClass" { - hasObjectClass = true - So(value.Vals, ShouldContain, "person") - So(value.Vals, ShouldContain, "top") - So(value.Vals, ShouldContain, "inetOrgPerson") - So(value.Vals, ShouldContain, "organizationalPerson") - } - - if value.Type == "sn" { - hasSN = true - So(value.Vals, ShouldContain, "ldap-tuz") - } - - if value.Type == "cn" { - hasCN = true - So(value.Vals, ShouldContain, "ldap-tuz") - } - } - - So(hasMail, ShouldBeTrue) - So(hasUserPassword, ShouldBeTrue) - So(hasObjectClass, ShouldBeTrue) - So(hasSN, ShouldBeTrue) - So(hasCN, ShouldBeTrue) - }) - }) - - Convey("Remove()", t, func() { - connection := &mockConnection{} - - auth := &Server{ - config: &ServerConfig{ - SearchBaseDNs: []string{"BaseDNHere"}, - }, - connection: connection, - log: log.New("test-logger"), - } - - Convey("Removes the user", func() { - dn := "cn=ldap-tuz,ou=users,dc=grafana,dc=org" - err := auth.Remove(dn) - - So(err, ShouldBeNil) - So(connection.delCalled, ShouldBeTrue) - So(connection.delParams.Controls, ShouldBeNil) - So(connection.delParams.DN, ShouldEqual, dn) - }) - }) - +func TestPublicAPI(t *testing.T) { Convey("Users()", t, func() { Convey("find one user", func() { - mockConnection := &mockConnection{} + MockConnection := &MockConnection{} entry := ldap.Entry{ DN: "dn", Attributes: []*ldap.EntryAttribute{ {Name: "username", Values: []string{"roelgerrits"}}, @@ -126,11 +22,11 @@ func TestAuth(t *testing.T) { {Name: "memberof", Values: []string{"admins"}}, }} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - mockConnection.setSearchResult(&result) + MockConnection.setSearchResult(&result) // Set up attribute map without surname and email server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -138,7 +34,7 @@ func TestAuth(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: mockConnection, + Connection: MockConnection, log: log.New("test-logger"), } @@ -148,10 +44,75 @@ func TestAuth(t *testing.T) { So(searchResult, ShouldNotBeNil) // User should be searched in ldap - So(mockConnection.searchCalled, ShouldBeTrue) + So(MockConnection.SearchCalled, ShouldBeTrue) // No empty attributes should be added to the search request - So(len(mockConnection.searchAttributes), ShouldEqual, 3) + So(len(MockConnection.SearchAttributes), ShouldEqual, 3) + }) + }) + + Convey("InitialBind", t, func() { + Convey("Given bind dn and password configured", func() { + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { + actualUsername = username + actualPassword = password + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", + BindPassword: "bindpwd", + }, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeTrue) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "bindpwd") + }) + + Convey("Given bind dn configured", func() { + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { + actualUsername = username + actualPassword = password + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", + }, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeFalse) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "pwd") + }) + + Convey("Given empty bind dn and password", func() { + connection := &MockConnection{} + unauthenticatedBindWasCalled := false + var actualUsername string + connection.unauthenticatedBindProvider = func(username string) error { + unauthenticatedBindWasCalled = true + actualUsername = username + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{}, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeTrue) + So(unauthenticatedBindWasCalled, ShouldBeTrue) + So(actualUsername, ShouldBeEmpty) }) }) } diff --git a/pkg/services/ldap/test.go b/pkg/services/ldap/test.go index daa6a3216641..6319cddd2807 100644 --- a/pkg/services/ldap/test.go +++ b/pkg/services/ldap/test.go @@ -12,22 +12,24 @@ import ( "github.com/grafana/grafana/pkg/services/login" ) -type mockConnection struct { - searchResult *ldap.SearchResult - searchCalled bool - searchAttributes []string +// MockConnection struct for testing +type MockConnection struct { + SearchResult *ldap.SearchResult + SearchCalled bool + SearchAttributes []string - addParams *ldap.AddRequest - addCalled bool + AddParams *ldap.AddRequest + AddCalled bool - delParams *ldap.DelRequest - delCalled bool + DelParams *ldap.DelRequest + DelCalled bool bindProvider func(username, password string) error unauthenticatedBindProvider func(username string) error } -func (c *mockConnection) Bind(username, password string) error { +// Bind mocks Bind connection function +func (c *MockConnection) Bind(username, password string) error { if c.bindProvider != nil { return c.bindProvider(username, password) } @@ -35,7 +37,8 @@ func (c *mockConnection) Bind(username, password string) error { return nil } -func (c *mockConnection) UnauthenticatedBind(username string) error { +// UnauthenticatedBind mocks UnauthenticatedBind connection function +func (c *MockConnection) UnauthenticatedBind(username string) error { if c.unauthenticatedBindProvider != nil { return c.unauthenticatedBindProvider(username) } @@ -43,35 +46,40 @@ func (c *mockConnection) UnauthenticatedBind(username string) error { return nil } -func (c *mockConnection) Close() {} +// Close mocks Close connection function +func (c *MockConnection) Close() {} -func (c *mockConnection) setSearchResult(result *ldap.SearchResult) { - c.searchResult = result +func (c *MockConnection) setSearchResult(result *ldap.SearchResult) { + c.SearchResult = result } -func (c *mockConnection) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { - c.searchCalled = true - c.searchAttributes = sr.Attributes - return c.searchResult, nil +// Search mocks Search connection function +func (c *MockConnection) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { + c.SearchCalled = true + c.SearchAttributes = sr.Attributes + return c.SearchResult, nil } -func (c *mockConnection) Add(request *ldap.AddRequest) error { - c.addCalled = true - c.addParams = request +// Add mocks Add connection function +func (c *MockConnection) Add(request *ldap.AddRequest) error { + c.AddCalled = true + c.AddParams = request return nil } -func (c *mockConnection) Del(request *ldap.DelRequest) error { - c.delCalled = true - c.delParams = request +// Del mocks Del connection function +func (c *MockConnection) Del(request *ldap.DelRequest) error { + c.DelCalled = true + c.DelParams = request return nil } -func (c *mockConnection) StartTLS(*tls.Config) error { +// StartTLS mocks StartTLS connection function +func (c *MockConnection) StartTLS(*tls.Config) error { return nil } -func authScenario(desc string, fn scenarioFunc) { +func serverScenario(desc string, fn scenarioFunc) { Convey(desc, func() { defer bus.ClearBusHandlers() diff --git a/pkg/services/multildap/multildap.go b/pkg/services/multildap/multildap.go index 1b309c646e17..6c2baf1671af 100644 --- a/pkg/services/multildap/multildap.go +++ b/pkg/services/multildap/multildap.go @@ -35,9 +35,6 @@ type IMultiLDAP interface { User(login string) ( *models.ExternalUserInfo, error, ) - - Add(dn string, values map[string][]string) error - Remove(dn string) error } // MultiLDAP is basic struct of LDAP authorization @@ -52,55 +49,6 @@ func New(configs []*ldap.ServerConfig) IMultiLDAP { } } -// Add adds user to the *first* defined LDAP -func (multiples *MultiLDAP) Add( - dn string, - values map[string][]string, -) error { - if len(multiples.configs) == 0 { - return ErrNoLDAPServers - } - - config := multiples.configs[0] - ldap := ldap.New(config) - - if err := ldap.Dial(); err != nil { - return err - } - - defer ldap.Close() - - err := ldap.Add(dn, values) - if err != nil { - return err - } - - return nil -} - -// Remove removes user from the *first* defined LDAP -func (multiples *MultiLDAP) Remove(dn string) error { - if len(multiples.configs) == 0 { - return ErrNoLDAPServers - } - - config := multiples.configs[0] - ldap := ldap.New(config) - - if err := ldap.Dial(); err != nil { - return err - } - - defer ldap.Close() - - err := ldap.Remove(dn) - if err != nil { - return err - } - - return nil -} - // Login tries to log in the user in multiples LDAP func (multiples *MultiLDAP) Login(query *models.LoginUserQuery) ( *models.ExternalUserInfo, error, diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 58bcc5578593..675af5f02bb3 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -37,6 +37,11 @@ var ( const ContextSessionName = "db-session" func init() { + // This change will make xorm use an empty default schema for postgres and + // by that mimic the functionality of how it was functioning before + // xorm's changes above. + xorm.DefaultPostgresSchema = "" + registry.Register(®istry.Descriptor{ Name: "SqlStore", Instance: &SqlStore{}, diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go index a0f648043399..9b744fd32884 100644 --- a/pkg/services/sqlstore/transactions.go +++ b/pkg/services/sqlstore/transactions.go @@ -40,12 +40,12 @@ func inTransactionWithRetryCtx(ctx context.Context, engine *xorm.Engine, callbac err = callback(sess) - // special handling of database locked errors for sqlite, then we can retry 3 times + // special handling of database locked errors for sqlite, then we can retry 5 times if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { - if sqlError.Code == sqlite3.ErrLocked { + if sqlError.Code == sqlite3.ErrLocked || sqlError.Code == sqlite3.ErrBusy { sess.Rollback() time.Sleep(time.Millisecond * time.Duration(10)) - sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) + sqlog.Info("Database locked, sleeping then retrying", "error", err, "retry", retry) return inTransactionWithRetry(callback, retry+1) } } diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 3c94e0617f08..641fc5f1344f 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -28,6 +28,7 @@ func (ss *SqlStore) addUserQueryAndCommandHandlers() { bus.AddHandler("sql", SearchUsers) bus.AddHandler("sql", GetUserOrgList) bus.AddHandler("sql", DisableUser) + bus.AddHandler("sql", BatchDisableUsers) bus.AddHandler("sql", DeleteUser) bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", SetUserHelpFlag) @@ -487,6 +488,31 @@ func DisableUser(cmd *m.DisableUserCommand) error { return err } +func BatchDisableUsers(cmd *m.BatchDisableUsersCommand) error { + return inTransaction(func(sess *DBSession) error { + userIds := cmd.UserIds + + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + disableSQL := "UPDATE " + dialect.Quote("user") + " SET is_disabled=? WHERE Id IN (?" + user_id_params + ")" + + disableParams := []interface{}{disableSQL, cmd.IsDisabled} + for _, v := range userIds { + disableParams = append(disableParams, v) + } + + _, err := sess.Exec(disableParams...) + if err != nil { + return err + } + + return nil + }) +} + func DeleteUser(cmd *m.DeleteUserCommand) error { return inTransaction(func(sess *DBSession) error { return deleteUserInTransaction(sess, cmd) diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index 84640687ed9f..e5807ea7bf57 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -175,6 +175,40 @@ func TestUserDataAccess(t *testing.T) { So(found, ShouldBeTrue) }) }) + + Convey("When batch disabling users", func() { + userIdsToDisable := []int64{} + for i := 0; i < 3; i++ { + userIdsToDisable = append(userIdsToDisable, users[i].Id) + } + disableCmd := m.BatchDisableUsersCommand{UserIds: userIdsToDisable, IsDisabled: true} + + err = BatchDisableUsers(&disableCmd) + So(err, ShouldBeNil) + + Convey("Should disable all provided users", func() { + query := m.SearchUsersQuery{} + err = SearchUsers(&query) + + So(query.Result.TotalCount, ShouldEqual, 5) + for _, user := range query.Result.Users { + shouldBeDisabled := false + + // Check if user id is in the userIdsToDisable list + for _, disabledUserId := range userIdsToDisable { + if user.Id == disabledUserId { + So(user.IsDisabled, ShouldBeTrue) + shouldBeDisabled = true + } + } + + // Otherwise user shouldn't be disabled + if !shouldBeDisabled { + So(user.IsDisabled, ShouldBeFalse) + } + } + }) + }) }) Convey("Given one grafana admin user", func() { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 65e4e0e25021..a6c07d232e10 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -47,10 +47,11 @@ var ( var ( // App settings. - Env = DEV - AppUrl string - AppSubUrl string - InstanceName string + Env = DEV + AppUrl string + AppSubUrl string + ServeFromSubPath bool + InstanceName string // build BuildVersion string @@ -205,8 +206,9 @@ type Cfg struct { Logger log.Logger // HTTP Server Settings - AppUrl string - AppSubUrl string + AppUrl string + AppSubUrl string + ServeFromSubPath bool // Paths ProvisioningPath string @@ -610,8 +612,11 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { if err != nil { return err } + ServeFromSubPath = server.Key("serve_from_sub_path").MustBool(false) + cfg.AppUrl = AppUrl cfg.AppSubUrl = AppSubUrl + cfg.ServeFromSubPath = ServeFromSubPath Protocol = HTTP protocolStr, err := valueAsString(server, "protocol", "http") diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index ec3f103a4ef2..c0794863efe5 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -74,7 +75,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -109,7 +110,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index bbd928b05631..8cd83d2f9991 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -4,8 +4,8 @@ import ( "fmt" "regexp" "strings" - "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -69,7 +69,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -104,7 +104,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 2efba13d31ac..f7a194e63cd1 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -95,7 +96,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -139,7 +140,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/util/strings.go b/pkg/util/strings.go index 9eaa141edbfb..9ce5d03e126c 100644 --- a/pkg/util/strings.go +++ b/pkg/util/strings.go @@ -4,6 +4,7 @@ import ( "fmt" "math" "regexp" + "strings" "time" ) @@ -66,3 +67,19 @@ func GetAgeString(t time.Time) string { return "< 1m" } + +// ToCamelCase changes kebab case, snake case or mixed strings to camel case. See unit test for examples. +func ToCamelCase(str string) string { + var finalParts []string + parts := strings.Split(str, "_") + + for _, part := range parts { + finalParts = append(finalParts, strings.Split(part, "-")...) + } + + for index, part := range finalParts[1:] { + finalParts[index+1] = strings.Title(part) + } + + return strings.Join(finalParts, "") +} diff --git a/pkg/util/strings_test.go b/pkg/util/strings_test.go index 0cc1905baff8..4bc52ee75217 100644 --- a/pkg/util/strings_test.go +++ b/pkg/util/strings_test.go @@ -37,3 +37,12 @@ func TestDateAge(t *testing.T) { So(GetAgeString(time.Now().Add(-time.Hour*24*409)), ShouldEqual, "1y") }) } + +func TestToCamelCase(t *testing.T) { + Convey("ToCamelCase", t, func() { + So(ToCamelCase("kebab-case-string"), ShouldEqual, "kebabCaseString") + So(ToCamelCase("snake_case_string"), ShouldEqual, "snakeCaseString") + So(ToCamelCase("mixed-case_string"), ShouldEqual, "mixedCaseString") + So(ToCamelCase("alreadyCamelCase"), ShouldEqual, "alreadyCamelCase") + }) +} diff --git a/public/app/core/actions/application.ts b/public/app/core/actions/application.ts new file mode 100644 index 000000000000..9bde989e8ca6 --- /dev/null +++ b/public/app/core/actions/application.ts @@ -0,0 +1,3 @@ +import { noPayloadActionCreatorFactory } from 'app/core/redux'; + +export const toggleLogActions = noPayloadActionCreatorFactory('TOGGLE_LOG_ACTIONS').create(); diff --git a/public/app/core/components/PluginHelp/PluginHelp.tsx b/public/app/core/components/PluginHelp/PluginHelp.tsx index 677fb254314e..40aed4a6c0c8 100644 --- a/public/app/core/components/PluginHelp/PluginHelp.tsx +++ b/public/app/core/components/PluginHelp/PluginHelp.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; // @ts-ignore import Remarkable from 'remarkable'; -import { getBackendSrv } from '../../services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; interface Props { plugin: { diff --git a/public/app/core/components/SharedPreferences/SharedPreferences.tsx b/public/app/core/components/SharedPreferences/SharedPreferences.tsx index 3b804ba47051..b6d19f1f8af2 100644 --- a/public/app/core/components/SharedPreferences/SharedPreferences.tsx +++ b/public/app/core/components/SharedPreferences/SharedPreferences.tsx @@ -1,9 +1,9 @@ import React, { PureComponent } from 'react'; import { FormLabel, Select } from '@grafana/ui'; -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; import { DashboardSearchHit, DashboardSearchHitType } from 'app/types'; +import { getBackendSrv } from 'app/core/services/backend_srv'; export interface Props { resourceUri: string; @@ -25,7 +25,7 @@ const timezones = [ ]; export class SharedPreferences extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); constructor(props: Props) { super(props); diff --git a/public/app/core/logs_model.ts b/public/app/core/logs_model.ts index d2a4780b62a7..5fe95a182d07 100644 --- a/public/app/core/logs_model.ts +++ b/public/app/core/logs_model.ts @@ -13,6 +13,7 @@ import { toLegacyResponseData, FieldCache, FieldType, + getLogLevelFromKey, LogRowModel, LogsModel, LogsMetaItem, @@ -368,7 +369,17 @@ export function processLogSeriesRow( const timeEpochMs = time.valueOf(); const timeFromNow = time.fromNow(); const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); - const logLevel = getLogLevel(message); + + let logLevel = LogLevel.unknown; + const logLevelField = fieldCache.getFieldByName('level'); + + if (logLevelField) { + logLevel = getLogLevelFromKey(row[logLevelField.index]); + } else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) { + logLevel = getLogLevelFromKey(series.labels['level']); + } else { + logLevel = getLogLevel(message); + } const hasAnsi = hasAnsiCodes(message); const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : []; diff --git a/public/app/core/middlewares/application.ts b/public/app/core/middlewares/application.ts new file mode 100644 index 000000000000..3ca9768d626f --- /dev/null +++ b/public/app/core/middlewares/application.ts @@ -0,0 +1,27 @@ +import { Store, Dispatch } from 'redux'; +import { StoreState } from 'app/types/store'; +import { ActionOf } from '../redux/actionCreatorFactory'; +import { toggleLogActions } from '../actions/application'; + +export const toggleLogActionsMiddleware = (store: Store) => (next: Dispatch) => (action: ActionOf) => { + const isLogActionsAction = action.type === toggleLogActions.type; + if (isLogActionsAction) { + return next(action); + } + + const logActionsTrue = + window && window.location && window.location.search && window.location.search.indexOf('logActions=true') !== -1; + const logActionsFalse = + window && window.location && window.location.search && window.location.search.indexOf('logActions=false') !== -1; + const logActions = store.getState().application.logActions; + + if (logActionsTrue && !logActions) { + store.dispatch(toggleLogActions()); + } + + if (logActionsFalse && logActions) { + store.dispatch(toggleLogActions()); + } + + return next(action); +}; diff --git a/public/app/core/reducers/application.ts b/public/app/core/reducers/application.ts new file mode 100644 index 000000000000..458f49316191 --- /dev/null +++ b/public/app/core/reducers/application.ts @@ -0,0 +1,17 @@ +import { ApplicationState } from 'app/types/application'; +import { reducerFactory } from 'app/core/redux'; +import { toggleLogActions } from '../actions/application'; + +export const initialState: ApplicationState = { + logActions: false, +}; + +export const applicationReducer = reducerFactory(initialState) + .addMapper({ + filter: toggleLogActions, + mapper: (state): ApplicationState => ({ + ...state, + logActions: !state.logActions, + }), + }) + .create(); diff --git a/public/app/core/reducers/index.ts b/public/app/core/reducers/index.ts index 1c8670ed0d6c..cc0c950ec4a0 100644 --- a/public/app/core/reducers/index.ts +++ b/public/app/core/reducers/index.ts @@ -1,9 +1,11 @@ import { navIndexReducer as navIndex } from './navModel'; import { locationReducer as location } from './location'; import { appNotificationsReducer as appNotifications } from './appNotification'; +import { applicationReducer as application } from './application'; export default { navIndex, location, appNotifications, + application, }; diff --git a/public/app/core/services/AngularLoader.ts b/public/app/core/services/AngularLoader.ts index 817e9c9f3985..ea4487ca2967 100644 --- a/public/app/core/services/AngularLoader.ts +++ b/public/app/core/services/AngularLoader.ts @@ -2,13 +2,9 @@ import angular from 'angular'; import coreModule from 'app/core/core_module'; import _ from 'lodash'; -export interface AngularComponent { - destroy(): void; - digest(): void; - getScope(): any; -} +import { AngularComponent, AngularLoader } from '@grafana/runtime'; -export class AngularLoader { +export class AngularLoaderClass implements AngularLoader { /** @ngInject */ constructor(private $compile: any, private $rootScope: any) {} @@ -38,15 +34,4 @@ export class AngularLoader { } } -coreModule.service('angularLoader', AngularLoader); - -let angularLoaderInstance: AngularLoader; - -export function setAngularLoader(pl: AngularLoader) { - angularLoaderInstance = pl; -} - -// away to access it from react -export function getAngularLoader(): AngularLoader { - return angularLoaderInstance; -} +coreModule.service('angularLoader', AngularLoaderClass); diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index e14b5f57b288..0f099c93d767 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -7,8 +7,9 @@ import { DashboardModel } from 'app/features/dashboard/state/DashboardModel'; import { DashboardSearchHit } from 'app/types/search'; import { ContextSrv } from './context_srv'; import { FolderInfo, DashboardDTO } from 'app/types'; +import { BackendSrv as BackendService, getBackendSrv as getBackendService, BackendSrvRequest } from '@grafana/runtime'; -export class BackendSrv { +export class BackendSrv implements BackendService { private inFlightRequests: { [key: string]: Array> } = {}; private HTTP_REQUEST_CANCELED = -1; private noBackendCache: boolean; @@ -83,7 +84,7 @@ export class BackendSrv { throw data; } - request(options: any) { + request(options: BackendSrvRequest) { options.retry = options.retry || 0; const requestIsLocal = !options.url.match(/^http/); const firstAttempt = options.retry === 0; @@ -385,16 +386,7 @@ export class BackendSrv { coreModule.service('backendSrv', BackendSrv); -// -// Code below is to expore the service to react components -// - -let singletonInstance: BackendSrv; - -export function setBackendSrv(instance: BackendSrv) { - singletonInstance = instance; -} - +// Used for testing and things that really need BackendSrv export function getBackendSrv(): BackendSrv { - return singletonInstance; + return getBackendService() as BackendSrv; } diff --git a/public/app/core/specs/file_export.test.ts b/public/app/core/specs/file_export.test.ts index 9e2ff0a7ce16..ab254a94f2b4 100644 --- a/public/app/core/specs/file_export.test.ts +++ b/public/app/core/specs/file_export.test.ts @@ -92,6 +92,7 @@ describe('file_export', () => { [0x123, 'some string with \n in the middle', 10.01, false], [0b1011, 'some string with ; in the middle', -12.34, true], [123, 'some string with ;; in the middle', -12.34, true], + [1234, '=a bogus formula ', '-and another', '+another', '@ref'], ], }; @@ -108,7 +109,8 @@ describe('file_export', () => { '501;"some string with "" at the end""";0.01;false\r\n' + '291;"some string with \n in the middle";10.01;false\r\n' + '11;"some string with ; in the middle";-12.34;true\r\n' + - '123;"some string with ;; in the middle";-12.34;true'; + '123;"some string with ;; in the middle";-12.34;true\r\n' + + '1234;"\'=a bogus formula";"\'-and another";"\'+another";"\'@ref"'; expect(returnedText).toBe(expectedText); }); diff --git a/public/app/core/specs/logs_model.test.ts b/public/app/core/specs/logs_model.test.ts index a2d47412bd06..c83f0ce6c1c0 100644 --- a/public/app/core/specs/logs_model.test.ts +++ b/public/app/core/specs/logs_model.test.ts @@ -1,4 +1,4 @@ -import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy } from '@grafana/ui'; +import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui'; import { dedupLogRows, calculateFieldStats, @@ -460,8 +460,12 @@ describe('seriesDataToLogsModel', () => { name: 'message', type: FieldType.string, }, + { + name: 'level', + type: FieldType.string, + }, ], - rows: [['1970-01-01T00:00:01Z', 'WARN boooo']], + rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']], }, ]; const logsModel = seriesDataToLogsModel(series, 0); @@ -470,7 +474,7 @@ describe('seriesDataToLogsModel', () => { { entry: 'WARN boooo', labels: undefined, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: {}, }, ]); @@ -482,6 +486,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '1', + level: 'dbug', }, fields: [ { @@ -500,6 +505,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '2', + level: 'err', }, fields: [ { @@ -521,19 +527,19 @@ describe('seriesDataToLogsModel', () => { { entry: 'INFO 2', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, { entry: 'WARN boooo', labels: { foo: 'bar', baz: '1' }, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: { baz: '1' }, }, { entry: 'INFO 1', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, ]); diff --git a/public/app/core/time_series2.ts b/public/app/core/time_series2.ts index 05815ab7ab38..d7a57b77afc9 100644 --- a/public/app/core/time_series2.ts +++ b/public/app/core/time_series2.ts @@ -329,7 +329,7 @@ export default class TimeSeries { isMsResolutionNeeded() { for (let i = 0; i < this.datapoints.length; i++) { - if (this.datapoints[i][1] !== null) { + if (this.datapoints[i][1] !== null && this.datapoints[i][1] !== undefined) { const timestamp = this.datapoints[i][1].toString(); if (timestamp.length === 13 && timestamp % 1000 !== 0) { return true; diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts index 99e168b8590f..4a4697d7d0a2 100644 --- a/public/app/core/utils/explore.ts +++ b/public/app/core/utils/explore.ts @@ -1,44 +1,35 @@ // Libraries import _ from 'lodash'; +import { from } from 'rxjs'; +import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; import { renderUrl } from 'app/core/utils/url'; import kbn from 'app/core/utils/kbn'; import store from 'app/core/store'; -import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; import { getNextRefIdChar } from './query'; // Types import { - colors, TimeRange, RawTimeRange, TimeZone, IntervalValues, DataQuery, DataSourceApi, - toSeriesData, - guessFieldTypes, TimeFragment, DataQueryError, LogRowModel, LogsModel, LogsDedupStrategy, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, } from '@grafana/ui'; -import TimeSeries from 'app/core/time_series2'; -import { - ExploreUrlState, - HistoryItem, - QueryTransaction, - ResultType, - QueryIntervals, - QueryOptions, - ResultGetter, -} from 'app/types/explore'; -import { seriesDataToLogsModel } from 'app/core/logs_model'; -import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; -import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; +import { ExploreUrlState, HistoryItem, QueryTransaction, QueryIntervals, QueryOptions } from 'app/types/explore'; +import { config } from '../config'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -116,7 +107,6 @@ export async function getExploreUrl( export function buildQueryTransaction( queries: DataQuery[], - resultType: ResultType, queryOptions: QueryOptions, range: TimeRange, queryIntervals: QueryIntervals, @@ -137,7 +127,7 @@ export function buildQueryTransaction( // Using `format` here because it relates to the view panel that the request is for. // However, some datasources don't use `panelId + query.refId`, but only `panelId`. // Therefore panel id has to be unique. - const panelId = `${queryOptions.format}-${key}`; + const panelId = `${key}`; const options = { interval, @@ -156,7 +146,6 @@ export function buildQueryTransaction( return { queries, options, - resultType, scanning, id: generateKey(), // reusing for unique ID done: false, @@ -328,28 +317,6 @@ export function hasNonEmptyQuery(queries: TQuery ); } -export function calculateResultsFromQueryTransactions(result: any, resultType: ResultType, graphInterval: number) { - const flattenedResult: any[] = _.flatten(result); - const graphResult = resultType === 'Graph' && result ? result : null; - const tableResult = - resultType === 'Table' && result - ? mergeTablesIntoModel( - new TableModel(), - ...flattenedResult.filter((r: any) => r.columns && r.rows).map((r: any) => r as TableModel) - ) - : mergeTablesIntoModel(new TableModel()); - const logsResult = - resultType === 'Logs' && result - ? seriesDataToLogsModel(flattenedResult.map(r => guessFieldTypes(toSeriesData(r))), graphInterval) - : null; - - return { - graphResult, - tableResult, - logsResult, - }; -} - export function getIntervals(range: TimeRange, lowLimit: string, resolution: number): IntervalValues { if (!resolution) { return { interval: '1s', intervalMs: 1000 }; @@ -358,37 +325,6 @@ export function getIntervals(range: TimeRange, lowLimit: string, resolution: num return kbn.calculateInterval(range, resolution, lowLimit); } -export const makeTimeSeriesList: ResultGetter = (dataList, transaction, allTransactions) => { - // Prevent multiple Graph transactions to have the same colors - let colorIndexOffset = 0; - for (const other of allTransactions) { - // Only need to consider transactions that came before the current one - if (other === transaction) { - break; - } - // Count timeseries of previous query results - if (other.resultType === 'Graph' && other.done) { - colorIndexOffset += other.result.length; - } - } - - return dataList.map((seriesData, index: number) => { - const datapoints = seriesData.datapoints || []; - const alias = seriesData.target; - const colorIndex = (colorIndexOffset + index) % colors.length; - const color = colors[colorIndex]; - - const series = new TimeSeries({ - datapoints, - alias, - color, - unit: seriesData.unit, - }); - - return series; - }); -}; - /** * Update the query history. Side-effect: store history in local storage */ @@ -566,3 +502,20 @@ export const sortLogsResult = (logsResult: LogsModel, refreshInterval: string) = return result; }; + +export const convertToWebSocketUrl = (url: string) => { + const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://'; + let backend = `${protocol}${window.location.host}${config.appSubUrl}`; + if (backend.endsWith('/')) { + backend = backend.slice(0, backend.length - 1); + } + return `${backend}${url}`; +}; + +export const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver +) => { + return from(datasourceInstance.query(options, observer)); +}; diff --git a/public/app/core/utils/file_export.ts b/public/app/core/utils/file_export.ts index 6d341b5582e2..ae8d0ad06dea 100644 --- a/public/app/core/utils/file_export.ts +++ b/public/app/core/utils/file_export.ts @@ -17,7 +17,11 @@ function csvEscaped(text) { return text; } - return text.split(QUOTE).join(QUOTE + QUOTE); + return text + .split(QUOTE) + .join(QUOTE + QUOTE) + .replace(/^([-+=@])/, "'$1") + .replace(/\s+$/, ''); } const domParser = new DOMParser(); diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index d747fa37f579..1a1cf6f56ba1 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -1,4 +1,4 @@ -import _ from 'lodash'; +import { has } from 'lodash'; import { getValueFormat, getValueFormatterIndex, getValueFormats, stringToJsRegex } from '@grafana/ui'; import deprecationWarning from '@grafana/ui/src/utils/deprecationWarning'; @@ -133,7 +133,7 @@ kbn.secondsToHms = seconds => { }; kbn.secondsToHhmmss = seconds => { - const strings = []; + const strings: string[] = []; const numhours = Math.floor(seconds / 3600); const numminutes = Math.floor((seconds % 3600) / 60); const numseconds = Math.floor((seconds % 3600) % 60); @@ -193,7 +193,7 @@ kbn.calculateInterval = (range, resolution, lowLimitInterval) => { kbn.describe_interval = str => { const matches = str.match(kbn.interval_regex); - if (!matches || !_.has(kbn.intervals_in_seconds, matches[2])) { + if (!matches || !has(kbn.intervals_in_seconds, matches[2])) { throw new Error('Invalid interval string, expecting a number followed by one of "Mwdhmsy"'); } else { return { diff --git a/public/app/features/admin/state/apis.ts b/public/app/features/admin/state/apis.ts index 05321c6e7148..1166fa4dc011 100644 --- a/public/app/features/admin/state/apis.ts +++ b/public/app/features/admin/state/apis.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export interface ServerStat { name: string; diff --git a/public/app/features/alerting/AlertTab.tsx b/public/app/features/alerting/AlertTab.tsx index c7d1a8e058d9..2f293010b907 100644 --- a/public/app/features/alerting/AlertTab.tsx +++ b/public/app/features/alerting/AlertTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services & Utils -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import appEvents from 'app/core/app_events'; // Components diff --git a/public/app/features/alerting/StateHistory.tsx b/public/app/features/alerting/StateHistory.tsx index c0c804c8bd1e..2a114ec00d10 100644 --- a/public/app/features/alerting/StateHistory.tsx +++ b/public/app/features/alerting/StateHistory.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import alertDef from './state/alertDef'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import appEvents from '../../core/app_events'; diff --git a/public/app/features/alerting/TestRuleResult.tsx b/public/app/features/alerting/TestRuleResult.tsx index e8f0551d7073..509ea1721cbd 100644 --- a/public/app/features/alerting/TestRuleResult.tsx +++ b/public/app/features/alerting/TestRuleResult.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import { JSONFormatter } from 'app/core/components/JSONFormatter/JSONFormatter'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import { LoadingPlaceholder } from '@grafana/ui/src'; diff --git a/public/app/features/alerting/state/actions.ts b/public/app/features/alerting/state/actions.ts index 5ec84fe051d4..3ca51d521344 100644 --- a/public/app/features/alerting/state/actions.ts +++ b/public/app/features/alerting/state/actions.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { AlertRuleDTO, StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; diff --git a/public/app/features/dashboard/components/DashNav/DashNav.tsx b/public/app/features/dashboard/components/DashNav/DashNav.tsx index f95e34d2d2e1..8db88e9ba55e 100644 --- a/public/app/features/dashboard/components/DashNav/DashNav.tsx +++ b/public/app/features/dashboard/components/DashNav/DashNav.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { appEvents } from 'app/core/app_events'; import { PlaylistSrv } from 'app/features/playlist/playlist_srv'; diff --git a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx index a043bc3e0daf..b724b89d9425 100644 --- a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx index bb18481d51a7..6f2a60f624ef 100644 --- a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx +++ b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx index e076ee5093cd..72977e7ebc15 100644 --- a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import classNames from 'classnames'; // Utils & Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { importPanelPlugin } from 'app/features/plugins/plugin_loader'; // Components diff --git a/public/app/features/dashboard/panel_editor/GeneralTab.tsx b/public/app/features/dashboard/panel_editor/GeneralTab.tsx index 01a6e39cedba..ddbbb0d88798 100644 --- a/public/app/features/dashboard/panel_editor/GeneralTab.tsx +++ b/public/app/features/dashboard/panel_editor/GeneralTab.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { EditorTabBody } from './EditorTabBody'; import { PanelModel } from '../state/PanelModel'; diff --git a/public/app/features/dashboard/panel_editor/PanelEditor.tsx b/public/app/features/dashboard/panel_editor/PanelEditor.tsx index 722b211e4ef1..dde5f8440c17 100644 --- a/public/app/features/dashboard/panel_editor/PanelEditor.tsx +++ b/public/app/features/dashboard/panel_editor/PanelEditor.tsx @@ -9,7 +9,7 @@ import { AlertTab } from '../../alerting/AlertTab'; import config from 'app/core/config'; import { store } from 'app/store/store'; import { updateLocation } from 'app/core/actions'; -import { AngularComponent } from 'app/core/services/AngularLoader'; +import { AngularComponent } from '@grafana/runtime'; import { PanelModel } from '../state/PanelModel'; import { DashboardModel } from '../state/DashboardModel'; diff --git a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx index 8b5f6b964f24..ca66d84ad784 100644 --- a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx +++ b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx @@ -5,7 +5,7 @@ import _ from 'lodash'; // Utils & Services import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { Emitter } from 'app/core/utils/emitter'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; diff --git a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx index 0eb352ca8061..f67532dd3980 100644 --- a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx +++ b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { connectWithStore } from 'app/core/utils/connectWithReduxStore'; import { StoreState } from 'app/types'; import { updateLocation } from 'app/core/actions'; diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts index 50f645095755..7b01975e29d3 100644 --- a/public/app/features/dashboard/state/actions.ts +++ b/public/app/features/dashboard/state/actions.ts @@ -1,5 +1,5 @@ // Services & Utils -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { actionCreatorFactory, noPayloadActionCreatorFactory } from 'app/core/redux'; import { createSuccessNotification } from 'app/core/copy/appNotification'; diff --git a/public/app/features/datasources/NewDataSourcePage.tsx b/public/app/features/datasources/NewDataSourcePage.tsx index 6420225f2afe..bd16ba6e8840 100644 --- a/public/app/features/datasources/NewDataSourcePage.tsx +++ b/public/app/features/datasources/NewDataSourcePage.tsx @@ -6,7 +6,7 @@ import { StoreState } from 'app/types'; import { addDataSource, loadDataSourceTypes, setDataSourceTypeSearchQuery } from './state/actions'; import { getDataSourceTypes } from './state/selectors'; import { FilterInput } from 'app/core/components/FilterInput/FilterInput'; -import { NavModel, DataSourcePluginMeta, List } from '@grafana/ui'; +import { NavModel, DataSourcePluginMeta, List, PluginType } from '@grafana/ui'; export interface Props { navModel: NavModel; @@ -43,6 +43,7 @@ class NewDataSourcePage extends PureComponent { loki: 90, mysql: 80, postgres: 79, + gcloud: -1, }; componentDidMount() { @@ -114,6 +115,8 @@ class NewDataSourcePage extends PureComponent { {} as DataSourceCategories ); + categories['cloud'].push(getGrafanaCloudPhantomPlugin()); + return ( <> {this.categoryInfoList.map(category => ( @@ -174,7 +177,9 @@ interface DataSourceTypeCardProps { } const DataSourceTypeCard: FC = props => { - const { plugin, onClick, onLearnMoreClick } = props; + const { plugin, onLearnMoreClick } = props; + const canSelect = plugin.id !== 'gcloud'; + const onClick = canSelect ? props.onClick : () => {}; // find first plugin info link const learnMoreLink = plugin.info.links && plugin.info.links.length > 0 ? plugin.info.links[0].url : null; @@ -188,16 +193,45 @@ const DataSourceTypeCard: FC = props => {
{learnMoreLink && ( - - Learn more + + Learn more )} - + {canSelect && }
); }; +function getGrafanaCloudPhantomPlugin(): DataSourcePluginMeta { + return { + id: 'gcloud', + name: 'Grafana Cloud', + type: PluginType.datasource, + module: '', + baseUrl: '', + info: { + description: 'Hosted Graphite, Prometheus and Loki', + logos: { small: 'public/img/grafana_icon.svg', large: 'asd' }, + author: { name: 'Grafana Labs' }, + links: [ + { + url: 'https://grafana.com/cloud', + name: 'Learn more', + }, + ], + screenshots: [], + updated: '2019-05-10', + version: '1.0.0', + }, + }; +} + export function getNavModel(): NavModel { const main = { icon: 'gicon gicon-add-datasources', diff --git a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx index 5c31b946149c..30d4d6ea38ca 100644 --- a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx +++ b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx @@ -276,7 +276,7 @@ export class DataSourceSettingsPage extends PureComponent {
{testingMessage && ( -
+
{testingStatus === 'error' ? ( @@ -285,7 +285,9 @@ export class DataSourceSettingsPage extends PureComponent { )}
-
{testingMessage}
+
+ {testingMessage} +
)} diff --git a/public/app/features/datasources/settings/PluginSettings.tsx b/public/app/features/datasources/settings/PluginSettings.tsx index a7462cbb45c7..58da3cc55f49 100644 --- a/public/app/features/datasources/settings/PluginSettings.tsx +++ b/public/app/features/datasources/settings/PluginSettings.tsx @@ -8,7 +8,7 @@ import { DataQuery, DataSourceJsonData, } from '@grafana/ui'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; export type GenericDataSourcePlugin = DataSourcePlugin>; diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts index a09289500693..9fb003bc0c4c 100644 --- a/public/app/features/datasources/state/actions.ts +++ b/public/app/features/datasources/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import config from '../../../core/config'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { LayoutMode } from 'app/core/components/LayoutSelector/LayoutSelector'; import { updateLocation, updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx index eef4b8b21dc9..8028e8362d79 100644 --- a/public/app/features/explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -51,11 +51,11 @@ import { } from 'app/core/utils/explore'; import { Emitter } from 'app/core/utils/emitter'; import { ExploreToolbar } from './ExploreToolbar'; -import { scanStopAction } from './state/actionTypes'; import { NoDataSourceCallToAction } from './NoDataSourceCallToAction'; import { FadeIn } from 'app/core/components/Animations/FadeIn'; import { getTimeZone } from '../profile/state/selectors'; import { ErrorContainer } from './ErrorContainer'; +import { scanStopAction } from './state/actionTypes'; interface ExploreProps { StartPage?: ComponentClass; diff --git a/public/app/features/explore/ExploreToolbar.tsx b/public/app/features/explore/ExploreToolbar.tsx index 9d6c4a1d3d96..9d3cb9841208 100644 --- a/public/app/features/explore/ExploreToolbar.tsx +++ b/public/app/features/explore/ExploreToolbar.tsx @@ -10,6 +10,7 @@ import { TimeZone, TimeRange, SelectOptionItem, + LoadingState, } from '@grafana/ui'; import { DataSourcePicker } from 'app/core/components/Select/DataSourcePicker'; import { StoreState } from 'app/types/store'; @@ -261,9 +262,7 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps exploreDatasources, range, refreshInterval, - graphIsLoading, - logIsLoading, - tableIsLoading, + loadingState, supportedModes, mode, isLive, @@ -271,8 +270,9 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps const selectedDatasource = datasourceInstance ? exploreDatasources.find(datasource => datasource.name === datasourceInstance.name) : undefined; - const loading = graphIsLoading || logIsLoading || tableIsLoading; - const hasLiveOption = datasourceInstance && datasourceInstance.convertToStreamTargets ? true : false; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; + const hasLiveOption = + datasourceInstance && datasourceInstance.meta && datasourceInstance.meta.streaming ? true : false; const supportedModeOptions: Array> = []; let selectedModeOption = null; diff --git a/public/app/features/explore/GraphContainer.tsx b/public/app/features/explore/GraphContainer.tsx index 0fba2ae6ded4..6d1bb6c4e387 100644 --- a/public/app/features/explore/GraphContainer.tsx +++ b/public/app/features/explore/GraphContainer.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; import { hot } from 'react-hot-loader'; import { connect } from 'react-redux'; -import { TimeRange, TimeZone, AbsoluteTimeRange } from '@grafana/ui'; +import { TimeRange, TimeZone, AbsoluteTimeRange, LoadingState } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; import { StoreState } from 'app/types'; @@ -69,8 +69,8 @@ function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const { split } = explore; const item: ExploreItemState = explore[exploreId]; - const { graphResult, graphIsLoading, range, showingGraph, showingTable } = item; - const loading = graphIsLoading; + const { graphResult, loadingState, range, showingGraph, showingTable } = item; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { graphResult, loading, range, showingGraph, showingTable, split, timeZone: getTimeZone(state.user) }; } diff --git a/public/app/features/explore/LogsContainer.tsx b/public/app/features/explore/LogsContainer.tsx index d8d85efcc135..79846e1d4bc9 100644 --- a/public/app/features/explore/LogsContainer.tsx +++ b/public/app/features/explore/LogsContainer.tsx @@ -13,6 +13,7 @@ import { LogsModel, LogRowModel, LogsDedupStrategy, + LoadingState, } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; @@ -151,14 +152,14 @@ function mapStateToProps(state: StoreState, { exploreId }) { const { logsHighlighterExpressions, logsResult, - logIsLoading, + loadingState, scanning, scanRange, range, datasourceInstance, isLive, } = item; - const loading = logIsLoading; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; const { dedupStrategy } = exploreItemUIStateSelector(item); const hiddenLogLevels = new Set(item.hiddenLogLevels); const dedupedResult = deduplicatedLogsSelector(item); diff --git a/public/app/features/explore/QueryEditor.tsx b/public/app/features/explore/QueryEditor.tsx index 5689f67ee13b..d29e8a0e8925 100644 --- a/public/app/features/explore/QueryEditor.tsx +++ b/public/app/features/explore/QueryEditor.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; // Types diff --git a/public/app/features/explore/QueryRow.tsx b/public/app/features/explore/QueryRow.tsx index 2a0429dbd971..49880c11230a 100644 --- a/public/app/features/explore/QueryRow.tsx +++ b/public/app/features/explore/QueryRow.tsx @@ -20,7 +20,6 @@ import { QueryFixAction, DataSourceStatus, PanelData, - LoadingState, DataQueryError, } from '@grafana/ui'; import { HistoryItem, ExploreItemState, ExploreId } from 'app/types/explore'; @@ -180,9 +179,7 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) range, datasourceError, graphResult, - graphIsLoading, - tableIsLoading, - logIsLoading, + loadingState, latency, queryErrors, } = item; @@ -190,15 +187,9 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) const datasourceStatus = datasourceError ? DataSourceStatus.Disconnected : DataSourceStatus.Connected; const error = queryErrors.filter(queryError => queryError.refId === query.refId)[0]; const series = graphResult ? graphResult : []; // TODO: use SeriesData - const queryResponseState = - graphIsLoading || tableIsLoading || logIsLoading - ? LoadingState.Loading - : error - ? LoadingState.Error - : LoadingState.Done; const queryResponse: PanelData = { series, - state: queryResponseState, + state: loadingState, error, }; diff --git a/public/app/features/explore/TableContainer.tsx b/public/app/features/explore/TableContainer.tsx index 18ee70d8ee20..ea227e78b976 100644 --- a/public/app/features/explore/TableContainer.tsx +++ b/public/app/features/explore/TableContainer.tsx @@ -9,6 +9,7 @@ import { toggleTable } from './state/actions'; import Table from './Table'; import Panel from './Panel'; import TableModel from 'app/core/table_model'; +import { LoadingState } from '@grafana/ui'; interface TableContainerProps { exploreId: ExploreId; @@ -38,8 +39,11 @@ export class TableContainer extends PureComponent { function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const item: ExploreItemState = explore[exploreId]; - const { tableIsLoading, showingTable, tableResult } = item; - const loading = tableIsLoading; + const { loadingState, showingTable, tableResult } = item; + const loading = + tableResult && tableResult.rows.length > 0 + ? false + : loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { loading, showingTable, tableResult }; } diff --git a/public/app/features/explore/state/actionTypes.ts b/public/app/features/explore/state/actionTypes.ts index ff7fdcb55dec..68b9ac604eb1 100644 --- a/public/app/features/explore/state/actionTypes.ts +++ b/public/app/features/explore/state/actionTypes.ts @@ -9,18 +9,23 @@ import { LogLevel, TimeRange, DataQueryError, + SeriesData, + LogsModel, + TimeSeries, + DataQueryResponseData, + LoadingState, } from '@grafana/ui/src/types'; import { ExploreId, ExploreItemState, HistoryItem, RangeScanner, - ResultType, - QueryTransaction, ExploreUIState, ExploreMode, + QueryOptions, } from 'app/types/explore'; import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory'; +import TableModel from 'app/core/table_model'; /** Higher order actions * @@ -142,21 +147,19 @@ export interface ModifyQueriesPayload { export interface QueryFailurePayload { exploreId: ExploreId; response: DataQueryError; - resultType: ResultType; } export interface QueryStartPayload { exploreId: ExploreId; - resultType: ResultType; - rowIndex: number; - transaction: QueryTransaction; } export interface QuerySuccessPayload { exploreId: ExploreId; - result: any; - resultType: ResultType; latency: number; + loadingState: LoadingState; + graphResult: TimeSeries[]; + tableResult: TableModel; + logsResult: LogsModel; } export interface HistoryUpdatedPayload { @@ -230,6 +233,7 @@ export interface LoadExploreDataSourcesPayload { export interface RunQueriesPayload { exploreId: ExploreId; + range: TimeRange; } export interface ResetQueryErrorPayload { @@ -237,6 +241,41 @@ export interface ResetQueryErrorPayload { refIds: string[]; } +export interface SetUrlReplacedPayload { + exploreId: ExploreId; +} + +export interface ProcessQueryErrorsPayload { + exploreId: ExploreId; + response: any; + datasourceId: string; +} + +export interface ProcessQueryResultsPayload { + exploreId: ExploreId; + latency: number; + datasourceId: string; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +export interface RunQueriesBatchPayload { + exploreId: ExploreId; + queryOptions: QueryOptions; +} + +export interface LimitMessageRatePayload { + series: SeriesData[]; + exploreId: ExploreId; + datasourceId: string; +} + +export interface ChangeRangePayload { + exploreId: ExploreId; + range: TimeRange; +} + /** * Adds a query row after the row with the given index. */ @@ -332,13 +371,6 @@ export const modifyQueriesAction = actionCreatorFactory('e */ export const queryFailureAction = actionCreatorFactory('explore/QUERY_FAILURE').create(); -/** - * Start a query transaction for the given result type. - * @param exploreId Explore area - * @param transaction Query options and `done` status. - * @param resultType Associate the transaction with a result viewer, e.g., Graph - * @param rowIndex Index is used to associate latency for this transaction with a query row - */ export const queryStartAction = actionCreatorFactory('explore/QUERY_START').create(); /** @@ -391,6 +423,7 @@ export const splitCloseAction = actionCreatorFactory('e * The copy keeps all query modifications but wipes the query results. */ export const splitOpenAction = actionCreatorFactory('explore/SPLIT_OPEN').create(); + export const stateSaveAction = noPayloadActionCreatorFactory('explore/STATE_SAVE').create(); /** @@ -439,6 +472,24 @@ export const historyUpdatedAction = actionCreatorFactory( export const resetQueryErrorAction = actionCreatorFactory('explore/RESET_QUERY_ERROR').create(); +export const setUrlReplacedAction = actionCreatorFactory('explore/SET_URL_REPLACED').create(); + +export const processQueryErrorsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_ERRORS' +).create(); + +export const processQueryResultsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_RESULTS' +).create(); + +export const runQueriesBatchAction = actionCreatorFactory('explore/RUN_QUERIES_BATCH').create(); + +export const limitMessageRatePayloadAction = actionCreatorFactory( + 'explore/LIMIT_MESSAGE_RATE_PAYLOAD' +).create(); + +export const changeRangeAction = actionCreatorFactory('explore/CHANGE_RANGE').create(); + export type HigherOrderAction = | ActionOf | SplitOpenAction diff --git a/public/app/features/explore/state/actions.ts b/public/app/features/explore/state/actions.ts index 09f950905ae9..4f95744eb479 100644 --- a/public/app/features/explore/state/actions.ts +++ b/public/app/features/explore/state/actions.ts @@ -7,25 +7,14 @@ import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { Emitter } from 'app/core/core'; import { LAST_USED_DATASOURCE_KEY, - clearQueryKeys, ensureQueries, generateEmptyQuery, - hasNonEmptyQuery, - makeTimeSeriesList, - updateHistory, - buildQueryTransaction, - serializeStateToUrlParam, parseUrlState, getTimeRange, getTimeRangeFromUrl, generateNewKeyAndAddRefIdIfMissing, - instanceOfDataQueryError, - getRefIds, } from 'app/core/utils/explore'; -// Actions -import { updateLocation } from 'app/core/actions'; - // Types import { ThunkResult } from 'app/types'; import { @@ -34,19 +23,9 @@ import { DataQuery, DataSourceSelectItem, QueryFixAction, - TimeRange, LogsDedupStrategy, } from '@grafana/ui'; -import { - ExploreId, - ExploreUrlState, - RangeScanner, - ResultType, - QueryOptions, - ExploreUIState, - QueryTransaction, - ExploreMode, -} from 'app/types/explore'; +import { ExploreId, RangeScanner, ExploreUIState, QueryTransaction, ExploreMode } from 'app/types/explore'; import { updateDatasourceInstanceAction, changeQueryAction, @@ -55,7 +34,6 @@ import { changeSizeAction, ChangeSizePayload, changeTimeAction, - scanStopAction, clearQueriesAction, initializeExploreAction, loadDatasourceMissingAction, @@ -64,9 +42,6 @@ import { LoadDatasourceReadyPayload, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - querySuccessAction, - scanRangeAction, scanStartAction, setQueriesAction, splitCloseAction, @@ -77,21 +52,19 @@ import { ToggleGraphPayload, ToggleTablePayload, updateUIStateAction, - runQueriesAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, loadExploreDatasources, - queryStartAction, - historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + scanStopAction, + scanRangeAction, + runQueriesAction, + stateSaveAction, } from './actionTypes'; import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory'; import { getTimeZone } from 'app/features/profile/state/selectors'; -import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; -import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; -import { startSubscriptionsAction, subscriptionDataReceivedAction } from 'app/features/explore/state/epics'; +import { offOption } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; /** * Updates UI state and save it to the URL @@ -99,7 +72,7 @@ import { startSubscriptionsAction, subscriptionDataReceivedAction } from 'app/fe const updateExploreUIState = (exploreId: ExploreId, uiStateFragment: Partial): ThunkResult => { return dispatch => { dispatch(updateUIStateAction({ exploreId, ...uiStateFragment })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; }; @@ -118,7 +91,7 @@ export function addQueryRow(exploreId: ExploreId, index: number): ThunkResult { +export function changeDatasource(exploreId: ExploreId, datasource: string): ThunkResult { return async (dispatch, getState) => { let newDataSourceInstance: DataSourceApi = null; @@ -135,8 +108,12 @@ export function changeDatasource(exploreId: ExploreId, datasource: string, repla dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance })); + if (getState().explore[exploreId].isLive) { + dispatch(changeRefreshInterval(exploreId, offOption.value)); + } + await dispatch(loadDatasource(exploreId, newDataSourceInstance)); - dispatch(runQueries(exploreId, false, replaceUrl)); + dispatch(runQueries(exploreId)); }; } @@ -215,7 +192,7 @@ export function clearQueries(exploreId: ExploreId): ThunkResult { return dispatch => { dispatch(scanStopAction({ exploreId })); dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -242,7 +219,7 @@ export function loadExploreDatasourcesAndSetDatasource( dispatch(loadExploreDatasources({ exploreId, exploreDatasources })); if (exploreDatasources.length >= 1) { - dispatch(changeDatasource(exploreId, datasourceName, true)); + dispatch(changeDatasource(exploreId, datasourceName)); } else { dispatch(loadDatasourceMissingAction({ exploreId })); } @@ -419,197 +396,17 @@ export function modifyQueries( }; } -export function processQueryErrors( - exploreId: ExploreId, - response: any, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance } = getState().explore[exploreId]; - - if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { - // Navigated away, queries did not matter - return; - } - - console.error(response); // To help finding problems with query syntax - - if (!instanceOfDataQueryError(response)) { - response = toDataQueryError(response); - } - - dispatch( - queryFailureAction({ - exploreId, - response, - resultType, - }) - ); - }; -} - -/** - * @param exploreId Explore area - * @param response Response from `datasourceInstance.query()` - * @param latency Duration between request and response - * @param resultType The type of result - * @param datasourceId Origin datasource instance, used to discard results if current datasource is different - */ -export function processQueryResults( - exploreId: ExploreId, - response: any, - latency: number, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance, scanning, scanner } = getState().explore[exploreId]; - - // If datasource already changed, results do not matter - if (datasourceInstance.meta.id !== datasourceId) { - return; - } - - const series: any[] = response.data; - const refIds = getRefIds(series); - - // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly - dispatch( - resetQueryErrorAction({ - exploreId, - refIds, - }) - ); - - const resultGetter = - resultType === 'Graph' ? makeTimeSeriesList : resultType === 'Table' ? (data: any[]) => data : null; - const result = resultGetter ? resultGetter(series, null, []) : series; - - dispatch( - querySuccessAction({ - exploreId, - result, - resultType, - latency, - }) - ); - - // Keep scanning for results if this was the last scanning transaction - if (scanning) { - if (_.size(result) === 0) { - const range = scanner(); - dispatch(scanRangeAction({ exploreId, range })); - } else { - // We can stop scanning if we have a result - dispatch(scanStopAction({ exploreId })); - } - } - }; -} - /** * Main action to run queries and dispatches sub-actions based on which result viewers are active */ -export function runQueries(exploreId: ExploreId, ignoreUIState = false, replaceUrl = false): ThunkResult { +export function runQueries(exploreId: ExploreId): ThunkResult { return (dispatch, getState) => { - const { - datasourceInstance, - queries, - showingGraph, - showingTable, - datasourceError, - containerWidth, - mode, - } = getState().explore[exploreId]; - - if (datasourceError) { - // let's not run any queries if data source is in a faulty state - return; - } - - if (!hasNonEmptyQuery(queries)) { - dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave(replaceUrl)); // Remember to save to state and update location - return; - } - - // Some datasource's query builders allow per-query interval limits, - // but we're using the datasource interval limit for now - const interval = datasourceInstance.interval; - - dispatch(runQueriesAction({ exploreId })); - // Keep table queries first since they need to return quickly - if ((ignoreUIState || showingTable) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Table', { - interval, - format: 'table', - instant: true, - valueWithRefId: true, - }) - ); - } - if ((ignoreUIState || showingGraph) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Graph', { - interval, - format: 'time_series', - instant: false, - maxDataPoints: containerWidth, - }) - ); - } - if (mode === ExploreMode.Logs) { - dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' })); - } - - dispatch(stateSave(replaceUrl)); - }; -} + const { range } = getState().explore[exploreId]; -/** - * Helper action to build a query transaction object and handing the query to the datasource. - * @param exploreId Explore area - * @param resultType Result viewer that will be associated with this query result - * @param queryOptions Query options as required by the datasource's `query()` function. - * @param resultGetter Optional result extractor, e.g., if the result is a list and you only need the first element. - */ -function runQueriesForType( - exploreId: ExploreId, - resultType: ResultType, - queryOptions: QueryOptions -): ThunkResult { - return async (dispatch, getState) => { - const { datasourceInstance, eventBridge, queries, queryIntervals, range, scanning, history } = getState().explore[ - exploreId - ]; - - if (resultType === 'Logs' && datasourceInstance.convertToStreamTargets) { - dispatch( - startSubscriptionsAction({ - exploreId, - dataReceivedActionCreator: subscriptionDataReceivedAction, - }) - ); - } + const timeZone = getTimeZone(getState().user); + const updatedRange = getTimeRange(timeZone, range.raw); - const datasourceId = datasourceInstance.meta.id; - const transaction = buildQueryTransaction(queries, resultType, queryOptions, range, queryIntervals, scanning); - dispatch(queryStartAction({ exploreId, resultType, rowIndex: 0, transaction })); - try { - const now = Date.now(); - const response = await datasourceInstance.query(transaction.options); - eventBridge.emit('data-received', response.data || []); - const latency = Date.now() - now; - // Side-effect: Saving history in localstorage - const nextHistory = updateHistory(history, datasourceId, queries); - dispatch(historyUpdatedAction({ exploreId, history: nextHistory })); - dispatch(processQueryResults(exploreId, response, latency, resultType, datasourceId)); - } catch (err) { - eventBridge.emit('data-error', err); - dispatch(processQueryErrors(exploreId, err, resultType, datasourceId)); - } + dispatch(runQueriesAction({ exploreId, range: updatedRange })); }; } @@ -649,7 +446,7 @@ export function setQueries(exploreId: ExploreId, rawQueries: DataQuery[]): Thunk export function splitClose(itemId: ExploreId): ThunkResult { return dispatch => { dispatch(splitCloseAction({ itemId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -673,64 +470,7 @@ export function splitOpen(): ThunkResult { urlState, }; dispatch(splitOpenAction({ itemState })); - dispatch(stateSave()); - }; -} - -const toRawTimeRange = (range: TimeRange): RawTimeRange => { - let from = range.raw.from; - if (isDateTime(from)) { - from = from.valueOf().toString(10); - } - - let to = range.raw.to; - if (isDateTime(to)) { - to = to.valueOf().toString(10); - } - - return { - from, - to, - }; -}; - -/** - * Saves Explore state to URL using the `left` and `right` parameters. - * If split view is not active, `right` will not be set. - */ -export function stateSave(replaceUrl = false): ThunkResult { - return (dispatch, getState) => { - const { left, right, split } = getState().explore; - const urlStates: { [index: string]: string } = {}; - const leftUrlState: ExploreUrlState = { - datasource: left.datasourceInstance.name, - queries: left.queries.map(clearQueryKeys), - range: toRawTimeRange(left.range), - ui: { - showingGraph: left.showingGraph, - showingLogs: true, - showingTable: left.showingTable, - dedupStrategy: left.dedupStrategy, - }, - }; - urlStates.left = serializeStateToUrlParam(leftUrlState, true); - if (split) { - const rightUrlState: ExploreUrlState = { - datasource: right.datasourceInstance.name, - queries: right.queries.map(clearQueryKeys), - range: toRawTimeRange(right.range), - ui: { - showingGraph: right.showingGraph, - showingLogs: true, - showingTable: right.showingTable, - dedupStrategy: right.dedupStrategy, - }, - }; - - urlStates.right = serializeStateToUrlParam(rightUrlState, true); - } - - dispatch(updateLocation({ query: urlStates, replace: replaceUrl })); + dispatch(stateSaveAction()); }; } diff --git a/public/app/features/explore/state/epics.test.ts b/public/app/features/explore/state/epics.test.ts deleted file mode 100644 index fbfb934a43ae..000000000000 --- a/public/app/features/explore/state/epics.test.ts +++ /dev/null @@ -1,550 +0,0 @@ -import { liveOption } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { DataSourceApi, DataQuery } from '@grafana/ui/src/types/datasource'; - -import { ExploreId, ExploreState } from 'app/types'; -import { actionCreatorFactory } from 'app/core/redux/actionCreatorFactory'; -import { - startSubscriptionsEpic, - startSubscriptionsAction, - SubscriptionDataReceivedPayload, - startSubscriptionAction, - startSubscriptionEpic, - limitMessageRatePayloadAction, -} from './epics'; -import { makeExploreItemState } from './reducers'; -import { epicTester } from 'test/core/redux/epicTester'; -import { - resetExploreAction, - updateDatasourceInstanceAction, - changeRefreshIntervalAction, - clearQueriesAction, -} from './actionTypes'; - -const setup = (options: any = {}) => { - const url = '/api/datasources/proxy/20/api/prom/tail?query=%7Bfilename%3D%22%2Fvar%2Flog%2Fdocker.log%22%7D'; - const webSocketUrl = 'ws://localhost' + url; - const refId = options.refId || 'A'; - const exploreId = ExploreId.left; - const datasourceInstance: DataSourceApi = options.datasourceInstance || { - id: 1337, - query: jest.fn(), - name: 'test', - testDatasource: jest.fn(), - convertToStreamTargets: () => [ - { - url, - refId, - }, - ], - resultToSeriesData: data => [data], - }; - const itemState = makeExploreItemState(); - const explore: Partial = { - [exploreId]: { - ...itemState, - datasourceInstance, - refreshInterval: options.refreshInterval || liveOption.value, - queries: [{} as DataQuery], - }, - }; - const state: any = { - explore, - }; - - return { url, state, refId, webSocketUrl, exploreId }; -}; - -const dataReceivedActionCreator = actionCreatorFactory('test').create(); - -describe('startSubscriptionsEpic', () => { - describe('when startSubscriptionsAction is dispatched', () => { - describe('and datasource supports convertToStreamTargets', () => { - describe('and explore is Live', () => { - it('then correct actions should be dispatched', () => { - const { state, refId, webSocketUrl, exploreId } = setup(); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenResultingActionsEqual( - startSubscriptionAction({ - exploreId, - refId, - url: webSocketUrl, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and explore is not Live', () => { - it('then no actions should be dispatched', () => { - const { state, exploreId } = setup({ refreshInterval: '10s' }); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenNoActionsWhereDispatched(); - }); - }); - }); - - describe('and datasource does not support streaming', () => { - it('then no actions should be dispatched', () => { - const { state, exploreId } = setup({ datasourceInstance: {} }); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenNoActionsWhereDispatched(); - }); - }); - }); -}); - -describe('startSubscriptionEpic', () => { - describe('when startSubscriptionAction is dispatched', () => { - describe('and datasource supports resultToSeriesData', () => { - it('then correct actions should be dispatched', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and datasource does not support resultToSeriesData', () => { - it('then no actions should be dispatched', () => { - const { state, webSocketUrl, refId, exploreId } = setup({ datasourceInstance: {} }); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenNoActionsWhereDispatched(); - }); - }); - }); - - describe('when an subscription is active', () => { - describe('and resetExploreAction is dispatched', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(resetExploreAction()) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and updateDatasourceInstanceAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance: null })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - updateDatasourceInstanceAction({ exploreId: ExploreId.right, datasourceInstance: null }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and changeRefreshIntervalAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - describe('and refreshinterval is not "Live"', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '10s' })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and refreshinterval is "Live"', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: liveOption.value })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId: ExploreId.right, refreshInterval: '10s' })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and clearQueriesAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(clearQueriesAction({ exploreId })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(clearQueriesAction({ exploreId: ExploreId.right })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and startSubscriptionAction is dispatched', () => { - describe('and exploreId and refId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - // This looks like we haven't stopped the subscription but we actually started the same again - ); - }); - - describe('and exploreId or refId does not match the websockets', () => { - it('then subscription should not be unsubscribed and another websocket is started', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId: 'B', - exploreId, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - }); - }); -}); diff --git a/public/app/features/explore/state/epics.ts b/public/app/features/explore/state/epics.ts deleted file mode 100644 index a31474f81ccf..000000000000 --- a/public/app/features/explore/state/epics.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { Epic } from 'redux-observable'; -import { NEVER } from 'rxjs'; -import { takeUntil, mergeMap, tap, filter, map, throttleTime } from 'rxjs/operators'; - -import { StoreState, ExploreId } from 'app/types'; -import { ActionOf, ActionCreator, actionCreatorFactory } from '../../../core/redux/actionCreatorFactory'; -import { config } from '../../../core/config'; -import { - updateDatasourceInstanceAction, - resetExploreAction, - changeRefreshIntervalAction, - clearQueriesAction, -} from './actionTypes'; -import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { SeriesData } from '@grafana/ui/src/types/data'; -import { EpicDependencies } from 'app/store/configureStore'; - -const convertToWebSocketUrl = (url: string) => { - const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://'; - let backend = `${protocol}${window.location.host}${config.appSubUrl}`; - if (backend.endsWith('/')) { - backend = backend.slice(0, backend.length - 1); - } - return `${backend}${url}`; -}; - -export interface StartSubscriptionsPayload { - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const startSubscriptionsAction = actionCreatorFactory( - 'explore/START_SUBSCRIPTIONS' -).create(); - -export interface StartSubscriptionPayload { - url: string; - refId: string; - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const startSubscriptionAction = actionCreatorFactory( - 'explore/START_SUBSCRIPTION' -).create(); - -export interface SubscriptionDataReceivedPayload { - data: SeriesData; - exploreId: ExploreId; -} - -export const subscriptionDataReceivedAction = actionCreatorFactory( - 'explore/SUBSCRIPTION_DATA_RECEIVED' -).create(); - -export interface LimitMessageRatePayload { - data: SeriesData; - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const limitMessageRatePayloadAction = actionCreatorFactory( - 'explore/LIMIT_MESSAGE_RATE_PAYLOAD' -).create(); - -export const startSubscriptionsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { - return action$.ofType(startSubscriptionsAction.type).pipe( - mergeMap((action: ActionOf) => { - const { exploreId, dataReceivedActionCreator } = action.payload; - const { datasourceInstance, queries, refreshInterval } = state$.value.explore[exploreId]; - - if (!datasourceInstance || !datasourceInstance.convertToStreamTargets) { - return NEVER; //do nothing if datasource does not support streaming - } - - if (!refreshInterval || !isLive(refreshInterval)) { - return NEVER; //do nothing if refresh interval is not 'LIVE' - } - - const request: any = { targets: queries }; - return datasourceInstance.convertToStreamTargets(request).map(target => - startSubscriptionAction({ - url: convertToWebSocketUrl(target.url), - refId: target.refId, - exploreId, - dataReceivedActionCreator, - }) - ); - }) - ); -}; - -export const startSubscriptionEpic: Epic, ActionOf, StoreState, EpicDependencies> = ( - action$, - state$, - { getWebSocket } -) => { - return action$.ofType(startSubscriptionAction.type).pipe( - mergeMap((action: ActionOf) => { - const { url, exploreId, refId, dataReceivedActionCreator } = action.payload; - return getWebSocket(url).pipe( - takeUntil( - action$ - .ofType( - startSubscriptionAction.type, - resetExploreAction.type, - updateDatasourceInstanceAction.type, - changeRefreshIntervalAction.type, - clearQueriesAction.type - ) - .pipe( - filter(action => { - if (action.type === resetExploreAction.type) { - return true; // stops all subscriptions if user navigates away - } - - if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { - return true; // stops subscriptions if user changes data source - } - - if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { - return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live' - } - - if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { - return true; // stops subscriptions if user clears all queries - } - - return action.payload.exploreId === exploreId && action.payload.refId === refId; - }), - tap(value => console.log('Stopping subscription', value)) - ) - ), - mergeMap((result: any) => { - const { datasourceInstance } = state$.value.explore[exploreId]; - - if (!datasourceInstance || !datasourceInstance.resultToSeriesData) { - return [null]; //do nothing if datasource does not support streaming - } - - return datasourceInstance - .resultToSeriesData(result, refId) - .map(data => limitMessageRatePayloadAction({ exploreId, data, dataReceivedActionCreator })); - }), - filter(action => action !== null) - ); - }) - ); -}; - -export const limitMessageRateEpic: Epic, ActionOf, StoreState, EpicDependencies> = action$ => { - return action$.ofType(limitMessageRatePayloadAction.type).pipe( - throttleTime(1), - map((action: ActionOf) => { - const { exploreId, data, dataReceivedActionCreator } = action.payload; - return dataReceivedActionCreator({ exploreId, data }); - }) - ); -}; diff --git a/public/app/features/explore/state/epics/limitMessageRateEpic.ts b/public/app/features/explore/state/epics/limitMessageRateEpic.ts new file mode 100644 index 000000000000..620137069687 --- /dev/null +++ b/public/app/features/explore/state/epics/limitMessageRateEpic.ts @@ -0,0 +1,25 @@ +import { Epic } from 'redux-observable'; +import { map, throttleTime } from 'rxjs/operators'; +import { LoadingState } from '@grafana/ui'; + +import { StoreState } from 'app/types'; +import { ActionOf } from '../../../../core/redux/actionCreatorFactory'; +import { limitMessageRatePayloadAction, LimitMessageRatePayload, processQueryResultsAction } from '../actionTypes'; +import { EpicDependencies } from 'app/store/configureStore'; + +export const limitMessageRateEpic: Epic, ActionOf, StoreState, EpicDependencies> = action$ => { + return action$.ofType(limitMessageRatePayloadAction.type).pipe( + throttleTime(1), + map((action: ActionOf) => { + const { exploreId, series, datasourceId } = action.payload; + return processQueryResultsAction({ + exploreId, + latency: 0, + datasourceId, + loadingState: LoadingState.Streaming, + series: null, + delta: series, + }); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts new file mode 100644 index 000000000000..7cdaca78f7d0 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts @@ -0,0 +1,67 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { processQueryErrorsAction, queryFailureAction } from '../actionTypes'; +import { processQueryErrorsEpic } from './processQueryErrorsEpic'; + +describe('processQueryErrorsEpic', () => { + let originalConsoleError = console.error; + + beforeEach(() => { + originalConsoleError = console.error; + console.error = jest.fn(); + }); + + afterEach(() => { + console.error = originalConsoleError; + }); + + describe('when processQueryErrorsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and the response is not cancelled', () => { + it('then queryFailureAction is dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenResultingActionsEqual(queryFailureAction({ exploreId, response })); + + expect(console.error).toBeCalledTimes(1); + expect(console.error).toBeCalledWith(response); + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-error', response); + }); + }); + + describe('and the response is cancelled', () => { + it('then no actions are dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { cancelled: true, message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + describe('and the response is not cancelled', () => { + it('then no actions are dispatched', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId: 'other id', response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts new file mode 100644 index 000000000000..ea029186dc89 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts @@ -0,0 +1,40 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER, of } from 'rxjs'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { instanceOfDataQueryError } from 'app/core/utils/explore'; +import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; +import { processQueryErrorsAction, ProcessQueryErrorsPayload, queryFailureAction } from '../actionTypes'; + +export const processQueryErrorsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryErrorsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId } = action.payload; + let { response } = action.payload; + const { datasourceInstance, eventBridge } = state$.value.explore[exploreId]; + + if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { + // Navigated away, queries did not matter + return NEVER; + } + + // For Angular editors + eventBridge.emit('data-error', response); + + console.error(response); // To help finding problems with query syntax + + if (!instanceOfDataQueryError(response)) { + response = toDataQueryError(response); + } + + return of( + queryFailureAction({ + exploreId, + response, + }) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts new file mode 100644 index 000000000000..c5da93081aa5 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts @@ -0,0 +1,119 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { + processQueryResultsAction, + resetQueryErrorAction, + querySuccessAction, + scanStopAction, + scanRangeAction, +} from '../actionTypes'; +import { SeriesData, LoadingState } from '@grafana/ui'; +import { processQueryResultsEpic } from './processQueryResultsEpic'; +import TableModel from 'app/core/table_model'; + +const testContext = () => { + const serieA: SeriesData = { + fields: [], + refId: 'A', + rows: [], + }; + const serieB: SeriesData = { + fields: [], + refId: 'B', + rows: [], + }; + const series = [serieA, serieB]; + const latency = 0; + const loadingState = LoadingState.Done; + + return { + latency, + series, + loadingState, + }; +}; + +describe('processQueryResultsEpic', () => { + describe('when processQueryResultsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and explore is not scanning', () => { + it('then resetQueryErrorAction and querySuccessAction are dispatched and eventBridge emits correct message', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }) + ); + + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-received', series); + }); + }); + + describe('and explore is scanning', () => { + describe('and we have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state } = mockExploreState({ scanning: true }); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanStopAction({ exploreId }) + ); + }); + }); + + describe('and we do not have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state, scanner } = mockExploreState({ scanning: true }); + const { latency, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series: [], latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: [] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanRangeAction({ exploreId, range: scanner() }) + ); + }); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + it('then no actions are dispatched and eventBridge does not emit message', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const { series, loadingState } = testContext(); + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId: 'other id', loadingState, series, latency: 0 }) + ) + .thenNoActionsWhereDispatched(); + + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.ts new file mode 100644 index 000000000000..76e767c36a09 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.ts @@ -0,0 +1,76 @@ +import _ from 'lodash'; +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER } from 'rxjs'; +import { LoadingState } from '@grafana/ui'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { getRefIds } from 'app/core/utils/explore'; +import { + processQueryResultsAction, + ProcessQueryResultsPayload, + querySuccessAction, + scanRangeAction, + resetQueryErrorAction, + scanStopAction, +} from '../actionTypes'; +import { ResultProcessor } from '../../utils/ResultProcessor'; + +export const processQueryResultsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryResultsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId, latency, loadingState, series, delta } = action.payload; + const { datasourceInstance, scanning, scanner, eventBridge } = state$.value.explore[exploreId]; + + // If datasource already changed, results do not matter + if (datasourceInstance.meta.id !== datasourceId) { + return NEVER; + } + + const result = series || delta || []; + const replacePreviousResults = loadingState === LoadingState.Done && series && !delta ? true : false; + const resultProcessor = new ResultProcessor(state$.value.explore[exploreId], replacePreviousResults, result); + const graphResult = resultProcessor.getGraphResult(); + const tableResult = resultProcessor.getTableResult(); + const logsResult = resultProcessor.getLogsResult(); + const refIds = getRefIds(result); + const actions: Array> = []; + + // For Angular editors + eventBridge.emit('data-received', resultProcessor.getRawData()); + + // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly + actions.push( + resetQueryErrorAction({ + exploreId, + refIds, + }) + ); + + actions.push( + querySuccessAction({ + exploreId, + latency, + loadingState, + graphResult, + tableResult, + logsResult, + }) + ); + + // Keep scanning for results if this was the last scanning transaction + if (scanning) { + if (_.size(result) === 0) { + const range = scanner(); + actions.push(scanRangeAction({ exploreId, range })); + } else { + // We can stop scanning if we have a result + actions.push(scanStopAction({ exploreId })); + } + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts new file mode 100644 index 000000000000..6ddada2bc32a --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts @@ -0,0 +1,421 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesBatchEpic } from './runQueriesBatchEpic'; +import { + runQueriesBatchAction, + queryStartAction, + historyUpdatedAction, + processQueryResultsAction, + processQueryErrorsAction, + limitMessageRatePayloadAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + clearQueriesAction, + stateSaveAction, +} from '../actionTypes'; +import { LoadingState, DataQueryRequest, SeriesData, FieldType } from '@grafana/ui'; + +const testContext = () => { + const series: SeriesData[] = [ + { + fields: [ + { + name: 'Value', + }, + { + name: 'Time', + type: FieldType.time, + unit: 'dateTimeAsIso', + }, + ], + rows: [], + refId: 'A', + }, + ]; + const response = { data: series }; + + return { + response, + series, + }; +}; + +describe('runQueriesBatchEpic', () => { + let originalDateNow = Date.now; + beforeEach(() => { + originalDateNow = Date.now; + Date.now = () => 1337; + }); + + afterEach(() => { + Date.now = originalDateNow; + }); + + describe('when runQueriesBatchAction is dispatched', () => { + describe('and query targets are not live', () => { + describe('and query is successful', () => { + it('then correct actions are dispatched', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + + describe('and query is not successful', () => { + it('then correct actions are dispatched', () => { + const error = { + message: 'Error parsing line x', + }; + const { exploreId, state, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryThrowsError(error) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + }); + + describe('and query targets are live', () => { + describe('and state equals Streaming', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieA], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieB], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + limitMessageRatePayloadAction({ exploreId, series: [serieA], datasourceId }), + limitMessageRatePayloadAction({ exploreId, series: [serieB], datasourceId }) + ); + }); + }); + + describe('and state equals Error', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const error = { message: 'Something went really wrong!' }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Error, + error, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + + describe('and state equals Done', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId, history } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + const delta = [serieA, serieB]; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Done, + series: null, + delta, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta, + series: null, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + }); + + describe('and another runQueriesBatchAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) // first observable + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched( + // second observable and unsubscribes the first observable + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 800 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), // output from first observable + historyUpdatedAction({ exploreId, history }), // output from first observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction(), + // output from first observable + queryStartAction({ exploreId }), // output from second observable + historyUpdatedAction({ exploreId, history }), // output from second observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + // output from second observable + ); + + expect(unsubscribe).toBeCalledTimes(1); // first unsubscribe should be called but not second as that isn't unsubscribed + }); + }); + + describe('and resetExploreAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(resetExploreAction()) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and updateDatasourceInstanceAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId, datasourceInstance } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and changeRefreshIntervalAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '' })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and clearQueriesAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(clearQueriesAction({ exploreId })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts new file mode 100644 index 000000000000..8e2642f193f8 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts @@ -0,0 +1,220 @@ +import { Epic } from 'redux-observable'; +import { Observable, Subject } from 'rxjs'; +import { mergeMap, catchError, takeUntil, filter } from 'rxjs/operators'; +import _, { isString } from 'lodash'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; +import { DataStreamState, LoadingState, DataQueryResponse, SeriesData, DataQueryResponseData } from '@grafana/ui'; +import * as dateMath from '@grafana/ui/src/utils/datemath'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { buildQueryTransaction, updateHistory } from 'app/core/utils/explore'; +import { + clearQueriesAction, + historyUpdatedAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + processQueryErrorsAction, + processQueryResultsAction, + runQueriesBatchAction, + RunQueriesBatchPayload, + queryStartAction, + limitMessageRatePayloadAction, + stateSaveAction, + changeRangeAction, +} from '../actionTypes'; +import { ExploreId, ExploreItemState } from 'app/types'; + +const publishActions = (outerObservable: Subject, actions: Array>) => { + for (const action of actions) { + outerObservable.next(action); + } +}; + +interface ProcessResponseConfig { + exploreId: ExploreId; + exploreItemState: ExploreItemState; + datasourceId: string; + now: number; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +const processResponse = (config: ProcessResponseConfig) => { + const { exploreId, exploreItemState, datasourceId, now, loadingState, series, delta } = config; + const { queries, history } = exploreItemState; + const latency = Date.now() - now; + + // Side-effect: Saving history in localstorage + const nextHistory = updateHistory(history, datasourceId, queries); + return [ + historyUpdatedAction({ exploreId, history: nextHistory }), + processQueryResultsAction({ exploreId, latency, datasourceId, loadingState, series, delta }), + stateSaveAction(), + ]; +}; + +interface ProcessErrorConfig { + exploreId: ExploreId; + datasourceId: string; + error: any; +} + +const processError = (config: ProcessErrorConfig) => { + const { exploreId, datasourceId, error } = config; + + return [processQueryErrorsAction({ exploreId, response: error, datasourceId })]; +}; + +export const runQueriesBatchEpic: Epic, ActionOf, StoreState> = ( + action$, + state$, + { getQueryResponse } +) => { + return action$.ofType(runQueriesBatchAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, queryOptions } = action.payload; + const exploreItemState = state$.value.explore[exploreId]; + const { datasourceInstance, queries, queryIntervals, range, scanning } = exploreItemState; + + // Create an observable per run queries action + // Within the observable create two subscriptions + // First subscription: 'querySubscription' subscribes to the call to query method on datasourceinstance + // Second subscription: 'streamSubscription' subscribes to events from the query methods observer callback + const observable: Observable> = Observable.create((outerObservable: Subject) => { + const datasourceId = datasourceInstance.meta.id; + const transaction = buildQueryTransaction(queries, queryOptions, range, queryIntervals, scanning); + outerObservable.next(queryStartAction({ exploreId })); + + const now = Date.now(); + let datasourceUnsubscribe: Function = null; + const streamHandler = new Subject(); + const observer = (event: DataStreamState) => { + datasourceUnsubscribe = event.unsubscribe; + if (!streamHandler.closed) { + // their might be a race condition when unsubscribing + streamHandler.next(event); + } + }; + + // observer subscription, handles datasourceInstance.query observer events and pushes that forward + const streamSubscription = streamHandler.subscribe({ + next: event => { + const { state, error, series, delta } = event; + if (!series && !delta && !error) { + return; + } + + if (state === LoadingState.Error) { + const actions = processError({ exploreId, datasourceId, error }); + publishActions(outerObservable, actions); + } + + if (state === LoadingState.Streaming) { + if (event.request && event.request.range) { + let newRange = event.request.range; + if (isString(newRange.raw.from)) { + newRange = { + from: dateMath.parse(newRange.raw.from, false), + to: dateMath.parse(newRange.raw.to, true), + raw: newRange.raw, + }; + } + outerObservable.next(changeRangeAction({ exploreId, range: newRange })); + } + outerObservable.next( + limitMessageRatePayloadAction({ + exploreId, + series: delta, + datasourceId, + }) + ); + } + + if (state === LoadingState.Done || state === LoadingState.Loading) { + const actions = processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: state, + series: null, + delta, + }); + publishActions(outerObservable, actions); + } + }, + }); + + // query subscription, handles datasourceInstance.query response and pushes that forward + const querySubscription = getQueryResponse(datasourceInstance, transaction.options, observer) + .pipe( + mergeMap((response: DataQueryResponse) => { + return processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: LoadingState.Done, + series: response && response.data ? response.data : [], + delta: null, + }); + }), + catchError(error => { + return processError({ exploreId, datasourceId, error }); + }) + ) + .subscribe({ next: (action: ActionOf) => outerObservable.next(action) }); + + // this unsubscribe method will be called when any of the takeUntil actions below happen + const unsubscribe = () => { + if (datasourceUnsubscribe) { + datasourceUnsubscribe(); + } + querySubscription.unsubscribe(); + streamSubscription.unsubscribe(); + streamHandler.unsubscribe(); + outerObservable.unsubscribe(); + }; + + return unsubscribe; + }); + + return observable.pipe( + takeUntil( + action$ + .ofType( + runQueriesBatchAction.type, + resetExploreAction.type, + updateDatasourceInstanceAction.type, + changeRefreshIntervalAction.type, + clearQueriesAction.type + ) + .pipe( + filter(action => { + if (action.type === resetExploreAction.type) { + return true; // stops all subscriptions if user navigates away + } + + if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user changes data source + } + + if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { + return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live' + } + + if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user clears all queries + } + + return action.payload.exploreId === exploreId; + }) + ) + ) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesEpic.test.ts b/public/app/features/explore/state/epics/runQueriesEpic.test.ts new file mode 100644 index 000000000000..87b1f86513f1 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.test.ts @@ -0,0 +1,71 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesAction, stateSaveAction, runQueriesBatchAction, clearQueriesAction } from '../actionTypes'; +import { runQueriesEpic } from './runQueriesEpic'; + +describe('runQueriesEpic', () => { + describe('when runQueriesAction is dispatched', () => { + describe('and there is no datasourceError', () => { + describe('and we have non empty queries', () => { + describe('and explore is not live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: false }, + }) + ); + }); + }); + + describe('and explore is live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ + queries, + isLive: true, + streaming: true, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: true }, + }) + ); + }); + }); + }); + + describe('and we have no queries', () => { + it('then clearQueriesAction and stateSaveAction are dispatched', () => { + const queries = []; + const { exploreId, state } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual(clearQueriesAction({ exploreId }), stateSaveAction()); + }); + }); + }); + + describe('and there is a datasourceError', () => { + it('then no actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ + datasourceError: { message: 'Some error' }, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenNoActionsWhereDispatched(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesEpic.ts b/public/app/features/explore/state/epics/runQueriesEpic.ts new file mode 100644 index 000000000000..2102c11b103c --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.ts @@ -0,0 +1,39 @@ +import { Epic } from 'redux-observable'; +import { NEVER } from 'rxjs'; +import { mergeMap } from 'rxjs/operators'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { hasNonEmptyQuery } from 'app/core/utils/explore'; +import { + clearQueriesAction, + runQueriesAction, + RunQueriesPayload, + runQueriesBatchAction, + stateSaveAction, +} from '../actionTypes'; + +export const runQueriesEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(runQueriesAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId } = action.payload; + const { datasourceInstance, queries, datasourceError, containerWidth, isLive } = state$.value.explore[exploreId]; + + if (datasourceError) { + // let's not run any queries if data source is in a faulty state + return NEVER; + } + + if (!hasNonEmptyQuery(queries)) { + return [clearQueriesAction({ exploreId }), stateSaveAction()]; // Remember to save to state and update location + } + + // Some datasource's query builders allow per-query interval limits, + // but we're using the datasource interval limit for now + const interval = datasourceInstance.interval; + const live = isLive; + + return [runQueriesBatchAction({ exploreId, queryOptions: { interval, maxDataPoints: containerWidth, live } })]; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/stateSaveEpic.test.ts b/public/app/features/explore/state/epics/stateSaveEpic.test.ts new file mode 100644 index 000000000000..bee12ad92a9e --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.test.ts @@ -0,0 +1,61 @@ +import { epicTester } from 'test/core/redux/epicTester'; +import { stateSaveEpic } from './stateSaveEpic'; +import { stateSaveAction, setUrlReplacedAction } from '../actionTypes'; +import { updateLocation } from 'app/core/actions/location'; +import { mockExploreState } from 'test/mocks/mockExploreState'; + +describe('stateSaveEpic', () => { + describe('when stateSaveAction is dispatched', () => { + describe('and there is a left state', () => { + describe('and no split', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState(); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + + describe('and explore is splitted', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ split: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { + left: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + right: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + }); + + describe('and urlReplaced is true', () => { + it('then setUrlReplacedAction should not be dispatched', () => { + const { state } = mockExploreState({ urlReplaced: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: false, + }) + ); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/stateSaveEpic.ts b/public/app/features/explore/state/epics/stateSaveEpic.ts new file mode 100644 index 000000000000..107f1de547b4 --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.ts @@ -0,0 +1,72 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { RawTimeRange, TimeRange } from '@grafana/ui/src/types/time'; +import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { ExploreUrlState, ExploreId } from 'app/types/explore'; +import { clearQueryKeys, serializeStateToUrlParam } from 'app/core/utils/explore'; +import { updateLocation } from 'app/core/actions/location'; +import { setUrlReplacedAction, stateSaveAction } from '../actionTypes'; + +const toRawTimeRange = (range: TimeRange): RawTimeRange => { + let from = range.raw.from; + if (isDateTime(from)) { + from = from.valueOf().toString(10); + } + + let to = range.raw.to; + if (isDateTime(to)) { + to = to.valueOf().toString(10); + } + + return { + from, + to, + }; +}; + +export const stateSaveEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(stateSaveAction.type).pipe( + mergeMap(() => { + const { left, right, split } = state$.value.explore; + const replace = left && left.urlReplaced === false; + const urlStates: { [index: string]: string } = {}; + const leftUrlState: ExploreUrlState = { + datasource: left.datasourceInstance.name, + queries: left.queries.map(clearQueryKeys), + range: toRawTimeRange(left.range), + ui: { + showingGraph: left.showingGraph, + showingLogs: true, + showingTable: left.showingTable, + dedupStrategy: left.dedupStrategy, + }, + }; + urlStates.left = serializeStateToUrlParam(leftUrlState, true); + if (split) { + const rightUrlState: ExploreUrlState = { + datasource: right.datasourceInstance.name, + queries: right.queries.map(clearQueryKeys), + range: toRawTimeRange(right.range), + ui: { + showingGraph: right.showingGraph, + showingLogs: true, + showingTable: right.showingTable, + dedupStrategy: right.dedupStrategy, + }, + }; + + urlStates.right = serializeStateToUrlParam(rightUrlState, true); + } + + const actions: Array> = [updateLocation({ query: urlStates, replace })]; + if (replace) { + actions.push(setUrlReplacedAction({ exploreId: ExploreId.left })); + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/reducers.test.ts b/public/app/features/explore/state/reducers.test.ts index da9bdfabe261..1f553313f807 100644 --- a/public/app/features/explore/state/reducers.test.ts +++ b/public/app/features/explore/state/reducers.test.ts @@ -4,6 +4,7 @@ import { exploreReducer, makeInitialUpdateState, initialExploreState, + DEFAULT_RANGE, } from './reducers'; import { ExploreId, @@ -16,7 +17,6 @@ import { import { reducerTester } from 'test/core/redux/reducerTester'; import { scanStartAction, - scanStopAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, @@ -24,6 +24,7 @@ import { splitOpenAction, splitCloseAction, changeModeAction, + scanStopAction, runQueriesAction, } from './actionTypes'; import { Reducer } from 'redux'; @@ -31,7 +32,7 @@ import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { updateLocation } from 'app/core/actions/location'; import { serializeStateToUrlParam } from 'app/core/utils/explore'; import TableModel from 'app/core/table_model'; -import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy } from '@grafana/ui'; +import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy, LoadingState, dateTime } from '@grafana/ui'; describe('Explore item reducer', () => { describe('scanning', () => { @@ -143,7 +144,6 @@ describe('Explore item reducer', () => { meta: { metrics: true, logs: true, - tables: true, }, components: { ExploreStartPage: StartPage, @@ -153,9 +153,6 @@ describe('Explore item reducer', () => { const queryKeys: string[] = []; const initalState: Partial = { datasourceInstance: null, - supportsGraph: false, - supportsLogs: false, - supportsTable: false, StartPage: null, showingStartPage: false, queries, @@ -163,18 +160,13 @@ describe('Explore item reducer', () => { }; const expectedState = { datasourceInstance, - supportsGraph: true, - supportsLogs: true, - supportsTable: true, StartPage, showingStartPage: true, queries, queryKeys, supportedModes: [ExploreMode.Metrics, ExploreMode.Logs], mode: ExploreMode.Metrics, - graphIsLoading: false, - tableIsLoading: false, - logIsLoading: false, + loadingState: LoadingState.NotStarted, latency: 0, queryErrors: [], }; @@ -193,6 +185,7 @@ describe('Explore item reducer', () => { it('then it should set correct state', () => { const initalState: Partial = { showingStartPage: true, + range: null, }; const expectedState = { queryIntervals: { @@ -200,11 +193,16 @@ describe('Explore item reducer', () => { intervalMs: 1000, }, showingStartPage: false, + range: { + from: dateTime(), + to: dateTime(), + raw: DEFAULT_RANGE, + }, }; reducerTester() .givenReducer(itemReducer, initalState) - .whenActionIsDispatched(runQueriesAction({ exploreId: ExploreId.left })) + .whenActionIsDispatched(runQueriesAction({ exploreId: ExploreId.left, range: expectedState.range })) .thenStateShouldEqual(expectedState); }); }); diff --git a/public/app/features/explore/state/reducers.ts b/public/app/features/explore/state/reducers.ts index 1291f3d749bb..67775b9626bd 100644 --- a/public/app/features/explore/state/reducers.ts +++ b/public/app/features/explore/state/reducers.ts @@ -1,6 +1,5 @@ import _ from 'lodash'; import { - calculateResultsFromQueryTransactions, getIntervals, ensureQueries, getQueryKeys, @@ -10,7 +9,7 @@ import { sortLogsResult, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, ExploreId, ExploreUpdateState, ExploreMode } from 'app/types/explore'; -import { DataQuery, LogsModel } from '@grafana/ui'; +import { DataQuery, LoadingState } from '@grafana/ui'; import { HigherOrderAction, ActionTypes, @@ -20,10 +19,17 @@ import { splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, - runQueriesAction, historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + queryFailureAction, + setUrlReplacedAction, + querySuccessAction, + scanRangeAction, + scanStopAction, + resetQueryErrorAction, + queryStartAction, + runQueriesAction, + changeRangeAction, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { @@ -40,13 +46,8 @@ import { loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - queryStartAction, - querySuccessAction, removeQueryRowAction, - scanRangeAction, scanStartAction, - scanStopAction, setQueriesAction, toggleTableAction, queriesImportedAction, @@ -57,8 +58,6 @@ import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; import TableModel from 'app/core/table_model'; import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { subscriptionDataReceivedAction, startSubscriptionAction } from './epics'; -import { seriesDataToLogsModel } from 'app/core/logs_model'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -100,12 +99,7 @@ export const makeExploreItemState = (): ExploreItemState => ({ scanRange: null, showingGraph: true, showingTable: true, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, - supportsGraph: null, - supportsLogs: null, - supportsTable: null, + loadingState: LoadingState.NotStarted, queryKeys: [], urlState: null, update: makeInitialUpdateState(), @@ -114,6 +108,7 @@ export const makeExploreItemState = (): ExploreItemState => ({ supportedModes: [], mode: null, isLive: false, + urlReplaced: false, }); /** @@ -194,10 +189,8 @@ export const itemReducer = reducerFactory({} as ExploreItemSta return { ...state, - refreshInterval: refreshInterval, - graphIsLoading: live ? true : false, - tableIsLoading: live ? true : false, - logIsLoading: live ? true : false, + refreshInterval, + loadingState: live ? LoadingState.Streaming : LoadingState.NotStarted, isLive: live, logsResult, }; @@ -246,7 +239,6 @@ export const itemReducer = reducerFactory({} as ExploreItemSta // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; - const supportsTable = datasourceInstance.meta.tables; let mode = ExploreMode.Metrics; const supportedModes: ExploreMode[] = []; @@ -271,12 +263,7 @@ export const itemReducer = reducerFactory({} as ExploreItemSta datasourceInstance, queryErrors: [], latency: 0, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, - supportsGraph, - supportsLogs, - supportsTable, + loadingState: LoadingState.NotStarted, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), @@ -353,35 +340,29 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: queryFailureAction, mapper: (state, action): ExploreItemState => { - const { resultType, response } = action.payload; + const { response } = action.payload; const queryErrors = state.queryErrors.concat(response); return { ...state, - graphResult: resultType === 'Graph' ? null : state.graphResult, - tableResult: resultType === 'Table' ? null : state.tableResult, - logsResult: resultType === 'Logs' ? null : state.logsResult, + graphResult: null, + tableResult: null, + logsResult: null, latency: 0, queryErrors, - graphIsLoading: resultType === 'Graph' ? false : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? false : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? false : state.tableIsLoading, + loadingState: LoadingState.Error, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryStartAction, - mapper: (state, action): ExploreItemState => { - const { resultType } = action.payload; - + mapper: (state): ExploreItemState => { return { ...state, queryErrors: [], latency: 0, - graphIsLoading: resultType === 'Graph' ? true : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? true : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? true : state.tableIsLoading, + loadingState: LoadingState.Loading, update: makeInitialUpdateState(), }; }, @@ -389,80 +370,20 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: querySuccessAction, mapper: (state, action): ExploreItemState => { - const { queryIntervals, refreshInterval } = state; - const { result, resultType, latency } = action.payload; - const results = calculateResultsFromQueryTransactions(result, resultType, queryIntervals.intervalMs); - const live = isLive(refreshInterval); - - if (live) { - return state; - } - - return { - ...state, - graphResult: resultType === 'Graph' ? results.graphResult : state.graphResult, - tableResult: resultType === 'Table' ? results.tableResult : state.tableResult, - logsResult: - resultType === 'Logs' - ? sortLogsResult(results.logsResult, refreshInterval) - : sortLogsResult(state.logsResult, refreshInterval), - latency, - graphIsLoading: live ? true : false, - logIsLoading: live ? true : false, - tableIsLoading: live ? true : false, - showingStartPage: false, - update: makeInitialUpdateState(), - }; - }, - }) - .addMapper({ - filter: startSubscriptionAction, - mapper: (state): ExploreItemState => { - const logsResult = sortLogsResult(state.logsResult, state.refreshInterval); + const { latency, loadingState, graphResult, tableResult, logsResult } = action.payload; return { ...state, + loadingState, + graphResult, + tableResult, logsResult, - graphIsLoading: true, - logIsLoading: true, - tableIsLoading: true, + latency, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) - .addMapper({ - filter: subscriptionDataReceivedAction, - mapper: (state, action): ExploreItemState => { - const { queryIntervals, refreshInterval } = state; - const { data } = action.payload; - const live = isLive(refreshInterval); - - if (!live) { - return state; - } - - const newResults = seriesDataToLogsModel([data], queryIntervals.intervalMs); - const rowsInState = sortLogsResult(state.logsResult, state.refreshInterval).rows; - - const processedRows = []; - for (const row of rowsInState) { - processedRows.push({ ...row, fresh: false }); - } - for (const row of newResults.rows) { - processedRows.push({ ...row, fresh: true }); - } - - const rows = processedRows.slice(processedRows.length - 1000, 1000); - - const logsResult: LogsModel = state.logsResult ? { ...state.logsResult, rows } : { hasUniqueLabels: false, rows }; - - return { - ...state, - logsResult, - }; - }, - }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { @@ -599,8 +520,9 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }) .addMapper({ filter: runQueriesAction, - mapper: (state): ExploreItemState => { - const { range, datasourceInstance, containerWidth } = state; + mapper: (state, action): ExploreItemState => { + const { range } = action.payload; + const { datasourceInstance, containerWidth } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; @@ -608,6 +530,7 @@ export const itemReducer = reducerFactory({} as ExploreItemSta const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, + range, queryIntervals, showingStartPage: false, }; @@ -640,6 +563,24 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }; }, }) + .addMapper({ + filter: setUrlReplacedAction, + mapper: (state): ExploreItemState => { + return { + ...state, + urlReplaced: true, + }; + }, + }) + .addMapper({ + filter: changeRangeAction, + mapper: (state, action): ExploreItemState => { + return { + ...state, + range: action.payload.range, + }; + }, + }) .create(); export const updateChildRefreshState = ( diff --git a/public/app/features/explore/utils/ResultProcessor.test.ts b/public/app/features/explore/utils/ResultProcessor.test.ts new file mode 100644 index 000000000000..4979afa538cb --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.test.ts @@ -0,0 +1,453 @@ +jest.mock('@grafana/ui/src/utils/moment_wrapper', () => ({ + dateTime: (ts: any) => { + return { + valueOf: () => ts, + fromNow: () => 'fromNow() jest mocked', + format: (fmt: string) => 'format() jest mocked', + }; + }, +})); + +import { ResultProcessor } from './ResultProcessor'; +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import TableModel from 'app/core/table_model'; +import { toFixed } from '@grafana/ui'; + +const testContext = (options: any = {}) => { + const response = [ + { + target: 'A-series', + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + refId: 'A', + }, + { + columns: [ + { + text: 'Time', + }, + { + text: 'Message', + }, + { + text: 'Description', + }, + { + text: 'Value', + }, + ], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + refId: 'B', + }, + ]; + const defaultOptions = { + mode: ExploreMode.Metrics, + replacePreviousResults: true, + result: { data: response }, + graphResult: [], + tableResult: new TableModel(), + logsResult: { hasUniqueLabels: false, rows: [] }, + }; + const combinedOptions = { ...defaultOptions, ...options }; + const state = ({ + mode: combinedOptions.mode, + graphResult: combinedOptions.graphResult, + tableResult: combinedOptions.tableResult, + logsResult: combinedOptions.logsResult, + queryIntervals: { intervalMs: 10 }, + } as any) as ExploreItemState; + const resultProcessor = new ResultProcessor(state, combinedOptions.replacePreviousResults, combinedOptions.result); + + return { + result: combinedOptions.result, + resultProcessor, + }; +}; + +describe('ResultProcessor', () => { + describe('constructed without result', () => { + describe('when calling getRawData', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return an empty TableModel', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual(new TableModel()); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return null', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toBeNull(); + }); + }); + }); + + describe('constructed with a result that is a DataQueryResponse', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ mode: ExploreMode.Logs, observerResponse: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toEqual({ + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + meta: undefined, + refId: 'A', + target: 'A-series', + unit: undefined, + }, + ], + }); + }); + }); + }); + + describe('constructed with result that is a DataQueryResponse and merging with previous results', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + graphResult: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[19.91264531864214, 1558038518831], [20.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [19.91264531864214, 1558038518831], + [20.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + tableResult: { + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + ], + type: 'table', + }, + }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ + mode: ExploreMode.Logs, + replacePreviousResults: false, + logsResult: { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[37.91264531864214, 1558038518831], [38.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }, + }); + const theResult = resultProcessor.getLogsResult(); + const expected = { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [37.91264531864214, 1558038518831], + [38.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ], + }; + + expect(theResult).toEqual(expected); + }); + }); + }); +}); diff --git a/public/app/features/explore/utils/ResultProcessor.ts b/public/app/features/explore/utils/ResultProcessor.ts new file mode 100644 index 000000000000..2521c4914f8e --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.ts @@ -0,0 +1,176 @@ +import { + DataQueryResponse, + TableData, + isTableData, + LogsModel, + toSeriesData, + guessFieldTypes, + DataQueryResponseData, + TimeSeries, +} from '@grafana/ui'; + +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState'; +import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; +import { sortLogsResult } from 'app/core/utils/explore'; +import { seriesDataToLogsModel } from 'app/core/logs_model'; +import { default as TimeSeries2 } from 'app/core/time_series2'; +import { DataProcessor } from 'app/plugins/panel/graph/data_processor'; + +export class ResultProcessor { + private rawData: DataQueryResponseData[] = []; + private metrics: TimeSeries[] = []; + private tables: TableData[] = []; + + constructor( + private state: ExploreItemState, + private replacePreviousResults: boolean, + result?: DataQueryResponse | DataQueryResponseData[] + ) { + if (result && result.hasOwnProperty('data')) { + this.rawData = (result as DataQueryResponse).data; + } else { + this.rawData = (result as DataQueryResponseData[]) || []; + } + + if (this.state.mode !== ExploreMode.Metrics) { + return; + } + + for (let index = 0; index < this.rawData.length; index++) { + const res: any = this.rawData[index]; + const isTable = isTableData(res); + if (isTable) { + this.tables.push(res); + } else { + this.metrics.push(res); + } + } + } + + getRawData = (): any[] => { + return this.rawData; + }; + + getGraphResult = (): TimeSeries[] => { + if (this.state.mode !== ExploreMode.Metrics) { + return []; + } + + const newResults = this.makeTimeSeriesList(this.metrics); + return this.mergeGraphResults(newResults, this.state.graphResult); + }; + + getTableResult = (): TableModel => { + if (this.state.mode !== ExploreMode.Metrics) { + return new TableModel(); + } + + const prevTableResults = this.state.tableResult || []; + const tablesToMerge = this.replacePreviousResults ? this.tables : [].concat(prevTableResults, this.tables); + + return mergeTablesIntoModel(new TableModel(), ...tablesToMerge); + }; + + getLogsResult = (): LogsModel => { + if (this.state.mode !== ExploreMode.Logs) { + return null; + } + const graphInterval = this.state.queryIntervals.intervalMs; + const seriesData = this.rawData.map(result => guessFieldTypes(toSeriesData(result))); + const newResults = this.rawData ? seriesDataToLogsModel(seriesData, graphInterval) : null; + + if (this.replacePreviousResults) { + return newResults; + } + + const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] }; + const sortedLogResult = sortLogsResult(prevLogsResult, this.state.refreshInterval); + const rowsInState = sortedLogResult.rows; + const seriesInState = sortedLogResult.series || []; + + const processedRows = []; + for (const row of rowsInState) { + processedRows.push({ ...row, fresh: false }); + } + for (const row of newResults.rows) { + processedRows.push({ ...row, fresh: true }); + } + + const processedSeries = this.mergeGraphResults(newResults.series, seriesInState); + + const slice = -1000; + const rows = processedRows.slice(slice); + const series = processedSeries.slice(slice); + + return { ...newResults, rows, series }; + }; + + private makeTimeSeriesList = (rawData: any[]) => { + const dataList = getProcessedSeriesData(rawData); + const dataProcessor = new DataProcessor({ xaxis: {}, aliasColors: [] }); // Hack before we use GraphSeriesXY instead + const timeSeries = dataProcessor.getSeriesList({ dataList }); + + return (timeSeries as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + }; + + private isSameTimeSeries = (a: TimeSeries | TimeSeries2, b: TimeSeries | TimeSeries2) => { + if (a.hasOwnProperty('id') && b.hasOwnProperty('id')) { + if (a['id'] !== undefined && b['id'] !== undefined && a['id'] === b['id']) { + return true; + } + } + + if (a.hasOwnProperty('alias') && b.hasOwnProperty('alias')) { + if (a['alias'] !== undefined && b['alias'] !== undefined && a['alias'] === b['alias']) { + return true; + } + } + + return false; + }; + + private mergeGraphResults = ( + newResults: TimeSeries[] | TimeSeries2[], + prevResults: TimeSeries[] | TimeSeries2[] + ): TimeSeries[] => { + if (!prevResults || prevResults.length === 0 || this.replacePreviousResults) { + return (newResults as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + } + + const results: TimeSeries[] = prevResults.slice() as TimeSeries[]; + + // update existing results + for (let index = 0; index < results.length; index++) { + const prevResult = results[index]; + for (const newResult of newResults) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + + if (isSame) { + prevResult.datapoints = prevResult.datapoints.concat(newResult.datapoints); + break; + } + } + } + + // add new results + for (const newResult of newResults) { + let isNew = true; + for (const prevResult of results) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + if (isSame) { + isNew = false; + break; + } + } + + if (isNew) { + const timeSeries2Result = new TimeSeries2({ ...newResult }); + + const result = (timeSeries2Result as any) as TimeSeries; // Hack before we use GraphSeriesXY instead + results.push(result); + } + } + return results; + }; +} diff --git a/public/app/features/org/state/actions.ts b/public/app/features/org/state/actions.ts index fc8742d12226..214674783cef 100644 --- a/public/app/features/org/state/actions.ts +++ b/public/app/features/org/state/actions.ts @@ -1,5 +1,5 @@ import { Organization, ThunkResult } from 'app/types'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export enum ActionTypes { LoadOrganization = 'LOAD_ORGANIZATION', diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index 1a84355ac028..c1c45f8acc40 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -5,11 +5,12 @@ import coreModule from 'app/core/core_module'; // Services & Utils import config from 'app/core/config'; import { importDataSourcePlugin } from './plugin_loader'; +import { DataSourceSrv as DataSourceService, getDataSourceSrv as getDataSourceService } from '@grafana/runtime'; // Types import { DataSourceApi, DataSourceSelectItem, ScopedVars } from '@grafana/ui/src/types'; -export class DatasourceSrv { +export class DatasourceSrv implements DataSourceService { datasources: { [name: string]: DataSourceApi }; /** @ngInject */ @@ -175,14 +176,8 @@ export class DatasourceSrv { } } -let singleton: DatasourceSrv; - -export function setDatasourceSrv(srv: DatasourceSrv) { - singleton = srv; -} - export function getDatasourceSrv(): DatasourceSrv { - return singleton; + return getDataSourceService() as DatasourceSrv; } coreModule.service('datasourceSrv', DatasourceSrv); diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 9c3bc4ca553b..74986466a49a 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -29,6 +29,7 @@ import impressionSrv from 'app/core/services/impression_srv'; import builtInPlugins from './built_in_plugins'; import * as d3 from 'd3'; import * as grafanaUI from '@grafana/ui'; +import * as grafanaRT from '@grafana/runtime'; // rxjs import { Observable, Subject } from 'rxjs'; @@ -68,6 +69,7 @@ function exposeToPlugin(name: string, component: any) { } exposeToPlugin('@grafana/ui', grafanaUI); +exposeToPlugin('@grafana/runtime', grafanaRT); exposeToPlugin('lodash', _); exposeToPlugin('moment', moment); exposeToPlugin('jquery', jquery); diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts index 9a1dbde7bffc..da0e14717633 100644 --- a/public/app/features/plugins/state/actions.ts +++ b/public/app/features/plugins/state/actions.ts @@ -1,6 +1,6 @@ import { StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; import { PluginDashboard } from '../../../types/plugins'; import { PluginMeta } from '@grafana/ui'; diff --git a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx index de6c670679d6..eb9afa9cf679 100644 --- a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx +++ b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx @@ -5,7 +5,7 @@ import extend from 'lodash/extend'; import { PluginMeta, AppPlugin, Button } from '@grafana/ui'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { getBackendSrv } from 'app/core/services/backend_srv'; import { ButtonVariant } from '@grafana/ui/src/components/Button/AbstractButton'; import { css } from 'emotion'; diff --git a/public/app/features/teams/state/actions.ts b/public/app/features/teams/state/actions.ts index e2582839233f..cd369b86e922 100644 --- a/public/app/features/teams/state/actions.ts +++ b/public/app/features/teams/state/actions.ts @@ -1,5 +1,5 @@ import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { StoreState, Team, TeamGroup, TeamMember } from 'app/types'; import { updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; import { buildNavModel } from './navModel'; diff --git a/public/app/features/users/state/actions.ts b/public/app/features/users/state/actions.ts index 5c50aa290965..3d69e6638596 100644 --- a/public/app/features/users/state/actions.ts +++ b/public/app/features/users/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import { StoreState } from '../../../types'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { Invitee, OrgUser } from 'app/types'; export enum ActionTypes { diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index bd2bd67248ca..5a92122f221c 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -149,12 +149,14 @@ export default class CloudWatchDatasource extends DataSourceApi if (res.results) { for (const query of request.queries) { const queryRes = res.results[query.refId]; - for (const series of queryRes.series) { - const s = { target: series.name, datapoints: series.points } as any; - if (queryRes.meta.unit) { - s.unit = queryRes.meta.unit; + if (queryRes) { + for (const series of queryRes.series) { + const s = { target: series.name, datapoints: series.points } as any; + if (queryRes.meta.unit) { + s.unit = queryRes.meta.unit; + } + data.push(s); } - data.push(s); } } } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts index 5bf7dfb19eb5..172aa5ee077a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts @@ -649,6 +649,16 @@ export const grafanaMacros = [ display: '$__timeFilter()', hint: 'Macro that uses the selected timerange in Grafana to filter the query.', }, + { + text: '$__timeTo', + display: '$__timeTo()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T20:09:58.907Z).', + }, + { + text: '$__timeFrom', + display: '$__timeFrom()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T18:09:58.907Z).', + }, { text: '$__escapeMulti', display: '$__escapeMulti()', diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts index fab268a34401..186c78743f8c 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts @@ -90,27 +90,27 @@ describe('LogAnalyticsDatasource', () => { }); }); - describe('when using $__from and $__to is in the query and range is until now', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is until now', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; }); - it('should replace $__from and $__to with a datetime and the now() function', () => { + it('should replace $__timeFrom and $__timeTo with a datetime and the now() function', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); - expect(query).toContain('myTime%20%3C%3D%20now()'); + expect(query).toContain('myTime%20%3C%3D%20datetime('); }); }); - describe('when using $__from and $__to is in the query and range is a specific interval', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is a specific interval', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; builder.options.range.to = dateTime().subtract(1, 'hour'); builder.options.rangeRaw.to = 'now-1h'; }); - it('should replace $__from and $__to with datetimes', () => { + it('should replace $__timeFrom and $__timeTo with datetimes', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts index afb64da8f4c6..ad72c4eb2eb9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts @@ -21,12 +21,16 @@ export default class LogAnalyticsQuerystringBuilder { if (p1 === 'timeFilter') { return this.getTimeFilter(p2, this.options); } + if (p1 === 'timeFrom') { + return this.getFrom(this.options); + } + if (p1 === 'timeTo') { + return this.getUntil(this.options); + } return match; }); queryString = queryString.replace(/\$__interval/gi, this.options.interval); - queryString = queryString.replace(/\$__from/gi, this.getFrom(this.options)); - queryString = queryString.replace(/\$__to/gi, this.getUntil(this.options)); } const rawQuery = queryString; queryString = encodeURIComponent(queryString); @@ -44,7 +48,10 @@ export default class LogAnalyticsQuerystringBuilder { getUntil(options) { if (options.rangeRaw.to === 'now') { - return 'now()'; + const now = Date.now(); + return `datetime(${dateTime(now) + .startOf('minute') + .toISOString()})`; } else { const until = options.range.to; return `datetime(${dateTime(until) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html index a5b2b2adc5be..7a855a10b44b 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html @@ -67,8 +67,8 @@ - $__timeFilter(datetimeColumn) -> datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z) Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 4690bc5be26c..1c2b14f366ed 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -189,13 +189,13 @@ If using the All option, then check the Include All Option checkbox and in the Custom all value field type in: all. If All is chosen -> 1 == 1 Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m Examples: - ¡ where $__timeFilter - - | where TimeGenerated ≥ $__from and TimeGenerated ≤ $__to + - | where TimeGenerated ≥ $__timeFrom and TimeGenerated ≤ $__timeTo - | summarize count() by Category, bin(TimeGenerated, $__interval)
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index cc623d8df981..9fc12e9b9169 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -110,6 +110,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { this.migrateTimeGrains(); + this.migrateToFromTimes(); + this.panelCtrl.events.on('data-received', this.onDataReceived.bind(this), $scope); this.panelCtrl.events.on('data-error', this.onDataError.bind(this), $scope); this.resultFormats = [{ text: 'Time series', value: 'time_series' }, { text: 'Table', value: 'table' }]; @@ -171,6 +173,11 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { } } + migrateToFromTimes() { + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__from\s/gi, '$__timeFrom() '); + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__to\s/gi, '$__timeTo() '); + } + replace(variable: string) { return this.templateSrv.replace(variable, this.panelCtrl.panel.scopedVars); } diff --git a/public/app/plugins/datasource/grafana/partials/annotations.editor.html b/public/app/plugins/datasource/grafana/partials/annotations.editor.html index e5a67d6a7dc7..c1164f7f8c74 100644 --- a/public/app/plugins/datasource/grafana/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana/partials/annotations.editor.html @@ -7,7 +7,7 @@
  • Dashboard: This will fetch annotation and alert state changes for whole dashboard and show them only on the event's originating panel.
  • -
  • All: This will fetch any annotation events that match the tags filter.
  • +
  • Tags: This will fetch any annotation events that match the tags filter.
@@ -32,10 +32,19 @@ label-class="width-9" checked="ctrl.annotation.matchAny" on-change="ctrl.refresh()" - tooltip="By default Grafana will only show annotation that matches all tags in the query. Enabling this will make Grafana return any annotation with the tags you specify."> + tooltip="By default Grafana only shows annotations that match all tags in the query. Enabling this returns annotations that match any of the tags in the query.">
- Tags + + Tags + + A tag entered here as 'foo' will match +
    +
  • annotation tags 'foo'
  • +
  • annotation key-value tags formatted as 'foo:bar'
  • +
+
+
diff --git a/public/app/plugins/datasource/graphite/plugin.json b/public/app/plugins/datasource/graphite/plugin.json index 01a95727ee79..a1cc0335b68e 100644 --- a/public/app/plugins/datasource/graphite/plugin.json +++ b/public/app/plugins/datasource/graphite/plugin.json @@ -10,7 +10,6 @@ "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, diff --git a/public/app/plugins/datasource/influxdb/influx_query.ts b/public/app/plugins/datasource/influxdb/influx_query.ts index 3ee9d703c54b..a655705ffcb6 100644 --- a/public/app/plugins/datasource/influxdb/influx_query.ts +++ b/public/app/plugins/datasource/influxdb/influx_query.ts @@ -146,7 +146,7 @@ export default class InfluxQuery { value = this.templateSrv.replace(value, this.scopedVars); } if (operator !== '>' && operator !== '<') { - value = "'" + value.replace(/\\/g, '\\\\') + "'"; + value = "'" + value.replace(/\\/g, '\\\\').replace(/\'/g, "\\'") + "'"; } } else if (interpolate) { value = this.templateSrv.replace(value, this.scopedVars, 'regex'); diff --git a/public/app/plugins/datasource/influxdb/plugin.json b/public/app/plugins/datasource/influxdb/plugin.json index fa660ee12329..785706dfc401 100644 --- a/public/app/plugins/datasource/influxdb/plugin.json +++ b/public/app/plugins/datasource/influxdb/plugin.json @@ -8,7 +8,6 @@ "metrics": true, "annotations": true, "alerting": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts index f8e65c21f2d2..ad76ea5309c5 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts @@ -139,6 +139,26 @@ describe('InfluxQuery', () => { }); }); + describe('field name with single quote should be escaped and', () => { + it('should generate correct query', () => { + const query = new InfluxQuery( + { + measurement: 'cpu', + groupBy: [{ type: 'time', params: ['auto'] }], + tags: [{ key: 'name', value: "Let's encrypt." }, { key: 'hostname', value: 'server2', condition: 'OR' }], + }, + templateSrv, + {} + ); + + const queryText = query.render(); + expect(queryText).toBe( + 'SELECT mean("value") FROM "cpu" WHERE ("name" = \'Let\\\'s encrypt.\' OR "hostname" = \'server2\') AND ' + + '$timeFilter GROUP BY time($__interval)' + ); + }); + }); + describe('query with value condition', () => { it('should not quote value', () => { const query = new InfluxQuery( diff --git a/public/app/plugins/datasource/input/plugin.json b/public/app/plugins/datasource/input/plugin.json index 91782a348065..dbfa0ad489a4 100644 --- a/public/app/plugins/datasource/input/plugin.json +++ b/public/app/plugins/datasource/input/plugin.json @@ -8,7 +8,6 @@ "alerting": false, "annotations": false, "logs": false, - "explore": false, "info": { "description": "Data source that supports manual table & CSV input", diff --git a/public/app/plugins/datasource/loki/datasource.ts b/public/app/plugins/datasource/loki/datasource.ts index d86e5fe1922a..b689d02ba135 100644 --- a/public/app/plugins/datasource/loki/datasource.ts +++ b/public/app/plugins/datasource/loki/datasource.ts @@ -1,5 +1,8 @@ // Libraries import _ from 'lodash'; +import { Subscription, of } from 'rxjs'; +import { webSocket } from 'rxjs/webSocket'; +import { catchError, map } from 'rxjs/operators'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; @@ -17,11 +20,14 @@ import { DataSourceInstanceSettings, DataQueryError, LogRowModel, + DataStreamObserver, + LoadingState, + DataStreamState, } from '@grafana/ui'; import { LokiQuery, LokiOptions } from './types'; import { BackendSrv } from 'app/core/services/backend_srv'; import { TemplateSrv } from 'app/features/templating/template_srv'; -import { safeStringifyValue } from 'app/core/utils/explore'; +import { safeStringifyValue, convertToWebSocketUrl } from 'app/core/utils/explore'; export const DEFAULT_MAX_LINES = 1000; @@ -47,6 +53,7 @@ interface LokiContextQueryOptions { } export class LokiDatasource extends DataSourceApi { + private subscriptions: { [key: string]: Subscription } = null; languageProvider: LanguageProvider; maxLines: number; @@ -60,6 +67,7 @@ export class LokiDatasource extends DataSourceApi { this.languageProvider = new LanguageProvider(this); const settingsData = instanceSettings.jsonData || {}; this.maxLines = parseInt(settingsData.maxLines, 10) || DEFAULT_MAX_LINES; + this.subscriptions = {}; } _request(apiUrl: string, data?, options?: any) { @@ -73,41 +81,20 @@ export class LokiDatasource extends DataSourceApi { return this.backendSrv.datasourceRequest(req); } - convertToStreamTargets = (options: DataQueryRequest): Array<{ url: string; refId: string }> => { - return options.targets - .filter(target => target.expr && !target.hide) - .map(target => { - const interpolated = this.templateSrv.replace(target.expr); - const { query, regexp } = parseQuery(interpolated); - const refId = target.refId; - const baseUrl = this.instanceSettings.url; - const params = serializeParams({ query, regexp }); - const url = `${baseUrl}/api/prom/tail?${params}`; - - return { - url, - refId, - }; - }); - }; - - resultToSeriesData = (data: any, refId: string): SeriesData[] => { - const toSeriesData = (stream: any, refId: string) => ({ - ...logStreamToSeriesData(stream), + prepareLiveTarget(target: LokiQuery, options: DataQueryRequest) { + const interpolated = this.templateSrv.replace(target.expr); + const { query, regexp } = parseQuery(interpolated); + const refId = target.refId; + const baseUrl = this.instanceSettings.url; + const params = serializeParams({ query, regexp }); + const url = convertToWebSocketUrl(`${baseUrl}/api/prom/tail?${params}`); + return { + query, + regexp, + url, refId, - }); - - if (data.streams) { - // new Loki API purposed in https://github.com/grafana/loki/pull/590 - const series: SeriesData[] = []; - for (const stream of data.streams || []) { - series.push(toSeriesData(stream, refId)); - } - return series; - } - - return [toSeriesData(data, refId)]; - }; + }; + } prepareQueryTarget(target: LokiQuery, options: DataQueryRequest) { const interpolated = this.templateSrv.replace(target.expr); @@ -126,9 +113,106 @@ export class LokiDatasource extends DataSourceApi { }; } - async query(options: DataQueryRequest) { + unsubscribe = (refId: string) => { + const subscription = this.subscriptions[refId]; + if (subscription && !subscription.closed) { + subscription.unsubscribe(); + delete this.subscriptions[refId]; + } + }; + + processError = (err: any, target: any): DataQueryError => { + const error: DataQueryError = { + message: 'Unknown error during query transaction. Please check JS console logs.', + refId: target.refId, + }; + + if (err.data) { + if (typeof err.data === 'string') { + error.message = err.data; + } else if (err.data.error) { + error.message = safeStringifyValue(err.data.error); + } + } else if (err.message) { + error.message = err.message; + } else if (typeof err === 'string') { + error.message = err; + } + + error.status = err.status; + error.statusText = err.statusText; + + return error; + }; + + processResult = (data: any, target: any): SeriesData[] => { + const series: SeriesData[] = []; + + if (Object.keys(data).length === 0) { + return series; + } + + if (!data.streams) { + return [{ ...logStreamToSeriesData(data), refId: target.refId }]; + } + + for (const stream of data.streams || []) { + const seriesData = logStreamToSeriesData(stream); + seriesData.refId = target.refId; + seriesData.meta = { + searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)), + limit: this.maxLines, + }; + series.push(seriesData); + } + + return series; + }; + + runLiveQueries = (options: DataQueryRequest, observer?: DataStreamObserver) => { + const liveTargets = options.targets + .filter(target => target.expr && !target.hide && target.live) + .map(target => this.prepareLiveTarget(target, options)); + + for (const liveTarget of liveTargets) { + const subscription = webSocket(liveTarget.url) + .pipe( + map((results: any[]) => { + const delta = this.processResult(results, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Streaming, + delta, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return state; + }), + catchError(err => { + const error = this.processError(err, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Error, + error, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return of(state); + }) + ) + .subscribe({ + next: state => observer(state), + }); + + this.subscriptions[liveTarget.refId] = subscription; + } + }; + + runQueries = async (options: DataQueryRequest) => { const queryTargets = options.targets - .filter(target => target.expr && !target.hide) + .filter(target => target.expr && !target.hide && !target.live) .map(target => this.prepareQueryTarget(target, options)); if (queryTargets.length === 0) { @@ -141,53 +225,29 @@ export class LokiDatasource extends DataSourceApi { return err; } - const error: DataQueryError = { - message: 'Unknown error during query transaction. Please check JS console logs.', - refId: target.refId, - }; - - if (err.data) { - if (typeof err.data === 'string') { - error.message = err.data; - } else if (err.data.error) { - error.message = safeStringifyValue(err.data.error); - } - } else if (err.message) { - error.message = err.message; - } else if (typeof err === 'string') { - error.message = err; - } - - error.status = err.status; - error.statusText = err.statusText; - + const error: DataQueryError = this.processError(err, target); throw error; }) ); return Promise.all(queries).then((results: any[]) => { - const series: Array = []; + let series: SeriesData[] = []; for (let i = 0; i < results.length; i++) { const result = results[i]; if (result.data) { - const refId = queryTargets[i].refId; - for (const stream of result.data.streams || []) { - const seriesData = logStreamToSeriesData(stream); - seriesData.refId = refId; - seriesData.meta = { - searchWords: getHighlighterExpressionsFromQuery( - formatQuery(queryTargets[i].query, queryTargets[i].regexp) - ), - limit: this.maxLines, - }; - series.push(seriesData); - } + series = series.concat(this.processResult(result.data, queryTargets[i])); } } return { data: series }; }); + }; + + async query(options: DataQueryRequest, observer?: DataStreamObserver) { + this.runLiveQueries(options, observer); + + return this.runQueries(options); } async importQueries(queries: LokiQuery[], originMeta: PluginMeta): Promise { diff --git a/public/app/plugins/datasource/loki/language_provider.ts b/public/app/plugins/datasource/loki/language_provider.ts index 64bf876f2c77..ff187bd88420 100644 --- a/public/app/plugins/datasource/loki/language_provider.ts +++ b/public/app/plugins/datasource/loki/language_provider.ts @@ -16,6 +16,7 @@ import { } from 'app/types/explore'; import { LokiQuery } from './types'; import { dateTime } from '@grafana/ui/src/utils/moment_wrapper'; +import { PromQuery } from '../prometheus/types'; const DEFAULT_KEYS = ['job', 'namespace']; const EMPTY_SELECTOR = '{}'; @@ -168,8 +169,9 @@ export default class LokiLanguageProvider extends LanguageProvider { return Promise.all( queries.map(async query => { const expr = await this.importPrometheusQuery(query.expr); + const { context, ...rest } = query as PromQuery; return { - ...query, + ...rest, expr, }; }) diff --git a/public/app/plugins/datasource/loki/plugin.json b/public/app/plugins/datasource/loki/plugin.json index cd14a7fe48ad..ca630b56bc73 100644 --- a/public/app/plugins/datasource/loki/plugin.json +++ b/public/app/plugins/datasource/loki/plugin.json @@ -8,8 +8,7 @@ "alerting": false, "annotations": false, "logs": true, - "explore": true, - "tables": false, + "streaming": true, "info": { "description": "Like Prometheus but for logs. OSS logging solution from Grafana Labs", diff --git a/public/app/plugins/datasource/loki/types.ts b/public/app/plugins/datasource/loki/types.ts index 4c973f8a79ed..e733c3b47cb6 100644 --- a/public/app/plugins/datasource/loki/types.ts +++ b/public/app/plugins/datasource/loki/types.ts @@ -2,6 +2,9 @@ import { DataQuery, Labels, DataSourceJsonData } from '@grafana/ui/src/types'; export interface LokiQuery extends DataQuery { expr: string; + live?: boolean; + query?: string; + regexp?: string; } export interface LokiOptions extends DataSourceJsonData { diff --git a/public/app/plugins/datasource/mssql/plugin.json b/public/app/plugins/datasource/mssql/plugin.json index b3269b91100e..ef280e9209ee 100644 --- a/public/app/plugins/datasource/mssql/plugin.json +++ b/public/app/plugins/datasource/mssql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/mysql/plugin.json b/public/app/plugins/datasource/mysql/plugin.json index 49d1996332fa..be0714560927 100644 --- a/public/app/plugins/datasource/mysql/plugin.json +++ b/public/app/plugins/datasource/mysql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/opentsdb/plugin.json b/public/app/plugins/datasource/opentsdb/plugin.json index e7cae327c5b5..a19916482369 100644 --- a/public/app/plugins/datasource/opentsdb/plugin.json +++ b/public/app/plugins/datasource/opentsdb/plugin.json @@ -8,7 +8,6 @@ "defaultMatchFormat": "pipe", "annotations": true, "alerting": true, - "tables": false, "info": { "description": "Open source time series database", diff --git a/public/app/plugins/datasource/postgres/plugin.json b/public/app/plugins/datasource/postgres/plugin.json index 994578a7f2c8..ce72d3b0f2f8 100644 --- a/public/app/plugins/datasource/postgres/plugin.json +++ b/public/app/plugins/datasource/postgres/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx index 14d03df6d388..c432e9d58b4b 100644 --- a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx +++ b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx @@ -223,7 +223,7 @@ class PromQueryField extends React.PureComponent { type: string; @@ -83,7 +87,7 @@ export class PrometheusDatasource extends DataSourceApi } } - _request(url, data?, options?: any) { + _request(url: string, data?: any, options?: any) { options = _.defaults(options || {}, { url: this.url + url, method: this.httpMethod, @@ -119,11 +123,11 @@ export class PrometheusDatasource extends DataSourceApi } // Use this for tab completion features, wont publish response to other components - metadataRequest(url) { + metadataRequest(url: string) { return this._request(url, null, { method: 'GET', silent: true }); } - interpolateQueryExpr(value, variable, defaultFormatFn) { + interpolateQueryExpr(value: any, variable: any, defaultFormatFn: any) { // if no multi or include all do not regexEscape if (!variable.multi && !variable.includeAll) { return prometheusRegularEscape(value); @@ -141,34 +145,132 @@ export class PrometheusDatasource extends DataSourceApi return this.templateSrv.variableExists(target.expr); } - query(options: DataQueryRequest): Promise<{ data: any }> { - const start = this.getPrometheusTime(options.range.from, false); - const end = this.getPrometheusTime(options.range.to, true); + processResult = (response: any, query: PromQueryRequest, target: PromQuery, responseListLength: number) => { + // Keeping original start/end for transformers + const transformerOptions = { + format: target.format, + step: query.step, + legendFormat: target.legendFormat, + start: query.start, + end: query.end, + query: query.expr, + responseListLength, + refId: target.refId, + valueWithRefId: target.valueWithRefId, + }; + const series = this.resultTransformer.transform(response, transformerOptions); - const queries = []; - const activeTargets = []; + return series; + }; - options = _.clone(options); + runObserverQueries = ( + options: DataQueryRequest, + observer: DataStreamObserver, + queries: PromQueryRequest[], + activeTargets: PromQuery[], + end: number + ) => { + for (let index = 0; index < queries.length; index++) { + const query = queries[index]; + const target = activeTargets[index]; + let observable: Observable = null; + + if (query.instant) { + observable = from(this.performInstantQuery(query, end)); + } else { + observable = from(this.performTimeSeriesQuery(query, query.start, query.end)); + } + + observable + .pipe( + single(), // unsubscribes automatically after first result + filter((response: any) => (response.cancelled ? false : true)), + map((response: any) => { + return this.processResult(response, query, target, queries.length); + }) + ) + .subscribe({ + next: series => { + if (query.instant) { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Loading, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } else { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Done, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } + }, + }); + } + }; + + prepareTargets = (options: DataQueryRequest, start: number, end: number) => { + const queries: PromQueryRequest[] = []; + const activeTargets: PromQuery[] = []; for (const target of options.targets) { if (!target.expr || target.hide) { continue; } + if (target.context === 'explore') { + target.format = 'time_series'; + target.instant = false; + const instantTarget: any = _.cloneDeep(target); + instantTarget.format = 'table'; + instantTarget.instant = true; + instantTarget.valueWithRefId = true; + delete instantTarget.maxDataPoints; + instantTarget.requestId += '_instant'; + instantTarget.refId += '_instant'; + activeTargets.push(instantTarget); + queries.push(this.createQuery(instantTarget, options, start, end)); + } + activeTargets.push(target); queries.push(this.createQuery(target, options, start, end)); } + return { + queries, + activeTargets, + }; + }; + + query(options: DataQueryRequest, observer?: DataStreamObserver): Promise<{ data: any }> { + const start = this.getPrometheusTime(options.range.from, false); + const end = this.getPrometheusTime(options.range.to, true); + + options = _.clone(options); + const { queries, activeTargets } = this.prepareTargets(options, start, end); + // No valid targets, return the empty result to save a round trip. if (_.isEmpty(queries)) { return this.$q.when({ data: [] }) as Promise<{ data: any }>; } + if (observer && options.targets.filter(target => target.context === 'explore').length === options.targets.length) { + // using observer to make the instant query return immediately + this.runObserverQueries(options, observer, queries, activeTargets, end); + return this.$q.when({ data: [] }) as Promise<{ data: any }>; + } + const allQueryPromise = _.map(queries, query => { - if (!query.instant) { - return this.performTimeSeriesQuery(query, query.start, query.end); - } else { + if (query.instant) { return this.performInstantQuery(query, end); + } else { + return this.performTimeSeriesQuery(query, query.start, query.end); } }); @@ -180,19 +282,10 @@ export class PrometheusDatasource extends DataSourceApi return; } - // Keeping original start/end for transformers - const transformerOptions = { - format: activeTargets[index].format, - step: queries[index].step, - legendFormat: activeTargets[index].legendFormat, - start: queries[index].start, - end: queries[index].end, - query: queries[index].expr, - responseListLength: responseList.length, - refId: activeTargets[index].refId, - valueWithRefId: activeTargets[index].valueWithRefId, - }; - const series = this.resultTransformer.transform(response, transformerOptions); + const target = activeTargets[index]; + const query = queries[index]; + const series = this.processResult(response, query, target, queries.length); + result = [...result, ...series]; }); @@ -202,10 +295,16 @@ export class PrometheusDatasource extends DataSourceApi return allPromise as Promise<{ data: any }>; } - createQuery(target, options, start, end) { - const query: any = { + createQuery(target: PromQuery, options: DataQueryRequest, start: number, end: number) { + const query: PromQueryRequest = { hinting: target.hinting, instant: target.instant, + step: 0, + expr: '', + requestId: '', + refId: '', + start: 0, + end: 0, }; const range = Math.ceil(end - start); @@ -398,7 +497,7 @@ export class PrometheusDatasource extends DataSourceApi }; // Unsetting min interval for accurate event resolution const minStep = '1s'; - const query = this.createQuery({ expr, interval: minStep }, queryOptions, start, end); + const query = this.createQuery({ expr, interval: minStep, refId: 'X' }, queryOptions, start, end); const self = this; return this.performTimeSeriesQuery(query, query.start, query.end).then(results => { diff --git a/public/app/plugins/datasource/prometheus/plugin.json b/public/app/plugins/datasource/prometheus/plugin.json index fb9ebbb52b10..ba1144549489 100644 --- a/public/app/plugins/datasource/prometheus/plugin.json +++ b/public/app/plugins/datasource/prometheus/plugin.json @@ -24,8 +24,6 @@ "metrics": true, "alerting": true, "annotations": true, - "explore": true, - "tables": true, "queryOptions": { "minInterval": true }, diff --git a/public/app/plugins/datasource/prometheus/specs/completer.test.ts b/public/app/plugins/datasource/prometheus/specs/completer.test.ts index 2580b87f6d7f..8a7b3b8c7c33 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.test.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.test.ts @@ -7,7 +7,7 @@ import { TemplateSrv } from 'app/features/templating/template_srv'; import { TimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { IQService } from 'angular'; jest.mock('../datasource'); -jest.mock('app/core/services/backend_srv'); +jest.mock('@grafana/ui'); describe('Prometheus editor completer', () => { function getSessionStub(data) { diff --git a/public/app/plugins/datasource/prometheus/types.ts b/public/app/plugins/datasource/prometheus/types.ts index e83029df8356..a256f289cfe2 100644 --- a/public/app/plugins/datasource/prometheus/types.ts +++ b/public/app/plugins/datasource/prometheus/types.ts @@ -2,6 +2,14 @@ import { DataQuery, DataSourceJsonData } from '@grafana/ui/src/types'; export interface PromQuery extends DataQuery { expr: string; + context?: 'explore' | 'panel'; + format?: string; + instant?: boolean; + hinting?: boolean; + interval?: string; + intervalFactor?: number; + legendFormat?: string; + valueWithRefId?: boolean; } export interface PromOptions extends DataSourceJsonData { @@ -10,3 +18,10 @@ export interface PromOptions extends DataSourceJsonData { httpMethod: string; directUrl: string; } + +export interface PromQueryRequest extends PromQuery { + step?: number; + requestId?: string; + start: number; + end: number; +} diff --git a/public/app/plugins/datasource/stackdriver/components/Filter.tsx b/public/app/plugins/datasource/stackdriver/components/Filter.tsx index 6c63f1ed8913..08134789d3da 100644 --- a/public/app/plugins/datasource/stackdriver/components/Filter.tsx +++ b/public/app/plugins/datasource/stackdriver/components/Filter.tsx @@ -3,7 +3,7 @@ import _ from 'lodash'; import appEvents from 'app/core/app_events'; import { QueryMeta } from '../types'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { TemplateSrv } from 'app/features/templating/template_srv'; import StackdriverDatasource from '../datasource'; import '../query_filter_ctrl'; diff --git a/public/app/plugins/datasource/stackdriver/plugin.json b/public/app/plugins/datasource/stackdriver/plugin.json index 620a7b1c8cea..20cac315400a 100644 --- a/public/app/plugins/datasource/stackdriver/plugin.json +++ b/public/app/plugins/datasource/stackdriver/plugin.json @@ -7,7 +7,6 @@ "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, "cacheTimeout": true diff --git a/public/app/plugins/datasource/testdata/QueryEditor.tsx b/public/app/plugins/datasource/testdata/QueryEditor.tsx index f14d976ca384..324848400ffa 100644 --- a/public/app/plugins/datasource/testdata/QueryEditor.tsx +++ b/public/app/plugins/datasource/testdata/QueryEditor.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import _ from 'lodash'; // Services & Utils -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; // Components import { FormLabel, Select, SelectOptionItem } from '@grafana/ui'; @@ -21,7 +21,7 @@ interface State { type Props = QueryEditorProps; export class QueryEditor extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); state: State = { scenarioList: [], diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index a37222091d05..c3c5b71ca68e 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -5,15 +5,15 @@ import Drop from 'tether-drop'; // Utils and servies import { colors } from '@grafana/ui'; +import { setBackendSrv, BackendSrv, setDataSourceSrv } from '@grafana/runtime'; import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import { profiler } from 'app/core/profiler'; import appEvents from 'app/core/app_events'; -import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { TimeSrv, setTimeSrv } from 'app/features/dashboard/services/TimeSrv'; -import { DatasourceSrv, setDatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { KeybindingSrv, setKeybindingSrv } from 'app/core/services/keybindingSrv'; -import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularLoader, setAngularLoader } from '@grafana/runtime'; import { configureStore } from 'app/store/configureStore'; // Types @@ -37,7 +37,7 @@ export class GrafanaCtrl { // make angular loader service available to react components setAngularLoader(angularLoader); setBackendSrv(backendSrv); - setDatasourceSrv(datasourceSrv); + setDataSourceSrv(datasourceSrv); setTimeSrv(timeSrv); setKeybindingSrv(keybindingSrv); configureStore(); diff --git a/public/app/store/configureStore.ts b/public/app/store/configureStore.ts index e561a7f5e592..63d8eaaf718d 100644 --- a/public/app/store/configureStore.ts +++ b/public/app/store/configureStore.ts @@ -1,7 +1,7 @@ import { createStore, applyMiddleware, compose, combineReducers } from 'redux'; import thunk from 'redux-thunk'; import { combineEpics, createEpicMiddleware } from 'redux-observable'; -// import { createLogger } from 'redux-logger'; +import { createLogger } from 'redux-logger'; import sharedReducers from 'app/core/reducers'; import alertingReducers from 'app/features/alerting/state/reducers'; import teamsReducers from 'app/features/teams/state/reducers'; @@ -15,8 +15,24 @@ import usersReducers from 'app/features/users/state/reducers'; import userReducers from 'app/features/profile/state/reducers'; import organizationReducers from 'app/features/org/state/reducers'; import { setStore } from './store'; -import { startSubscriptionsEpic, startSubscriptionEpic, limitMessageRateEpic } from 'app/features/explore/state/epics'; -import { WebSocketSubject, webSocket } from 'rxjs/webSocket'; +import { limitMessageRateEpic } from 'app/features/explore/state/epics/limitMessageRateEpic'; +import { stateSaveEpic } from 'app/features/explore/state/epics/stateSaveEpic'; +import { processQueryResultsEpic } from 'app/features/explore/state/epics/processQueryResultsEpic'; +import { processQueryErrorsEpic } from 'app/features/explore/state/epics/processQueryErrorsEpic'; +import { runQueriesEpic } from 'app/features/explore/state/epics/runQueriesEpic'; +import { runQueriesBatchEpic } from 'app/features/explore/state/epics/runQueriesBatchEpic'; +import { + DataSourceApi, + DataQueryResponse, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, +} from '@grafana/ui'; +import { Observable } from 'rxjs'; +import { getQueryResponse } from 'app/core/utils/explore'; +import { StoreState } from 'app/types/store'; +import { toggleLogActionsMiddleware } from 'app/core/middlewares/application'; const rootReducers = { ...sharedReducers, @@ -37,29 +53,42 @@ export function addRootReducer(reducers) { Object.assign(rootReducers, ...reducers); } -export const rootEpic: any = combineEpics(startSubscriptionsEpic, startSubscriptionEpic, limitMessageRateEpic); +export const rootEpic: any = combineEpics( + limitMessageRateEpic, + stateSaveEpic, + runQueriesEpic, + runQueriesBatchEpic, + processQueryResultsEpic, + processQueryErrorsEpic +); export interface EpicDependencies { - getWebSocket: (urlConfigOrSource: string) => WebSocketSubject; + getQueryResponse: ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => Observable; } const dependencies: EpicDependencies = { - getWebSocket: webSocket, + getQueryResponse, }; const epicMiddleware = createEpicMiddleware({ dependencies }); export function configureStore() { const composeEnhancers = (window as any).__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; - const rootReducer = combineReducers(rootReducers); + const logger = createLogger({ + predicate: (getState: () => StoreState) => { + return getState().application.logActions; + }, + }); + const storeEnhancers = + process.env.NODE_ENV !== 'production' + ? applyMiddleware(toggleLogActionsMiddleware, thunk, epicMiddleware, logger) + : applyMiddleware(thunk, epicMiddleware); - if (process.env.NODE_ENV !== 'production') { - // DEV builds we had the logger middleware - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk, epicMiddleware)))); - } else { - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk, epicMiddleware)))); - } - + setStore(createStore(rootReducer, {}, composeEnhancers(storeEnhancers))); epicMiddleware.run(rootEpic); } diff --git a/public/app/types/application.ts b/public/app/types/application.ts new file mode 100644 index 000000000000..d4562d68c448 --- /dev/null +++ b/public/app/types/application.ts @@ -0,0 +1,3 @@ +export interface ApplicationState { + logActions: boolean; +} diff --git a/public/app/types/explore.ts b/public/app/types/explore.ts index c852d92ee29a..98d137f1e7a5 100644 --- a/public/app/types/explore.ts +++ b/public/app/types/explore.ts @@ -3,7 +3,6 @@ import { Value } from 'slate'; import { RawTimeRange, DataQuery, - DataQueryResponseData, DataSourceSelectItem, DataSourceApi, QueryHint, @@ -13,9 +12,10 @@ import { DataQueryError, LogsModel, LogsDedupStrategy, + LoadingState, } from '@grafana/ui'; -import { Emitter, TimeSeries } from 'app/core/core'; +import { Emitter } from 'app/core/core'; import TableModel from 'app/core/table_model'; export enum ExploreMode { @@ -214,22 +214,8 @@ export interface ExploreItemState { * True if table result viewer is expanded. Query runs will contain table queries. */ showingTable: boolean; - /** - * True if `datasourceInstance` supports graph queries. - */ - supportsGraph: boolean | null; - /** - * True if `datasourceInstance` supports logs queries. - */ - supportsLogs: boolean | null; - /** - * True if `datasourceInstance` supports table queries. - */ - supportsTable: boolean | null; - graphIsLoading: boolean; - logIsLoading: boolean; - tableIsLoading: boolean; + loadingState: LoadingState; /** * Table model that combines all query table results into a single table. */ @@ -266,6 +252,7 @@ export interface ExploreItemState { mode: ExploreMode; isLive: boolean; + urlReplaced: boolean; } export interface ExploreUpdateState { @@ -326,11 +313,8 @@ export interface QueryIntervals { export interface QueryOptions { interval: string; - format: string; - hinting?: boolean; - instant?: boolean; - valueWithRefId?: boolean; maxDataPoints?: number; + live?: boolean; } export interface QueryTransaction { @@ -342,23 +326,14 @@ export interface QueryTransaction { options: any; queries: DataQuery[]; result?: any; // Table model / Timeseries[] / Logs - resultType: ResultType; scanning?: boolean; } export type RangeScanner = () => RawTimeRange; -export type ResultGetter = ( - result: DataQueryResponseData, - transaction: QueryTransaction, - allTransactions: QueryTransaction[] -) => TimeSeries; - export interface TextMatch { text: string; start: number; length: number; end: number; } - -export type ResultType = 'Graph' | 'Logs' | 'Table'; diff --git a/public/app/types/store.ts b/public/app/types/store.ts index 975cd40ae71c..66a3db1a3cb9 100644 --- a/public/app/types/store.ts +++ b/public/app/types/store.ts @@ -13,6 +13,7 @@ import { OrganizationState } from './organization'; import { AppNotificationsState } from './appNotifications'; import { PluginsState } from './plugins'; import { NavIndex } from '@grafana/ui'; +import { ApplicationState } from './application'; export interface StoreState { navIndex: NavIndex; @@ -29,6 +30,7 @@ export interface StoreState { appNotifications: AppNotificationsState; user: UserState; plugins: PluginsState; + application: ApplicationState; } /* diff --git a/public/e2e-test/core/images.ts b/public/e2e-test/core/images.ts index eb4ca3538d23..2897ba8aa2df 100644 --- a/public/e2e-test/core/images.ts +++ b/public/e2e-test/core/images.ts @@ -23,8 +23,21 @@ export const compareScreenShots = async (fileName: string) => return; } - expect(screenShotFromTest.width).toEqual(screenShotFromTruth.width); - expect(screenShotFromTest.height).toEqual(screenShotFromTruth.height); + if (screenShotFromTest.width !== screenShotFromTruth.width) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.width + }] that differs from the expected: [${screenShotFromTruth.width}].` + ); + } + + if (screenShotFromTest.height !== screenShotFromTruth.height) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.height + }] that differs from the expected: [${screenShotFromTruth.height}].` + ); + } const diff = new PNG({ width: screenShotFromTest.width, height: screenShotFromTruth.height }); const numDiffPixels = pixelmatch( @@ -36,7 +49,27 @@ export const compareScreenShots = async (fileName: string) => { threshold: 0.1 } ); - expect(numDiffPixels).toBe(0); + if (numDiffPixels !== 0) { + const localMessage = `\nCompare the output from expected:[${constants.screenShotsTruthDir}] with outcome:[${ + constants.screenShotsOutputDir + }]`; + const circleCIMessage = '\nCheck the Artifacts tab in the CircleCi build output for the actual screenshots.'; + const checkMessage = process.env.CIRCLE_SHA1 ? circleCIMessage : localMessage; + let msg = `\nThe screenshot:[${ + constants.screenShotsOutputDir + }/${fileName}.png] taken during the test differs by:[${numDiffPixels}] pixels from the expected.`; + msg += '\n'; + msg += checkMessage; + msg += '\n'; + msg += '\n If the difference between expected and outcome is NOT acceptable then do the following:'; + msg += '\n - Check the code for changes that causes this difference, fix that and retry.'; + msg += '\n'; + msg += '\n If the difference between expected and outcome is acceptable then do the following:'; + msg += '\n - Replace the expected image with the outcome and retry.'; + msg += '\n'; + throw new Error(msg); + } + resolve(); }; diff --git a/public/e2e-test/install/install.ts b/public/e2e-test/install/install.ts index fa71acfb7851..61bfca0bfca1 100644 --- a/public/e2e-test/install/install.ts +++ b/public/e2e-test/install/install.ts @@ -11,7 +11,9 @@ export const downloadBrowserIfNeeded = async (): Promise => { console.log('Did not find any local revisions for browser, downloading latest this might take a while.'); await browserFetcher.download(constants.chromiumRevision, (downloaded, total) => { - console.log(`Downloaded ${downloaded}bytes of ${total}bytes.`); + if (downloaded === total) { + console.log('Chromium successfully downloaded'); + } }); }; diff --git a/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png b/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png index 8ea1294d4d6c..832163502300 100644 Binary files a/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png and b/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png differ diff --git a/public/sass/components/_add_data_source.scss b/public/sass/components/_add_data_source.scss index c14455d35c14..9e1bcb6ed778 100644 --- a/public/sass/components/_add_data_source.scss +++ b/public/sass/components/_add_data_source.scss @@ -77,6 +77,10 @@ } } +.add-datasource-item-actions__btn-icon { + margin-left: $space-sm; +} + .add-data-source-more { text-align: center; margin: $space-xl; diff --git a/public/sass/components/_buttons.scss b/public/sass/components/_buttons.scss index 1a9936bceab0..254ac5906abb 100644 --- a/public/sass/components/_buttons.scss +++ b/public/sass/components/_buttons.scss @@ -70,6 +70,7 @@ @include button-size($btn-padding-y-lg, $space-lg, $font-size-lg, $border-radius-sm); font-weight: normal; height: $height-lg; + .gicon { //font-size: 31px; margin-right: $space-sm; diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index fba4a7333462..c06af5864c76 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -22,6 +22,11 @@ .ds-picker { min-width: 200px; max-width: 200px; + + .gf-form-select-box__img-value { + max-width: 150px; + overflow: hidden; + } } } diff --git a/public/test/core/redux/epicTester.ts b/public/test/core/redux/epicTester.ts index 5c2a42469435..88638f556c68 100644 --- a/public/test/core/redux/epicTester.ts +++ b/public/test/core/redux/epicTester.ts @@ -1,6 +1,14 @@ import { Epic, ActionsObservable, StateObservable } from 'redux-observable'; import { Subject } from 'rxjs'; -import { WebSocketSubject } from 'rxjs/webSocket'; +import { + DataSourceApi, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, + DataQueryResponse, + DataStreamState, +} from '@grafana/ui'; import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { StoreState } from 'app/types/store'; @@ -8,21 +16,30 @@ import { EpicDependencies } from 'app/store/configureStore'; export const epicTester = ( epic: Epic, ActionOf, StoreState, EpicDependencies>, - state?: StoreState + state?: Partial ) => { const resultingActions: Array> = []; const action$ = new Subject>(); const state$ = new Subject(); const actionObservable$ = new ActionsObservable(action$); - const stateObservable$ = new StateObservable(state$, state || ({} as StoreState)); - const websockets$: Array> = []; + const stateObservable$ = new StateObservable(state$, (state as StoreState) || ({} as StoreState)); + const queryResponse$ = new Subject(); + const observer$ = new Subject(); + const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => { + if (observer) { + observer$.subscribe({ next: event => observer(event) }); + } + return queryResponse$; + }; + const dependencies: EpicDependencies = { - getWebSocket: () => { - const webSocket$ = new Subject(); - websockets$.push(webSocket$); - return webSocket$ as WebSocketSubject; - }, + getQueryResponse, }; + epic(actionObservable$, stateObservable$, dependencies).subscribe({ next: action => resultingActions.push(action) }); const whenActionIsDispatched = (action: ActionOf) => { @@ -31,14 +48,26 @@ export const epicTester = ( return instance; }; - const whenWebSocketReceivesData = (data: any) => { - websockets$.forEach(websocket$ => websocket$.next(data)); + const whenQueryReceivesResponse = (response: DataQueryResponse) => { + queryResponse$.next(response); + + return instance; + }; + + const whenQueryThrowsError = (error: any) => { + queryResponse$.error(error); + + return instance; + }; + + const whenQueryObserverReceivesEvent = (event: DataStreamState) => { + observer$.next(event); return instance; }; const thenResultingActionsEqual = (...actions: Array>) => { - expect(resultingActions).toEqual(actions); + expect(actions).toEqual(resultingActions); return instance; }; @@ -51,7 +80,9 @@ export const epicTester = ( const instance = { whenActionIsDispatched, - whenWebSocketReceivesData, + whenQueryReceivesResponse, + whenQueryThrowsError, + whenQueryObserverReceivesEvent, thenResultingActionsEqual, thenNoActionsWhereDispatched, }; diff --git a/public/test/mocks/mockExploreState.ts b/public/test/mocks/mockExploreState.ts new file mode 100644 index 000000000000..981f1fb2dbe4 --- /dev/null +++ b/public/test/mocks/mockExploreState.ts @@ -0,0 +1,86 @@ +import { DataSourceApi } from '@grafana/ui/src/types/datasource'; + +import { ExploreId, ExploreItemState, ExploreState } from 'app/types/explore'; +import { makeExploreItemState } from 'app/features/explore/state/reducers'; +import { StoreState } from 'app/types'; + +export const mockExploreState = (options: any = {}) => { + const isLive = options.isLive || false; + const history = []; + const eventBridge = { + emit: jest.fn(), + }; + const streaming = options.streaming || undefined; + const datasourceInterval = options.datasourceInterval || ''; + const refreshInterval = options.refreshInterval || ''; + const containerWidth = options.containerWidth || 1980; + const queries = options.queries || []; + const datasourceError = options.datasourceError || null; + const scanner = options.scanner || jest.fn(); + const scanning = options.scanning || false; + const datasourceId = options.datasourceId || '1337'; + const exploreId = ExploreId.left; + const datasourceInstance: DataSourceApi = options.datasourceInstance || { + id: 1337, + query: jest.fn(), + name: 'test', + testDatasource: jest.fn(), + meta: { + id: datasourceId, + streaming, + }, + interval: datasourceInterval, + }; + const urlReplaced = options.urlReplaced || false; + const left: ExploreItemState = options.left || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const right: ExploreItemState = options.right || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const split: boolean = options.split || false; + const explore: ExploreState = { + left, + right, + split, + }; + const state: Partial = { + explore, + }; + + return { + containerWidth, + datasourceId, + datasourceInstance, + datasourceInterval, + eventBridge, + exploreId, + history, + queries, + refreshInterval, + state, + scanner, + }; +}; diff --git a/scripts/backend-lint.sh b/scripts/backend-lint.sh index 6e7305364fdf..09b035bff6ee 100755 --- a/scripts/backend-lint.sh +++ b/scripts/backend-lint.sh @@ -36,4 +36,5 @@ exit_if_fail golangci-lint run --deadline 10m --disable-all \ exit_if_fail go vet ./pkg/... exit_if_fail make revive +exit_if_fail make revive-alerting exit_if_fail make gosec diff --git a/scripts/cli/tasks/cherrypick.ts b/scripts/cli/tasks/cherrypick.ts index 966781951828..ac92f223a7eb 100644 --- a/scripts/cli/tasks/cherrypick.ts +++ b/scripts/cli/tasks/cherrypick.ts @@ -7,6 +7,10 @@ const cherryPickRunner: TaskRunner = async () => { let client = axios.create({ baseURL: 'https://api.github.com/repos/grafana/grafana', timeout: 10000, + // auth: { + // username: '', + // password: '', + // }, }); const res = await client.get('/issues', { diff --git a/scripts/grunt/default_task.js b/scripts/grunt/default_task.js index 95a2522ccfc5..f910941d630d 100644 --- a/scripts/grunt/default_task.js +++ b/scripts/grunt/default_task.js @@ -34,7 +34,8 @@ module.exports = function(grunt) { grunt.registerTask('no-only-tests', function() { var files = grunt.file.expand( 'public/**/*@(_specs|.test).@(ts|js|tsx|jsx)', - 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)' + 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)', + 'packages/grafana-runtime/**/*@(_specs|.test).@(ts|js|tsx|jsx)' ); grepFiles(files, '.only(', 'found only statement in test: '); }); diff --git a/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md b/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md deleted file mode 100644 index ec6e6d7a3767..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md +++ /dev/null @@ -1,134 +0,0 @@ -go test -bench=. -benchmem -goos: darwin -goarch: amd64 -pkg: github.com/brianvoe/gofakeit -Table generated with tablesgenerator.com/markdown_tables - -| Benchmark | Ops | CPU | MEM | MEM alloc | -|---------------------------------|-----------|-------------|------------|--------------| -| BenchmarkAddress-4 | 1000000 | 1998 ns/op | 248 B/op | 7 allocs/op | -| BenchmarkStreet-4 | 1000000 | 1278 ns/op | 62 B/op | 3 allocs/op | -| BenchmarkStreetNumber-4 | 5000000 | 344 ns/op | 36 B/op | 2 allocs/op | -| BenchmarkStreetPrefix-4 | 10000000 | 121 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStreetName-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStreetSuffix-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCity-4 | 5000000 | 326 ns/op | 15 B/op | 1 allocs/op | -| BenchmarkState-4 | 10000000 | 120 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStateAbr-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkZip-4 | 5000000 | 315 ns/op | 5 B/op | 1 allocs/op | -| BenchmarkCountry-4 | 10000000 | 126 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCountryAbr-4 | 10000000 | 123 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLatitude-4 | 100000000 | 23.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLongitude-4 | 100000000 | 23.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLatitudeInRange-4 | 50000000 | 27.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLongitudeInRange-4 | 50000000 | 27.8 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerName-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerStyle-4 | 10000000 | 119 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerHop-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerYeast-4 | 20000000 | 106 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerMalt-4 | 20000000 | 114 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerIbu-4 | 20000000 | 71.0 ns/op | 8 B/op | 1 allocs/op | -| BenchmarkBeerAlcohol-4 | 5000000 | 335 ns/op | 40 B/op | 3 allocs/op | -| BenchmarkBeerBlg-4 | 5000000 | 338 ns/op | 48 B/op | 3 allocs/op | -| BenchmarkBool-4 | 50000000 | 34.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkColor-4 | 20000000 | 112 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSafeColor-4 | 20000000 | 102 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHexColor-4 | 3000000 | 491 ns/op | 24 B/op | 3 allocs/op | -| BenchmarkRGBColor-4 | 20000000 | 103 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkCompany-4 | 5000000 | 353 ns/op | 22 B/op | 1 allocs/op | -| BenchmarkCompanySuffix-4 | 20000000 | 89.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBuzzWord-4 | 20000000 | 99.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBS-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkContact-4 | 1000000 | 1121 ns/op | 178 B/op | 7 allocs/op | -| BenchmarkPhone-4 | 5000000 | 346 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkPhoneFormatted-4 | 3000000 | 456 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkEmail-4 | 2000000 | 715 ns/op | 130 B/op | 5 allocs/op | -| BenchmarkCurrency-4 | 10000000 | 125 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkCurrencyShort-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCurrencyLong-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPrice-4 | 50000000 | 27.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDate-4 | 5000000 | 371 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDateRange-4 | 10000000 | 238 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMonth-4 | 30000000 | 44.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDay-4 | 50000000 | 39.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkWeekDay-4 | 30000000 | 44.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkYear-4 | 20000000 | 115 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHour-4 | 30000000 | 39.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMinute-4 | 50000000 | 40.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSecond-4 | 30000000 | 40.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNanoSecond-4 | 30000000 | 42.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZone-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneFull-4 | 20000000 | 118 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneAbv-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneOffset-4 | 10000000 | 147 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMimeType-4 | 20000000 | 99.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkExtension-4 | 20000000 | 109 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkGenerate-4 | 1000000 | 1588 ns/op | 414 B/op | 11 allocs/op | -| BenchmarkHackerPhrase-4 | 300000 | 4576 ns/op | 2295 B/op | 26 allocs/op | -| BenchmarkHackerAbbreviation-4 | 20000000 | 101 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerAdjective-4 | 20000000 | 101 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerNoun-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerVerb-4 | 20000000 | 113 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerIngverb-4 | 20000000 | 98.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHipsterWord-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHipsterSentence-4 | 1000000 | 1636 ns/op | 353 B/op | 3 allocs/op | -| BenchmarkHipsterParagraph-4 | 50000 | 31677 ns/op | 12351 B/op | 64 allocs/op | -| BenchmarkImageURL-4 | 20000000 | 108 ns/op | 38 B/op | 3 allocs/op | -| BenchmarkDomainName-4 | 3000000 | 491 ns/op | 76 B/op | 3 allocs/op | -| BenchmarkDomainSuffix-4 | 20000000 | 99.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkURL-4 | 1000000 | 1201 ns/op | 278 B/op | 8 allocs/op | -| BenchmarkHTTPMethod-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkIPv4Address-4 | 3000000 | 407 ns/op | 48 B/op | 5 allocs/op | -| BenchmarkIPv6Address-4 | 3000000 | 552 ns/op | 96 B/op | 7 allocs/op | -| BenchmarkUsername-4 | 5000000 | 307 ns/op | 16 B/op | 2 allocs/op | -| BenchmarkJob-4 | 2000000 | 726 ns/op | 86 B/op | 2 allocs/op | -| BenchmarkJobTitle-4 | 20000000 | 98.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkJobDescriptor-4 | 20000000 | 98.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkJobLevel-4 | 20000000 | 110 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLogLevel-4 | 20000000 | 107 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkReplaceWithNumbers-4 | 3000000 | 570 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkName-4 | 5000000 | 285 ns/op | 17 B/op | 1 allocs/op | -| BenchmarkFirstName-4 | 20000000 | 102 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLastName-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNamePrefix-4 | 20000000 | 98.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNameSuffix-4 | 20000000 | 109 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNumber-4 | 50000000 | 34.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint8-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint16-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint32-4 | 50000000 | 27.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint64-4 | 50000000 | 34.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt8-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt16-4 | 50000000 | 28.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt32-4 | 50000000 | 27.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt64-4 | 50000000 | 34.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat32-4 | 50000000 | 27.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat32Range-4 | 50000000 | 27.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat64-4 | 50000000 | 25.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat64Range-4 | 50000000 | 26.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNumerify-4 | 5000000 | 354 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkShuffleInts-4 | 10000000 | 226 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPassword-4 | 2000000 | 655 ns/op | 304 B/op | 6 allocs/op | -| BenchmarkCreditCard-4 | 2000000 | 997 ns/op | 88 B/op | 4 allocs/op | -| BenchmarkCreditCardType-4 | 20000000 | 92.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCreditCardNumber-4 | 3000000 | 572 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkCreditCardNumberLuhn-4 | 300000 | 5815 ns/op | 159 B/op | 9 allocs/op | -| BenchmarkCreditCardExp-4 | 10000000 | 129 ns/op | 5 B/op | 1 allocs/op | -| BenchmarkCreditCardCvv-4 | 10000000 | 128 ns/op | 3 B/op | 1 allocs/op | -| BenchmarkSSN-4 | 20000000 | 84.2 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkGender-4 | 50000000 | 38.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPerson-4 | 300000 | 5563 ns/op | 805 B/op | 26 allocs/op | -| BenchmarkSimpleStatusCode-4 | 20000000 | 72.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStatusCode-4 | 20000000 | 75.8 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLetter-4 | 50000000 | 38.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDigit-4 | 50000000 | 38.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLexify-4 | 10000000 | 222 ns/op | 8 B/op | 1 allocs/op | -| BenchmarkShuffleStrings-4 | 10000000 | 197 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUUID-4 | 20000000 | 106 ns/op | 48 B/op | 1 allocs/op | -| BenchmarkUserAgent-4 | 1000000 | 1236 ns/op | 305 B/op | 5 allocs/op | -| BenchmarkChromeUserAgent-4 | 2000000 | 881 ns/op | 188 B/op | 5 allocs/op | -| BenchmarkFirefoxUserAgent-4 | 1000000 | 1595 ns/op | 386 B/op | 7 allocs/op | -| BenchmarkSafariUserAgent-4 | 1000000 | 1396 ns/op | 551 B/op | 7 allocs/op | -| BenchmarkOperaUserAgent-4 | 2000000 | 950 ns/op | 216 B/op | 5 allocs/op | -| BenchmarkWord-4 | 20000000 | 99.1 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSentence-4 | 1000000 | 1540 ns/op | 277 B/op | 2 allocs/op | -| BenchmarkParagraph-4 | 50000 | 30978 ns/op | 11006 B/op | 61 allocs/op | \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md b/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md deleted file mode 100644 index 99d12c90fecf..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,46 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at brian@webiswhatido.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] - -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md b/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md deleted file mode 100644 index 5a4812c28ee8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -# Make a pull request and submit it and ill take a look at it. Thanks! diff --git a/vendor/github.com/brianvoe/gofakeit/LICENSE.txt b/vendor/github.com/brianvoe/gofakeit/LICENSE.txt deleted file mode 100644 index 21984c9d5eaa..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) [year] [fullname] - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/README.md b/vendor/github.com/brianvoe/gofakeit/README.md deleted file mode 100644 index 4e3723fd5117..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/README.md +++ /dev/null @@ -1,254 +0,0 @@ -![alt text](https://raw.githubusercontent.com/brianvoe/gofakeit/master/logo.png) - -# gofakeit [![Go Report Card](https://goreportcard.com/badge/github.com/brianvoe/gofakeit)](https://goreportcard.com/report/github.com/brianvoe/gofakeit) [![Build Status](https://travis-ci.org/brianvoe/gofakeit.svg?branch=master)](https://travis-ci.org/brianvoe/gofakeit) [![codecov.io](https://codecov.io/github/brianvoe/gofakeit/branch/master/graph/badge.svg)](https://codecov.io/github/brianvoe/gofakeit) [![GoDoc](https://godoc.org/github.com/brianvoe/gofakeit?status.svg)](https://godoc.org/github.com/brianvoe/gofakeit) [![license](http://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/brianvoe/gofakeit/master/LICENSE.txt) -Random data generator written in go - -Buy Me A Coffee - -### Features -- Every function has an example and a benchmark, -[see benchmarks](https://github.com/brianvoe/gofakeit/blob/master/BENCHMARKS.md) -- Zero dependencies -- Randomizes user defined structs -- Numerous functions for regular use - -### 120+ Functions!!! -If there is something that is generic enough missing from this package [add an issue](https://github.com/brianvoe/gofakeit/issues) and let me know what you need. -Most of the time i'll add it! - -## Person -```go -Person() *PersonInfo -Name() string -NamePrefix() string -NameSuffix() string -FirstName() string -LastName() string -Gender() string -SSN() string -Contact() *ContactInfo -Email() string -Phone() string -PhoneFormatted() string -Username() string -Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string -``` - -## Address -```go -Address() *AddressInfo -City() string -Country() string -CountryAbr() string -State() string -StateAbr() string -StatusCode() string -Street() string -StreetName() string -StreetNumber() string -StreetPrefix() string -StreetSuffix() string -Zip() string -Latitude() float64 -LatitudeInRange() (float64, error) -Longitude() float64 -LongitudeInRange() (float64, error) -``` - -## Beer -```go -BeerAlcohol() string -BeerBlg() string -BeerHop() string -BeerIbu() string -BeerMalt() string -BeerName() string -BeerStyle() string -BeerYeast() string -``` - -## Cars -```go -Vehicle() *VehicleInfo -CarMaker() string -CarModel() string -VehicleType() string -FuelType() string -TransmissionGearType() string -``` - -## Words -```go -Word() string -Sentence(wordCount int) string -Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string -Question() string -Quote() string -``` - -## Misc -```go -Struct(v interface{}) -Generate() string -Bool() bool -UUID() string -``` - -## Colors -```go -Color() string -HexColor() string -RGBColor() string -SafeColor() string -``` - -## Internet -```go -URL() string -ImageURL(width int, height int) string -DomainName() string -DomainSuffix() string -IPv4Address() string -IPv6Address() string -SimpleStatusCode() int -LogLevel(logType string) string -HTTPMethod() string -UserAgent() string -ChromeUserAgent() string -FirefoxUserAgent() string -OperaUserAgent() string -SafariUserAgent() string -``` - -## Date/Time -```go -Date() time.Time -DateRange(start, end time.Time) time.Time -NanoSecond() int -Second() int -Minute() int -Hour() int -Month() string -Day() int -WeekDay() string -Year() int -TimeZone() string -TimeZoneAbv() string -TimeZoneFull() string -TimeZoneOffset() float32 -``` - -## Payment -```go -Price(min, max float64) float64 -CreditCard() *CreditCardInfo -CreditCardCvv() string -CreditCardExp() string -CreditCardNumber() int -CreditCardNumberLuhn() int -CreditCardType() string -Currency() *CurrencyInfo -CurrencyLong() string -CurrencyShort() string -``` - -## Company -```go -BS() string -BuzzWord() string -Company() string -CompanySuffix() string -Job() *JobInfo -JobDescriptor() string -JobLevel() string -JobTitle() string -``` - -## Hacker -```go -HackerAbbreviation() string -HackerAdjective() string -HackerIngverb() string -HackerNoun() string -HackerPhrase() string -HackerVerb() string -``` - -## Hipster -```go -HipsterWord() string -HipsterSentence(wordCount int) string -HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string -``` - -## File -```go -Extension() string -MimeType() string -``` - -## Numbers -```go -Number(min int, max int) int -Numerify(str string) string -Int8() int8 -Int16() int16 -Int32() int32 -Int64() int64 -Uint8() uint8 -Uint16() uint16 -Uint32() uint32 -Uint64() uint64 -Float32() float32 -Float32Range(min, max float32) float32 -Float64() float64 -Float64Range(min, max float64) float64 -ShuffleInts(a []int) -``` - -## String -```go -Digit() string -Letter() string -Lexify(str string) string -RandString(a []string) string -ShuffleStrings(a []string) -``` - -## Documentation -[![GoDoc](https://godoc.org/github.com/brianvoe/gofakeit?status.svg)](https://godoc.org/github.com/brianvoe/gofakeit) - -## Example -```go -import "github.com/brianvoe/gofakeit" - -gofakeit.Name() // Markus Moen -gofakeit.Email() // alaynawuckert@kozey.biz -gofakeit.Phone() // (570)245-7485 -gofakeit.BS() // front-end -gofakeit.BeerName() // Duvel -gofakeit.Color() // MediumOrchid -gofakeit.Company() // Moen, Pagac and Wuckert -gofakeit.CreditCardNumber() // 4287271570245748 -gofakeit.HackerPhrase() // Connecting the array won't do anything, we need to generate the haptic COM driver! -gofakeit.JobTitle() // Director -gofakeit.Password(true, true, true, true, true, 32) // WV10MzLxq2DX79w1omH97_0ga59j8!kj -gofakeit.CurrencyShort() // USD -// 120+ more!!! - -// Create structs with random injected data -type Foo struct { - Bar string - Baz string - Int int - Pointer *int - Skip *string `fake:"skip"` // Set to "skip" to not generate data for -} -var f Foo -gofakeit.Struct(&f) -fmt.Printf("f.Bar:%s\n", f.Bar) // f.Bar:hrukpttuezptneuvunh -fmt.Printf("f.Baz:%s\n", f.Baz) // f.Baz:uksqvgzadxlgghejkmv -fmt.Printf("f.Int:%d\n", f.Int) // f.Int:-7825289004089916589 -fmt.Printf("f.Pointer:%d\n", *f.Pointer) // f.Pointer:-343806609094473732 -fmt.Printf("f.Skip:%v\n", f.Skip) // f.Skip: -``` diff --git a/vendor/github.com/brianvoe/gofakeit/TODO.txt b/vendor/github.com/brianvoe/gofakeit/TODO.txt deleted file mode 100644 index 7a492842136b..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/TODO.txt +++ /dev/null @@ -1,3 +0,0 @@ -* Take a look at [chance.js](http://chancejs.com/) and see if i missed anything. -* Look into [National Baby Name List](http://www.ssa.gov/oact/babynames/limits.html) and see if that makes sense to replace over what we currently have. -* Look at [data list](https://github.com/dariusk/corpora/tree/master/data) and see if it makes sense to add that data in or if it seems unncessary. diff --git a/vendor/github.com/brianvoe/gofakeit/address.go b/vendor/github.com/brianvoe/gofakeit/address.go deleted file mode 100644 index 82fc6b00e191..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/address.go +++ /dev/null @@ -1,131 +0,0 @@ -package gofakeit - -import ( - "errors" - "math/rand" - "strings" -) - -// AddressInfo is a struct full of address information -type AddressInfo struct { - Address string - Street string - City string - State string - Zip string - Country string - Latitude float64 - Longitude float64 -} - -// Address will generate a struct of address information -func Address() *AddressInfo { - street := Street() - city := City() - state := State() - zip := Zip() - - return &AddressInfo{ - Address: street + ", " + city + ", " + state + " " + zip, - Street: street, - City: city, - State: state, - Zip: zip, - Country: Country(), - Latitude: Latitude(), - Longitude: Longitude(), - } -} - -// Street will generate a random address street string -func Street() (street string) { - switch randInt := randIntRange(1, 2); randInt { - case 1: - street = StreetNumber() + " " + StreetPrefix() + " " + StreetName() + StreetSuffix() - case 2: - street = StreetNumber() + " " + StreetName() + StreetSuffix() - } - - return -} - -// StreetNumber will generate a random address street number string -func StreetNumber() string { - return strings.TrimLeft(replaceWithNumbers(getRandValue([]string{"address", "number"})), "0") -} - -// StreetPrefix will generate a random address street prefix string -func StreetPrefix() string { - return getRandValue([]string{"address", "street_prefix"}) -} - -// StreetName will generate a random address street name string -func StreetName() string { - return getRandValue([]string{"address", "street_name"}) -} - -// StreetSuffix will generate a random address street suffix string -func StreetSuffix() string { - return getRandValue([]string{"address", "street_suffix"}) -} - -// City will generate a random city string -func City() (city string) { - switch randInt := randIntRange(1, 3); randInt { - case 1: - city = FirstName() + StreetSuffix() - case 2: - city = LastName() + StreetSuffix() - case 3: - city = StreetPrefix() + " " + LastName() - } - - return -} - -// State will generate a random state string -func State() string { - return getRandValue([]string{"address", "state"}) -} - -// StateAbr will generate a random abbreviated state string -func StateAbr() string { - return getRandValue([]string{"address", "state_abr"}) -} - -// Zip will generate a random Zip code string -func Zip() string { - return replaceWithNumbers(getRandValue([]string{"address", "zip"})) -} - -// Country will generate a random country string -func Country() string { - return getRandValue([]string{"address", "country"}) -} - -// CountryAbr will generate a random abbreviated country string -func CountryAbr() string { - return getRandValue([]string{"address", "country_abr"}) -} - -// Latitude will generate a random latitude float64 -func Latitude() float64 { return (rand.Float64() * 180) - 90 } - -// LatitudeInRange will generate a random latitude within the input range -func LatitudeInRange(min, max float64) (float64, error) { - if min > max || min < -90 || min > 90 || max < -90 || max > 90 { - return 0, errors.New("input range is invalid") - } - return randFloat64Range(min, max), nil -} - -// Longitude will generate a random longitude float64 -func Longitude() float64 { return (rand.Float64() * 360) - 180 } - -// LongitudeInRange will generate a random longitude within the input range -func LongitudeInRange(min, max float64) (float64, error) { - if min > max || min < -180 || min > 180 || max < -180 || max > 180 { - return 0, errors.New("input range is invalid") - } - return randFloat64Range(min, max), nil -} diff --git a/vendor/github.com/brianvoe/gofakeit/beer.go b/vendor/github.com/brianvoe/gofakeit/beer.go deleted file mode 100644 index 53297d537809..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/beer.go +++ /dev/null @@ -1,45 +0,0 @@ -package gofakeit - -import "strconv" - -// Faker::Beer.blg #=> "18.5°Blg" - -// BeerName will return a random beer name -func BeerName() string { - return getRandValue([]string{"beer", "name"}) -} - -// BeerStyle will return a random beer style -func BeerStyle() string { - return getRandValue([]string{"beer", "style"}) -} - -// BeerHop will return a random beer hop -func BeerHop() string { - return getRandValue([]string{"beer", "hop"}) -} - -// BeerYeast will return a random beer yeast -func BeerYeast() string { - return getRandValue([]string{"beer", "yeast"}) -} - -// BeerMalt will return a random beer malt -func BeerMalt() string { - return getRandValue([]string{"beer", "malt"}) -} - -// BeerIbu will return a random beer ibu value between 10 and 100 -func BeerIbu() string { - return strconv.Itoa(randIntRange(10, 100)) + " IBU" -} - -// BeerAlcohol will return a random beer alcohol level between 2.0 and 10.0 -func BeerAlcohol() string { - return strconv.FormatFloat(randFloat64Range(2.0, 10.0), 'f', 1, 64) + "%" -} - -// BeerBlg will return a random beer blg between 5.0 and 20.0 -func BeerBlg() string { - return strconv.FormatFloat(randFloat64Range(5.0, 20.0), 'f', 1, 64) + "°Blg" -} diff --git a/vendor/github.com/brianvoe/gofakeit/bool.go b/vendor/github.com/brianvoe/gofakeit/bool.go deleted file mode 100644 index f63eeedd3241..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/bool.go +++ /dev/null @@ -1,10 +0,0 @@ -package gofakeit - -// Bool will generate a random boolean value -func Bool() bool { - if randIntRange(0, 1) == 1 { - return true - } - - return false -} diff --git a/vendor/github.com/brianvoe/gofakeit/color.go b/vendor/github.com/brianvoe/gofakeit/color.go deleted file mode 100644 index 63a737e99a62..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/color.go +++ /dev/null @@ -1,44 +0,0 @@ -package gofakeit - -import "math/rand" - -// Color will generate a random color string -func Color() string { - return getRandValue([]string{"color", "full"}) -} - -// SafeColor will generate a random safe color string -func SafeColor() string { - return getRandValue([]string{"color", "safe"}) -} - -// HexColor will generate a random hexadecimal color string -func HexColor() string { - color := make([]byte, 6) - hashQuestion := []byte("?#") - for i := 0; i < 6; i++ { - color[i] = hashQuestion[rand.Intn(2)] - } - - return "#" + replaceWithLetters(replaceWithNumbers(string(color))) - - // color := "" - // for i := 1; i <= 6; i++ { - // color += RandString([]string{"?", "#"}) - // } - - // // Replace # with number - // color = replaceWithNumbers(color) - - // // Replace ? with letter - // for strings.Count(color, "?") > 0 { - // color = strings.Replace(color, "?", RandString(letters), 1) - // } - - // return "#" + color -} - -// RGBColor will generate a random int slice color -func RGBColor() []int { - return []int{randIntRange(0, 255), randIntRange(0, 255), randIntRange(0, 255)} -} diff --git a/vendor/github.com/brianvoe/gofakeit/company.go b/vendor/github.com/brianvoe/gofakeit/company.go deleted file mode 100644 index abdb2aa698f1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/company.go +++ /dev/null @@ -1,30 +0,0 @@ -package gofakeit - -// Company will generate a random company name string -func Company() (company string) { - switch randInt := randIntRange(1, 3); randInt { - case 1: - company = LastName() + ", " + LastName() + " and " + LastName() - case 2: - company = LastName() + "-" + LastName() - case 3: - company = LastName() + " " + CompanySuffix() - } - - return -} - -// CompanySuffix will generate a random company suffix string -func CompanySuffix() string { - return getRandValue([]string{"company", "suffix"}) -} - -// BuzzWord will generate a random company buzz word string -func BuzzWord() string { - return getRandValue([]string{"company", "buzzwords"}) -} - -// BS will generate a random company bs string -func BS() string { - return getRandValue([]string{"company", "bs"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/contact.go b/vendor/github.com/brianvoe/gofakeit/contact.go deleted file mode 100644 index 1eb0ae05303d..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/contact.go +++ /dev/null @@ -1,40 +0,0 @@ -package gofakeit - -import ( - "strings" -) - -// ContactInfo struct full of contact info -type ContactInfo struct { - Phone string - Email string -} - -// Contact will generate a struct with information randomly populated contact information -func Contact() *ContactInfo { - return &ContactInfo{ - Phone: Phone(), - Email: Email(), - } -} - -// Phone will generate a random phone number string -func Phone() string { - return replaceWithNumbers("##########") -} - -// PhoneFormatted will generate a random phone number string -func PhoneFormatted() string { - return replaceWithNumbers(getRandValue([]string{"contact", "phone"})) -} - -// Email will generate a random email string -func Email() string { - var email string - - email = getRandValue([]string{"person", "first"}) + getRandValue([]string{"person", "last"}) - email += "@" - email += getRandValue([]string{"person", "last"}) + "." + getRandValue([]string{"internet", "domain_suffix"}) - - return strings.ToLower(email) -} diff --git a/vendor/github.com/brianvoe/gofakeit/currency.go b/vendor/github.com/brianvoe/gofakeit/currency.go deleted file mode 100644 index c25e4d62a7aa..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/currency.go +++ /dev/null @@ -1,38 +0,0 @@ -package gofakeit - -import ( - "math" - "math/rand" - - "github.com/brianvoe/gofakeit/data" -) - -// CurrencyInfo is a struct of currency information -type CurrencyInfo struct { - Short string - Long string -} - -// Currency will generate a struct with random currency information -func Currency() *CurrencyInfo { - index := rand.Intn(len(data.Data["currency"]["short"])) - return &CurrencyInfo{ - Short: data.Data["currency"]["short"][index], - Long: data.Data["currency"]["long"][index], - } -} - -// CurrencyShort will generate a random short currency value -func CurrencyShort() string { - return getRandValue([]string{"currency", "short"}) -} - -// CurrencyLong will generate a random long currency name -func CurrencyLong() string { - return getRandValue([]string{"currency", "long"}) -} - -// Price will take in a min and max value and return a formatted price -func Price(min, max float64) float64 { - return math.Floor(randFloat64Range(min, max)*100) / 100 -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/address.go b/vendor/github.com/brianvoe/gofakeit/data/address.go deleted file mode 100644 index 671cdda91375..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/address.go +++ /dev/null @@ -1,15 +0,0 @@ -package data - -// Address consists of address information -var Address = map[string][]string{ - "number": {"#####", "####", "###"}, - "street_prefix": {"North", "East", "West", "South", "New", "Lake", "Port"}, - "street_name": {"Alley", "Avenue", "Branch", "Bridge", "Brook", "Brooks", "Burg", "Burgs", "Bypass", "Camp", "Canyon", "Cape", "Causeway", "Center", "Centers", "Circle", "Circles", "Cliff", "Cliffs", "Club", "Common", "Corner", "Corners", "Course", "Court", "Courts", "Cove", "Coves", "Creek", "Crescent", "Crest", "Crossing", "Crossroad", "Curve", "Dale", "Dam", "Divide", "Drive", "Drive", "Drives", "Estate", "Estates", "Expressway", "Extension", "Extensions", "Fall", "Falls", "Ferry", "Field", "Fields", "Flat", "Flats", "Ford", "Fords", "Forest", "Forge", "Forges", "Fork", "Forks", "Fort", "Freeway", "Garden", "Gardens", "Gateway", "Glen", "Glens", "Green", "Greens", "Grove", "Groves", "Harbor", "Harbors", "Haven", "Heights", "Highway", "Hill", "Hills", "Hollow", "Inlet", "Inlet", "Island", "Island", "Islands", "Islands", "Isle", "Isle", "Junction", "Junctions", "Key", "Keys", "Knoll", "Knolls", "Lake", "Lakes", "Land", "Landing", "Lane", "Light", "Lights", "Loaf", "Lock", "Locks", "Locks", "Lodge", "Lodge", "Loop", "Mall", "Manor", "Manors", "Meadow", "Meadows", "Mews", "Mill", "Mills", "Mission", "Mission", "Motorway", "Mount", "Mountain", "Mountain", "Mountains", "Mountains", "Neck", "Orchard", "Oval", "Overpass", "Park", "Parks", "Parkway", "Parkways", "Pass", "Passage", "Path", "Pike", "Pine", "Pines", "Place", "Plain", "Plains", "Plains", "Plaza", "Plaza", "Point", "Points", "Port", "Port", "Ports", "Ports", "Prairie", "Prairie", "Radial", "Ramp", "Ranch", "Rapid", "Rapids", "Rest", "Ridge", "Ridges", "River", "Road", "Road", "Roads", "Roads", "Route", "Row", "Rue", "Run", "Shoal", "Shoals", "Shore", "Shores", "Skyway", "Spring", "Springs", "Springs", "Spur", "Spurs", "Square", "Square", "Squares", "Squares", "Station", "Station", "Stravenue", "Stravenue", "Stream", "Stream", "Street", "Street", "Streets", "Summit", "Summit", "Terrace", "Throughway", "Trace", "Track", "Trafficway", "Trail", "Trail", "Tunnel", "Tunnel", "Turnpike", "Turnpike", "Underpass", "Union", "Unions", "Valley", "Valleys", "Via", "Viaduct", "View", "Views", "Village", "Village", "Villages", "Ville", "Vista", "Vista", "Walk", "Walks", "Wall", "Way", "Ways", "Well", "Wells"}, - "street_suffix": {"town", "ton", "land", "ville", "berg", "burgh", "borough", "bury", "view", "port", "mouth", "stad", "furt", "chester", "mouth", "fort", "haven", "side", "shire"}, - "city": {"{address.street_prefix} {name.first}{address.street_suffix}", "{address.street_prefix} {name.first}", "{name.first}{address.street_suffix}", "{name.last}{address.street_suffix}"}, - "state": {"Alabama", "Alaska", "Arizona", "Arkansas", "California", "Colorado", "Connecticut", "Delaware", "Florida", "Georgia", "Hawaii", "Idaho", "Illinois", "Indiana", "Iowa", "Kansas", "Kentucky", "Louisiana", "Maine", "Maryland", "Massachusetts", "Michigan", "Minnesota", "Mississippi", "Missouri", "Montana", "Nebraska", "Nevada", "New Hampshire", "New Jersey", "New Mexico", "New York", "North Carolina", "North Dakota", "Ohio", "Oklahoma", "Oregon", "Pennsylvania", "Rhode Island", "South Carolina", "South Dakota", "Tennessee", "Texas", "Utah", "Vermont", "Virginia", "Washington", "West Virginia", "Wisconsin", "Wyoming"}, - "state_abr": {"AL", "AK", "AS", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FM", "FL", "GA", "GU", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MH", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "MP", "OH", "OK", "OR", "PW", "PA", "PR", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VI", "VA", "WA", "WV", "WI", "WY", "AE", "AA", "AP"}, - "zip": {"#####"}, - "country": {"Afghanistan", "Albania", "Algeria", "American Samoa", "Andorra", "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", "Argentina", "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", "Belarus", "Belgium", "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia", "Bosnia and Herzegovina", "Botswana", "Bouvet Island", "Brazil", "British Indian Ocean Territory", "British Virgin Islands", "Brunei Darussalam", "Bulgaria", "Burkina Faso", "Burundi", "Cambodia", "Cameroon", "Canada", "Cape Verde", "Cayman Islands", "Central African Republic", "Chad", "Chile", "China", "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", "Congo", "Congo", "Cook Islands", "Costa Rica", "Cote Divoire", "Croatia", "Cuba", "Cyprus", "Czech Republic", "Denmark", "Djibouti", "Dominica", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", "Ethiopia", "Faroe Islands", "Falkland Islands", "Fiji", "Finland", "France", "French Guiana", "French Polynesia", "French Southern Territories", "Gabon", "Gambia", "Georgia", "Germany", "Ghana", "Gibraltar", "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", "Guatemala", "Guernsey", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Heard Island and McDonald Islands", "Holy See (Vatican City State)", "Honduras", "Hong Kong", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland", "Isle of Man", "Israel", "Italy", "Jamaica", "Japan", "Jersey", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Korea", "Korea", "Kuwait", "Kyrgyz Republic", "Lao Peoples Democratic Republic", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libyan Arab Jamahiriya", "Liechtenstein", "Lithuania", "Luxembourg", "Macao", "Macedonia", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Martinique", "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia", "Moldova", "Monaco", "Mongolia", "Montenegro", "Montserrat", "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands Antilles", "Netherlands", "New Caledonia", "New Zealand", "Nicaragua", "Niger", "Nigeria", "Niue", "Norfolk Island", "Northern Mariana Islands", "Norway", "Oman", "Pakistan", "Palau", "Palestinian Territory", "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Pitcairn Islands", "Poland", "Portugal", "Puerto Rico", "Qatar", "Reunion", "Romania", "Russian Federation", "Rwanda", "Saint Barthelemy", "Saint Helena", "Saint Kitts and Nevis", "Saint Lucia", "Saint Martin", "Saint Pierre and Miquelon", "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Sao Tome and Principe", "Saudi Arabia", "Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", "Slovakia (Slovak Republic)", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "Spain", "Sri Lanka", "Sudan", "Suriname", "Svalbard & Jan Mayen Islands", "Swaziland", "Sweden", "Switzerland", "Syrian Arab Republic", "Taiwan", "Tajikistan", "Tanzania", "Thailand", "Timor-Leste", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Turks and Caicos Islands", "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States of America", "United States Minor Outlying Islands", "United States Virgin Islands", "Uruguay", "Uzbekistan", "Vanuatu", "Venezuela", "Vietnam", "Wallis and Futuna", "Western Sahara", "Yemen", "Zambia", "Zimbabwe"}, - "country_abr": {"AF", "AL", "DZ", "AS", "AD", "AO", "AI", "AQ", "AG", "AR", "AM", "AW", "AU", "AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ", "BM", "BT", "BO", "BA", "BW", "BV", "BR", "IO", "BN", "BG", "BF", "BI", "KH", "CM", "CA", "CV", "KY", "CF", "TD", "CL", "CN", "CX", "CC", "CO", "KM", "CG", "CK", "CR", "CI", "HR", "CU", "CY", "CZ", "DK", "DJ", "DM", "DO", "TL", "EC", "EG", "SV", "GQ", "ER", "EE", "ET", "FK", "FO", "FJ", "FI", "FR", "FX", "GF", "PF", "TF", "GA", "GM", "GE", "DE", "GH", "GI", "GR", "GL", "GD", "GP", "GU", "GT", "GN", "GW", "GY", "HT", "HM", "HN", "HK", "HU", "IS", "IN", "ID", "IR", "IQ", "IE", "IL", "IT", "JM", "JP", "JO", "KZ", "KE", "KI", "KP", "KR", "KW", "KG", "LA", "LV", "LB", "LS", "LR", "LY", "LI", "LT", "LU", "MO", "MK", "MG", "MW", "MY", "MV", "ML", "MT", "MH", "MQ", "MR", "MU", "YT", "MX", "FM", "MD", "MC", "MN", "MS", "MA", "MZ", "MM", "NA", "NR", "NP", "NL", "AN", "NC", "NZ", "NI", "NE", "NG", "NU", "NF", "MP", "NO", "OM", "PK", "PW", "PA", "PG", "PY", "PE", "PH", "PN", "PL", "PT", "PR", "QA", "RE", "RO", "RU", "RW", "KN", "LC", "VC", "WS", "SM", "ST", "SA", "SN", "RS", "SC", "SL", "SG", "SK", "SI", "SB", "SO", "ZA", "ES", "LK", "SH", "PM", "SD", "SR", "SJ", "SZ", "SE", "CH", "SY", "TW", "TJ", "TZ", "TH", "TG", "TK", "TO", "TT", "TN", "TR", "TM", "TC", "TV", "UG", "UA", "AE", "GB", "US", "UM", "UY", "UZ", "VU", "VA", "VE", "VN", "VG", "VI", "WF", "EH", "YE", "YU", "ZR", "ZM", "ZW"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/beer.go b/vendor/github.com/brianvoe/gofakeit/data/beer.go deleted file mode 100644 index 1192907d5f29..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/beer.go +++ /dev/null @@ -1,10 +0,0 @@ -package data - -// Beer consists of various beer information -var Beer = map[string][]string{ - "name": {"Pliny The Elder", "Founders Kentucky Breakfast", "Trappistes Rochefort 10", "HopSlam Ale", "Stone Imperial Russian Stout", "St. Bernardus Abt 12", "Founders Breakfast Stout", "Weihenstephaner Hefeweissbier", "Péché Mortel", "Celebrator Doppelbock", "Duvel", "Dreadnaught IPA", "Nugget Nectar", "La Fin Du Monde", "Bourbon County Stout", "Old Rasputin Russian Imperial Stout", "Two Hearted Ale", "Ruination IPA", "Schneider Aventinus", "Double Bastard Ale", "90 Minute IPA", "Hop Rod Rye", "Trappistes Rochefort 8", "Chimay Grande Réserve", "Stone IPA", "Arrogant Bastard Ale", "Edmund Fitzgerald Porter", "Chocolate St", "Oak Aged Yeti Imperial Stout", "Ten FIDY", "Storm King Stout", "Shakespeare Oatmeal", "Alpha King Pale Ale", "Westmalle Trappist Tripel", "Samuel Smith’s Imperial IPA", "Yeti Imperial Stout", "Hennepin", "Samuel Smith’s Oatmeal Stout", "Brooklyn Black", "Oaked Arrogant Bastard Ale", "Sublimely Self-Righteous Ale", "Trois Pistoles", "Bell’s Expedition", "Sierra Nevada Celebration Ale", "Sierra Nevada Bigfoot Barleywine Style Ale", "Racer 5 India Pale Ale, Bear Republic Bre", "Orval Trappist Ale", "Hercules Double IPA", "Maharaj", "Maudite"}, - "hop": {"Ahtanum", "Amarillo", "Bitter Gold", "Bravo", "Brewer’s Gold", "Bullion", "Cascade", "Cashmere", "Centennial", "Chelan", "Chinook", "Citra", "Cluster", "Columbia", "Columbus", "Comet", "Crystal", "Equinox", "Eroica", "Fuggle", "Galena", "Glacier", "Golding", "Hallertau", "Horizon", "Liberty", "Magnum", "Millennium", "Mosaic", "Mt. Hood", "Mt. Rainier", "Newport", "Northern Brewer", "Nugget", "Olympic", "Palisade", "Perle", "Saaz", "Santiam", "Simcoe", "Sorachi Ace", "Sterling", "Summit", "Tahoma", "Tettnang", "TriplePearl", "Ultra", "Vanguard", "Warrior", "Willamette", "Yakima Gol"}, - "yeast": {"1007 - German Ale", "1010 - American Wheat", "1028 - London Ale", "1056 - American Ale", "1084 - Irish Ale", "1098 - British Ale", "1099 - Whitbread Ale", "1187 - Ringwood Ale", "1272 - American Ale II", "1275 - Thames Valley Ale", "1318 - London Ale III", "1332 - Northwest Ale", "1335 - British Ale II", "1450 - Dennys Favorite 50", "1469 - West Yorkshire Ale", "1728 - Scottish Ale", "1968 - London ESB Ale", "2565 - Kölsch", "1214 - Belgian Abbey", "1388 - Belgian Strong Ale", "1762 - Belgian Abbey II", "3056 - Bavarian Wheat Blend", "3068 - Weihenstephan Weizen", "3278 - Belgian Lambic Blend", "3333 - German Wheat", "3463 - Forbidden Fruit", "3522 - Belgian Ardennes", "3638 - Bavarian Wheat", "3711 - French Saison", "3724 - Belgian Saison", "3763 - Roeselare Ale Blend", "3787 - Trappist High Gravity", "3942 - Belgian Wheat", "3944 - Belgian Witbier", "2000 - Budvar Lager", "2001 - Urquell Lager", "2007 - Pilsen Lager", "2035 - American Lager", "2042 - Danish Lager", "2112 - California Lager", "2124 - Bohemian Lager", "2206 - Bavarian Lager", "2278 - Czech Pils", "2308 - Munich Lager", "2633 - Octoberfest Lager Blend", "5112 - Brettanomyces bruxellensis", "5335 - Lactobacillus", "5526 - Brettanomyces lambicus", "5733 - Pediococcus"}, - "malt": {"Black malt", "Caramel", "Carapils", "Chocolate", "Munich", "Caramel", "Carapils", "Chocolate malt", "Munich", "Pale", "Roasted barley", "Rye malt", "Special roast", "Victory", "Vienna", "Wheat mal"}, - "style": {"Light Lager", "Pilsner", "European Amber Lager", "Dark Lager", "Bock", "Light Hybrid Beer", "Amber Hybrid Beer", "English Pale Ale", "Scottish And Irish Ale", "Merican Ale", "English Brown Ale", "Porter", "Stout", "India Pale Ale", "German Wheat And Rye Beer", "Belgian And French Ale", "Sour Ale", "Belgian Strong Ale", "Strong Ale", "Fruit Beer", "Vegetable Beer", "Smoke-flavored", "Wood-aged Beer"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/colors.go b/vendor/github.com/brianvoe/gofakeit/data/colors.go deleted file mode 100644 index 3aca817d69f3..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/colors.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Colors consists of color information -var Colors = map[string][]string{ - "safe": {"black", "maroon", "green", "navy", "olive", "purple", "teal", "lime", "blue", "silver", "gray", "yellow", "fuchsia", "aqua", "white"}, - "full": {"AliceBlue", "AntiqueWhite", "Aqua", "Aquamarine", "Azure", "Beige", "Bisque", "Black", "BlanchedAlmond", "Blue", "BlueViolet", "Brown", "BurlyWood", "CadetBlue", "Chartreuse", "Chocolate", "Coral", "CornflowerBlue", "Cornsilk", "Crimson", "Cyan", "DarkBlue", "DarkCyan", "DarkGoldenRod", "DarkGray", "DarkGreen", "DarkKhaki", "DarkMagenta", "DarkOliveGreen", "Darkorange", "DarkOrchid", "DarkRed", "DarkSalmon", "DarkSeaGreen", "DarkSlateBlue", "DarkSlateGray", "DarkTurquoise", "DarkViolet", "DeepPink", "DeepSkyBlue", "DimGray", "DimGrey", "DodgerBlue", "FireBrick", "FloralWhite", "ForestGreen", "Fuchsia", "Gainsboro", "GhostWhite", "Gold", "GoldenRod", "Gray", "Green", "GreenYellow", "HoneyDew", "HotPink", "IndianRed ", "Indigo ", "Ivory", "Khaki", "Lavender", "LavenderBlush", "LawnGreen", "LemonChiffon", "LightBlue", "LightCoral", "LightCyan", "LightGoldenRodYellow", "LightGray", "LightGreen", "LightPink", "LightSalmon", "LightSeaGreen", "LightSkyBlue", "LightSlateGray", "LightSteelBlue", "LightYellow", "Lime", "LimeGreen", "Linen", "Magenta", "Maroon", "MediumAquaMarine", "MediumBlue", "MediumOrchid", "MediumPurple", "MediumSeaGreen", "MediumSlateBlue", "MediumSpringGreen", "MediumTurquoise", "MediumVioletRed", "MidnightBlue", "MintCream", "MistyRose", "Moccasin", "NavajoWhite", "Navy", "OldLace", "Olive", "OliveDrab", "Orange", "OrangeRed", "Orchid", "PaleGoldenRod", "PaleGreen", "PaleTurquoise", "PaleVioletRed", "PapayaWhip", "PeachPuff", "Peru", "Pink", "Plum", "PowderBlue", "Purple", "Red", "RosyBrown", "RoyalBlue", "SaddleBrown", "Salmon", "SandyBrown", "SeaGreen", "SeaShell", "Sienna", "Silver", "SkyBlue", "SlateBlue", "SlateGray", "Snow", "SpringGreen", "SteelBlue", "Tan", "Teal", "Thistle", "Tomato", "Turquoise", "Violet", "Wheat", "White", "WhiteSmoke", "Yellow", "YellowGreen"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/company.go b/vendor/github.com/brianvoe/gofakeit/data/company.go deleted file mode 100644 index b2a3790c7c68..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/company.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// Company consists of company information -var Company = map[string][]string{ - "name": {"{person.last} {company.suffix}", "{person.last}-{person.last}", "{person.last}, {person.last} and {person.last}"}, - "suffix": {"Inc", "and Sons", "LLC", "Group"}, - "buzzwords": {"Adaptive", "Advanced", "Ameliorated", "Assimilated", "Automated", "Balanced", "Business-focused", "Centralized", "Cloned", "Compatible", "Configurable", "Cross-group", "Cross-platform", "Customer-focused", "Customizable", "De-engineered", "Decentralized", "Devolved", "Digitized", "Distributed", "Diverse", "Down-sized", "Enhanced", "Enterprise-wide", "Ergonomic", "Exclusive", "Expanded", "Extended", "Face to face", "Focused", "Front-line", "Fully-configurable", "Function-based", "Fundamental", "Future-proofed", "Grass-roots", "Horizontal", "Implemented", "Innovative", "Integrated", "Intuitive", "Inverse", "Managed", "Mandatory", "Monitored", "Multi-channelled", "Multi-lateral", "Multi-layered", "Multi-tiered", "Networked", "Object-based", "Open-architected", "Open-source", "Operative", "Optimized", "Optional", "Organic", "Organized", "Persevering", "Persistent", "Phased", "Polarised", "Pre-emptive", "Proactive", "Profit-focused", "Profound", "Programmable", "Progressive", "Public-key", "Quality-focused", "Re-contextualized", "Re-engineered", "Reactive", "Realigned", "Reduced", "Reverse-engineered", "Right-sized", "Robust", "Seamless", "Secured", "Self-enabling", "Sharable", "Stand-alone", "Streamlined", "Switchable", "Synchronised", "Synergistic", "Synergized", "Team-oriented", "Total", "Triple-buffered", "Universal", "Up-sized", "Upgradable", "User-centric", "User-friendly", "Versatile", "Virtual", "Vision-oriented", "Visionary", "24 hour", "24/7", "3rd generation", "4th generation", "5th generation", "6th generation", "actuating", "analyzing", "asymmetric", "asynchronous", "attitude-oriented", "background", "bandwidth-monitored", "bi-directional", "bifurcated", "bottom-line", "clear-thinking", "client-driven", "client-server", "coherent", "cohesive", "composite", "content-based", "context-sensitive", "contextually-based", "dedicated", "demand-driven", "didactic", "directional", "discrete", "disintermediate", "dynamic", "eco-centric", "empowering", "encompassing", "even-keeled", "executive", "explicit", "exuding", "fault-tolerant", "foreground", "fresh-thinking", "full-range", "global", "grid-enabled", "heuristic", "high-level", "holistic", "homogeneous", "human-resource", "hybrid", "impactful", "incremental", "intangible", "interactive", "intermediate", "leading edge", "local", "logistical", "maximized", "methodical", "mission-critical", "mobile", "modular", "motivating", "multi-state", "multi-tasking", "multimedia", "national", "needs-based", "neutral", "next generation", "non-volatile", "object-oriented", "optimal", "optimizing", "radical", "real-time", "reciprocal", "regional", "responsive", "scalable", "secondary", "solution-oriented", "stable", "static", "system-worthy", "systematic", "systemic", "tangible", "tertiary", "transitional", "uniform", "upward-trending", "user-facing", "value-added", "web-enabled", "well-modulated", "zero administration", "zero defect", "zero tolerance", "Graphic Interface", "Graphical User Interface", "ability", "access", "adapter", "algorithm", "alliance", "analyzer", "application", "approach", "architecture", "archive", "array", "artificial intelligence", "attitude", "benchmark", "budgetary management", "capability", "capacity", "challenge", "circuit", "collaboration", "complexity", "concept", "conglomeration", "contingency", "core", "customer loyalty", "data-warehouse", "database", "definition", "emulation", "encoding", "encryption", "extranet", "firmware", "flexibility", "focus group", "forecast", "frame", "framework", "function", "functionalities", "groupware", "hardware", "help-desk", "hierarchy", "hub", "implementation", "info-mediaries", "infrastructure", "initiative", "installation", "instruction set", "interface", "internet solution", "intranet", "knowledge base", "knowledge user", "leverage", "local area network", "matrices", "matrix", "methodology", "middleware", "migration", "model", "moderator", "monitoring", "moratorium", "neural-net", "open architecture", "open system", "orchestration", "paradigm", "parallelism", "policy", "portal", "pricing structure", "process improvement", "product", "productivity", "project", "projection", "protocol", "secured line", "service-desk", "software", "solution", "standardization", "strategy", "structure", "success", "superstructure", "support", "synergy", "system engine", "task-force", "throughput", "time-frame", "toolset", "utilisation", "website", "workforce"}, - "bs": {"aggregate", "architect", "benchmark", "brand", "cultivate", "deliver", "deploy", "disintermediate", "drive", "e-enable", "embrace", "empower", "enable", "engage", "engineer", "enhance", "envisioneer", "evolve", "expedite", "exploit", "extend", "facilitate", "generate", "grow", "harness", "implement", "incentivize", "incubate", "innovate", "integrate", "iterate", "leverage", "matrix", "maximize", "mesh", "monetize", "morph", "optimize", "orchestrate", "productize", "recontextualize", "redefine", "reintermediate", "reinvent", "repurpose", "revolutionize", "scale", "seize", "strategize", "streamline", "syndicate", "synergize", "synthesize", "target", "transform", "transition", "unleash", "utilize", "visualize", "whiteboard", "24/365", "24/7", "B2B", "B2C", "back-end", "best-of-breed", "bleeding-edge", "bricks-and-clicks", "clicks-and-mortar", "collaborative", "compelling", "cross-media", "cross-platform", "customized", "cutting-edge", "distributed", "dot-com", "dynamic", "e-business", "efficient", "end-to-end", "enterprise", "extensible", "frictionless", "front-end", "global", "granular", "holistic", "impactful", "innovative", "integrated", "interactive", "intuitive", "killer", "leading-edge", "magnetic", "mission-critical", "next-generation", "one-to-one", "open-source", "out-of-the-box", "plug-and-play", "proactive", "real-time", "revolutionary", "rich", "robust", "scalable", "seamless", "sexy", "sticky", "strategic", "synergistic", "transparent", "turn-key", "ubiquitous", "user-centric", "value-added", "vertical", "viral", "virtual", "visionary", "web-enabled", "wireless", "world-class", "ROI", "action-items", "applications", "architectures", "bandwidth", "channels", "communities", "content", "convergence", "deliverables", "e-business", "e-commerce", "e-markets", "e-services", "e-tailers", "experiences", "eyeballs", "functionalities", "infomediaries", "infrastructures", "initiatives", "interfaces", "markets", "methodologies", "metrics", "mindshare", "models", "networks", "niches", "paradigms", "partnerships", "platforms", "portals", "relationships", "schemas", "solutions", "supply-chains", "synergies", "systems", "technologies", "users", "vortals", "web services", "web-readiness"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/computer.go b/vendor/github.com/brianvoe/gofakeit/data/computer.go deleted file mode 100644 index b682c6f820cc..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/computer.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Computer consists of computer information -var Computer = map[string][]string{ - "linux_processor": {"i686", "x86_64"}, - "mac_processor": {"Intel", "PPC", "U; Intel", "U; PPC"}, - "windows_platform": {"Windows NT 6.2", "Windows NT 6.1", "Windows NT 6.0", "Windows NT 5.2", "Windows NT 5.1", "Windows NT 5.01", "Windows NT 5.0", "Windows NT 4.0", "Windows 98; Win 9x 4.90", "Windows 98", "Windows 95", "Windows CE"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/contact.go b/vendor/github.com/brianvoe/gofakeit/data/contact.go deleted file mode 100644 index 88b957961dbb..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/contact.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Contact consists of contact information -var Contact = map[string][]string{ - "phone": {"###-###-####", "(###)###-####", "1-###-###-####", "###.###.####"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/currency.go b/vendor/github.com/brianvoe/gofakeit/data/currency.go deleted file mode 100644 index 13b8019973ca..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/currency.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Currency consists of currency information -var Currency = map[string][]string{ - "short": {"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BRL", "BSD", "BTN", "BWP", "BYR", "BZD", "CAD", "CDF", "CHF", "CLP", "CNY", "COP", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB", "EUR", "FJD", "FKP", "GBP", "GEL", "GGP", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HRK", "HTG", "HUF", "IDR", "ILS", "IMP", "INR", "IQD", "IRR", "ISK", "JEP", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LTL", "LYD", "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MYR", "MZN", "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", "QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SPL", "SRD", "STD", "SVC", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TVD", "TWD", "TZS", "UAH", "UGX", "USD", "UYU", "UZS", "VEF", "VND", "VUV", "WST", "XAF", "XCD", "XDR", "XOF", "XPF", "YER", "ZAR", "ZMW", "ZWD"}, - "long": {"United Arab Emirates Dirham", "Afghanistan Afghani", "Albania Lek", "Armenia Dram", "Netherlands Antilles Guilder", "Angola Kwanza", "Argentina Peso", "Australia Dollar", "Aruba Guilder", "Azerbaijan New Manat", "Bosnia and Herzegovina Convertible Marka", "Barbados Dollar", "Bangladesh Taka", "Bulgaria Lev", "Bahrain Dinar", "Burundi Franc", "Bermuda Dollar", "Brunei Darussalam Dollar", "Bolivia Boliviano", "Brazil Real", "Bahamas Dollar", "Bhutan Ngultrum", "Botswana Pula", "Belarus Ruble", "Belize Dollar", "Canada Dollar", "Congo/Kinshasa Franc", "Switzerland Franc", "Chile Peso", "China Yuan Renminbi", "Colombia Peso", "Costa Rica Colon", "Cuba Convertible Peso", "Cuba Peso", "Cape Verde Escudo", "Czech Republic Koruna", "Djibouti Franc", "Denmark Krone", "Dominican Republic Peso", "Algeria Dinar", "Egypt Pound", "Eritrea Nakfa", "Ethiopia Birr", "Euro Member Countries", "Fiji Dollar", "Falkland Islands (Malvinas) Pound", "United Kingdom Pound", "Georgia Lari", "Guernsey Pound", "Ghana Cedi", "Gibraltar Pound", "Gambia Dalasi", "Guinea Franc", "Guatemala Quetzal", "Guyana Dollar", "Hong Kong Dollar", "Honduras Lempira", "Croatia Kuna", "Haiti Gourde", "Hungary Forint", "Indonesia Rupiah", "Israel Shekel", "Isle of Man Pound", "India Rupee", "Iraq Dinar", "Iran Rial", "Iceland Krona", "Jersey Pound", "Jamaica Dollar", "Jordan Dinar", "Japan Yen", "Kenya Shilling", "Kyrgyzstan Som", "Cambodia Riel", "Comoros Franc", "Korea (North) Won", "Korea (South) Won", "Kuwait Dinar", "Cayman Islands Dollar", "Kazakhstan Tenge", "Laos Kip", "Lebanon Pound", "Sri Lanka Rupee", "Liberia Dollar", "Lesotho Loti", "Lithuania Litas", "Libya Dinar", "Morocco Dirham", "Moldova Leu", "Madagascar Ariary", "Macedonia Denar", "Myanmar (Burma) Kyat", "Mongolia Tughrik", "Macau Pataca", "Mauritania Ouguiya", "Mauritius Rupee", "Maldives (Maldive Islands) Rufiyaa", "Malawi Kwacha", "Mexico Peso", "Malaysia Ringgit", "Mozambique Metical", "Namibia Dollar", "Nigeria Naira", "Nicaragua Cordoba", "Norway Krone", "Nepal Rupee", "New Zealand Dollar", "Oman Rial", "Panama Balboa", "Peru Nuevo Sol", "Papua New Guinea Kina", "Philippines Peso", "Pakistan Rupee", "Poland Zloty", "Paraguay Guarani", "Qatar Riyal", "Romania New Leu", "Serbia Dinar", "Russia Ruble", "Rwanda Franc", "Saudi Arabia Riyal", "Solomon Islands Dollar", "Seychelles Rupee", "Sudan Pound", "Sweden Krona", "Singapore Dollar", "Saint Helena Pound", "Sierra Leone Leone", "Somalia Shilling", "Seborga Luigino", "Suriname Dollar", "São Tomé and Príncipe Dobra", "El Salvador Colon", "Syria Pound", "Swaziland Lilangeni", "Thailand Baht", "Tajikistan Somoni", "Turkmenistan Manat", "Tunisia Dinar", "Tonga Pa'anga", "Turkey Lira", "Trinidad and Tobago Dollar", "Tuvalu Dollar", "Taiwan New Dollar", "Tanzania Shilling", "Ukraine Hryvnia", "Uganda Shilling", "United States Dollar", "Uruguay Peso", "Uzbekistan Som", "Venezuela Bolivar", "Viet Nam Dong", "Vanuatu Vatu", "Samoa Tala", "Communauté Financière Africaine (BEAC) CFA Franc BEAC", "East Caribbean Dollar", "International Monetary Fund (IMF) Special Drawing Rights", "Communauté Financière Africaine (BCEAO) Franc", "Comptoirs Français du Pacifique (CFP) Franc", "Yemen Rial", "South Africa Rand", "Zambia Kwacha", "Zimbabwe Dollar"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/data.go b/vendor/github.com/brianvoe/gofakeit/data/data.go deleted file mode 100644 index d751c9994356..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/data.go +++ /dev/null @@ -1,28 +0,0 @@ -package data - -// Data consists of the main set of fake information -var Data = map[string]map[string][]string{ - "person": Person, - "contact": Contact, - "address": Address, - "company": Company, - "job": Job, - "lorem": Lorem, - "internet": Internet, - "file": Files, - "color": Colors, - "computer": Computer, - "payment": Payment, - "hipster": Hipster, - "beer": Beer, - "hacker": Hacker, - "currency": Currency, - "log_level": LogLevels, - "timezone": TimeZone, - "vehicle": Vehicle, -} - -// IntData consists of the main set of fake information (integer only) -var IntData = map[string]map[string][]int{ - "status_code": StatusCodes, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/datetime.go b/vendor/github.com/brianvoe/gofakeit/data/datetime.go deleted file mode 100644 index 3347120a67e2..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/datetime.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// TimeZone is an array of short and long timezones -var TimeZone = map[string][]string{ - "offset": {"-12", "-11", "-10", "-8", "-7", "-7", "-8", "-7", "-6", "-6", "-6", "-5", "-5", "-6", "-5", "-4", "-4", "-4.5", "-4", "-3", "-4", "-4", "-4", "-2.5", "-3", "-3", "-3", "-3", "-3", "-3", "-2", "-1", "0", "-1", "1", "0", "0", "1", "1", "0", "2", "2", "2", "2", "1", "1", "3", "3", "2", "3", "3", "2", "3", "3", "3", "2", "3", "3", "3", "3", "3", "3", "4", "4.5", "4", "5", "4", "4", "4", "4.5", "5", "5", "5", "5.5", "5.5", "5.75", "6", "6", "6.5", "7", "7", "8", "8", "8", "8", "8", "8", "9", "9", "9", "9.5", "9.5", "10", "10", "10", "10", "10", "11", "11", "12", "12", "12", "12", "13", "13", "13"}, - "abr": {"DST", "U", "HST", "AKDT", "PDT", "PDT", "PST", "UMST", "MDT", "MDT", "CAST", "CDT", "CDT", "CCST", "SPST", "EDT", "UEDT", "VST", "PYT", "ADT", "CBST", "SWST", "PSST", "NDT", "ESAST", "AST", "SEST", "GDT", "MST", "BST", "U", "MDT", "ADT", "CVST", "MDT", "UTC", "GMT", "BST", "GDT", "GST", "WEDT", "CEDT", "RDT", "CEDT", "WCAST", "NST", "GDT", "MEDT", "EST", "SDT", "EEDT", "SAST", "FDT", "TDT", "JDT", "LST", "JST", "AST", "KST", "AST", "EAST", "MSK", "SAMT", "IDT", "AST", "ADT", "MST", "GST", "CST", "AST", "WAST", "YEKT", "PKT", "IST", "SLST", "NST", "CAST", "BST", "MST", "SAST", "NCAST", "CST", "NAST", "MPST", "WAST", "TST", "UST", "NAEST", "JST", "KST", "CAST", "ACST", "EAST", "AEST", "WPST", "TST", "YST", "CPST", "VST", "NZST", "U", "FST", "MST", "KDT", "TST", "SST"}, - "text": {"Dateline Standard Time", "UTC-11", "Hawaiian Standard Time", "Alaskan Standard Time", "Pacific Standard Time (Mexico)", "Pacific Daylight Time", "Pacific Standard Time", "US Mountain Standard Time", "Mountain Standard Time (Mexico)", "Mountain Standard Time", "Central America Standard Time", "Central Standard Time", "Central Standard Time (Mexico)", "Canada Central Standard Time", "SA Pacific Standard Time", "Eastern Standard Time", "US Eastern Standard Time", "Venezuela Standard Time", "Paraguay Standard Time", "Atlantic Standard Time", "Central Brazilian Standard Time", "SA Western Standard Time", "Pacific SA Standard Time", "Newfoundland Standard Time", "E. South America Standard Time", "Argentina Standard Time", "SA Eastern Standard Time", "Greenland Standard Time", "Montevideo Standard Time", "Bahia Standard Time", "UTC-02", "Mid-Atlantic Standard Time", "Azores Standard Time", "Cape Verde Standard Time", "Morocco Standard Time", "UTC", "Greenwich Mean Time", "British Summer Time", "GMT Standard Time", "Greenwich Standard Time", "W. Europe Standard Time", "Central Europe Standard Time", "Romance Standard Time", "Central European Standard Time", "W. Central Africa Standard Time", "Namibia Standard Time", "GTB Standard Time", "Middle East Standard Time", "Egypt Standard Time", "Syria Standard Time", "E. Europe Standard Time", "South Africa Standard Time", "FLE Standard Time", "Turkey Standard Time", "Israel Standard Time", "Libya Standard Time", "Jordan Standard Time", "Arabic Standard Time", "Kaliningrad Standard Time", "Arab Standard Time", "E. Africa Standard Time", "Moscow Standard Time", "Samara Time", "Iran Standard Time", "Arabian Standard Time", "Azerbaijan Standard Time", "Mauritius Standard Time", "Georgian Standard Time", "Caucasus Standard Time", "Afghanistan Standard Time", "West Asia Standard Time", "Yekaterinburg Time", "Pakistan Standard Time", "India Standard Time", "Sri Lanka Standard Time", "Nepal Standard Time", "Central Asia Standard Time", "Bangladesh Standard Time", "Myanmar Standard Time", "SE Asia Standard Time", "N. Central Asia Standard Time", "China Standard Time", "North Asia Standard Time", "Singapore Standard Time", "W. Australia Standard Time", "Taipei Standard Time", "Ulaanbaatar Standard Time", "North Asia East Standard Time", "Japan Standard Time", "Korea Standard Time", "Cen. Australia Standard Time", "AUS Central Standard Time", "E. Australia Standard Time", "AUS Eastern Standard Time", "West Pacific Standard Time", "Tasmania Standard Time", "Yakutsk Standard Time", "Central Pacific Standard Time", "Vladivostok Standard Time", "New Zealand Standard Time", "UTC+12", "Fiji Standard Time", "Magadan Standard Time", "Kamchatka Standard Time", "Tonga Standard Time", "Samoa Standard Time"}, - "full": {"(UTC-12:00) International Date Line West", "(UTC-11:00) Coordinated Universal Time-11", "(UTC-10:00) Hawaii", "(UTC-09:00) Alaska", "(UTC-08:00) Baja California", "(UTC-07:00) Pacific Time (US & Canada)", "(UTC-08:00) Pacific Time (US & Canada)", "(UTC-07:00) Arizona", "(UTC-07:00) Chihuahua, La Paz, Mazatlan", "(UTC-07:00) Mountain Time (US & Canada)", "(UTC-06:00) Central America", "(UTC-06:00) Central Time (US & Canada)", "(UTC-06:00) Guadalajara, Mexico City, Monterrey", "(UTC-06:00) Saskatchewan", "(UTC-05:00) Bogota, Lima, Quito", "(UTC-05:00) Eastern Time (US & Canada)", "(UTC-05:00) Indiana (East)", "(UTC-04:30) Caracas", "(UTC-04:00) Asuncion", "(UTC-04:00) Atlantic Time (Canada)", "(UTC-04:00) Cuiaba", "(UTC-04:00) Georgetown, La Paz, Manaus, San Juan", "(UTC-04:00) Santiago", "(UTC-03:30) Newfoundland", "(UTC-03:00) Brasilia", "(UTC-03:00) Buenos Aires", "(UTC-03:00) Cayenne, Fortaleza", "(UTC-03:00) Greenland", "(UTC-03:00) Montevideo", "(UTC-03:00) Salvador", "(UTC-02:00) Coordinated Universal Time-02", "(UTC-02:00) Mid-Atlantic - Old", "(UTC-01:00) Azores", "(UTC-01:00) Cape Verde Is.", "(UTC) Casablanca", "(UTC) Coordinated Universal Time", "(UTC) Edinburgh, London", "(UTC+01:00) Edinburgh, London", "(UTC) Dublin, Lisbon", "(UTC) Monrovia, Reykjavik", "(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna", "(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague", "(UTC+01:00) Brussels, Copenhagen, Madrid, Paris", "(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb", "(UTC+01:00) West Central Africa", "(UTC+01:00) Windhoek", "(UTC+02:00) Athens, Bucharest", "(UTC+02:00) Beirut", "(UTC+02:00) Cairo", "(UTC+02:00) Damascus", "(UTC+02:00) E. Europe", "(UTC+02:00) Harare, Pretoria", "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", "(UTC+03:00) Istanbul", "(UTC+02:00) Jerusalem", "(UTC+02:00) Tripoli", "(UTC+03:00) Amman", "(UTC+03:00) Baghdad", "(UTC+03:00) Kaliningrad, Minsk", "(UTC+03:00) Kuwait, Riyadh", "(UTC+03:00) Nairobi", "(UTC+03:00) Moscow, St. Petersburg, Volgograd", "(UTC+04:00) Samara, Ulyanovsk, Saratov", "(UTC+03:30) Tehran", "(UTC+04:00) Abu Dhabi, Muscat", "(UTC+04:00) Baku", "(UTC+04:00) Port Louis", "(UTC+04:00) Tbilisi", "(UTC+04:00) Yerevan", "(UTC+04:30) Kabul", "(UTC+05:00) Ashgabat, Tashkent", "(UTC+05:00) Yekaterinburg", "(UTC+05:00) Islamabad, Karachi", "(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi", "(UTC+05:30) Sri Jayawardenepura", "(UTC+05:45) Kathmandu", "(UTC+06:00) Astana", "(UTC+06:00) Dhaka", "(UTC+06:30) Yangon (Rangoon)", "(UTC+07:00) Bangkok, Hanoi, Jakarta", "(UTC+07:00) Novosibirsk", "(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi", "(UTC+08:00) Krasnoyarsk", "(UTC+08:00) Kuala Lumpur, Singapore", "(UTC+08:00) Perth", "(UTC+08:00) Taipei", "(UTC+08:00) Ulaanbaatar", "(UTC+09:00) Irkutsk", "(UTC+09:00) Osaka, Sapporo, Tokyo", "(UTC+09:00) Seoul", "(UTC+09:30) Adelaide", "(UTC+09:30) Darwin", "(UTC+10:00) Brisbane", "(UTC+10:00) Canberra, Melbourne, Sydney", "(UTC+10:00) Guam, Port Moresby", "(UTC+10:00) Hobart", "(UTC+10:00) Yakutsk", "(UTC+11:00) Solomon Is., New Caledonia", "(UTC+11:00) Vladivostok", "(UTC+12:00) Auckland, Wellington", "(UTC+12:00) Coordinated Universal Time+12", "(UTC+12:00) Fiji", "(UTC+12:00) Magadan", "(UTC+12:00) Petropavlovsk-Kamchatsky - Old", "(UTC+13:00) Nuku'alofa", "(UTC+13:00) Samoa"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/files.go b/vendor/github.com/brianvoe/gofakeit/data/files.go deleted file mode 100644 index 363b840017f5..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/files.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Files consists of file information -var Files = map[string][]string{ - "mime_type": {"x-world/x-3dmf", "application/octet-stream", "application/x-authorware-bin", "application/x-authorware-map", "application/x-authorware-seg", "text/vnd.abc", "text/html", "video/animaflex", "application/postscript", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "application/x-aim", "text/x-audiosoft-intra", "application/x-navi-animation", "application/x-nokia-9000-communicator-add-on-software", "application/mime", "application/octet-stream", "application/arj", "application/octet-stream", "image/x-jg", "video/x-ms-asf", "text/x-asm", "text/asp", "application/x-mplayer2", "video/x-ms-asf", "video/x-ms-asf-plugin", "audio/basic", "audio/x-au", "application/x-troff-msvideo", "video/avi", "video/msvideo", "video/x-msvideo", "video/avs-video", "application/x-bcpio", "application/mac-binary", "application/macbinary", "application/octet-stream", "application/x-binary", "application/x-macbinary", "image/bmp", "image/bmp", "image/x-windows-bmp", "application/book", "application/book", "application/x-bzip2", "application/x-bsh", "application/x-bzip", "application/x-bzip2", "text/plain", "text/x-c", "text/plain", "application/vnd.ms-pki.seccat", "text/plain", "text/x-c", "application/clariscad", "application/x-cocoa", "application/cdf", "application/x-cdf", "application/x-netcdf", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-chat", "application/x-chat", "application/java", "application/java-byte-code", "application/x-java-class", "application/octet-stream", "text/plain", "text/plain", "application/x-cpio", "text/x-c", "application/mac-compactpro", "application/x-compactpro", "application/x-cpt", "application/pkcs-crl", "application/pkix-crl", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-x509-user-cert", "application/x-csh", "text/x-script.csh", "application/x-pointplus", "text/css", "text/plain", "application/x-director", "application/x-deepv", "text/plain", "application/x-x509-ca-cert", "video/x-dv", "application/x-director", "video/dl", "video/x-dl", "application/msword", "application/msword", "application/commonground", "application/drafting", "application/octet-stream", "video/x-dv", "application/x-dvi", "drawing/x-dwf (old)", "model/vnd.dwf", "application/acad", "image/vnd.dwg", "image/x-dwg", "application/dxf", "image/vnd.dwg", "image/x-dwg", "application/x-director", "text/x-script.elisp", "application/x-bytecode.elisp (compiled elisp)", "application/x-elc", "application/x-envoy", "application/postscript", "application/x-esrehber", "text/x-setext", "application/envoy", "application/x-envoy", "application/octet-stream", "text/plain", "text/x-fortran", "text/x-fortran", "text/plain", "text/x-fortran", "application/vnd.fdf", "application/fractals", "image/fif", "video/fli", "video/x-fli", "image/florian", "text/vnd.fmi.flexstor", "video/x-atomic3d-feature", "text/plain", "text/x-fortran", "image/vnd.fpx", "image/vnd.net-fpx", "application/freeloader", "audio/make", "text/plain", "image/g3fax", "image/gif", "video/gl", "video/x-gl", "audio/x-gsm", "audio/x-gsm", "application/x-gsp", "application/x-gss", "application/x-gtar", "application/x-compressed", "application/x-gzip", "application/x-gzip", "multipart/x-gzip", "text/plain", "text/x-h", "application/x-hdf", "application/x-helpfile", "application/vnd.hp-hpgl", "text/plain", "text/x-h", "text/x-script", "application/hlp", "application/x-helpfile", "application/x-winhelp", "application/vnd.hp-hpgl", "application/vnd.hp-hpgl", "application/binhex", "application/binhex4", "application/mac-binhex", "application/mac-binhex40", "application/x-binhex40", "application/x-mac-binhex40", "application/hta", "text/x-component", "text/html", "text/html", "text/html", "text/webviewhtml", "text/html", "x-conference/x-cooltalk", "image/x-icon", "text/plain", "image/ief", "image/ief", "application/iges", "model/iges", "application/iges", "model/iges", "application/x-ima", "application/x-httpd-imap", "application/inf", "application/x-internett-signup", "application/x-ip2", "video/x-isvideo", "audio/it", "application/x-inventor", "i-world/i-vrml", "application/x-livescreen", "audio/x-jam", "text/plain", "text/x-java-source", "text/plain", "text/x-java-source", "application/x-java-commerce", "image/jpeg", "image/pjpeg", "image/jpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/x-jps", "application/x-javascript", "image/jutvision", "audio/midi", "music/x-karaoke", "application/x-ksh", "text/x-script.ksh", "audio/nspaudio", "audio/x-nspaudio", "audio/x-liveaudio", "application/x-latex", "application/lha", "application/octet-stream", "application/x-lha", "application/octet-stream", "text/plain", "audio/nspaudio", "audio/x-nspaudio", "text/plain", "application/x-lisp", "text/x-script.lisp", "text/plain", "text/x-la-asf", "application/x-latex", "application/octet-stream", "application/x-lzh", "application/lzx", "application/octet-stream", "application/x-lzx", "text/plain", "text/x-m", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/x-mpequrl", "application/x-troff-man", "application/x-navimap", "text/plain", "application/mbedlet", "application/mcad", "application/x-mathcad", "image/vasa", "text/mcf", "application/netmc", "application/x-troff-me", "message/rfc822", "message/rfc822", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-frame", "application/x-mif", "message/rfc822", "www/mime", "video/x-motion-jpeg", "application/base64", "application/x-meme", "application/base64", "audio/mod", "audio/x-mod", "video/quicktime", "video/quicktime", "video/x-sgi-movie", "audio/mpeg", "audio/x-mpeg", "video/mpeg", "video/x-mpeg", "video/x-mpeq2a", "audio/mpeg3", "audio/x-mpeg-3", "video/mpeg", "video/x-mpeg", "audio/mpeg", "video/mpeg", "application/x-project", "video/mpeg", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/mpeg", "application/vnd.ms-project", "application/x-project", "application/x-project", "application/x-project", "application/marc", "application/x-troff-ms", "video/x-sgi-movie", "audio/make", "application/x-vnd.audioexplosion.mzz", "image/naplps", "image/naplps", "application/x-netcdf", "application/vnd.nokia.configuration-message", "image/x-niff", "image/x-niff", "application/x-mix-transfer", "application/x-conference", "application/x-navidoc", "application/octet-stream", "application/oda", "application/x-omc", "application/x-omcdatamaker", "application/x-omcregerator", "text/x-pascal", "application/pkcs10", "application/x-pkcs10", "application/pkcs-12", "application/x-pkcs12", "application/x-pkcs7-signature", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/x-pkcs7-certreqresp", "application/pkcs7-signature", "application/pro_eng", "text/pascal", "image/x-portable-bitmap", "application/vnd.hp-pcl", "application/x-pcl", "image/x-pict", "image/x-pcx", "chemical/x-pdb", "application/pdf", "audio/make", "audio/make.my.funk", "image/x-portable-graymap", "image/x-portable-greymap", "image/pict", "image/pict", "application/x-newton-compatible-pkg", "application/vnd.ms-pki.pko", "text/plain", "text/x-script.perl", "application/x-pixclscript", "image/x-xpixmap", "text/x-script.perl-module", "application/x-pagemaker", "application/x-pagemaker", "image/png", "application/x-portable-anymap", "image/x-portable-anymap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "model/x-pov", "application/vnd.ms-powerpoint", "image/x-portable-pixmap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "application/mspowerpoint", "application/powerpoint", "application/vnd.ms-powerpoint", "application/x-mspowerpoint", "application/mspowerpoint", "application/x-freelance", "application/pro_eng", "application/postscript", "application/octet-stream", "paleovu/x-pv", "application/vnd.ms-powerpoint", "text/x-script.phyton", "application/x-bytecode.python", "audio/vnd.qcelp", "x-world/x-3dmf", "x-world/x-3dmf", "image/x-quicktime", "video/quicktime", "video/x-qtc", "image/x-quicktime", "image/x-quicktime", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "audio/x-realaudio", "audio/x-pn-realaudio", "application/x-cmu-raster", "image/cmu-raster", "image/x-cmu-raster", "image/cmu-raster", "text/x-script.rexx", "image/vnd.rn-realflash", "image/x-rgb", "application/vnd.rn-realmedia", "audio/x-pn-realaudio", "audio/mid", "audio/x-pn-realaudio", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "application/ringing-tones", "application/vnd.nokia.ringing-tone", "application/vnd.rn-realplayer", "application/x-troff", "image/vnd.rn-realpix", "audio/x-pn-realaudio-plugin", "text/richtext", "text/vnd.rn-realtext", "application/rtf", "application/x-rtf", "text/richtext", "application/rtf", "text/richtext", "video/vnd.rn-realvideo", "text/x-asm", "audio/s3m", "application/octet-stream", "application/x-tbook", "application/x-lotusscreencam", "text/x-script.guile", "text/x-script.scheme", "video/x-scm", "text/plain", "application/sdp", "application/x-sdp", "application/sounder", "application/sea", "application/x-sea", "application/set", "text/sgml", "text/x-sgml", "text/sgml", "text/x-sgml", "application/x-bsh", "application/x-sh", "application/x-shar", "text/x-script.sh", "application/x-bsh", "application/x-shar", "text/html", "text/x-server-parsed-html", "audio/x-psid", "application/x-sit", "application/x-stuffit", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-seelogo", "application/smil", "application/smil", "audio/basic", "audio/x-adpcm", "application/solids", "application/x-pkcs7-certificates", "text/x-speech", "application/futuresplash", "application/x-sprite", "application/x-sprite", "application/x-wais-source", "text/x-server-parsed-html", "application/streamingmedia", "application/vnd.ms-pki.certstore", "application/step", "application/sla", "application/vnd.ms-pki.stl", "application/x-navistyle", "application/step", "application/x-sv4cpio", "application/x-sv4crc", "image/vnd.dwg", "image/x-dwg", "application/x-world", "x-world/x-svr", "application/x-shockwave-flash", "application/x-troff", "text/x-speech", "application/x-tar", "application/toolbook", "application/x-tbook", "application/x-tcl", "text/x-script.tcl", "text/x-script.tcsh", "application/x-tex", "application/x-texinfo", "application/x-texinfo", "application/plain", "text/plain", "application/gnutar", "application/x-compressed", "image/tiff", "image/x-tiff", "image/tiff", "image/x-tiff", "application/x-troff", "audio/tsp-audio", "application/dsptype", "audio/tsplayer", "text/tab-separated-values", "image/florian", "text/plain", "text/x-uil", "text/uri-list", "text/uri-list", "application/i-deas", "text/uri-list", "text/uri-list", "application/x-ustar", "multipart/x-ustar", "application/octet-stream", "text/x-uuencode", "text/x-uuencode", "application/x-cdlink", "text/x-vcalendar", "application/vda", "video/vdo", "application/groupwise", "video/vivo", "video/vnd.vivo", "video/vivo", "video/vnd.vivo", "application/vocaltec-media-desc", "application/vocaltec-media-file", "audio/voc", "audio/x-voc", "video/vosaic", "audio/voxware", "audio/x-twinvq-plugin", "audio/x-twinvq", "audio/x-twinvq-plugin", "application/x-vrml", "model/vrml", "x-world/x-vrml", "x-world/x-vrt", "application/x-visio", "application/x-visio", "application/x-visio", "application/wordperfect6.0", "application/wordperfect6.1", "application/msword", "audio/wav", "audio/x-wav", "application/x-qpro", "image/vnd.wap.wbmp", "application/vnd.xara", "application/msword", "application/x-123", "windows/metafile", "text/vnd.wap.wml", "application/vnd.wap.wmlc", "text/vnd.wap.wmlscript", "application/vnd.wap.wmlscriptc", "application/msword", "application/wordperfect", "application/wordperfect", "application/wordperfect6.0", "application/wordperfect", "application/wordperfect", "application/x-wpwin", "application/x-lotus", "application/mswrite", "application/x-wri", "application/x-world", "model/vrml", "x-world/x-vrml", "model/vrml", "x-world/x-vrml", "text/scriplet", "application/x-wais-source", "application/x-wintalk", "image/x-xbitmap", "image/x-xbm", "image/xbm", "video/x-amt-demorun", "xgl/drawing", "image/vnd.xiff", "application/excel", "application/excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "audio/xm", "application/xml", "text/xml", "xgl/movie", "application/x-vnd.ls-xpix", "image/x-xpixmap", "image/xpm", "image/png", "video/x-amt-showrun", "image/x-xwd", "image/x-xwindowdump", "chemical/x-pdb", "application/x-compress", "application/x-compressed", "application/x-compressed", "application/x-zip-compressed", "application/zip", "multipart/x-zip", "application/octet-stream", "text/x-script.zsh"}, - "extension": {"doc", "docx", "log", "msg", "odt", "pages", "rtf", "tex", "txt", "wpd", "wps", "csv", "dat", "gbr", "ged", "key", "keychain", "pps", "ppt", "pptx", "sdf", "tar", "vcf", "xml", "aif", "iff", "mid", "mpa", "ra", "wav", "wma", "asf", "asx", "avi", "flv", "mov", "mpg", "rm", "srt", "swf", "vob", "wmv", "max", "obj", "bmp", "dds", "gif", "jpg", "png", "psd", "pspimage", "tga", "thm", "tif", "tiff", "yuv", "ai", "eps", "ps", "svg", "indd", "pct", "pdf", "xlr", "xls", "xlsx", "accdb", "db", "dbf", "mdb", "pdb", "sql", "apk", "app", "bat", "cgi", "com", "exe", "gadget", "jar", "pif", "vb", "wsf", "dem", "gam", "nes", "rom", "sav", "dwg", "dxf", "gpx", "kml", "kmz", "asp", "aspx", "cer", "cfm", "csr", "css", "htm", "html", "js", "jsp", "php", "rss", "xhtml", "crx", "plugin", "fnt", "fon", "otf", "ttf", "cab", "cpl", "cur", "deskthemepack", "dll", "dmp", "drv", "icns", "ico", "lnk", "sys", "cfg", "ini", "prf", "hqx", "mim", "uue", "cbr", "deb", "gz", "pkg", "rar", "rpm", "sitx", "gz", "zip", "zipx", "bin", "cue", "dmg", "iso", "mdf", "toast", "vcd", "class", "cpp", "cs", "dtd", "fla", "java", "lua", "pl", "py", "sh", "sln", "swift", "vcxproj", "xcodeproj", "bak", "tmp", "crdownload", "ics", "msi", "part", "torrent"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/hacker.go b/vendor/github.com/brianvoe/gofakeit/data/hacker.go deleted file mode 100644 index 4735f7d560af..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/hacker.go +++ /dev/null @@ -1,20 +0,0 @@ -package data - -// Hacker consists of random hacker phrases -var Hacker = map[string][]string{ - "abbreviation": {"TCP", "HTTP", "SDD", "RAM", "GB", "CSS", "SSL", "AGP", "SQL", "FTP", "PCI", "AI", "ADP", "RSS", "XML", "EXE", "COM", "HDD", "THX", "SMTP", "SMS", "USB", "PNG", "SAS", "IB", "SCSI", "JSON", "XSS", "JBOD"}, - "adjective": {"auxiliary", "primary", "back-end", "digital", "open-source", "virtual", "cross-platform", "redundant", "online", "haptic", "multi-byte", "bluetooth", "wireless", "1080p", "neural", "optical", "solid state", "mobile"}, - "noun": {"driver", "protocol", "bandwidth", "panel", "microchip", "program", "port", "card", "array", "interface", "system", "sensor", "firewall", "hard drive", "pixel", "alarm", "feed", "monitor", "application", "transmitter", "bus", "circuit", "capacitor", "matrix"}, - "verb": {"back up", "bypass", "hack", "override", "compress", "copy", "navigate", "index", "connect", "generate", "quantify", "calculate", "synthesize", "input", "transmit", "program", "reboot", "parse"}, - "ingverb": {"backing up", "bypassing", "hacking", "overriding", "compressing", "copying", "navigating", "indexing", "connecting", "generating", "quantifying", "calculating", "synthesizing", "transmitting", "programming", "parsing"}, - "phrase": { - "If we {hacker.verb} the {hacker.noun}, we can get to the {hacker.abbreviation} {hacker.noun} through the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "We need to {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "Try to {hacker.verb} the {hacker.abbreviation} {hacker.noun}, maybe it will {hacker.verb} the {hacker.adjective} {hacker.noun}!", - "You can't {hacker.verb} the {hacker.noun} without {hacker.ingverb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "Use the {hacker.adjective} {hacker.abbreviation} {hacker.noun}, then you can {hacker.verb} the {hacker.adjective} {hacker.noun}!", - "The {hacker.abbreviation} {hacker.noun} is down, {hacker.verb} the {hacker.adjective} {hacker.noun} so we can {hacker.verb} the {hacker.abbreviation} {hacker.noun}!", - "{hacker.ingverb} the {hacker.noun} won't do anything, we need to {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "I'll {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}, that should {hacker.verb} the {hacker.abbreviation} {hacker.noun}!", - }, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/hipster.go b/vendor/github.com/brianvoe/gofakeit/data/hipster.go deleted file mode 100644 index f036f4639bc8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/hipster.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Hipster consists of random hipster words -var Hipster = map[string][]string{ - "word": {"Wes Anderson", "chicharrones", "narwhal", "food truck", "marfa", "aesthetic", "keytar", "art party", "sustainable", "forage", "mlkshk", "gentrify", "locavore", "swag", "hoodie", "microdosing", "VHS", "before they sold out", "pabst", "plaid", "Thundercats", "freegan", "scenester", "hella", "occupy", "truffaut", "raw denim", "beard", "post-ironic", "photo booth", "twee", "90's", "pitchfork", "cray", "cornhole", "kale chips", "pour-over", "yr", "five dollar toast", "kombucha", "you probably haven't heard of them", "mustache", "fixie", "try-hard", "franzen", "kitsch", "austin", "stumptown", "keffiyeh", "whatever", "tumblr", "DIY", "shoreditch", "biodiesel", "vegan", "pop-up", "banjo", "kogi", "cold-pressed", "letterpress", "chambray", "butcher", "synth", "trust fund", "hammock", "farm-to-table", "intelligentsia", "loko", "ugh", "offal", "poutine", "gastropub", "Godard", "jean shorts", "sriracha", "dreamcatcher", "leggings", "fashion axe", "church-key", "meggings", "tote bag", "disrupt", "readymade", "helvetica", "flannel", "meh", "roof", "hashtag", "knausgaard", "cronut", "schlitz", "green juice", "waistcoat", "normcore", "viral", "ethical", "actually", "fingerstache", "humblebrag", "deep v", "wayfarers", "tacos", "taxidermy", "selvage", "put a bird on it", "ramps", "portland", "retro", "kickstarter", "bushwick", "brunch", "distillery", "migas", "flexitarian", "XOXO", "small batch", "messenger bag", "heirloom", "tofu", "bicycle rights", "bespoke", "salvia", "wolf", "selfies", "echo", "park", "listicle", "craft beer", "chartreuse", "sartorial", "pinterest", "mumblecore", "kinfolk", "vinyl", "etsy", "umami", "8-bit", "polaroid", "banh mi", "crucifix", "bitters", "brooklyn", "PBR&B", "drinking", "vinegar", "squid", "tattooed", "skateboard", "vice", "authentic", "literally", "lomo", "celiac", "health", "goth", "artisan", "chillwave", "blue bottle", "pickled", "next level", "neutra", "organic", "Yuccie", "paleo", "blog", "single-origin coffee", "seitan", "street", "gluten-free", "mixtape", "venmo", "irony", "everyday", "carry", "slow-carb", "3 wolf moon", "direct trade", "lo-fi", "tousled", "tilde", "semiotics", "cred", "chia", "master", "cleanse", "ennui", "quinoa", "pug", "iPhone", "fanny pack", "cliche", "cardigan", "asymmetrical", "meditation", "YOLO", "typewriter", "pork belly", "shabby chic", "+1", "lumbersexual", "williamsburg"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/internet.go b/vendor/github.com/brianvoe/gofakeit/data/internet.go deleted file mode 100644 index 1f16db95c765..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/internet.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Internet consists of various internet information -var Internet = map[string][]string{ - "browser": {"firefox", "chrome", "internetExplorer", "opera", "safari"}, - "domain_suffix": {"com", "biz", "info", "name", "net", "org", "io"}, - "http_method": {"HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/job.go b/vendor/github.com/brianvoe/gofakeit/data/job.go deleted file mode 100644 index 905dd74ee023..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/job.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Job consists of job data -var Job = map[string][]string{ - "title": {"Administrator", "Agent", "Analyst", "Architect", "Assistant", "Associate", "Consultant", "Coordinator", "Designer", "Developer", "Director", "Engineer", "Executive", "Facilitator", "Liaison", "Manager", "Officer", "Orchestrator", "Planner", "Producer", "Representative", "Specialist", "Strategist", "Supervisor", "Technician"}, - "descriptor": {"Central", "Chief", "Corporate", "Customer", "Direct", "District", "Dynamic", "Dynamic", "Forward", "Future", "Global", "Human", "Internal", "International", "Investor", "Lead", "Legacy", "National", "Principal", "Product", "Regional", "Senior"}, - "level": {"Accountability", "Accounts", "Applications", "Assurance", "Brand", "Branding", "Communications", "Configuration", "Creative", "Data", "Directives", "Division", "Factors", "Functionality", "Group", "Identity", "Implementation", "Infrastructure", "Integration", "Interactions", "Intranet", "Marketing", "Markets", "Metrics", "Mobility", "Operations", "Optimization", "Paradigm", "Program", "Quality", "Research", "Response", "Security", "Solutions", "Tactics", "Usability", "Web"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/log_level.go b/vendor/github.com/brianvoe/gofakeit/data/log_level.go deleted file mode 100644 index 01d98b63c6b6..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/log_level.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// LogLevels consists of log levels for several types -var LogLevels = map[string][]string{ - "general": {"error", "warning", "info", "fatal", "trace", "debug"}, - "syslog": {"emerg", "alert", "crit", "err", "warning", "notice", "info", "debug"}, - "apache": {"emerg", "alert", "crit", "error", "warn", "notice", "info", "debug", "trace1-8"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/lorem.go b/vendor/github.com/brianvoe/gofakeit/data/lorem.go deleted file mode 100644 index b0a8f8a1378f..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/lorem.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Lorem consists of lorem ipsum information -var Lorem = map[string][]string{ - "word": {"alias", "consequatur", "aut", "perferendis", "sit", "voluptatem", "accusantium", "doloremque", "aperiam", "eaque", "ipsa", "quae", "ab", "illo", "inventore", "veritatis", "et", "quasi", "architecto", "beatae", "vitae", "dicta", "sunt", "explicabo", "aspernatur", "aut", "odit", "aut", "fugit", "sed", "quia", "consequuntur", "magni", "dolores", "eos", "qui", "ratione", "voluptatem", "sequi", "nesciunt", "neque", "dolorem", "ipsum", "quia", "dolor", "sit", "amet", "consectetur", "adipisci", "velit", "sed", "quia", "non", "numquam", "eius", "modi", "tempora", "incidunt", "ut", "labore", "et", "dolore", "magnam", "aliquam", "quaerat", "voluptatem", "ut", "enim", "ad", "minima", "veniam", "quis", "nostrum", "exercitationem", "ullam", "corporis", "nemo", "enim", "ipsam", "voluptatem", "quia", "voluptas", "sit", "suscipit", "laboriosam", "nisi", "ut", "aliquid", "ex", "ea", "commodi", "consequatur", "quis", "autem", "vel", "eum", "iure", "reprehenderit", "qui", "in", "ea", "voluptate", "velit", "esse", "quam", "nihil", "molestiae", "et", "iusto", "odio", "dignissimos", "ducimus", "qui", "blanditiis", "praesentium", "laudantium", "totam", "rem", "voluptatum", "deleniti", "atque", "corrupti", "quos", "dolores", "et", "quas", "molestias", "excepturi", "sint", "occaecati", "cupiditate", "non", "provident", "sed", "ut", "perspiciatis", "unde", "omnis", "iste", "natus", "error", "similique", "sunt", "in", "culpa", "qui", "officia", "deserunt", "mollitia", "animi", "id", "est", "laborum", "et", "dolorum", "fuga", "et", "harum", "quidem", "rerum", "facilis", "est", "et", "expedita", "distinctio", "nam", "libero", "tempore", "cum", "soluta", "nobis", "est", "eligendi", "optio", "cumque", "nihil", "impedit", "quo", "porro", "quisquam", "est", "qui", "minus", "id", "quod", "maxime", "placeat", "facere", "possimus", "omnis", "voluptas", "assumenda", "est", "omnis", "dolor", "repellendus", "temporibus", "autem", "quibusdam", "et", "aut", "consequatur", "vel", "illum", "qui", "dolorem", "eum", "fugiat", "quo", "voluptas", "nulla", "pariatur", "at", "vero", "eos", "et", "accusamus", "officiis", "debitis", "aut", "rerum", "necessitatibus", "saepe", "eveniet", "ut", "et", "voluptates", "repudiandae", "sint", "et", "molestiae", "non", "recusandae", "itaque", "earum", "rerum", "hic", "tenetur", "a", "sapiente", "delectus", "ut", "aut", "reiciendis", "voluptatibus", "maiores", "doloribus", "asperiores", "repellat"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/payment.go b/vendor/github.com/brianvoe/gofakeit/data/payment.go deleted file mode 100644 index e50903a72af6..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/payment.go +++ /dev/null @@ -1,20 +0,0 @@ -package data - -// Payment contains payment information -var Payment = map[string][]string{ - "card_type": {"Visa", "MasterCard", "American Express", "Discover"}, - "number": { - // Visa - "4###############", - "4###############", - // Mastercard - "222100##########", - "272099##########", - // American Express - "34#############", - "37#############", - // Discover - "65##############", - "65##############", - }, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/person.go b/vendor/github.com/brianvoe/gofakeit/data/person.go deleted file mode 100644 index 129b59ba6e3c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/person.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// Person consists of a slice of people information -var Person = map[string][]string{ - "prefix": {"Mr.", "Mrs.", "Ms.", "Miss", "Dr."}, - "suffix": {"Jr.", "Sr.", "I", "II", "III", "IV", "V", "MD", "DDS", "PhD", "DVM"}, - "first": {"Aaliyah", "Aaron", "Abagail", "Abbey", "Abbie", "Abbigail", "Abby", "Abdiel", "Abdul", "Abdullah", "Abe", "Abel", "Abelardo", "Abigail", "Abigale", "Abigayle", "Abner", "Abraham", "Ada", "Adah", "Adalberto", "Adaline", "Adam", "Adan", "Addie", "Addison", "Adela", "Adelbert", "Adele", "Adelia", "Adeline", "Adell", "Adella", "Adelle", "Aditya", "Adolf", "Adolfo", "Adolph", "Adolphus", "Adonis", "Adrain", "Adrian", "Adriana", "Adrianna", "Adriel", "Adrien", "Adrienne", "Afton", "Aglae", "Agnes", "Agustin", "Agustina", "Ahmad", "Ahmed", "Aida", "Aidan", "Aiden", "Aileen", "Aimee", "Aisha", "Aiyana", "Akeem", "Al", "Alaina", "Alan", "Alana", "Alanis", "Alanna", "Alayna", "Alba", "Albert", "Alberta", "Albertha", "Alberto", "Albin", "Albina", "Alda", "Alden", "Alec", "Aleen", "Alejandra", "Alejandrin", "Alek", "Alena", "Alene", "Alessandra", "Alessandro", "Alessia", "Aletha", "Alex", "Alexa", "Alexander", "Alexandra", "Alexandre", "Alexandrea", "Alexandria", "Alexandrine", "Alexandro", "Alexane", "Alexanne", "Alexie", "Alexis", "Alexys", "Alexzander", "Alf", "Alfonso", "Alfonzo", "Alford", "Alfred", "Alfreda", "Alfredo", "Ali", "Alia", "Alice", "Alicia", "Alisa", "Alisha", "Alison", "Alivia", "Aliya", "Aliyah", "Aliza", "Alize", "Allan", "Allen", "Allene", "Allie", "Allison", "Ally", "Alphonso", "Alta", "Althea", "Alva", "Alvah", "Alvena", "Alvera", "Alverta", "Alvina", "Alvis", "Alyce", "Alycia", "Alysa", "Alysha", "Alyson", "Alysson", "Amalia", "Amanda", "Amani", "Amara", "Amari", "Amaya", "Amber", "Ambrose", "Amelia", "Amelie", "Amely", "America", "Americo", "Amie", "Amina", "Amir", "Amira", "Amiya", "Amos", "Amparo", "Amy", "Amya", "Ana", "Anabel", "Anabelle", "Anahi", "Anais", "Anastacio", "Anastasia", "Anderson", "Andre", "Andreane", "Andreanne", "Andres", "Andrew", "Andy", "Angel", "Angela", "Angelica", "Angelina", "Angeline", "Angelita", "Angelo", "Angie", "Angus", "Anibal", "Anika", "Anissa", "Anita", "Aniya", "Aniyah", "Anjali", "Anna", "Annabel", "Annabell", "Annabelle", "Annalise", "Annamae", "Annamarie", "Anne", "Annetta", "Annette", "Annie", "Ansel", "Ansley", "Anthony", "Antoinette", "Antone", "Antonetta", "Antonette", "Antonia", "Antonietta", "Antonina", "Antonio", "Antwan", "Antwon", "Anya", "April", "Ara", "Araceli", "Aracely", "Arch", "Archibald", "Ardella", "Arden", "Ardith", "Arely", "Ari", "Ariane", "Arianna", "Aric", "Ariel", "Arielle", "Arjun", "Arlene", "Arlie", "Arlo", "Armand", "Armando", "Armani", "Arnaldo", "Arne", "Arno", "Arnold", "Arnoldo", "Arnulfo", "Aron", "Art", "Arthur", "Arturo", "Arvel", "Arvid", "Arvilla", "Aryanna", "Asa", "Asha", "Ashlee", "Ashleigh", "Ashley", "Ashly", "Ashlynn", "Ashton", "Ashtyn", "Asia", "Assunta", "Astrid", "Athena", "Aubree", "Aubrey", "Audie", "Audra", "Audreanne", "Audrey", "August", "Augusta", "Augustine", "Augustus", "Aurelia", "Aurelie", "Aurelio", "Aurore", "Austen", "Austin", "Austyn", "Autumn", "Ava", "Avery", "Avis", "Axel", "Ayana", "Ayden", "Ayla", "Aylin", "Baby", "Bailee", "Bailey", "Barbara", "Barney", "Baron", "Barrett", "Barry", "Bart", "Bartholome", "Barton", "Baylee", "Beatrice", "Beau", "Beaulah", "Bell", "Bella", "Belle", "Ben", "Benedict", "Benjamin", "Bennett", "Bennie", "Benny", "Benton", "Berenice", "Bernadette", "Bernadine", "Bernard", "Bernardo", "Berneice", "Bernhard", "Bernice", "Bernie", "Berniece", "Bernita", "Berry", "Bert", "Berta", "Bertha", "Bertram", "Bertrand", "Beryl", "Bessie", "Beth", "Bethany", "Bethel", "Betsy", "Bette", "Bettie", "Betty", "Bettye", "Beulah", "Beverly", "Bianka", "Bill", "Billie", "Billy", "Birdie", "Blair", "Blaise", "Blake", "Blanca", "Blanche", "Blaze", "Bo", "Bobbie", "Bobby", "Bonita", "Bonnie", "Boris", "Boyd", "Brad", "Braden", "Bradford", "Bradley", "Bradly", "Brady", "Braeden", "Brain", "Brandi", "Brando", "Brandon", "Brandt", "Brandy", "Brandyn", "Brannon", "Branson", "Brant", "Braulio", "Braxton", "Brayan", "Breana", "Breanna", "Breanne", "Brenda", "Brendan", "Brenden", "Brendon", "Brenna", "Brennan", "Brennon", "Brent", "Bret", "Brett", "Bria", "Brian", "Briana", "Brianne", "Brice", "Bridget", "Bridgette", "Bridie", "Brielle", "Brigitte", "Brionna", "Brisa", "Britney", "Brittany", "Brock", "Broderick", "Brody", "Brook", "Brooke", "Brooklyn", "Brooks", "Brown", "Bruce", "Bryana", "Bryce", "Brycen", "Bryon", "Buck", "Bud", "Buddy", "Buford", "Bulah", "Burdette", "Burley", "Burnice", "Buster", "Cade", "Caden", "Caesar", "Caitlyn", "Cale", "Caleb", "Caleigh", "Cali", "Calista", "Callie", "Camden", "Cameron", "Camila", "Camilla", "Camille", "Camren", "Camron", "Camryn", "Camylle", "Candace", "Candelario", "Candice", "Candida", "Candido", "Cara", "Carey", "Carissa", "Carlee", "Carleton", "Carley", "Carli", "Carlie", "Carlo", "Carlos", "Carlotta", "Carmel", "Carmela", "Carmella", "Carmelo", "Carmen", "Carmine", "Carol", "Carolanne", "Carole", "Carolina", "Caroline", "Carolyn", "Carolyne", "Carrie", "Carroll", "Carson", "Carter", "Cary", "Casandra", "Casey", "Casimer", "Casimir", "Casper", "Cassandra", "Cassandre", "Cassidy", "Cassie", "Catalina", "Caterina", "Catharine", "Catherine", "Cathrine", "Cathryn", "Cathy", "Cayla", "Ceasar", "Cecelia", "Cecil", "Cecile", "Cecilia", "Cedrick", "Celestine", "Celestino", "Celia", "Celine", "Cesar", "Chad", "Chadd", "Chadrick", "Chaim", "Chance", "Chandler", "Chanel", "Chanelle", "Charity", "Charlene", "Charles", "Charley", "Charlie", "Charlotte", "Chase", "Chasity", "Chauncey", "Chaya", "Chaz", "Chelsea", "Chelsey", "Chelsie", "Chesley", "Chester", "Chet", "Cheyanne", "Cheyenne", "Chloe", "Chris", "Christ", "Christa", "Christelle", "Christian", "Christiana", "Christina", "Christine", "Christop", "Christophe", "Christopher", "Christy", "Chyna", "Ciara", "Cicero", "Cielo", "Cierra", "Cindy", "Citlalli", "Clair", "Claire", "Clara", "Clarabelle", "Clare", "Clarissa", "Clark", "Claud", "Claude", "Claudia", "Claudie", "Claudine", "Clay", "Clemens", "Clement", "Clementina", "Clementine", "Clemmie", "Cleo", "Cleora", "Cleta", "Cletus", "Cleve", "Cleveland", "Clifford", "Clifton", "Clint", "Clinton", "Clotilde", "Clovis", "Cloyd", "Clyde", "Coby", "Cody", "Colby", "Cole", "Coleman", "Colin", "Colleen", "Collin", "Colt", "Colten", "Colton", "Columbus", "Concepcion", "Conner", "Connie", "Connor", "Conor", "Conrad", "Constance", "Constantin", "Consuelo", "Cooper", "Cora", "Coralie", "Corbin", "Cordelia", "Cordell", "Cordia", "Cordie", "Corene", "Corine", "Cornelius", "Cornell", "Corrine", "Cortez", "Cortney", "Cory", "Coty", "Courtney", "Coy", "Craig", "Crawford", "Creola", "Cristal", "Cristian", "Cristina", "Cristobal", "Cristopher", "Cruz", "Crystal", "Crystel", "Cullen", "Curt", "Curtis", "Cydney", "Cynthia", "Cyril", "Cyrus", "Dagmar", "Dahlia", "Daija", "Daisha", "Daisy", "Dakota", "Dale", "Dallas", "Dallin", "Dalton", "Damaris", "Dameon", "Damian", "Damien", "Damion", "Damon", "Dan", "Dana", "Dandre", "Dane", "Dangelo", "Dangelo", "Danial", "Daniela", "Daniella", "Danielle", "Danika", "Dannie", "Danny", "Dante", "Danyka", "Daphne", "Daphnee", "Daphney", "Darby", "Daren", "Darian", "Dariana", "Darien", "Dario", "Darion", "Darius", "Darlene", "Daron", "Darrel", "Darrell", "Darren", "Darrick", "Darrin", "Darrion", "Darron", "Darryl", "Darwin", "Daryl", "Dashawn", "Dasia", "Dave", "David", "Davin", "Davion", "Davon", "Davonte", "Dawn", "Dawson", "Dax", "Dayana", "Dayna", "Dayne", "Dayton", "Dean", "Deangelo", "Deanna", "Deborah", "Declan", "Dedric", "Dedrick", "Dee", "Deion", "Deja", "Dejah", "Dejon", "Dejuan", "Delaney", "Delbert", "Delfina", "Delia", "Delilah", "Dell", "Della", "Delmer", "Delores", "Delpha", "Delphia", "Delphine", "Delta", "Demarco", "Demarcus", "Demario", "Demetris", "Demetrius", "Demond", "Dena", "Denis", "Dennis", "Deon", "Deondre", "Deontae", "Deonte", "Dereck", "Derek", "Derick", "Deron", "Derrick", "Deshaun", "Deshawn", "Desiree", "Desmond", "Dessie", "Destany", "Destin", "Destinee", "Destiney", "Destini", "Destiny", "Devan", "Devante", "Deven", "Devin", "Devon", "Devonte", "Devyn", "Dewayne", "Dewitt", "Dexter", "Diamond", "Diana", "Dianna", "Diego", "Dillan", "Dillon", "Dimitri", "Dina", "Dino", "Dion", "Dixie", "Dock", "Dolly", "Dolores", "Domenic", "Domenica", "Domenick", "Domenico", "Domingo", "Dominic", "Dominique", "Don", "Donald", "Donato", "Donavon", "Donna", "Donnell", "Donnie", "Donny", "Dora", "Dorcas", "Dorian", "Doris", "Dorothea", "Dorothy", "Dorris", "Dortha", "Dorthy", "Doug", "Douglas", "Dovie", "Doyle", "Drake", "Drew", "Duane", "Dudley", "Dulce", "Duncan", "Durward", "Dustin", "Dusty", "Dwight", "Dylan", "Earl", "Earlene", "Earline", "Earnest", "Earnestine", "Easter", "Easton", "Ebba", "Ebony", "Ed", "Eda", "Edd", "Eddie", "Eden", "Edgar", "Edgardo", "Edison", "Edmond", "Edmund", "Edna", "Eduardo", "Edward", "Edwardo", "Edwin", "Edwina", "Edyth", "Edythe", "Effie", "Efrain", "Efren", "Eileen", "Einar", "Eino", "Eladio", "Elaina", "Elbert", "Elda", "Eldon", "Eldora", "Eldred", "Eldridge", "Eleanora", "Eleanore", "Eleazar", "Electa", "Elena", "Elenor", "Elenora", "Eleonore", "Elfrieda", "Eli", "Elian", "Eliane", "Elias", "Eliezer", "Elijah", "Elinor", "Elinore", "Elisa", "Elisabeth", "Elise", "Eliseo", "Elisha", "Elissa", "Eliza", "Elizabeth", "Ella", "Ellen", "Ellie", "Elliot", "Elliott", "Ellis", "Ellsworth", "Elmer", "Elmira", "Elmo", "Elmore", "Elna", "Elnora", "Elody", "Eloisa", "Eloise", "Elouise", "Eloy", "Elroy", "Elsa", "Else", "Elsie", "Elta", "Elton", "Elva", "Elvera", "Elvie", "Elvis", "Elwin", "Elwyn", "Elyse", "Elyssa", "Elza", "Emanuel", "Emelia", "Emelie", "Emely", "Emerald", "Emerson", "Emery", "Emie", "Emil", "Emile", "Emilia", "Emiliano", "Emilie", "Emilio", "Emily", "Emma", "Emmalee", "Emmanuel", "Emmanuelle", "Emmet", "Emmett", "Emmie", "Emmitt", "Emmy", "Emory", "Ena", "Enid", "Enoch", "Enola", "Enos", "Enrico", "Enrique", "Ephraim", "Era", "Eriberto", "Eric", "Erica", "Erich", "Erick", "Ericka", "Erik", "Erika", "Erin", "Erling", "Erna", "Ernest", "Ernestina", "Ernestine", "Ernesto", "Ernie", "Ervin", "Erwin", "Eryn", "Esmeralda", "Esperanza", "Esta", "Esteban", "Estefania", "Estel", "Estell", "Estella", "Estelle", "Estevan", "Esther", "Estrella", "Etha", "Ethan", "Ethel", "Ethelyn", "Ethyl", "Ettie", "Eudora", "Eugene", "Eugenia", "Eula", "Eulah", "Eulalia", "Euna", "Eunice", "Eusebio", "Eva", "Evalyn", "Evan", "Evangeline", "Evans", "Eve", "Eveline", "Evelyn", "Everardo", "Everett", "Everette", "Evert", "Evie", "Ewald", "Ewell", "Ezekiel", "Ezequiel", "Ezra", "Fabian", "Fabiola", "Fae", "Fannie", "Fanny", "Fatima", "Faustino", "Fausto", "Favian", "Fay", "Faye", "Federico", "Felicia", "Felicita", "Felicity", "Felipa", "Felipe", "Felix", "Felton", "Fermin", "Fern", "Fernando", "Ferne", "Fidel", "Filiberto", "Filomena", "Finn", "Fiona", "Flavie", "Flavio", "Fleta", "Fletcher", "Flo", "Florence", "Florencio", "Florian", "Florida", "Florine", "Flossie", "Floy", "Floyd", "Ford", "Forest", "Forrest", "Foster", "Frances", "Francesca", "Francesco", "Francis", "Francisca", "Francisco", "Franco", "Frank", "Frankie", "Franz", "Fred", "Freda", "Freddie", "Freddy", "Frederic", "Frederick", "Frederik", "Frederique", "Fredrick", "Fredy", "Freeda", "Freeman", "Freida", "Frida", "Frieda", "Friedrich", "Fritz", "Furman", "Gabe", "Gabriel", "Gabriella", "Gabrielle", "Gaetano", "Gage", "Gail", "Gardner", "Garett", "Garfield", "Garland", "Garnet", "Garnett", "Garret", "Garrett", "Garrick", "Garrison", "Garry", "Garth", "Gaston", "Gavin", "Gay", "Gayle", "Gaylord", "Gene", "General", "Genesis", "Genevieve", "Gennaro", "Genoveva", "Geo", "Geoffrey", "George", "Georgette", "Georgiana", "Georgianna", "Geovanni", "Geovanny", "Geovany", "Gerald", "Geraldine", "Gerard", "Gerardo", "Gerda", "Gerhard", "Germaine", "German", "Gerry", "Gerson", "Gertrude", "Gia", "Gianni", "Gideon", "Gilbert", "Gilberto", "Gilda", "Giles", "Gillian", "Gina", "Gino", "Giovani", "Giovanna", "Giovanni", "Giovanny", "Gisselle", "Giuseppe", "Gladyce", "Gladys", "Glen", "Glenda", "Glenna", "Glennie", "Gloria", "Godfrey", "Golda", "Golden", "Gonzalo", "Gordon", "Grace", "Gracie", "Graciela", "Grady", "Graham", "Grant", "Granville", "Grayce", "Grayson", "Green", "Greg", "Gregg", "Gregoria", "Gregorio", "Gregory", "Greta", "Gretchen", "Greyson", "Griffin", "Grover", "Guadalupe", "Gudrun", "Guido", "Guillermo", "Guiseppe", "Gunnar", "Gunner", "Gus", "Gussie", "Gust", "Gustave", "Guy", "Gwen", "Gwendolyn", "Hadley", "Hailee", "Hailey", "Hailie", "Hal", "Haleigh", "Haley", "Halie", "Halle", "Hallie", "Hank", "Hanna", "Hannah", "Hans", "Hardy", "Harley", "Harmon", "Harmony", "Harold", "Harrison", "Harry", "Harvey", "Haskell", "Hassan", "Hassie", "Hattie", "Haven", "Hayden", "Haylee", "Hayley", "Haylie", "Hazel", "Hazle", "Heath", "Heather", "Heaven", "Heber", "Hector", "Heidi", "Helen", "Helena", "Helene", "Helga", "Hellen", "Helmer", "Heloise", "Henderson", "Henri", "Henriette", "Henry", "Herbert", "Herman", "Hermann", "Hermina", "Herminia", "Herminio", "Hershel", "Herta", "Hertha", "Hester", "Hettie", "Hilario", "Hilbert", "Hilda", "Hildegard", "Hillard", "Hillary", "Hilma", "Hilton", "Hipolito", "Hiram", "Hobart", "Holden", "Hollie", "Hollis", "Holly", "Hope", "Horace", "Horacio", "Hortense", "Hosea", "Houston", "Howard", "Howell", "Hoyt", "Hubert", "Hudson", "Hugh", "Hulda", "Humberto", "Hunter", "Hyman", "Ian", "Ibrahim", "Icie", "Ida", "Idell", "Idella", "Ignacio", "Ignatius", "Ike", "Ila", "Ilene", "Iliana", "Ima", "Imani", "Imelda", "Immanuel", "Imogene", "Ines", "Irma", "Irving", "Irwin", "Isaac", "Isabel", "Isabell", "Isabella", "Isabelle", "Isac", "Isadore", "Isai", "Isaiah", "Isaias", "Isidro", "Ismael", "Isobel", "Isom", "Israel", "Issac", "Itzel", "Iva", "Ivah", "Ivory", "Ivy", "Izabella", "Izaiah", "Jabari", "Jace", "Jacey", "Jacinthe", "Jacinto", "Jack", "Jackeline", "Jackie", "Jacklyn", "Jackson", "Jacky", "Jaclyn", "Jacquelyn", "Jacques", "Jacynthe", "Jada", "Jade", "Jaden", "Jadon", "Jadyn", "Jaeden", "Jaida", "Jaiden", "Jailyn", "Jaime", "Jairo", "Jakayla", "Jake", "Jakob", "Jaleel", "Jalen", "Jalon", "Jalyn", "Jamaal", "Jamal", "Jamar", "Jamarcus", "Jamel", "Jameson", "Jamey", "Jamie", "Jamil", "Jamir", "Jamison", "Jammie", "Jan", "Jana", "Janae", "Jane", "Janelle", "Janessa", "Janet", "Janice", "Janick", "Janie", "Janis", "Janiya", "Jannie", "Jany", "Jaquan", "Jaquelin", "Jaqueline", "Jared", "Jaren", "Jarod", "Jaron", "Jarred", "Jarrell", "Jarret", "Jarrett", "Jarrod", "Jarvis", "Jasen", "Jasmin", "Jason", "Jasper", "Jaunita", "Javier", "Javon", "Javonte", "Jay", "Jayce", "Jaycee", "Jayda", "Jayde", "Jayden", "Jaydon", "Jaylan", "Jaylen", "Jaylin", "Jaylon", "Jayme", "Jayne", "Jayson", "Jazlyn", "Jazmin", "Jazmyn", "Jazmyne", "Jean", "Jeanette", "Jeanie", "Jeanne", "Jed", "Jedediah", "Jedidiah", "Jeff", "Jefferey", "Jeffery", "Jeffrey", "Jeffry", "Jena", "Jenifer", "Jennie", "Jennifer", "Jennings", "Jennyfer", "Jensen", "Jerad", "Jerald", "Jeramie", "Jeramy", "Jerel", "Jeremie", "Jeremy", "Jermain", "Jermaine", "Jermey", "Jerod", "Jerome", "Jeromy", "Jerrell", "Jerrod", "Jerrold", "Jerry", "Jess", "Jesse", "Jessica", "Jessie", "Jessika", "Jessy", "Jessyca", "Jesus", "Jett", "Jettie", "Jevon", "Jewel", "Jewell", "Jillian", "Jimmie", "Jimmy", "Jo", "Joan", "Joana", "Joanie", "Joanne", "Joannie", "Joanny", "Joany", "Joaquin", "Jocelyn", "Jodie", "Jody", "Joe", "Joel", "Joelle", "Joesph", "Joey", "Johan", "Johann", "Johanna", "Johathan", "John", "Johnathan", "Johnathon", "Johnnie", "Johnny", "Johnpaul", "Johnson", "Jolie", "Jon", "Jonas", "Jonatan", "Jonathan", "Jonathon", "Jordan", "Jordane", "Jordi", "Jordon", "Jordy", "Jordyn", "Jorge", "Jose", "Josefa", "Josefina", "Joseph", "Josephine", "Josh", "Joshua", "Joshuah", "Josiah", "Josiane", "Josianne", "Josie", "Josue", "Jovan", "Jovani", "Jovanny", "Jovany", "Joy", "Joyce", "Juana", "Juanita", "Judah", "Judd", "Jude", "Judge", "Judson", "Judy", "Jules", "Julia", "Julian", "Juliana", "Julianne", "Julie", "Julien", "Juliet", "Julio", "Julius", "June", "Junior", "Junius", "Justen", "Justice", "Justina", "Justine", "Juston", "Justus", "Justyn", "Juvenal", "Juwan", "Kacey", "Kaci", "Kacie", "Kade", "Kaden", "Kadin", "Kaela", "Kaelyn", "Kaia", "Kailee", "Kailey", "Kailyn", "Kaitlin", "Kaitlyn", "Kale", "Kaleb", "Kaleigh", "Kaley", "Kali", "Kallie", "Kameron", "Kamille", "Kamren", "Kamron", "Kamryn", "Kane", "Kara", "Kareem", "Karelle", "Karen", "Kari", "Kariane", "Karianne", "Karina", "Karine", "Karl", "Karlee", "Karley", "Karli", "Karlie", "Karolann", "Karson", "Kasandra", "Kasey", "Kassandra", "Katarina", "Katelin", "Katelyn", "Katelynn", "Katharina", "Katherine", "Katheryn", "Kathleen", "Kathlyn", "Kathryn", "Kathryne", "Katlyn", "Katlynn", "Katrina", "Katrine", "Kattie", "Kavon", "Kay", "Kaya", "Kaycee", "Kayden", "Kayla", "Kaylah", "Kaylee", "Kayleigh", "Kayley", "Kayli", "Kaylie", "Kaylin", "Keagan", "Keanu", "Keara", "Keaton", "Keegan", "Keeley", "Keely", "Keenan", "Keira", "Keith", "Kellen", "Kelley", "Kelli", "Kellie", "Kelly", "Kelsi", "Kelsie", "Kelton", "Kelvin", "Ken", "Kendall", "Kendra", "Kendrick", "Kenna", "Kennedi", "Kennedy", "Kenneth", "Kennith", "Kenny", "Kenton", "Kenya", "Kenyatta", "Kenyon", "Keon", "Keshaun", "Keshawn", "Keven", "Kevin", "Kevon", "Keyon", "Keyshawn", "Khalid", "Khalil", "Kian", "Kiana", "Kianna", "Kiara", "Kiarra", "Kiel", "Kiera", "Kieran", "Kiley", "Kim", "Kimberly", "King", "Kip", "Kira", "Kirk", "Kirsten", "Kirstin", "Kitty", "Kobe", "Koby", "Kody", "Kolby", "Kole", "Korbin", "Korey", "Kory", "Kraig", "Kris", "Krista", "Kristian", "Kristin", "Kristina", "Kristofer", "Kristoffer", "Kristopher", "Kristy", "Krystal", "Krystel", "Krystina", "Kurt", "Kurtis", "Kyla", "Kyle", "Kylee", "Kyleigh", "Kyler", "Kylie", "Kyra", "Lacey", "Lacy", "Ladarius", "Lafayette", "Laila", "Laisha", "Lamar", "Lambert", "Lamont", "Lance", "Landen", "Lane", "Laney", "Larissa", "Laron", "Larry", "Larue", "Laura", "Laurel", "Lauren", "Laurence", "Lauretta", "Lauriane", "Laurianne", "Laurie", "Laurine", "Laury", "Lauryn", "Lavada", "Lavern", "Laverna", "Laverne", "Lavina", "Lavinia", "Lavon", "Lavonne", "Lawrence", "Lawson", "Layla", "Layne", "Lazaro", "Lea", "Leann", "Leanna", "Leanne", "Leatha", "Leda", "Lee", "Leif", "Leila", "Leilani", "Lela", "Lelah", "Leland", "Lelia", "Lempi", "Lemuel", "Lenna", "Lennie", "Lenny", "Lenora", "Lenore", "Leo", "Leola", "Leon", "Leonard", "Leonardo", "Leone", "Leonel", "Leonie", "Leonor", "Leonora", "Leopold", "Leopoldo", "Leora", "Lera", "Lesley", "Leslie", "Lesly", "Lessie", "Lester", "Leta", "Letha", "Letitia", "Levi", "Lew", "Lewis", "Lexi", "Lexie", "Lexus", "Lia", "Liam", "Liana", "Libbie", "Libby", "Lila", "Lilian", "Liliana", "Liliane", "Lilla", "Lillian", "Lilliana", "Lillie", "Lilly", "Lily", "Lilyan", "Lina", "Lincoln", "Linda", "Lindsay", "Lindsey", "Linnea", "Linnie", "Linwood", "Lionel", "Lisa", "Lisandro", "Lisette", "Litzy", "Liza", "Lizeth", "Lizzie", "Llewellyn", "Lloyd", "Logan", "Lois", "Lola", "Lolita", "Loma", "Lon", "London", "Lonie", "Lonnie", "Lonny", "Lonzo", "Lora", "Loraine", "Loren", "Lorena", "Lorenz", "Lorenza", "Lorenzo", "Lori", "Lorine", "Lorna", "Lottie", "Lou", "Louie", "Louisa", "Lourdes", "Louvenia", "Lowell", "Loy", "Loyal", "Loyce", "Lucas", "Luciano", "Lucie", "Lucienne", "Lucile", "Lucinda", "Lucio", "Lucious", "Lucius", "Lucy", "Ludie", "Ludwig", "Lue", "Luella", "Luigi", "Luis", "Luisa", "Lukas", "Lula", "Lulu", "Luna", "Lupe", "Lura", "Lurline", "Luther", "Luz", "Lyda", "Lydia", "Lyla", "Lynn", "Lyric", "Lysanne", "Mabel", "Mabelle", "Mable", "Mac", "Macey", "Maci", "Macie", "Mack", "Mackenzie", "Macy", "Madaline", "Madalyn", "Maddison", "Madeline", "Madelyn", "Madelynn", "Madge", "Madie", "Madilyn", "Madisen", "Madison", "Madisyn", "Madonna", "Madyson", "Mae", "Maegan", "Maeve", "Mafalda", "Magali", "Magdalen", "Magdalena", "Maggie", "Magnolia", "Magnus", "Maia", "Maida", "Maiya", "Major", "Makayla", "Makenna", "Makenzie", "Malachi", "Malcolm", "Malika", "Malinda", "Mallie", "Mallory", "Malvina", "Mandy", "Manley", "Manuel", "Manuela", "Mara", "Marc", "Marcel", "Marcelina", "Marcelino", "Marcella", "Marcelle", "Marcellus", "Marcelo", "Marcia", "Marco", "Marcos", "Marcus", "Margaret", "Margarete", "Margarett", "Margaretta", "Margarette", "Margarita", "Marge", "Margie", "Margot", "Margret", "Marguerite", "Maria", "Mariah", "Mariam", "Marian", "Mariana", "Mariane", "Marianna", "Marianne", "Mariano", "Maribel", "Marie", "Mariela", "Marielle", "Marietta", "Marilie", "Marilou", "Marilyne", "Marina", "Mario", "Marion", "Marisa", "Marisol", "Maritza", "Marjolaine", "Marjorie", "Marjory", "Mark", "Markus", "Marlee", "Marlen", "Marlene", "Marley", "Marlin", "Marlon", "Marques", "Marquis", "Marquise", "Marshall", "Marta", "Martin", "Martina", "Martine", "Marty", "Marvin", "Mary", "Maryam", "Maryjane", "Maryse", "Mason", "Mateo", "Mathew", "Mathias", "Mathilde", "Matilda", "Matilde", "Matt", "Matteo", "Mattie", "Maud", "Maude", "Maudie", "Maureen", "Maurice", "Mauricio", "Maurine", "Maverick", "Mavis", "Max", "Maxie", "Maxime", "Maximilian", "Maximillia", "Maximillian", "Maximo", "Maximus", "Maxine", "Maxwell", "May", "Maya", "Maybell", "Maybelle", "Maye", "Maymie", "Maynard", "Mayra", "Mazie", "Mckayla", "Mckenna", "Mckenzie", "Meagan", "Meaghan", "Meda", "Megane", "Meggie", "Meghan", "Mekhi", "Melany", "Melba", "Melisa", "Melissa", "Mellie", "Melody", "Melvin", "Melvina", "Melyna", "Melyssa", "Mercedes", "Meredith", "Merl", "Merle", "Merlin", "Merritt", "Mertie", "Mervin", "Meta", "Mia", "Micaela", "Micah", "Michael", "Michaela", "Michale", "Micheal", "Michel", "Michele", "Michelle", "Miguel", "Mikayla", "Mike", "Mikel", "Milan", "Miles", "Milford", "Miller", "Millie", "Milo", "Milton", "Mina", "Minerva", "Minnie", "Miracle", "Mireille", "Mireya", "Misael", "Missouri", "Misty", "Mitchel", "Mitchell", "Mittie", "Modesta", "Modesto", "Mohamed", "Mohammad", "Mohammed", "Moises", "Mollie", "Molly", "Mona", "Monica", "Monique", "Monroe", "Monserrat", "Monserrate", "Montana", "Monte", "Monty", "Morgan", "Moriah", "Morris", "Mortimer", "Morton", "Mose", "Moses", "Moshe", "Mossie", "Mozell", "Mozelle", "Muhammad", "Muriel", "Murl", "Murphy", "Murray", "Mustafa", "Mya", "Myah", "Mylene", "Myles", "Myra", "Myriam", "Myrl", "Myrna", "Myron", "Myrtice", "Myrtie", "Myrtis", "Myrtle", "Nadia", "Nakia", "Name", "Nannie", "Naomi", "Naomie", "Napoleon", "Narciso", "Nash", "Nasir", "Nat", "Natalia", "Natalie", "Natasha", "Nathan", "Nathanael", "Nathanial", "Nathaniel", "Nathen", "Nayeli", "Neal", "Ned", "Nedra", "Neha", "Neil", "Nelda", "Nella", "Nelle", "Nellie", "Nels", "Nelson", "Neoma", "Nestor", "Nettie", "Neva", "Newell", "Newton", "Nia", "Nicholas", "Nicholaus", "Nichole", "Nick", "Nicklaus", "Nickolas", "Nico", "Nicola", "Nicolas", "Nicole", "Nicolette", "Nigel", "Nikita", "Nikki", "Nikko", "Niko", "Nikolas", "Nils", "Nina", "Noah", "Noble", "Noe", "Noel", "Noelia", "Noemi", "Noemie", "Noemy", "Nola", "Nolan", "Nona", "Nora", "Norbert", "Norberto", "Norene", "Norma", "Norris", "Norval", "Norwood", "Nova", "Novella", "Nya", "Nyah", "Nyasia", "Obie", "Oceane", "Ocie", "Octavia", "Oda", "Odell", "Odessa", "Odie", "Ofelia", "Okey", "Ola", "Olaf", "Ole", "Olen", "Oleta", "Olga", "Olin", "Oliver", "Ollie", "Oma", "Omari", "Omer", "Ona", "Onie", "Opal", "Ophelia", "Ora", "Oral", "Oran", "Oren", "Orie", "Orin", "Orion", "Orland", "Orlando", "Orlo", "Orpha", "Orrin", "Orval", "Orville", "Osbaldo", "Osborne", "Oscar", "Osvaldo", "Oswald", "Oswaldo", "Otha", "Otho", "Otilia", "Otis", "Ottilie", "Ottis", "Otto", "Ova", "Owen", "Ozella", "Pablo", "Paige", "Palma", "Pamela", "Pansy", "Paolo", "Paris", "Parker", "Pascale", "Pasquale", "Pat", "Patience", "Patricia", "Patrick", "Patsy", "Pattie", "Paul", "Paula", "Pauline", "Paxton", "Payton", "Pearl", "Pearlie", "Pearline", "Pedro", "Peggie", "Penelope", "Percival", "Percy", "Perry", "Pete", "Peter", "Petra", "Peyton", "Philip", "Phoebe", "Phyllis", "Pierce", "Pierre", "Pietro", "Pink", "Pinkie", "Piper", "Polly", "Porter", "Precious", "Presley", "Preston", "Price", "Prince", "Princess", "Priscilla", "Providenci", "Prudence", "Queen", "Queenie", "Quentin", "Quincy", "Quinn", "Quinten", "Quinton", "Rachael", "Rachel", "Rachelle", "Rae", "Raegan", "Rafael", "Rafaela", "Raheem", "Rahsaan", "Rahul", "Raina", "Raleigh", "Ralph", "Ramiro", "Ramon", "Ramona", "Randal", "Randall", "Randi", "Randy", "Ransom", "Raoul", "Raphael", "Raphaelle", "Raquel", "Rashad", "Rashawn", "Rasheed", "Raul", "Raven", "Ray", "Raymond", "Raymundo", "Reagan", "Reanna", "Reba", "Rebeca", "Rebecca", "Rebeka", "Rebekah", "Reece", "Reed", "Reese", "Regan", "Reggie", "Reginald", "Reid", "Reilly", "Reina", "Reinhold", "Remington", "Rene", "Renee", "Ressie", "Reta", "Retha", "Retta", "Reuben", "Reva", "Rex", "Rey", "Reyes", "Reymundo", "Reyna", "Reynold", "Rhea", "Rhett", "Rhianna", "Rhiannon", "Rhoda", "Ricardo", "Richard", "Richie", "Richmond", "Rick", "Rickey", "Rickie", "Ricky", "Rico", "Rigoberto", "Riley", "Rita", "River", "Robb", "Robbie", "Robert", "Roberta", "Roberto", "Robin", "Robyn", "Rocio", "Rocky", "Rod", "Roderick", "Rodger", "Rodolfo", "Rodrick", "Rodrigo", "Roel", "Rogelio", "Roger", "Rogers", "Rolando", "Rollin", "Roma", "Romaine", "Roman", "Ron", "Ronaldo", "Ronny", "Roosevelt", "Rory", "Rosa", "Rosalee", "Rosalia", "Rosalind", "Rosalinda", "Rosalyn", "Rosamond", "Rosanna", "Rosario", "Roscoe", "Rose", "Rosella", "Roselyn", "Rosemarie", "Rosemary", "Rosendo", "Rosetta", "Rosie", "Rosina", "Roslyn", "Ross", "Rossie", "Rowan", "Rowena", "Rowland", "Roxane", "Roxanne", "Roy", "Royal", "Royce", "Rozella", "Ruben", "Rubie", "Ruby", "Rubye", "Rudolph", "Rudy", "Rupert", "Russ", "Russel", "Russell", "Rusty", "Ruth", "Ruthe", "Ruthie", "Ryan", "Ryann", "Ryder", "Rylan", "Rylee", "Ryleigh", "Ryley", "Sabina", "Sabrina", "Sabryna", "Sadie", "Sadye", "Sage", "Saige", "Sallie", "Sally", "Salma", "Salvador", "Salvatore", "Sam", "Samanta", "Samantha", "Samara", "Samir", "Sammie", "Sammy", "Samson", "Sandra", "Sandrine", "Sandy", "Sanford", "Santa", "Santiago", "Santina", "Santino", "Santos", "Sarah", "Sarai", "Sarina", "Sasha", "Saul", "Savanah", "Savanna", "Savannah", "Savion", "Scarlett", "Schuyler", "Scot", "Scottie", "Scotty", "Seamus", "Sean", "Sebastian", "Sedrick", "Selena", "Selina", "Selmer", "Serena", "Serenity", "Seth", "Shad", "Shaina", "Shakira", "Shana", "Shane", "Shanel", "Shanelle", "Shania", "Shanie", "Shaniya", "Shanna", "Shannon", "Shanny", "Shanon", "Shany", "Sharon", "Shaun", "Shawn", "Shawna", "Shaylee", "Shayna", "Shayne", "Shea", "Sheila", "Sheldon", "Shemar", "Sheridan", "Sherman", "Sherwood", "Shirley", "Shyann", "Shyanne", "Sibyl", "Sid", "Sidney", "Sienna", "Sierra", "Sigmund", "Sigrid", "Sigurd", "Silas", "Sim", "Simeon", "Simone", "Sincere", "Sister", "Skye", "Skyla", "Skylar", "Sofia", "Soledad", "Solon", "Sonia", "Sonny", "Sonya", "Sophia", "Sophie", "Spencer", "Stacey", "Stacy", "Stan", "Stanford", "Stanley", "Stanton", "Stefan", "Stefanie", "Stella", "Stephan", "Stephania", "Stephanie", "Stephany", "Stephen", "Stephon", "Sterling", "Steve", "Stevie", "Stewart", "Stone", "Stuart", "Summer", "Sunny", "Susan", "Susana", "Susanna", "Susie", "Suzanne", "Sven", "Syble", "Sydnee", "Sydney", "Sydni", "Sydnie", "Sylvan", "Sylvester", "Sylvia", "Tabitha", "Tad", "Talia", "Talon", "Tamara", "Tamia", "Tania", "Tanner", "Tanya", "Tara", "Taryn", "Tate", "Tatum", "Tatyana", "Taurean", "Tavares", "Taya", "Taylor", "Teagan", "Ted", "Telly", "Terence", "Teresa", "Terrance", "Terrell", "Terrence", "Terrill", "Terry", "Tess", "Tessie", "Tevin", "Thad", "Thaddeus", "Thalia", "Thea", "Thelma", "Theo", "Theodora", "Theodore", "Theresa", "Therese", "Theresia", "Theron", "Thomas", "Thora", "Thurman", "Tia", "Tiana", "Tianna", "Tiara", "Tierra", "Tiffany", "Tillman", "Timmothy", "Timmy", "Timothy", "Tina", "Tito", "Titus", "Tobin", "Toby", "Tod", "Tom", "Tomas", "Tomasa", "Tommie", "Toney", "Toni", "Tony", "Torey", "Torrance", "Torrey", "Toy", "Trace", "Tracey", "Tracy", "Travis", "Travon", "Tre", "Tremaine", "Tremayne", "Trent", "Trenton", "Tressa", "Tressie", "Treva", "Trever", "Trevion", "Trevor", "Trey", "Trinity", "Trisha", "Tristian", "Tristin", "Triston", "Troy", "Trudie", "Trycia", "Trystan", "Turner", "Twila", "Tyler", "Tyra", "Tyree", "Tyreek", "Tyrel", "Tyrell", "Tyrese", "Tyrique", "Tyshawn", "Tyson", "Ubaldo", "Ulices", "Ulises", "Una", "Unique", "Urban", "Uriah", "Uriel", "Ursula", "Vada", "Valentin", "Valentina", "Valentine", "Valerie", "Vallie", "Van", "Vance", "Vanessa", "Vaughn", "Veda", "Velda", "Vella", "Velma", "Velva", "Vena", "Verda", "Verdie", "Vergie", "Verla", "Verlie", "Vern", "Verna", "Verner", "Vernice", "Vernie", "Vernon", "Verona", "Veronica", "Vesta", "Vicenta", "Vicente", "Vickie", "Vicky", "Victor", "Victoria", "Vida", "Vidal", "Vilma", "Vince", "Vincent", "Vincenza", "Vincenzo", "Vinnie", "Viola", "Violet", "Violette", "Virgie", "Virgil", "Virginia", "Virginie", "Vita", "Vito", "Viva", "Vivian", "Viviane", "Vivianne", "Vivien", "Vivienne", "Vladimir", "Wade", "Waino", "Waldo", "Walker", "Wallace", "Walter", "Walton", "Wanda", "Ward", "Warren", "Watson", "Wava", "Waylon", "Wayne", "Webster", "Weldon", "Wellington", "Wendell", "Wendy", "Werner", "Westley", "Weston", "Whitney", "Wilber", "Wilbert", "Wilburn", "Wiley", "Wilford", "Wilfred", "Wilfredo", "Wilfrid", "Wilhelm", "Wilhelmine", "Will", "Willa", "Willard", "William", "Willie", "Willis", "Willow", "Willy", "Wilma", "Wilmer", "Wilson", "Wilton", "Winfield", "Winifred", "Winnifred", "Winona", "Winston", "Woodrow", "Wyatt", "Wyman", "Xander", "Xavier", "Xzavier", "Yadira", "Yasmeen", "Yasmin", "Yasmine", "Yazmin", "Yesenia", "Yessenia", "Yolanda", "Yoshiko", "Yvette", "Yvonne", "Zachariah", "Zachary", "Zachery", "Zack", "Zackary", "Zackery", "Zakary", "Zander", "Zane", "Zaria", "Zechariah", "Zelda", "Zella", "Zelma", "Zena", "Zetta", "Zion", "Zita", "Zoe", "Zoey", "Zoie", "Zoila", "Zola", "Zora", "Zula"}, - "last": {"Abbott", "Abernathy", "Abshire", "Adams", "Altenwerth", "Anderson", "Ankunding", "Armstrong", "Auer", "Aufderhar", "Bahringer", "Bailey", "Balistreri", "Barrows", "Bartell", "Bartoletti", "Barton", "Bashirian", "Batz", "Bauch", "Baumbach", "Bayer", "Beahan", "Beatty", "Bechtelar", "Becker", "Bednar", "Beer", "Beier", "Berge", "Bergnaum", "Bergstrom", "Bernhard", "Bernier", "Bins", "Blanda", "Blick", "Block", "Bode", "Boehm", "Bogan", "Bogisich", "Borer", "Bosco", "Botsford", "Boyer", "Boyle", "Bradtke", "Brakus", "Braun", "Breitenberg", "Brekke", "Brown", "Bruen", "Buckridge", "Carroll", "Carter", "Cartwright", "Casper", "Cassin", "Champlin", "Christiansen", "Cole", "Collier", "Collins", "Conn", "Connelly", "Conroy", "Considine", "Corkery", "Cormier", "Corwin", "Cremin", "Crist", "Crona", "Cronin", "Crooks", "Cruickshank", "Cummerata", "Cummings", "Dach", "Damore", "Daniel", "Dare", "Daugherty", "Davis", "Deckow", "Denesik", "Dibbert", "Dickens", "Dicki", "Dickinson", "Dietrich", "Donnelly", "Dooley", "Douglas", "Doyle", "DuBuque", "Durgan", "Ebert", "Effertz", "Eichmann", "Emard", "Emmerich", "Erdman", "Ernser", "Fadel", "Fahey", "Farrell", "Fay", "Feeney", "Feest", "Feil", "Ferry", "Fisher", "Flatley", "Frami", "Franecki", "Friesen", "Fritsch", "Funk", "Gaylord", "Gerhold", "Gerlach", "Gibson", "Gislason", "Gleason", "Gleichner", "Glover", "Goldner", "Goodwin", "Gorczany", "Gottlieb", "Goyette", "Grady", "Graham", "Grant", "Green", "Greenfelder", "Greenholt", "Grimes", "Gulgowski", "Gusikowski", "Gutkowski", "Gutmann", "Haag", "Hackett", "Hagenes", "Hahn", "Haley", "Halvorson", "Hamill", "Hammes", "Hand", "Hane", "Hansen", "Harber", "Harris", "Hartmann", "Harvey", "Hauck", "Hayes", "Heaney", "Heathcote", "Hegmann", "Heidenreich", "Heller", "Herman", "Hermann", "Hermiston", "Herzog", "Hessel", "Hettinger", "Hickle", "Hilll", "Hills", "Hilpert", "Hintz", "Hirthe", "Hodkiewicz", "Hoeger", "Homenick", "Hoppe", "Howe", "Howell", "Hudson", "Huel", "Huels", "Hyatt", "Jacobi", "Jacobs", "Jacobson", "Jakubowski", "Jaskolski", "Jast", "Jenkins", "Jerde", "Jewess", "Johns", "Johnson", "Johnston", "Jones", "Kassulke", "Kautzer", "Keebler", "Keeling", "Kemmer", "Kerluke", "Kertzmann", "Kessler", "Kiehn", "Kihn", "Kilback", "King", "Kirlin", "Klein", "Kling", "Klocko", "Koch", "Koelpin", "Koepp", "Kohler", "Konopelski", "Koss", "Kovacek", "Kozey", "Krajcik", "Kreiger", "Kris", "Kshlerin", "Kub", "Kuhic", "Kuhlman", "Kuhn", "Kulas", "Kunde", "Kunze", "Kuphal", "Kutch", "Kuvalis", "Labadie", "Lakin", "Lang", "Langosh", "Langworth", "Larkin", "Larson", "Leannon", "Lebsack", "Ledner", "Leffler", "Legros", "Lehner", "Lemke", "Lesch", "Leuschke", "Lind", "Lindgren", "Littel", "Little", "Lockman", "Lowe", "Lubowitz", "Lueilwitz", "Luettgen", "Lynch", "Macejkovic", "Maggio", "Mann", "Mante", "Marks", "Marquardt", "Marvin", "Mayer", "Mayert", "McClure", "McCullough", "McDermott", "McGlynn", "McKenzie", "McLaughlin", "Medhurst", "Mertz", "Metz", "Miller", "Mills", "Mitchell", "Moen", "Mohr", "Monahan", "Moore", "Morar", "Morissette", "Mosciski", "Mraz", "Mueller", "Muller", "Murazik", "Murphy", "Murray", "Nader", "Nicolas", "Nienow", "Nikolaus", "Nitzsche", "Nolan", "Oberbrunner", "Okuneva", "Olson", "Ondricka", "OReilly", "Orn", "Ortiz", "Osinski", "Pacocha", "Padberg", "Pagac", "Parisian", "Parker", "Paucek", "Pfannerstill", "Pfeffer", "Pollich", "Pouros", "Powlowski", "Predovic", "Price", "Prohaska", "Prosacco", "Purdy", "Quigley", "Quitzon", "Rath", "Ratke", "Rau", "Raynor", "Reichel", "Reichert", "Reilly", "Reinger", "Rempel", "Renner", "Reynolds", "Rice", "Rippin", "Ritchie", "Robel", "Roberts", "Rodriguez", "Rogahn", "Rohan", "Rolfson", "Romaguera", "Roob", "Rosenbaum", "Rowe", "Ruecker", "Runolfsdottir", "Runolfsson", "Runte", "Russel", "Rutherford", "Ryan", "Sanford", "Satterfield", "Sauer", "Sawayn", "Schaden", "Schaefer", "Schamberger", "Schiller", "Schimmel", "Schinner", "Schmeler", "Schmidt", "Schmitt", "Schneider", "Schoen", "Schowalter", "Schroeder", "Schulist", "Schultz", "Schumm", "Schuppe", "Schuster", "Senger", "Shanahan", "Shields", "Simonis", "Sipes", "Skiles", "Smith", "Smitham", "Spencer", "Spinka", "Sporer", "Stamm", "Stanton", "Stark", "Stehr", "Steuber", "Stiedemann", "Stokes", "Stoltenberg", "Stracke", "Streich", "Stroman", "Strosin", "Swaniawski", "Swift", "Terry", "Thiel", "Thompson", "Tillman", "Torp", "Torphy", "Towne", "Toy", "Trantow", "Tremblay", "Treutel", "Tromp", "Turcotte", "Turner", "Ullrich", "Upton", "Vandervort", "Veum", "Volkman", "Von", "VonRueden", "Waelchi", "Walker", "Walsh", "Walter", "Ward", "Waters", "Watsica", "Weber", "Wehner", "Weimann", "Weissnat", "Welch", "West", "White", "Wiegand", "Wilderman", "Wilkinson", "Will", "Williamson", "Willms", "Windler", "Wintheiser", "Wisoky", "Wisozk", "Witting", "Wiza", "Wolf", "Wolff", "Wuckert", "Wunsch", "Wyman", "Yost", "Yundt", "Zboncak", "Zemlak", "Ziemann", "Zieme", "Zulauf"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/status_code.go b/vendor/github.com/brianvoe/gofakeit/data/status_code.go deleted file mode 100644 index 7d78fd995026..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/status_code.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// StatusCodes consists of commonly used HTTP status codes -var StatusCodes = map[string][]int{ - "simple": {200, 301, 302, 400, 404, 500}, - "general": {100, 200, 201, 203, 204, 205, 301, 302, 304, 400, 401, 403, 404, 405, 406, 416, 500, 501, 502, 503, 504}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/vehicle.go b/vendor/github.com/brianvoe/gofakeit/data/vehicle.go deleted file mode 100644 index 3b96728bccad..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/vehicle.go +++ /dev/null @@ -1,10 +0,0 @@ -package data - -// Vehicle Beer consists of various beer information -var Vehicle = map[string][]string{ - "vehicle_type": {"Passenger car mini", "Passenger car light", "Passenger car compact", "Passenger car medium", "Passenger car heavy", "Sport utility vehicle", "Pickup truck", "Van"}, - "fuel_type": {"Gasoline", "Methanol", "Ethanol", "Diesel", "LPG", "CNG", "Electric"}, - "transmission_type": {"Manual", "Automatic"}, - "maker": {"Alfa Romeo", "Aston Martin", "Audi", "Bentley", "Benz", "BMW", "Bugatti", "Cadillac", "Chevrolet", "Chrysler", "Citroen", "Corvette", "DAF", "Dacia", "Daewoo", "Daihatsu", "Datsun", "De Lorean", "Dino", "Dodge", "Farboud", "Ferrari", "Fiat", "Ford", "Honda", "Hummer", "Hyundai", "Jaguar", "Jeep", "KIA", "Koenigsegg", "Lada", "Lamborghini", "Lancia", "Land Rover", "Lexus", "Ligier", "Lincoln", "Lotus", "Martini", "Maserati", "Maybach", "Mazda", "McLaren", "Mercedes", "Mercedes-Benz", "Mini", "Mitsubishi", "Nissan", "Noble", "Opel", "Peugeot", "Pontiac", "Porsche", "Renault", "Rolls-Royce", "Rover", "Saab", "Seat", "Skoda", "Smart", "Spyker", "Subaru", "Suzuki", "Toyota", "Tesla", "Vauxhall", "Volkswagen", "Volvo"}, - "model": {"Db9 Coupe", "Db9 Coupe Manual", "Db9 Volante", "V12 Vanquish S", "V8 Vantage", "A3", "A4", "A4 Avant Quattro", "A4 Cabriolet", "A4 Cabriolet Quattro", "A4 Quattro", "A6", "A6 Avant Quattro", "A6 Quattro", "A8 L", "Gti", "Passat", "S4", "S4 Avant", "S4 Cabriolet", "Tt Coupe", "Tt Roadster", "Bentley Arnage", "Continental Flying Spur", "Continental Gt", " 325ci Convertible", " 325i", " 325xi", " 325xi Sport Wagon", " 330ci Convertible", " 330i", " 330xi", " 525i", " 525xi", " 530i", " 530xi", " 530xi Sport Wagon", " 550i", " 650ci", " 650ci Convertible", " 750li", " 760li", " M3", " M3 Convertible", " M5", " M6", " Mini Cooper", " Mini Cooper Convertible", " Mini Cooper S", " Mini Cooper S Convertible", " X3", " X5", " X5 4.8is", " Z4 3.0 Si Coupe", " Z4 3.0i", " Z4 3.0si", " Z4 M Roadster", "Veyron", "300c/srt-8", "Caravan 2wd", "Charger", "Commander 4wd", "Crossfire Roadster", "Dakota Pickup 2wd", "Dakota Pickup 4wd", "Durango 2wd", "Durango 4wd", "Grand Cherokee 2wd", "Grand Cherokee 4wd", "Liberty/cherokee 2wd", "Liberty/cherokee 4wd", "Pacifica 2wd", "Pacifica Awd", "Pt Cruiser", "Ram 1500 Pickup 2wd", "Ram 1500 Pickup 4wd", "Sebring 4-dr", "Stratus 4-dr", "Town & Country 2wd", "Viper Convertible", "Wrangler/tj 4wd", "F430", "Ferrari 612 Scaglietti", "Ferrari F141", "B4000 4wd", "Crown Victoria Police", "E150 Club Wagon", "E150 Econoline 2wd", "Escape 4wd", "Escape Fwd", "Escape Hybrid 4wd", "Escape Hybrid Fwd", "Expedition 2wd", "Explorer 2wd", "Explorer 4wd", "F150 Ffv 2wd", "F150 Ffv 4wd", "F150 Pickup 2wd", "F150 Pickup 4wd", "Five Hundred Awd", "Focus Fwd", "Focus Station Wag", "Freestar Wagon Fwd", "Freestyle Awd", "Freestyle Fwd", "Grand Marquis", "Gt 2wd", "Ls", "Mark Lt", "Milan", "Monterey Wagon Fwd", "Mountaineer 4wd", "Mustang", "Navigator 2wd", "Ranger Pickup 2wd", "Ranger Pickup 4wd", "Taurus", "Taurus Ethanol Ffv", "Thunderbird", "Town Car", "Zephyr", "B9 Tribeca Awd", "Baja Awd", "Forester Awd", "Impreza Awd", "Impreza Wgn/outback Spt Awd", "Legacy Awd", "Legacy Wagon Awd", "Outback Awd", "Outback Wagon Awd", "9-3 Convertible", "9-3 Sport Sedan", "9-5 Sedan", "C15 Silverado Hybrid 2wd", "C1500 Silverado 2wd", "C1500 Suburban 2wd", "C1500 Tahoe 2wd", "C1500 Yukon 2wd", "Cobalt", "Colorado 2wd", "Colorado 4wd", "Colorado Cab Chassis Inc 2wd", "Colorado Crew Cab 2wd", "Colorado Crew Cab 4wd", "Corvette", "Cts", "Dts", "Envoy 2wd", "Envoy Xl 4wd", "Equinox Awd", "Equinox Fwd", "Escalade 2wd", "Escalade Esv Awd", "G15/25chev Van 2wd Conv", "G1500/2500 Chevy Express 2wd", "G1500/2500 Chevy Van 2wd", "G6", "G6 Gt/gtp Convertible", "Grand Prix", "Gto", "H3 4wd", "Hhr Fwd", "I-280 2wd Ext Cab", "Impala", "K15 Silverado Hybrid 4wd", "K1500 Avalanche 4wd", "K1500 Silverado 4wd", "K1500 Tahoe 4wd", "Lacrosse/allure", "Limousine", "Malibu", "Montana Sv6 Awd", "Monte Carlo", "Rendezvous Awd", "Rendezvous Fwd", "Solstice", "Srx 2wd", "Srx Awd", "Ssr Pickup 2wd", "Sts", "Sts Awd", "Terraza Fwd", "Trailblazer 2wd", "Trailblazer 4wd", "Trailblazer Awd", "Trailblazer Ext 4wd", "Uplander Fwd", "Vue Awd", "Vue Fwd", "Xlr", "Aveo", "Forenza", "Forenza Wagon", "Verona", "Accord", "Accord Hybrid", "Civic", "Civic Hybrid", "Cr-v 4wd", "Element 2wd", "Element 4wd", "Insight", "Mdx 4wd", "Odyssey 2wd", "Pilot 2wd", "Pilot 4wd", "Ridgeline 4wd", "Rl", "Rsx", "S2000", "Tl", "Tsx", "Accent", "Azera", "Elantra", "Santafe 2wd", "Santafe 4wd", "Sonata", "Tiburon", "Tucson 2wd", "Tucson 4wd", "S-type 3.0 Litre", "S-type 4.2 Litre", "S-type R", "Vdp Lwb", "Xj8", "Xk8 Convertible", "Xkr Convertible", "X-type", "X-type Sport Brake", "Amanti", "Optima", "Optima(ms)", "Rio", "Sedona", "Sorento 2wd", "Sorento 4wd", "Spectra(ld)", "Sportage 2wd", "Sportage 4wd", "L-140/715 Gallardo", "L-147/148 Murcielago", "Lr3", "Range Rover", "Range Rover Sport", "Elise/exige", "Coupe Cambiocorsa/gt/g-sport", "Quattroporte", "Mazda 3", "Mazda 5", "Mazda 6", "Mazda 6 Sport Wagon", "Mazda Rx-8", "Mpv", "Mx-5", "C230", "C280", "C280 4matic", "C350", "C350 4matic", "C55 Amg", "Cl65 Amg", "Clk350", "Clk350 (cabriolet)", "Clk55 Amg (cabriolet)", "Cls500", "Cls55 Amg", "E320 Cdi", "E350", "E350 (wagon)", "E350 4matic", "E350 4matic (wagon)", "E500", "E55 Amg", "E55 Amg (wagon)", "Maybach 57s", "Maybach 62", "Ml350", "Ml500", "R350", "R500", "S350", "S430", "Sl500", "Sl600", "Sl65 Amg", "Slk280", "Slk350", "Slr", "Eclipse", "Endeavor 2wd", "Endeavor 4wd", "Galant", "Lancer", "Lancer Evolution", "Lancer Sportback", "Montero", "Outlander 2wd", "Outlander 4wd", "Vibe", "350z", "350z Roadster", "Altima", "Armada 2wd", "Armada 4wd", "Frontier 2wd", "Frontier V6-2wd", "Frontier V6-4wd", "Fx35 Awd", "Fx35 Rwd", "Fx45 Awd", "G35", "M35", "M35x", "M45", "Maxima", "Murano Awd", "Murano Fwd", "Pathfinder 2wd", "Pathfinder 4wd", "Q45", "Q45 Sport", "Quest", "Qx56 4wd", "Sentra", "Titan 2wd", "Titan 4wd", "Xterra 2wd", "Xterra 4wd", "Boxster", "Boxster S", "Carrera 2 Coupe", "Cayenne", "Cayenne S", "Cayenne Turbo", "Cayman S", "Phantom", "F150 Supercrew 4wd", "C8 Spyder", "Aerio", "Aerio Sx", "Aerio Sx Awd", "Grand Vitara Xl-7", "Grand Vitara Xl-7 4wd", "Grand Vitara Xv6", "Grand Vitara Xv6 Awd", "4runner 2wd", "4runner 4wd", "Avalon", "Camry", "Camry Solara", "Camry Solara Convertible", "Corolla", "Corolla Matrix", "Es 330", "Gs 300 4wd", "Gs 300/gs 430", "Gx 470", "Highlander 2wd", "Highlander 4wd", "Highlander Hybrid 2wd", "Highlander Hybrid 4wd", "Is 250", "Is 250 Awd", "Is 350", "Ls 430", "Lx 470", "Prius", "Rav4 2wd", "Rav4 4wd", "Rx 330 2wd", "Rx 330 4wd", "Rx 400h 4wd", "Sc 430", "Scion Tc", "Scion Xa", "Scion Xb", "Sequoia 2wd", "Sequoia 4wd", "Sienna 2wd", "Sienna 4wd", "Toyota Tacoma 2wd", "Toyota Tacoma 4wd", "Toyota Tundra 2wd", "Toyota Tundra 4wd", "Yaris", "A3 Quattro", "Golf", "Jetta", "New Beetle", "New Beetle Convertible", "Passat Wagon 4motion", "Phaeton", "Rabbit", "Touareg", "Tt Coupe Quattro", "Tt Roadster Quattro", "C70 Convertible", "S40 Awd", "S40 Fwd", "S60 Awd", "S60 Fwd", "S60 R Awd", "S80 Fwd", "V50 Awd", "V70 Fwd", "V70 R Awd", "Xc 70 Awd", "Xc 90 Awd", "Xc 90 Fwd"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/datetime.go b/vendor/github.com/brianvoe/gofakeit/datetime.go deleted file mode 100644 index 8c064473d3d8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/datetime.go +++ /dev/null @@ -1,77 +0,0 @@ -package gofakeit - -import ( - "strconv" - "time" -) - -// Date will generate a random time.Time struct -func Date() time.Time { - return time.Date(Year(), time.Month(Number(0, 12)), Day(), Hour(), Minute(), Second(), NanoSecond(), time.UTC) -} - -// DateRange will generate a random time.Time struct between a start and end date -func DateRange(start, end time.Time) time.Time { - return time.Unix(0, int64(Number(int(start.UnixNano()), int(end.UnixNano())))).UTC() -} - -// Month will generate a random month string -func Month() string { - return time.Month(Number(1, 12)).String() -} - -// Day will generate a random day between 1 - 31 -func Day() int { - return Number(1, 31) -} - -// WeekDay will generate a random weekday string (Monday-Sunday) -func WeekDay() string { - return time.Weekday(Number(0, 6)).String() -} - -// Year will generate a random year between 1900 - current year -func Year() int { - return Number(1900, time.Now().Year()) -} - -// Hour will generate a random hour - in military time -func Hour() int { - return Number(0, 23) -} - -// Minute will generate a random minute -func Minute() int { - return Number(0, 59) -} - -// Second will generate a random second -func Second() int { - return Number(0, 59) -} - -// NanoSecond will generate a random nano second -func NanoSecond() int { - return Number(0, 999999999) -} - -// TimeZone will select a random timezone string -func TimeZone() string { - return getRandValue([]string{"timezone", "text"}) -} - -// TimeZoneFull will select a random full timezone string -func TimeZoneFull() string { - return getRandValue([]string{"timezone", "full"}) -} - -// TimeZoneAbv will select a random timezone abbreviation string -func TimeZoneAbv() string { - return getRandValue([]string{"timezone", "abr"}) -} - -// TimeZoneOffset will select a random timezone offset -func TimeZoneOffset() float32 { - value, _ := strconv.ParseFloat(getRandValue([]string{"timezone", "offset"}), 32) - return float32(value) -} diff --git a/vendor/github.com/brianvoe/gofakeit/doc.go b/vendor/github.com/brianvoe/gofakeit/doc.go deleted file mode 100644 index c53335e634f1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/doc.go +++ /dev/null @@ -1,10 +0,0 @@ -/* -Package gofakeit is a random data generator written in go - -Every function has an example and a benchmark - -See the full list here https://godoc.org/github.com/brianvoe/gofakeit - -80+ Functions!!! -*/ -package gofakeit diff --git a/vendor/github.com/brianvoe/gofakeit/faker.go b/vendor/github.com/brianvoe/gofakeit/faker.go deleted file mode 100644 index 38062d5cdf91..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/faker.go +++ /dev/null @@ -1,15 +0,0 @@ -package gofakeit - -import ( - "math/rand" - "time" -) - -// Seed random. Setting seed to 0 will use time.Now().UnixNano() -func Seed(seed int64) { - if seed == 0 { - rand.Seed(time.Now().UTC().UnixNano()) - } else { - rand.Seed(seed) - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/file.go b/vendor/github.com/brianvoe/gofakeit/file.go deleted file mode 100644 index 6c1e8d56cba1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/file.go +++ /dev/null @@ -1,11 +0,0 @@ -package gofakeit - -// MimeType will generate a random mime file type -func MimeType() string { - return getRandValue([]string{"file", "mime_type"}) -} - -// Extension will generate a random file extension -func Extension() string { - return getRandValue([]string{"file", "extension"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/generate.go b/vendor/github.com/brianvoe/gofakeit/generate.go deleted file mode 100644 index 284eef8bb108..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/generate.go +++ /dev/null @@ -1,41 +0,0 @@ -package gofakeit - -import ( - "strings" -) - -// Generate fake information from given string. String should contain {category.subcategory} -// -// Ex: {person.first} - random firstname -// -// Ex: {person.first}###{person.last}@{person.last}.{internet.domain_suffix} - billy834smith@smith.com -// -// Ex: ### - 481 - random numbers -// -// Ex: ??? - fda - random letters -// -// For a complete list possible categories use the Categories() function. -func Generate(dataVal string) string { - // Identify items between brackets: {person.first} - for strings.Count(dataVal, "{") > 0 && strings.Count(dataVal, "}") > 0 { - catValue := "" - startIndex := strings.Index(dataVal, "{") - endIndex := strings.Index(dataVal, "}") - replace := dataVal[(startIndex + 1):endIndex] - categories := strings.Split(replace, ".") - - if len(categories) >= 2 && dataCheck([]string{categories[0], categories[1]}) { - catValue = getRandValue([]string{categories[0], categories[1]}) - } - - dataVal = strings.Replace(dataVal, "{"+replace+"}", catValue, 1) - } - - // Replace # with numbers - dataVal = replaceWithNumbers(dataVal) - - // Replace ? with letters - dataVal = replaceWithLetters(dataVal) - - return dataVal -} diff --git a/vendor/github.com/brianvoe/gofakeit/hacker.go b/vendor/github.com/brianvoe/gofakeit/hacker.go deleted file mode 100644 index 0ac73b7109f3..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/hacker.go +++ /dev/null @@ -1,35 +0,0 @@ -package gofakeit - -import "strings" - -// HackerPhrase will return a random hacker sentence -func HackerPhrase() string { - words := strings.Split(Generate(getRandValue([]string{"hacker", "phrase"})), " ") - words[0] = strings.Title(words[0]) - return strings.Join(words, " ") -} - -// HackerAbbreviation will return a random hacker abbreviation -func HackerAbbreviation() string { - return getRandValue([]string{"hacker", "abbreviation"}) -} - -// HackerAdjective will return a random hacker adjective -func HackerAdjective() string { - return getRandValue([]string{"hacker", "adjective"}) -} - -// HackerNoun will return a random hacker noun -func HackerNoun() string { - return getRandValue([]string{"hacker", "noun"}) -} - -// HackerVerb will return a random hacker verb -func HackerVerb() string { - return getRandValue([]string{"hacker", "verb"}) -} - -// HackerIngverb will return a random hacker ingverb -func HackerIngverb() string { - return getRandValue([]string{"hacker", "ingverb"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/hipster.go b/vendor/github.com/brianvoe/gofakeit/hipster.go deleted file mode 100644 index 3166a9966a13..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/hipster.go +++ /dev/null @@ -1,20 +0,0 @@ -package gofakeit - -// HipsterWord will return a single hipster word -func HipsterWord() string { - return getRandValue([]string{"hipster", "word"}) -} - -// HipsterSentence will generate a random sentence -func HipsterSentence(wordCount int) string { - return sentence(wordCount, HipsterWord) -} - -// HipsterParagraph will generate a random paragraphGenerator -// Set Paragraph Count -// Set Sentence Count -// Set Word Count -// Set Paragraph Separator -func HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { - return paragraphGenerator(paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, HipsterSentence) -} diff --git a/vendor/github.com/brianvoe/gofakeit/image.go b/vendor/github.com/brianvoe/gofakeit/image.go deleted file mode 100644 index de5a2e6d916c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/image.go +++ /dev/null @@ -1,8 +0,0 @@ -package gofakeit - -import "strconv" - -// ImageURL will generate a random Image Based Upon Height And Width. https://picsum.photos/ -func ImageURL(width int, height int) string { - return "https://picsum.photos/" + strconv.Itoa(width) + "/" + strconv.Itoa(height) -} diff --git a/vendor/github.com/brianvoe/gofakeit/internet.go b/vendor/github.com/brianvoe/gofakeit/internet.go deleted file mode 100644 index 69dd700e5231..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/internet.go +++ /dev/null @@ -1,55 +0,0 @@ -package gofakeit - -import ( - "fmt" - "math/rand" - "strings" -) - -// DomainName will generate a random url domain name -func DomainName() string { - return strings.ToLower(JobDescriptor()+BS()) + "." + DomainSuffix() -} - -// DomainSuffix will generate a random domain suffix -func DomainSuffix() string { - return getRandValue([]string{"internet", "domain_suffix"}) -} - -// URL will generate a random url string -func URL() string { - url := "http" + RandString([]string{"s", ""}) + "://www." - url += DomainName() - - // Slugs - num := Number(1, 4) - slug := make([]string, num) - for i := 0; i < num; i++ { - slug[i] = BS() - } - url += "/" + strings.ToLower(strings.Join(slug, "/")) - - return url -} - -// HTTPMethod will generate a random http method -func HTTPMethod() string { - return getRandValue([]string{"internet", "http_method"}) -} - -// IPv4Address will generate a random version 4 ip address -func IPv4Address() string { - num := func() int { return 2 + rand.Intn(254) } - return fmt.Sprintf("%d.%d.%d.%d", num(), num(), num(), num()) -} - -// IPv6Address will generate a random version 6 ip address -func IPv6Address() string { - num := 65536 - return fmt.Sprintf("2001:cafe:%x:%x:%x:%x:%x:%x", rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num)) -} - -// Username will genrate a random username based upon picking a random lastname and random numbers at the end -func Username() string { - return getRandValue([]string{"person", "last"}) + replaceWithNumbers("####") -} diff --git a/vendor/github.com/brianvoe/gofakeit/job.go b/vendor/github.com/brianvoe/gofakeit/job.go deleted file mode 100644 index c156bde77243..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/job.go +++ /dev/null @@ -1,34 +0,0 @@ -package gofakeit - -// JobInfo is a struct of job information -type JobInfo struct { - Company string - Title string - Descriptor string - Level string -} - -// Job will generate a struct with random job information -func Job() *JobInfo { - return &JobInfo{ - Company: Company(), - Title: JobTitle(), - Descriptor: JobDescriptor(), - Level: JobLevel(), - } -} - -// JobTitle will generate a random job title string -func JobTitle() string { - return getRandValue([]string{"job", "title"}) -} - -// JobDescriptor will generate a random job descriptor string -func JobDescriptor() string { - return getRandValue([]string{"job", "descriptor"}) -} - -// JobLevel will generate a random job level string -func JobLevel() string { - return getRandValue([]string{"job", "level"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/log_level.go b/vendor/github.com/brianvoe/gofakeit/log_level.go deleted file mode 100644 index bde9bf310588..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/log_level.go +++ /dev/null @@ -1,15 +0,0 @@ -package gofakeit - -import ( - "github.com/brianvoe/gofakeit/data" -) - -// LogLevel will generate a random log level -// See data/LogLevels for list of available levels -func LogLevel(logType string) string { - if _, ok := data.LogLevels[logType]; ok { - return getRandValue([]string{"log_level", logType}) - } - - return getRandValue([]string{"log_level", "general"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/logo.png b/vendor/github.com/brianvoe/gofakeit/logo.png deleted file mode 100644 index a97962030afd..000000000000 Binary files a/vendor/github.com/brianvoe/gofakeit/logo.png and /dev/null differ diff --git a/vendor/github.com/brianvoe/gofakeit/misc.go b/vendor/github.com/brianvoe/gofakeit/misc.go deleted file mode 100644 index 8449afe0293c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/misc.go +++ /dev/null @@ -1,132 +0,0 @@ -package gofakeit - -import ( - "math/rand" - - "github.com/brianvoe/gofakeit/data" -) - -const hashtag = '#' -const questionmark = '?' - -// Check if in lib -func dataCheck(dataVal []string) bool { - var checkOk bool - - if len(dataVal) == 2 { - _, checkOk = data.Data[dataVal[0]] - if checkOk { - _, checkOk = data.Data[dataVal[0]][dataVal[1]] - } - } - - return checkOk -} - -// Check if in lib -func intDataCheck(dataVal []string) bool { - if len(dataVal) != 2 { - return false - } - - _, checkOk := data.IntData[dataVal[0]] - if checkOk { - _, checkOk = data.IntData[dataVal[0]][dataVal[1]] - } - - return checkOk -} - -// Get Random Value -func getRandValue(dataVal []string) string { - if !dataCheck(dataVal) { - return "" - } - return data.Data[dataVal[0]][dataVal[1]][rand.Intn(len(data.Data[dataVal[0]][dataVal[1]]))] -} - -// Get Random Integer Value -func getRandIntValue(dataVal []string) int { - if !intDataCheck(dataVal) { - return 0 - } - return data.IntData[dataVal[0]][dataVal[1]][rand.Intn(len(data.IntData[dataVal[0]][dataVal[1]]))] -} - -// Replace # with numbers -func replaceWithNumbers(str string) string { - if str == "" { - return str - } - bytestr := []byte(str) - for i := 0; i < len(bytestr); i++ { - if bytestr[i] == hashtag { - bytestr[i] = byte(randDigit()) - } - } - if bytestr[0] == '0' { - bytestr[0] = byte(rand.Intn(8)+1) + '0' - } - - return string(bytestr) -} - -// Replace ? with ASCII lowercase letters -func replaceWithLetters(str string) string { - if str == "" { - return str - } - bytestr := []byte(str) - for i := 0; i < len(bytestr); i++ { - if bytestr[i] == questionmark { - bytestr[i] = byte(randLetter()) - } - } - - return string(bytestr) -} - -// Generate random lowercase ASCII letter -func randLetter() rune { - return rune(byte(rand.Intn(26)) + 'a') -} - -// Generate random ASCII digit -func randDigit() rune { - return rune(byte(rand.Intn(10)) + '0') -} - -// Generate random integer between min and max -func randIntRange(min, max int) int { - if min == max { - return min - } - return rand.Intn((max+1)-min) + min -} - -func randFloat32Range(min, max float32) float32 { - if min == max { - return min - } - return rand.Float32()*(max-min) + min -} - -func randFloat64Range(min, max float64) float64 { - if min == max { - return min - } - return rand.Float64()*(max-min) + min -} - -// Categories will return a map string array of available data categories and sub categories -func Categories() map[string][]string { - types := make(map[string][]string) - for category, subCategoriesMap := range data.Data { - subCategories := make([]string, 0) - for subType := range subCategoriesMap { - subCategories = append(subCategories, subType) - } - types[category] = subCategories - } - return types -} diff --git a/vendor/github.com/brianvoe/gofakeit/name.go b/vendor/github.com/brianvoe/gofakeit/name.go deleted file mode 100644 index 559d6cfb8ad9..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/name.go +++ /dev/null @@ -1,26 +0,0 @@ -package gofakeit - -// Name will generate a random First and Last Name -func Name() string { - return getRandValue([]string{"person", "first"}) + " " + getRandValue([]string{"person", "last"}) -} - -// FirstName will generate a random first name -func FirstName() string { - return getRandValue([]string{"person", "first"}) -} - -// LastName will generate a random last name -func LastName() string { - return getRandValue([]string{"person", "last"}) -} - -// NamePrefix will generate a random name prefix -func NamePrefix() string { - return getRandValue([]string{"person", "prefix"}) -} - -// NameSuffix will generate a random name suffix -func NameSuffix() string { - return getRandValue([]string{"person", "suffix"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/number.go b/vendor/github.com/brianvoe/gofakeit/number.go deleted file mode 100644 index 7c78f0b0a6e6..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/number.go +++ /dev/null @@ -1,84 +0,0 @@ -package gofakeit - -import ( - "math" - "math/rand" -) - -// Number will generate a random number between given min And max -func Number(min int, max int) int { - return randIntRange(min, max) -} - -// Uint8 will generate a random uint8 value -func Uint8() uint8 { - return uint8(randIntRange(0, math.MaxUint8)) -} - -// Uint16 will generate a random uint16 value -func Uint16() uint16 { - return uint16(randIntRange(0, math.MaxUint16)) -} - -// Uint32 will generate a random uint32 value -func Uint32() uint32 { - return uint32(randIntRange(0, math.MaxInt32)) -} - -// Uint64 will generate a random uint64 value -func Uint64() uint64 { - return uint64(rand.Int63n(math.MaxInt64)) -} - -// Int8 will generate a random Int8 value -func Int8() int8 { - return int8(randIntRange(math.MinInt8, math.MaxInt8)) -} - -// Int16 will generate a random int16 value -func Int16() int16 { - return int16(randIntRange(math.MinInt16, math.MaxInt16)) -} - -// Int32 will generate a random int32 value -func Int32() int32 { - return int32(randIntRange(math.MinInt32, math.MaxInt32)) -} - -// Int64 will generate a random int64 value -func Int64() int64 { - return rand.Int63n(math.MaxInt64) + math.MinInt64 -} - -// Float32 will generate a random float32 value -func Float32() float32 { - return randFloat32Range(math.SmallestNonzeroFloat32, math.MaxFloat32) -} - -// Float32Range will generate a random float32 value between min and max -func Float32Range(min, max float32) float32 { - return randFloat32Range(min, max) -} - -// Float64 will generate a random float64 value -func Float64() float64 { - return randFloat64Range(math.SmallestNonzeroFloat64, math.MaxFloat64) -} - -// Float64Range will generate a random float64 value between min and max -func Float64Range(min, max float64) float64 { - return randFloat64Range(min, max) -} - -// Numerify will replace # with random numerical values -func Numerify(str string) string { - return replaceWithNumbers(str) -} - -// ShuffleInts will randomize a slice of ints -func ShuffleInts(a []int) { - for i := range a { - j := rand.Intn(i + 1) - a[i], a[j] = a[j], a[i] - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/password.go b/vendor/github.com/brianvoe/gofakeit/password.go deleted file mode 100644 index 4f66f77cc5eb..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/password.go +++ /dev/null @@ -1,68 +0,0 @@ -package gofakeit - -import ( - "math/rand" -) - -const lowerStr = "abcdefghijklmnopqrstuvwxyz" -const upperStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" -const numericStr = "0123456789" -const specialStr = "!@#$%&*+-=?" -const spaceStr = " " - -// Password will generate a random password -// Minimum number length of 5 if less than -func Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string { - // Make sure the num minimun is at least 5 - if num < 5 { - num = 5 - } - i := 0 - b := make([]byte, num) - var passString string - - if lower { - passString += lowerStr - b[i] = lowerStr[rand.Int63()%int64(len(lowerStr))] - i++ - } - if upper { - passString += upperStr - b[i] = upperStr[rand.Int63()%int64(len(upperStr))] - i++ - } - if numeric { - passString += numericStr - b[i] = numericStr[rand.Int63()%int64(len(numericStr))] - i++ - } - if special { - passString += specialStr - b[i] = specialStr[rand.Int63()%int64(len(specialStr))] - i++ - } - if space { - passString += spaceStr - b[i] = spaceStr[rand.Int63()%int64(len(spaceStr))] - i++ - } - - // Set default if empty - if passString == "" { - passString = lowerStr + numericStr - } - - // Loop through and add it up - for i <= num-1 { - b[i] = passString[rand.Int63()%int64(len(passString))] - i++ - } - - // Shuffle bytes - for i := range b { - j := rand.Intn(i + 1) - b[i], b[j] = b[j], b[i] - } - - return string(b) -} diff --git a/vendor/github.com/brianvoe/gofakeit/payment.go b/vendor/github.com/brianvoe/gofakeit/payment.go deleted file mode 100644 index 57cff1d6d35d..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/payment.go +++ /dev/null @@ -1,81 +0,0 @@ -package gofakeit - -import "strconv" -import "time" - -var currentYear = time.Now().Year() - 2000 - -// CreditCardInfo is a struct containing credit variables -type CreditCardInfo struct { - Type string - Number int - Exp string - Cvv string -} - -// CreditCard will generate a struct full of credit card information -func CreditCard() *CreditCardInfo { - return &CreditCardInfo{ - Type: CreditCardType(), - Number: CreditCardNumber(), - Exp: CreditCardExp(), - Cvv: CreditCardCvv(), - } -} - -// CreditCardType will generate a random credit card type string -func CreditCardType() string { - return getRandValue([]string{"payment", "card_type"}) -} - -// CreditCardNumber will generate a random credit card number int -func CreditCardNumber() int { - integer, _ := strconv.Atoi(replaceWithNumbers(getRandValue([]string{"payment", "number"}))) - return integer -} - -// CreditCardNumberLuhn will generate a random credit card number int that passes luhn test -func CreditCardNumberLuhn() int { - cc := "" - for i := 0; i < 100000; i++ { - cc = replaceWithNumbers(getRandValue([]string{"payment", "number"})) - if luhn(cc) { - break - } - } - integer, _ := strconv.Atoi(cc) - return integer -} - -// CreditCardExp will generate a random credit card expiration date string -// Exp date will always be a future date -func CreditCardExp() string { - month := strconv.Itoa(randIntRange(1, 12)) - if len(month) == 1 { - month = "0" + month - } - return month + "/" + strconv.Itoa(randIntRange(currentYear+1, currentYear+10)) -} - -// CreditCardCvv will generate a random CVV number - Its a string because you could have 017 as an exp date -func CreditCardCvv() string { - return Numerify("###") -} - -// luhn check is used for checking if credit card is valid -func luhn(s string) bool { - var t = [...]int{0, 2, 4, 6, 8, 1, 3, 5, 7, 9} - odd := len(s) & 1 - var sum int - for i, c := range s { - if c < '0' || c > '9' { - return false - } - if i&1 == odd { - sum += t[c-'0'] - } else { - sum += int(c - '0') - } - } - return sum%10 == 0 -} diff --git a/vendor/github.com/brianvoe/gofakeit/person.go b/vendor/github.com/brianvoe/gofakeit/person.go deleted file mode 100644 index 5fd6cbe22a11..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/person.go +++ /dev/null @@ -1,45 +0,0 @@ -package gofakeit - -import "strconv" - -// SSN will generate a random Social Security Number -func SSN() string { - return strconv.Itoa(randIntRange(100000000, 999999999)) -} - -// Gender will generate a random gender string -func Gender() string { - if Bool() == true { - return "male" - } - - return "female" -} - -// PersonInfo is a struct of person information -type PersonInfo struct { - FirstName string - LastName string - Gender string - SSN string - Image string - Job *JobInfo - Address *AddressInfo - Contact *ContactInfo - CreditCard *CreditCardInfo -} - -// Person will generate a struct with person information -func Person() *PersonInfo { - return &PersonInfo{ - FirstName: FirstName(), - LastName: LastName(), - Gender: Gender(), - SSN: SSN(), - Image: ImageURL(300, 300) + "/people", - Job: Job(), - Address: Address(), - Contact: Contact(), - CreditCard: CreditCard(), - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/status_code.go b/vendor/github.com/brianvoe/gofakeit/status_code.go deleted file mode 100644 index 1751c0fbe401..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/status_code.go +++ /dev/null @@ -1,11 +0,0 @@ -package gofakeit - -// SimpleStatusCode will generate a random simple status code -func SimpleStatusCode() int { - return getRandIntValue([]string{"status_code", "simple"}) -} - -// StatusCode will generate a random status code -func StatusCode() int { - return getRandIntValue([]string{"status_code", "general"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/string.go b/vendor/github.com/brianvoe/gofakeit/string.go deleted file mode 100644 index fc646cf38ac1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/string.go +++ /dev/null @@ -1,48 +0,0 @@ -package gofakeit - -import ( - "math/rand" -) - -// Letter will generate a single random lower case ASCII letter -func Letter() string { - return string(randLetter()) -} - -// Digit will generate a single ASCII digit -func Digit() string { - return string(randDigit()) -} - -// Lexify will replace ? will random generated letters -func Lexify(str string) string { - return replaceWithLetters(str) -} - -// ShuffleStrings will randomize a slice of strings -func ShuffleStrings(a []string) { - swap := func(i, j int) { - a[i], a[j] = a[j], a[i] - } - //to avoid upgrading to 1.10 I copied the algorithm - n := len(a) - if n <= 1 { - return - } - - //if size is > int32 probably it will never finish, or ran out of entropy - i := n - 1 - for ; i > 0; i-- { - j := int(rand.Int31n(int32(i + 1))) - swap(i, j) - } -} - -// RandString will take in a slice of string and return a randomly selected value -func RandString(a []string) string { - size := len(a) - if size == 0 { - return "" - } - return a[rand.Intn(size)] -} diff --git a/vendor/github.com/brianvoe/gofakeit/struct.go b/vendor/github.com/brianvoe/gofakeit/struct.go deleted file mode 100644 index 2c68a9a3cb21..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/struct.go +++ /dev/null @@ -1,87 +0,0 @@ -package gofakeit - -import ( - "reflect" -) - -// Struct fills in exported elements of a struct with random data -// based on the value of `fake` tag of exported elements. -// Use `fake:"skip"` to explicitly skip an element. -// All built-in types are supported, with templating support -// for string types. -func Struct(v interface{}) { - r(reflect.TypeOf(v), reflect.ValueOf(v), "") -} - -func r(t reflect.Type, v reflect.Value, template string) { - switch t.Kind() { - case reflect.Ptr: - rPointer(t, v, template) - case reflect.Struct: - rStruct(t, v) - case reflect.String: - rString(template, v) - case reflect.Uint8: - v.SetUint(uint64(Uint8())) - case reflect.Uint16: - v.SetUint(uint64(Uint16())) - case reflect.Uint32: - v.SetUint(uint64(Uint32())) - case reflect.Uint64: - //capped at [0, math.MaxInt64) - v.SetUint(uint64(Uint64())) - case reflect.Int: - v.SetInt(int64(Int64())) - case reflect.Int8: - v.SetInt(int64(Int8())) - case reflect.Int16: - v.SetInt(int64(Int16())) - case reflect.Int32: - v.SetInt(int64(Int32())) - case reflect.Int64: - v.SetInt(int64(Int64())) - case reflect.Float64: - v.SetFloat(Float64()) - case reflect.Float32: - v.SetFloat(float64(Float32())) - case reflect.Bool: - v.SetBool(Bool()) - } -} - -func rString(template string, v reflect.Value) { - if template != "" { - r := Generate(template) - v.SetString(r) - } else { - v.SetString(Generate("???????????????????")) - // we don't have a String(len int) string function!! - } -} - -func rStruct(t reflect.Type, v reflect.Value) { - n := t.NumField() - for i := 0; i < n; i++ { - elementT := t.Field(i) - elementV := v.Field(i) - fake := true - t, ok := elementT.Tag.Lookup("fake") - if ok && t == "skip" { - fake = false - } - if fake && elementV.CanSet() { - r(elementT.Type, elementV, t) - } - } -} - -func rPointer(t reflect.Type, v reflect.Value, template string) { - elemT := t.Elem() - if v.IsNil() { - nv := reflect.New(elemT) - r(elemT, nv.Elem(), template) - v.Set(nv) - } else { - r(elemT, v.Elem(), template) - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/unique.go b/vendor/github.com/brianvoe/gofakeit/unique.go deleted file mode 100644 index 4b969a7e9b8f..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/unique.go +++ /dev/null @@ -1,34 +0,0 @@ -package gofakeit - -import ( - "encoding/hex" - "math/rand" -) - -// UUID (version 4) will generate a random unique identifier based upon random nunbers -// Format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -func UUID() string { - version := byte(4) - uuid := make([]byte, 16) - rand.Read(uuid) - - // Set version - uuid[6] = (uuid[6] & 0x0f) | (version << 4) - - // Set variant - uuid[8] = (uuid[8] & 0xbf) | 0x80 - - buf := make([]byte, 36) - var dash byte = '-' - hex.Encode(buf[0:8], uuid[0:4]) - buf[8] = dash - hex.Encode(buf[9:13], uuid[4:6]) - buf[13] = dash - hex.Encode(buf[14:18], uuid[6:8]) - buf[18] = dash - hex.Encode(buf[19:23], uuid[8:10]) - buf[23] = dash - hex.Encode(buf[24:], uuid[10:]) - - return string(buf) -} diff --git a/vendor/github.com/brianvoe/gofakeit/user_agent.go b/vendor/github.com/brianvoe/gofakeit/user_agent.go deleted file mode 100644 index 2ba334121452..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/user_agent.go +++ /dev/null @@ -1,92 +0,0 @@ -package gofakeit - -import "strconv" - -// UserAgent will generate a random broswer user agent -func UserAgent() string { - randNum := randIntRange(0, 4) - switch randNum { - case 0: - return ChromeUserAgent() - case 1: - return FirefoxUserAgent() - case 2: - return SafariUserAgent() - case 3: - return OperaUserAgent() - default: - return ChromeUserAgent() - } -} - -// ChromeUserAgent will generate a random chrome browser user agent string -func ChromeUserAgent() string { - randNum1 := strconv.Itoa(randIntRange(531, 536)) + strconv.Itoa(randIntRange(0, 2)) - randNum2 := strconv.Itoa(randIntRange(36, 40)) - randNum3 := strconv.Itoa(randIntRange(800, 899)) - return "Mozilla/5.0 " + "(" + randomPlatform() + ") AppleWebKit/" + randNum1 + " (KHTML, like Gecko) Chrome/" + randNum2 + ".0." + randNum3 + ".0 Mobile Safari/" + randNum1 -} - -// FirefoxUserAgent will generate a random firefox broswer user agent string -func FirefoxUserAgent() string { - ver := "Gecko/" + Date().Format("2006-02-01") + " Firefox/" + strconv.Itoa(randIntRange(35, 37)) + ".0" - platforms := []string{ - "(" + windowsPlatformToken() + "; " + "en-US" + "; rv:1.9." + strconv.Itoa(randIntRange(0, 3)) + ".20) " + ver, - "(" + linuxPlatformToken() + "; rv:" + strconv.Itoa(randIntRange(5, 8)) + ".0) " + ver, - "(" + macPlatformToken() + " rv:" + strconv.Itoa(randIntRange(2, 7)) + ".0) " + ver, - } - - return "Mozilla/5.0 " + RandString(platforms) -} - -// SafariUserAgent will generate a random safari browser user agent string -func SafariUserAgent() string { - randNum := strconv.Itoa(randIntRange(531, 536)) + "." + strconv.Itoa(randIntRange(1, 51)) + "." + strconv.Itoa(randIntRange(1, 8)) - ver := strconv.Itoa(randIntRange(4, 6)) + "." + strconv.Itoa(randIntRange(0, 2)) - - mobileDevices := []string{ - "iPhone; CPU iPhone OS", - "iPad; CPU OS", - } - - platforms := []string{ - "(Windows; U; " + windowsPlatformToken() + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, - "(" + macPlatformToken() + " rv:" + strconv.Itoa(randIntRange(4, 7)) + ".0; en-US) AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, - "(" + RandString(mobileDevices) + " " + strconv.Itoa(randIntRange(7, 9)) + "_" + strconv.Itoa(randIntRange(0, 3)) + "_" + strconv.Itoa(randIntRange(1, 3)) + " like Mac OS X; " + "en-US" + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + strconv.Itoa(randIntRange(3, 5)) + ".0.5 Mobile/8B" + strconv.Itoa(randIntRange(111, 120)) + " Safari/6" + randNum, - } - - return "Mozilla/5.0 " + RandString(platforms) -} - -// OperaUserAgent will generate a random opera browser user agent string -func OperaUserAgent() string { - platform := "(" + randomPlatform() + "; en-US) Presto/2." + strconv.Itoa(randIntRange(8, 13)) + "." + strconv.Itoa(randIntRange(160, 355)) + " Version/" + strconv.Itoa(randIntRange(10, 13)) + ".00" - - return "Opera/" + strconv.Itoa(randIntRange(8, 10)) + "." + strconv.Itoa(randIntRange(10, 99)) + " " + platform -} - -// linuxPlatformToken will generate a random linux platform -func linuxPlatformToken() string { - return "X11; Linux " + getRandValue([]string{"computer", "linux_processor"}) -} - -// macPlatformToken will generate a random mac platform -func macPlatformToken() string { - return "Macintosh; " + getRandValue([]string{"computer", "mac_processor"}) + " Mac OS X 10_" + strconv.Itoa(randIntRange(5, 9)) + "_" + strconv.Itoa(randIntRange(0, 10)) -} - -// windowsPlatformToken will generate a random windows platform -func windowsPlatformToken() string { - return getRandValue([]string{"computer", "windows_platform"}) -} - -// randomPlatform will generate a random platform -func randomPlatform() string { - platforms := []string{ - linuxPlatformToken(), - macPlatformToken(), - windowsPlatformToken(), - } - - return RandString(platforms) -} diff --git a/vendor/github.com/brianvoe/gofakeit/vehicle.go b/vendor/github.com/brianvoe/gofakeit/vehicle.go deleted file mode 100644 index 093fe3a1d84c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/vehicle.go +++ /dev/null @@ -1,55 +0,0 @@ -package gofakeit - -// VehicleInfo is a struct dataset of all vehicle information -type VehicleInfo struct { - // Vehicle type - VehicleType string - // Fuel type - Fuel string - // Transmission type - TransmissionGear string - // Brand name - Brand string - // Vehicle model - Model string - // Vehicle model year - Year int -} - -// Vehicle will generate a struct with vehicle information -func Vehicle() *VehicleInfo { - return &VehicleInfo{ - VehicleType: VehicleType(), - Fuel: FuelType(), - TransmissionGear: TransmissionGearType(), - Brand: CarMaker(), - Model: CarModel(), - Year: Year(), - } - -} - -// VehicleType will generate a random vehicle type string -func VehicleType() string { - return getRandValue([]string{"vehicle", "vehicle_type"}) -} - -// FuelType will return a random fuel type -func FuelType() string { - return getRandValue([]string{"vehicle", "fuel_type"}) -} - -// TransmissionGearType will return a random transmission gear type -func TransmissionGearType() string { - return getRandValue([]string{"vehicle", "transmission_type"}) -} - -// CarMaker will return a random car maker -func CarMaker() string { - return getRandValue([]string{"vehicle", "maker"}) -} - -// CarModel will return a random car model -func CarModel() string { - return getRandValue([]string{"vehicle", "model"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/words.go b/vendor/github.com/brianvoe/gofakeit/words.go deleted file mode 100644 index 631e45c7ddd7..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/words.go +++ /dev/null @@ -1,100 +0,0 @@ -package gofakeit - -import ( - "bytes" - "strings" - "unicode" -) - -type paragrapOptions struct { - paragraphCount int - sentenceCount int - wordCount int - separator string -} - -const bytesPerWordEstimation = 6 - -type sentenceGenerator func(wordCount int) string -type wordGenerator func() string - -// Word will generate a random word -func Word() string { - return getRandValue([]string{"lorem", "word"}) -} - -// Sentence will generate a random sentence -func Sentence(wordCount int) string { - return sentence(wordCount, Word) -} - -// Paragraph will generate a random paragraphGenerator -// Set Paragraph Count -// Set Sentence Count -// Set Word Count -// Set Paragraph Separator -func Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { - return paragraphGenerator(paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, Sentence) -} - -func sentence(wordCount int, word wordGenerator) string { - if wordCount <= 0 { - return "" - } - - wordSeparator := ' ' - sentence := bytes.Buffer{} - sentence.Grow(wordCount * bytesPerWordEstimation) - - for i := 0; i < wordCount; i++ { - word := word() - if i == 0 { - runes := []rune(word) - runes[0] = unicode.ToTitle(runes[0]) - word = string(runes) - } - sentence.WriteString(word) - if i < wordCount-1 { - sentence.WriteRune(wordSeparator) - } - } - sentence.WriteRune('.') - return sentence.String() -} - -func paragraphGenerator(opts paragrapOptions, sentecer sentenceGenerator) string { - if opts.paragraphCount <= 0 || opts.sentenceCount <= 0 || opts.wordCount <= 0 { - return "" - } - - //to avoid making Go 1.10 dependency, we cannot use strings.Builder - paragraphs := bytes.Buffer{} - //we presume the length - paragraphs.Grow(opts.paragraphCount * opts.sentenceCount * opts.wordCount * bytesPerWordEstimation) - wordSeparator := ' ' - - for i := 0; i < opts.paragraphCount; i++ { - for e := 0; e < opts.sentenceCount; e++ { - paragraphs.WriteString(sentecer(opts.wordCount)) - if e < opts.sentenceCount-1 { - paragraphs.WriteRune(wordSeparator) - } - } - - if i < opts.paragraphCount-1 { - paragraphs.WriteString(opts.separator) - } - } - - return paragraphs.String() -} - -// Question will return a random question -func Question() string { - return strings.Replace(HipsterSentence(Number(3, 10)), ".", "?", 1) -} - -// Quote will return a random quote from a random person -func Quote() string { - return `"` + HipsterSentence(Number(3, 10)) + `" - ` + FirstName() + " " + LastName() -} diff --git a/vendor/github.com/robfig/cron/README.md b/vendor/github.com/robfig/cron/README.md index 4e0ae1c25f39..ec40c95fcb9d 100644 --- a/vendor/github.com/robfig/cron/README.md +++ b/vendor/github.com/robfig/cron/README.md @@ -1,4 +1,4 @@ -[![GoDoc](http://godoc.org/github.com/robfig/cron?status.png)](http://godoc.org/github.com/robfig/cron) +[![GoDoc](http://godoc.org/github.com/robfig/cron?status.png)](http://godoc.org/github.com/robfig/cron) [![Build Status](https://travis-ci.org/robfig/cron.svg?branch=master)](https://travis-ci.org/robfig/cron) # cron diff --git a/vendor/github.com/robfig/cron/doc.go b/vendor/github.com/robfig/cron/doc.go index 1ce84f7bf462..d02ec2f3b563 100644 --- a/vendor/github.com/robfig/cron/doc.go +++ b/vendor/github.com/robfig/cron/doc.go @@ -84,7 +84,7 @@ You may use one of several pre-defined schedules in place of a cron expression. Intervals -You may also schedule a job to execute at fixed intervals, starting at the time it's added +You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: @every diff --git a/vendor/modules.txt b/vendor/modules.txt index 1cf623aa57d4..19e66874848b 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -56,9 +56,6 @@ github.com/benbjohnson/clock github.com/beorn7/perks/quantile # github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 github.com/bradfitz/gomemcache/memcache -# github.com/brianvoe/gofakeit v3.17.0+incompatible -github.com/brianvoe/gofakeit -github.com/brianvoe/gofakeit/data # github.com/codegangsta/cli v1.20.0 github.com/codegangsta/cli # github.com/davecgh/go-spew v1.1.1 diff --git a/yarn.lock b/yarn.lock index e092d5045691..57a84a7d860d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2284,10 +2284,10 @@ resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-1.10.35.tgz#4e5c2b1e5b3bf0b863efb8c5e70081f52e6c9518" integrity sha512-SVtqEcudm7yjkTwoRA1gC6CNMhGDdMx4Pg8BPdiqI7bXXdCn1BPmtxgeWYQOgDxrq53/5YTlhq5ULxBEAlWIBg== -"@types/lodash@4.14.119", "@types/lodash@4.14.123": - version "4.14.119" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.119.tgz#be847e5f4bc3e35e46d041c394ead8b603ad8b39" - integrity sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw== +"@types/lodash@4.14.123": + version "4.14.123" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.123.tgz#39be5d211478c8dd3bdae98ee75bb7efe4abfe4d" + integrity sha512-pQvPkc4Nltyx7G1Ww45OjVqUsJP4UsZm+GWJpigXgkikZqJgRm4c48g027o6tdgubWHwFRF15iFd+Y4Pmqv6+Q== "@types/minimatch@*": version "3.0.3" @@ -3510,13 +3510,13 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== -axios@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.18.0.tgz#32d53e4851efdc0a11993b6cd000789d70c05102" - integrity sha1-MtU+SFHv3AoRmTts0AB4nXDAUQI= +axios@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.0.tgz#8e09bff3d9122e133f7b8101c8fbdd00ed3d2ab8" + integrity sha512-1uvKqKQta3KBxIz14F2v06AEHZ/dIoeKfbTRkK1E5oqjDnuEerLmYTgJB5AiQZHJcljpg1TuRzdjDR06qNk0DQ== dependencies: - follow-redirects "^1.3.0" - is-buffer "^1.1.5" + follow-redirects "1.5.10" + is-buffer "^2.0.2" babel-code-frame@^6.22.0: version "6.26.0" @@ -4470,6 +4470,11 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" +caniuse-db@1.0.30000772: + version "1.0.30000772" + resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000772.tgz#51aae891768286eade4a3d8319ea76d6a01b512b" + integrity sha1-UarokXaChureSj2DGep21qAbUSs= + caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000929, caniuse-lite@^1.0.30000947, caniuse-lite@^1.0.30000957, caniuse-lite@^1.0.30000963: version "1.0.30000966" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000966.tgz#f3c6fefacfbfbfb981df6dfa68f2aae7bff41b64" @@ -6108,7 +6113,7 @@ debug@2.6.9, debug@^2.1.1, debug@^2.1.3, debug@^2.2.0, debug@^2.3.3, debug@^2.6. dependencies: ms "2.0.0" -debug@3.1.0: +debug@3.1.0, debug@=3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== @@ -7631,7 +7636,14 @@ focus-lock@^0.6.3: resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.6.3.tgz#ef0e82ebac0023f841039d60bf329725d6438028" integrity sha512-EU6ePgEauhWrzJEN5RtG1d1ayrWXhEnfzTjnieHj+jG9tNHDEhKTAnCn1TN3gs9h6XWCDH6cpeX1VXY/lzLwZg== -follow-redirects@^1.0.0, follow-redirects@^1.3.0: +follow-redirects@1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a" + integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ== + dependencies: + debug "=3.1.0" + +follow-redirects@^1.0.0: version "1.7.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.7.0.tgz#489ebc198dc0e7f64167bd23b03c4c19b5784c76" integrity sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ== @@ -9247,7 +9259,7 @@ is-buffer@^1.0.2, is-buffer@^1.1.5: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== -is-buffer@^2.0.0: +is-buffer@^2.0.0, is-buffer@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.3.tgz#4ecf3fcf749cbd1e472689e109ac66261a25e725" integrity sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw== @@ -10138,10 +10150,10 @@ jest@24.8.0: import-local "^2.0.0" jest-cli "^24.8.0" -jquery@3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.0.tgz#8de513fa0fa4b2c7d2e48a530e26f0596936efdf" - integrity sha512-ggRCXln9zEqv6OqAGXFEcshF5dSBvCkzj6Gm2gzuR5fWawaX8t7cxKVkkygKODrDAzKdoYw3l/e3pm3vlT4IbQ== +jquery@3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2" + integrity sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw== js-base64@^2.1.8, js-base64@^2.1.9: version "2.5.1" @@ -17029,6 +17041,11 @@ tryor@~0.1.2: resolved "https://registry.yarnpkg.com/tryor/-/tryor-0.1.2.tgz#8145e4ca7caff40acde3ccf946e8b8bb75b4172b" integrity sha1-gUXkynyv9ArN48z5Rui4u3W0Fys= +ts-easing@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" + integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== + ts-jest@24.0.2: version "24.0.2" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-24.0.2.tgz#8dde6cece97c31c03e80e474c749753ffd27194d" @@ -17696,10 +17713,10 @@ webidl-conversions@^4.0.2: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== -webpack-bundle-analyzer@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.1.0.tgz#2f19cbb87bb6d4f3cb4e59cb67c837bd9436e89d" - integrity sha512-nyDyWEs7C6DZlgvu1pR1zzJfIWSiGPbtaByZr8q+Fd2xp70FuM/8ngCJzj3Er1TYRLSFmp1F1OInbEm4DZH8NA== +webpack-bundle-analyzer@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.3.2.tgz#3da733a900f515914e729fcebcd4c40dde71fc6f" + integrity sha512-7qvJLPKB4rRWZGjVp5U1KEjwutbDHSKboAl0IfafnrdXMrgC0tOtZbQD6Rw0u4cmpgRN4O02Fc0t8eAT+FgGzA== dependencies: acorn "^6.0.7" acorn-walk "^6.1.1"